about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/networkx/algorithms
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/networkx/algorithms
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-4a52a71956a8d46fcb7294ac71734504bb09bcc2.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/networkx/algorithms')
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/__init__.py133
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__init__.py25
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clique.py259
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clustering_coefficient.py71
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/connectivity.py412
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/distance_measures.py150
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/dominating_set.py149
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/kcomponents.py369
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/matching.py44
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/maxcut.py143
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/ramsey.py53
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/steinertree.py231
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py41
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_clique.py112
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_connectivity.py199
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_distance_measures.py59
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py78
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py303
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_matching.py8
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_maxcut.py94
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py31
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_steinertree.py265
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_traveling_salesman.py977
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py280
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_vertex_cover.py68
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/traveling_salesman.py1501
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/treewidth.py252
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/vertex_cover.py83
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__init__.py5
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/connectivity.py122
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/correlation.py302
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/mixing.py255
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/neighbor_degree.py160
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/pairs.py127
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/base_test.py81
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py143
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py123
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py176
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py108
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py87
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/asteroidal.py171
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__init__.py87
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/basic.py322
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/centrality.py290
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/cluster.py278
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/covering.py57
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/edgelist.py360
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/extendability.py105
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/generators.py604
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matching.py590
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matrix.py168
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/projection.py526
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/redundancy.py112
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/spectral.py69
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_basic.py125
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py192
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py84
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_covering.py33
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py240
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_extendability.py334
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_generators.py409
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matching.py327
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py84
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_project.py407
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_redundancy.py35
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py80
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/boundary.py168
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/bridges.py205
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/broadcasting.py155
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__init__.py20
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness.py436
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness_subset.py275
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/closeness.py282
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py342
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py227
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_closeness.py96
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/degree_alg.py150
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/dispersion.py107
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/eigenvector.py357
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/flow_matrix.py130
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/group.py787
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/harmonic.py89
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/katz.py331
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/laplacian.py150
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/load.py200
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/percolation.py128
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/reaching.py209
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/second_order.py141
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/subgraph_alg.py340
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality.py780
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py340
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py307
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py197
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py147
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_closeness.py43
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_degree_centrality.py144
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py73
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py187
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_group.py277
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_harmonic_centrality.py122
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_katz_centrality.py345
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py221
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_load_centrality.py344
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_percolation_centrality.py87
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_reaching.py140
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_second_order_centrality.py82
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_subgraph.py110
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_trophic.py302
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_voterank.py64
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/trophic.py163
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/voterank_alg.py95
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/chains.py172
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/chordal.py443
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/clique.py755
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/cluster.py609
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__init__.py4
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/equitable_coloring.py505
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/greedy_coloring.py565
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/test_coloring.py863
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/communicability_alg.py163
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/__init__.py26
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/asyn_fluid.py151
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/centrality.py171
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/community_utils.py30
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/divisive.py216
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/kclique.py79
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/kernighan_lin.py139
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/label_propagation.py338
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/louvain.py382
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/lukes.py227
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/modularity_max.py451
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/quality.py346
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py136
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_centrality.py85
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_divisive.py106
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kclique.py91
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py92
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_label_propagation.py241
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_louvain.py264
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_lukes.py152
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_modularity_max.py340
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_quality.py139
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_utils.py26
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/__init__.py6
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/attracting.py115
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/biconnected.py394
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/connected.py216
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/semiconnected.py71
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/strongly_connected.py351
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_attracting.py70
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_biconnected.py248
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_connected.py138
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_semiconnected.py55
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py193
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py96
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/components/weakly_connected.py197
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__init__.py11
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/connectivity.py811
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/cuts.py612
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/disjoint_paths.py408
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_augmentation.py1270
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_kcomponents.py592
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcomponents.py223
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcutsets.py235
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/stoerwagner.py152
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_connectivity.py421
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_cuts.py309
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_disjoint_paths.py249
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_augmentation.py502
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py488
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcomponents.py296
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcutsets.py273
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_stoer_wagner.py102
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/utils.py88
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/core.py649
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/covering.py142
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/cuts.py398
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/cycles.py1230
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/d_separation.py722
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/dag.py1418
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/distance_measures.py1022
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/distance_regular.py238
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/dominance.py135
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/dominating.py95
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/efficiency_measures.py167
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/euler.py470
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__init__.py11
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/boykovkolmogorov.py370
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/capacityscaling.py407
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/dinitz_alg.py238
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/edmondskarp.py241
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/gomory_hu.py178
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/maxflow.py607
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/mincost.py356
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/networksimplex.py666
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/preflowpush.py425
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/shortestaugmentingpath.py300
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gl1.gpickle.bz2bin0 -> 44623 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gw1.gpickle.bz2bin0 -> 42248 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/netgen-2.gpickle.bz2bin0 -> 18972 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_gomory_hu.py128
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow.py573
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow_large_graph.py156
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_mincost.py476
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_networksimplex.py387
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/wlm3.gpickle.bz2bin0 -> 88132 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/flow/utils.py189
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/graph_hashing.py328
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/graphical.py483
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/hierarchy.py57
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/hybrid.py196
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isolate.py107
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py7
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py1163
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py249
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py1238
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py352
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py308
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99bin0 -> 1442 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99bin0 -> 1442 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99bin0 -> 310 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99bin0 -> 1602 bytes
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py327
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py48
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py410
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py64
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py212
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py292
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py1608
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py3106
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py200
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py284
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py1075
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py192
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__init__.py2
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/hits_alg.py337
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py500
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py78
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py214
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/link_prediction.py687
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/lowest_common_ancestors.py269
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/matching.py1152
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__init__.py27
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/minors/contraction.py634
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/test_contraction.py446
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/mis.py78
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/moral.py59
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/node_classification.py219
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/non_randomness.py98
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__init__.py4
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/operators/all.py321
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/operators/binary.py450
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/operators/product.py633
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_all.py328
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_binary.py453
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_product.py491
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_unary.py55
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/operators/unary.py77
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/planar_drawing.py464
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/planarity.py1402
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/polynomials.py306
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/reciprocity.py98
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/regular.py215
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/richclub.py138
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__init__.py5
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/astar.py241
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/dense.py260
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/generic.py730
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_astar.py248
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense.py212
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py89
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_generic.py450
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_unweighted.py149
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_weighted.py972
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/unweighted.py579
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/weighted.py2520
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/similarity.py1780
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/simple_paths.py950
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/smallworld.py404
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/smetric.py30
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/sparsifiers.py296
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/structuralholes.py283
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/summarization.py564
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/swap.py406
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_asteroidal.py23
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_boundary.py154
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_bridges.py144
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_broadcasting.py82
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chains.py141
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chordal.py129
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_clique.py291
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cluster.py549
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_communicability.py80
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_core.py266
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_covering.py85
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cuts.py171
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cycles.py974
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_d_separation.py348
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dag.py835
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_measures.py774
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_regular.py85
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominance.py286
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominating.py46
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_efficiency.py58
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_euler.py314
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graph_hashing.py686
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graphical.py163
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hierarchy.py46
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hybrid.py24
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_isolate.py26
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_link_prediction.py586
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_lowest_common_ancestors.py427
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_matching.py605
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_max_weight_clique.py179
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_mis.py62
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_moral.py15
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_node_classification.py140
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_non_randomness.py42
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planar_drawing.py274
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planarity.py535
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_polynomials.py57
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_reciprocity.py37
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_regular.py92
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_richclub.py149
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_similarity.py946
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_simple_paths.py803
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smallworld.py78
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smetric.py8
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_sparsifiers.py138
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_structuralholes.py137
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_summarization.py642
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_swap.py179
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_threshold.py269
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_time_dependent.py431
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_tournament.py163
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_triads.py289
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_vitality.py41
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_voronoi.py103
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_walks.py54
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_wiener.py123
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/threshold.py980
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/time_dependent.py142
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tournament.py403
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__init__.py5
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/beamsearch.py90
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/breadth_first_search.py575
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/depth_first_search.py529
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgebfs.py178
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgedfs.py176
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_beamsearch.py25
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_bfs.py203
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_dfs.py305
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgebfs.py147
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgedfs.py131
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__init__.py6
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/branchings.py1042
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/coding.py413
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/decomposition.py88
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/mst.py1284
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/operations.py105
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/recognition.py273
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_branchings.py624
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_coding.py114
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_decomposition.py79
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_mst.py918
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_operations.py53
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_recognition.py174
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/triads.py604
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/vitality.py76
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/voronoi.py86
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/walks.py79
-rw-r--r--.venv/lib/python3.12/site-packages/networkx/algorithms/wiener.py226
388 files changed, 112182 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/__init__.py
new file mode 100644
index 00000000..56bfb14a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/__init__.py
@@ -0,0 +1,133 @@
+from networkx.algorithms.assortativity import *
+from networkx.algorithms.asteroidal import *
+from networkx.algorithms.boundary import *
+from networkx.algorithms.broadcasting import *
+from networkx.algorithms.bridges import *
+from networkx.algorithms.chains import *
+from networkx.algorithms.centrality import *
+from networkx.algorithms.chordal import *
+from networkx.algorithms.cluster import *
+from networkx.algorithms.clique import *
+from networkx.algorithms.communicability_alg import *
+from networkx.algorithms.components import *
+from networkx.algorithms.coloring import *
+from networkx.algorithms.core import *
+from networkx.algorithms.covering import *
+from networkx.algorithms.cycles import *
+from networkx.algorithms.cuts import *
+from networkx.algorithms.d_separation import *
+from networkx.algorithms.dag import *
+from networkx.algorithms.distance_measures import *
+from networkx.algorithms.distance_regular import *
+from networkx.algorithms.dominance import *
+from networkx.algorithms.dominating import *
+from networkx.algorithms.efficiency_measures import *
+from networkx.algorithms.euler import *
+from networkx.algorithms.graphical import *
+from networkx.algorithms.hierarchy import *
+from networkx.algorithms.hybrid import *
+from networkx.algorithms.link_analysis import *
+from networkx.algorithms.link_prediction import *
+from networkx.algorithms.lowest_common_ancestors import *
+from networkx.algorithms.isolate import *
+from networkx.algorithms.matching import *
+from networkx.algorithms.minors import *
+from networkx.algorithms.mis import *
+from networkx.algorithms.moral import *
+from networkx.algorithms.non_randomness import *
+from networkx.algorithms.operators import *
+from networkx.algorithms.planarity import *
+from networkx.algorithms.planar_drawing import *
+from networkx.algorithms.polynomials import *
+from networkx.algorithms.reciprocity import *
+from networkx.algorithms.regular import *
+from networkx.algorithms.richclub import *
+from networkx.algorithms.shortest_paths import *
+from networkx.algorithms.similarity import *
+from networkx.algorithms.graph_hashing import *
+from networkx.algorithms.simple_paths import *
+from networkx.algorithms.smallworld import *
+from networkx.algorithms.smetric import *
+from networkx.algorithms.structuralholes import *
+from networkx.algorithms.sparsifiers import *
+from networkx.algorithms.summarization import *
+from networkx.algorithms.swap import *
+from networkx.algorithms.time_dependent import *
+from networkx.algorithms.traversal import *
+from networkx.algorithms.triads import *
+from networkx.algorithms.vitality import *
+from networkx.algorithms.voronoi import *
+from networkx.algorithms.walks import *
+from networkx.algorithms.wiener import *
+
+# Make certain subpackages available to the user as direct imports from
+# the `networkx` namespace.
+from networkx.algorithms import approximation
+from networkx.algorithms import assortativity
+from networkx.algorithms import bipartite
+from networkx.algorithms import node_classification
+from networkx.algorithms import centrality
+from networkx.algorithms import chordal
+from networkx.algorithms import cluster
+from networkx.algorithms import clique
+from networkx.algorithms import components
+from networkx.algorithms import connectivity
+from networkx.algorithms import community
+from networkx.algorithms import coloring
+from networkx.algorithms import flow
+from networkx.algorithms import isomorphism
+from networkx.algorithms import link_analysis
+from networkx.algorithms import lowest_common_ancestors
+from networkx.algorithms import operators
+from networkx.algorithms import shortest_paths
+from networkx.algorithms import tournament
+from networkx.algorithms import traversal
+from networkx.algorithms import tree
+
+# Make certain functions from some of the previous subpackages available
+# to the user as direct imports from the `networkx` namespace.
+from networkx.algorithms.bipartite import complete_bipartite_graph
+from networkx.algorithms.bipartite import is_bipartite
+from networkx.algorithms.bipartite import projected_graph
+from networkx.algorithms.connectivity import all_pairs_node_connectivity
+from networkx.algorithms.connectivity import all_node_cuts
+from networkx.algorithms.connectivity import average_node_connectivity
+from networkx.algorithms.connectivity import edge_connectivity
+from networkx.algorithms.connectivity import edge_disjoint_paths
+from networkx.algorithms.connectivity import k_components
+from networkx.algorithms.connectivity import k_edge_components
+from networkx.algorithms.connectivity import k_edge_subgraphs
+from networkx.algorithms.connectivity import k_edge_augmentation
+from networkx.algorithms.connectivity import is_k_edge_connected
+from networkx.algorithms.connectivity import minimum_edge_cut
+from networkx.algorithms.connectivity import minimum_node_cut
+from networkx.algorithms.connectivity import node_connectivity
+from networkx.algorithms.connectivity import node_disjoint_paths
+from networkx.algorithms.connectivity import stoer_wagner
+from networkx.algorithms.flow import capacity_scaling
+from networkx.algorithms.flow import cost_of_flow
+from networkx.algorithms.flow import gomory_hu_tree
+from networkx.algorithms.flow import max_flow_min_cost
+from networkx.algorithms.flow import maximum_flow
+from networkx.algorithms.flow import maximum_flow_value
+from networkx.algorithms.flow import min_cost_flow
+from networkx.algorithms.flow import min_cost_flow_cost
+from networkx.algorithms.flow import minimum_cut
+from networkx.algorithms.flow import minimum_cut_value
+from networkx.algorithms.flow import network_simplex
+from networkx.algorithms.isomorphism import could_be_isomorphic
+from networkx.algorithms.isomorphism import fast_could_be_isomorphic
+from networkx.algorithms.isomorphism import faster_could_be_isomorphic
+from networkx.algorithms.isomorphism import is_isomorphic
+from networkx.algorithms.isomorphism.vf2pp import *
+from networkx.algorithms.tree.branchings import maximum_branching
+from networkx.algorithms.tree.branchings import maximum_spanning_arborescence
+from networkx.algorithms.tree.branchings import minimum_branching
+from networkx.algorithms.tree.branchings import minimum_spanning_arborescence
+from networkx.algorithms.tree.branchings import ArborescenceIterator
+from networkx.algorithms.tree.coding import *
+from networkx.algorithms.tree.decomposition import *
+from networkx.algorithms.tree.mst import *
+from networkx.algorithms.tree.operations import *
+from networkx.algorithms.tree.recognition import *
+from networkx.algorithms.tournament import is_tournament
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__init__.py
new file mode 100644
index 00000000..d12a8f8b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__init__.py
@@ -0,0 +1,25 @@
+"""Approximations of graph properties and Heuristic methods for optimization.
+
+The functions in this class are not imported into the top-level ``networkx``
+namespace so the easiest way to use them is with::
+
+    >>> from networkx.algorithms import approximation
+
+Another option is to import the specific function with
+``from networkx.algorithms.approximation import function_name``.
+
+"""
+
+from networkx.algorithms.approximation.clustering_coefficient import *
+from networkx.algorithms.approximation.clique import *
+from networkx.algorithms.approximation.connectivity import *
+from networkx.algorithms.approximation.distance_measures import *
+from networkx.algorithms.approximation.dominating_set import *
+from networkx.algorithms.approximation.kcomponents import *
+from networkx.algorithms.approximation.matching import *
+from networkx.algorithms.approximation.ramsey import *
+from networkx.algorithms.approximation.steinertree import *
+from networkx.algorithms.approximation.traveling_salesman import *
+from networkx.algorithms.approximation.treewidth import *
+from networkx.algorithms.approximation.vertex_cover import *
+from networkx.algorithms.approximation.maxcut import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clique.py
new file mode 100644
index 00000000..ed0f3506
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clique.py
@@ -0,0 +1,259 @@
+"""Functions for computing large cliques and maximum independent sets."""
+
+import networkx as nx
+from networkx.algorithms.approximation import ramsey
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "clique_removal",
+    "max_clique",
+    "large_clique_size",
+    "maximum_independent_set",
+]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def maximum_independent_set(G):
+    """Returns an approximate maximum independent set.
+
+    Independent set or stable set is a set of vertices in a graph, no two of
+    which are adjacent. That is, it is a set I of vertices such that for every
+    two vertices in I, there is no edge connecting the two. Equivalently, each
+    edge in the graph has at most one endpoint in I. The size of an independent
+    set is the number of vertices it contains [1]_.
+
+    A maximum independent set is a largest independent set for a given graph G
+    and its size is denoted $\\alpha(G)$. The problem of finding such a set is called
+    the maximum independent set problem and is an NP-hard optimization problem.
+    As such, it is unlikely that there exists an efficient algorithm for finding
+    a maximum independent set of a graph.
+
+    The Independent Set algorithm is based on [2]_.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    Returns
+    -------
+    iset : Set
+        The apx-maximum independent set
+
+    Examples
+    --------
+    >>> G = nx.path_graph(10)
+    >>> nx.approximation.maximum_independent_set(G)
+    {0, 2, 4, 6, 9}
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph is directed or is a multigraph.
+
+    Notes
+    -----
+    Finds the $O(|V|/(log|V|)^2)$ apx of independent set in the worst case.
+
+    References
+    ----------
+    .. [1] `Wikipedia: Independent set
+        <https://en.wikipedia.org/wiki/Independent_set_(graph_theory)>`_
+    .. [2] Boppana, R., & Halldórsson, M. M. (1992).
+       Approximating maximum independent sets by excluding subgraphs.
+       BIT Numerical Mathematics, 32(2), 180–196. Springer.
+    """
+    iset, _ = clique_removal(G)
+    return iset
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def max_clique(G):
+    r"""Find the Maximum Clique
+
+    Finds the $O(|V|/(log|V|)^2)$ apx of maximum clique/independent set
+    in the worst case.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    Returns
+    -------
+    clique : set
+        The apx-maximum clique of the graph
+
+    Examples
+    --------
+    >>> G = nx.path_graph(10)
+    >>> nx.approximation.max_clique(G)
+    {8, 9}
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph is directed or is a multigraph.
+
+    Notes
+    -----
+    A clique in an undirected graph G = (V, E) is a subset of the vertex set
+    `C \subseteq V` such that for every two vertices in C there exists an edge
+    connecting the two. This is equivalent to saying that the subgraph
+    induced by C is complete (in some cases, the term clique may also refer
+    to the subgraph).
+
+    A maximum clique is a clique of the largest possible size in a given graph.
+    The clique number `\omega(G)` of a graph G is the number of
+    vertices in a maximum clique in G. The intersection number of
+    G is the smallest number of cliques that together cover all edges of G.
+
+    https://en.wikipedia.org/wiki/Maximum_clique
+
+    References
+    ----------
+    .. [1] Boppana, R., & Halldórsson, M. M. (1992).
+        Approximating maximum independent sets by excluding subgraphs.
+        BIT Numerical Mathematics, 32(2), 180–196. Springer.
+        doi:10.1007/BF01994876
+    """
+    # finding the maximum clique in a graph is equivalent to finding
+    # the independent set in the complementary graph
+    cgraph = nx.complement(G)
+    iset, _ = clique_removal(cgraph)
+    return iset
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def clique_removal(G):
+    r"""Repeatedly remove cliques from the graph.
+
+    Results in a $O(|V|/(\log |V|)^2)$ approximation of maximum clique
+    and independent set. Returns the largest independent set found, along
+    with found maximal cliques.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    Returns
+    -------
+    max_ind_cliques : (set, list) tuple
+        2-tuple of Maximal Independent Set and list of maximal cliques (sets).
+
+    Examples
+    --------
+    >>> G = nx.path_graph(10)
+    >>> nx.approximation.clique_removal(G)
+    ({0, 2, 4, 6, 9}, [{0, 1}, {2, 3}, {4, 5}, {6, 7}, {8, 9}])
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph is directed or is a multigraph.
+
+    References
+    ----------
+    .. [1] Boppana, R., & Halldórsson, M. M. (1992).
+        Approximating maximum independent sets by excluding subgraphs.
+        BIT Numerical Mathematics, 32(2), 180–196. Springer.
+    """
+    graph = G.copy()
+    c_i, i_i = ramsey.ramsey_R2(graph)
+    cliques = [c_i]
+    isets = [i_i]
+    while graph:
+        graph.remove_nodes_from(c_i)
+        c_i, i_i = ramsey.ramsey_R2(graph)
+        if c_i:
+            cliques.append(c_i)
+        if i_i:
+            isets.append(i_i)
+    # Determine the largest independent set as measured by cardinality.
+    maxiset = max(isets, key=len)
+    return maxiset, cliques
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def large_clique_size(G):
+    """Find the size of a large clique in a graph.
+
+    A *clique* is a subset of nodes in which each pair of nodes is
+    adjacent. This function is a heuristic for finding the size of a
+    large clique in the graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    k: integer
+       The size of a large clique in the graph.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(10)
+    >>> nx.approximation.large_clique_size(G)
+    2
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph is directed or is a multigraph.
+
+    Notes
+    -----
+    This implementation is from [1]_. Its worst case time complexity is
+    :math:`O(n d^2)`, where *n* is the number of nodes in the graph and
+    *d* is the maximum degree.
+
+    This function is a heuristic, which means it may work well in
+    practice, but there is no rigorous mathematical guarantee on the
+    ratio between the returned number and the actual largest clique size
+    in the graph.
+
+    References
+    ----------
+    .. [1] Pattabiraman, Bharath, et al.
+       "Fast Algorithms for the Maximum Clique Problem on Massive Graphs
+       with Applications to Overlapping Community Detection."
+       *Internet Mathematics* 11.4-5 (2015): 421--448.
+       <https://doi.org/10.1080/15427951.2014.986778>
+
+    See also
+    --------
+
+    :func:`networkx.algorithms.approximation.clique.max_clique`
+        A function that returns an approximate maximum clique with a
+        guarantee on the approximation ratio.
+
+    :mod:`networkx.algorithms.clique`
+        Functions for finding the exact maximum clique in a graph.
+
+    """
+    degrees = G.degree
+
+    def _clique_heuristic(G, U, size, best_size):
+        if not U:
+            return max(best_size, size)
+        u = max(U, key=degrees)
+        U.remove(u)
+        N_prime = {v for v in G[u] if degrees[v] >= best_size}
+        return _clique_heuristic(G, U & N_prime, size + 1, best_size)
+
+    best_size = 0
+    nodes = (u for u in G if degrees[u] >= best_size)
+    for u in nodes:
+        neighbors = {v for v in G[u] if degrees[v] >= best_size}
+        best_size = _clique_heuristic(G, neighbors, 1, best_size)
+    return best_size
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clustering_coefficient.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clustering_coefficient.py
new file mode 100644
index 00000000..545fc655
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clustering_coefficient.py
@@ -0,0 +1,71 @@
+import networkx as nx
+from networkx.utils import not_implemented_for, py_random_state
+
+__all__ = ["average_clustering"]
+
+
+@not_implemented_for("directed")
+@py_random_state(2)
+@nx._dispatchable(name="approximate_average_clustering")
+def average_clustering(G, trials=1000, seed=None):
+    r"""Estimates the average clustering coefficient of G.
+
+    The local clustering of each node in `G` is the fraction of triangles
+    that actually exist over all possible triangles in its neighborhood.
+    The average clustering coefficient of a graph `G` is the mean of
+    local clusterings.
+
+    This function finds an approximate average clustering coefficient
+    for G by repeating `n` times (defined in `trials`) the following
+    experiment: choose a node at random, choose two of its neighbors
+    at random, and check if they are connected. The approximate
+    coefficient is the fraction of triangles found over the number
+    of trials [1]_.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    trials : integer
+        Number of trials to perform (default 1000).
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    c : float
+        Approximated average clustering coefficient.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import approximation
+    >>> G = nx.erdos_renyi_graph(10, 0.2, seed=10)
+    >>> approximation.average_clustering(G, trials=1000, seed=10)
+    0.214
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is directed.
+
+    References
+    ----------
+    .. [1] Schank, Thomas, and Dorothea Wagner. Approximating clustering
+       coefficient and transitivity. Universität Karlsruhe, Fakultät für
+       Informatik, 2004.
+       https://doi.org/10.5445/IR/1000001239
+
+    """
+    n = len(G)
+    triangles = 0
+    nodes = list(G)
+    for i in [int(seed.random() * n) for i in range(trials)]:
+        nbrs = list(G[nodes[i]])
+        if len(nbrs) < 2:
+            continue
+        u, v = seed.sample(nbrs, 2)
+        if u in G[v]:
+            triangles += 1
+    return triangles / trials
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/connectivity.py
new file mode 100644
index 00000000..0b596fdf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/connectivity.py
@@ -0,0 +1,412 @@
+"""Fast approximation for node connectivity"""
+
+import itertools
+from operator import itemgetter
+
+import networkx as nx
+
+__all__ = [
+    "local_node_connectivity",
+    "node_connectivity",
+    "all_pairs_node_connectivity",
+]
+
+
+@nx._dispatchable(name="approximate_local_node_connectivity")
+def local_node_connectivity(G, source, target, cutoff=None):
+    """Compute node connectivity between source and target.
+
+    Pairwise or local node connectivity between two distinct and nonadjacent
+    nodes is the minimum number of nodes that must be removed (minimum
+    separating cutset) to disconnect them. By Menger's theorem, this is equal
+    to the number of node independent paths (paths that share no nodes other
+    than source and target). Which is what we compute in this function.
+
+    This algorithm is a fast approximation that gives an strict lower
+    bound on the actual number of node independent paths between two nodes [1]_.
+    It works for both directed and undirected graphs.
+
+    Parameters
+    ----------
+
+    G : NetworkX graph
+
+    source : node
+        Starting node for node connectivity
+
+    target : node
+        Ending node for node connectivity
+
+    cutoff : integer
+        Maximum node connectivity to consider. If None, the minimum degree
+        of source or target is used as a cutoff. Default value None.
+
+    Returns
+    -------
+    k: integer
+       pairwise node connectivity
+
+    Examples
+    --------
+    >>> # Platonic octahedral graph has node connectivity 4
+    >>> # for each non adjacent node pair
+    >>> from networkx.algorithms import approximation as approx
+    >>> G = nx.octahedral_graph()
+    >>> approx.local_node_connectivity(G, 0, 5)
+    4
+
+    Notes
+    -----
+    This algorithm [1]_ finds node independents paths between two nodes by
+    computing their shortest path using BFS, marking the nodes of the path
+    found as 'used' and then searching other shortest paths excluding the
+    nodes marked as used until no more paths exist. It is not exact because
+    a shortest path could use nodes that, if the path were longer, may belong
+    to two different node independent paths. Thus it only guarantees an
+    strict lower bound on node connectivity.
+
+    Note that the authors propose a further refinement, losing accuracy and
+    gaining speed, which is not implemented yet.
+
+    See also
+    --------
+    all_pairs_node_connectivity
+    node_connectivity
+
+    References
+    ----------
+    .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
+        Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
+        http://eclectic.ss.uci.edu/~drwhite/working.pdf
+
+    """
+    if target == source:
+        raise nx.NetworkXError("source and target have to be different nodes.")
+
+    # Maximum possible node independent paths
+    if G.is_directed():
+        possible = min(G.out_degree(source), G.in_degree(target))
+    else:
+        possible = min(G.degree(source), G.degree(target))
+
+    K = 0
+    if not possible:
+        return K
+
+    if cutoff is None:
+        cutoff = float("inf")
+
+    exclude = set()
+    for i in range(min(possible, cutoff)):
+        try:
+            path = _bidirectional_shortest_path(G, source, target, exclude)
+            exclude.update(set(path))
+            K += 1
+        except nx.NetworkXNoPath:
+            break
+
+    return K
+
+
+@nx._dispatchable(name="approximate_node_connectivity")
+def node_connectivity(G, s=None, t=None):
+    r"""Returns an approximation for node connectivity for a graph or digraph G.
+
+    Node connectivity is equal to the minimum number of nodes that
+    must be removed to disconnect G or render it trivial. By Menger's theorem,
+    this is equal to the number of node independent paths (paths that
+    share no nodes other than source and target).
+
+    If source and target nodes are provided, this function returns the
+    local node connectivity: the minimum number of nodes that must be
+    removed to break all paths from source to target in G.
+
+    This algorithm is based on a fast approximation that gives an strict lower
+    bound on the actual number of node independent paths between two nodes [1]_.
+    It works for both directed and undirected graphs.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    s : node
+        Source node. Optional. Default value: None.
+
+    t : node
+        Target node. Optional. Default value: None.
+
+    Returns
+    -------
+    K : integer
+        Node connectivity of G, or local node connectivity if source
+        and target are provided.
+
+    Examples
+    --------
+    >>> # Platonic octahedral graph is 4-node-connected
+    >>> from networkx.algorithms import approximation as approx
+    >>> G = nx.octahedral_graph()
+    >>> approx.node_connectivity(G)
+    4
+
+    Notes
+    -----
+    This algorithm [1]_ finds node independents paths between two nodes by
+    computing their shortest path using BFS, marking the nodes of the path
+    found as 'used' and then searching other shortest paths excluding the
+    nodes marked as used until no more paths exist. It is not exact because
+    a shortest path could use nodes that, if the path were longer, may belong
+    to two different node independent paths. Thus it only guarantees an
+    strict lower bound on node connectivity.
+
+    See also
+    --------
+    all_pairs_node_connectivity
+    local_node_connectivity
+
+    References
+    ----------
+    .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
+        Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
+        http://eclectic.ss.uci.edu/~drwhite/working.pdf
+
+    """
+    if (s is not None and t is None) or (s is None and t is not None):
+        raise nx.NetworkXError("Both source and target must be specified.")
+
+    # Local node connectivity
+    if s is not None and t is not None:
+        if s not in G:
+            raise nx.NetworkXError(f"node {s} not in graph")
+        if t not in G:
+            raise nx.NetworkXError(f"node {t} not in graph")
+        return local_node_connectivity(G, s, t)
+
+    # Global node connectivity
+    if G.is_directed():
+        connected_func = nx.is_weakly_connected
+        iter_func = itertools.permutations
+
+        def neighbors(v):
+            return itertools.chain(G.predecessors(v), G.successors(v))
+
+    else:
+        connected_func = nx.is_connected
+        iter_func = itertools.combinations
+        neighbors = G.neighbors
+
+    if not connected_func(G):
+        return 0
+
+    # Choose a node with minimum degree
+    v, minimum_degree = min(G.degree(), key=itemgetter(1))
+    # Node connectivity is bounded by minimum degree
+    K = minimum_degree
+    # compute local node connectivity with all non-neighbors nodes
+    # and store the minimum
+    for w in set(G) - set(neighbors(v)) - {v}:
+        K = min(K, local_node_connectivity(G, v, w, cutoff=K))
+    # Same for non adjacent pairs of neighbors of v
+    for x, y in iter_func(neighbors(v), 2):
+        if y not in G[x] and x != y:
+            K = min(K, local_node_connectivity(G, x, y, cutoff=K))
+    return K
+
+
+@nx._dispatchable(name="approximate_all_pairs_node_connectivity")
+def all_pairs_node_connectivity(G, nbunch=None, cutoff=None):
+    """Compute node connectivity between all pairs of nodes.
+
+    Pairwise or local node connectivity between two distinct and nonadjacent
+    nodes is the minimum number of nodes that must be removed (minimum
+    separating cutset) to disconnect them. By Menger's theorem, this is equal
+    to the number of node independent paths (paths that share no nodes other
+    than source and target). Which is what we compute in this function.
+
+    This algorithm is a fast approximation that gives an strict lower
+    bound on the actual number of node independent paths between two nodes [1]_.
+    It works for both directed and undirected graphs.
+
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    nbunch: container
+        Container of nodes. If provided node connectivity will be computed
+        only over pairs of nodes in nbunch.
+
+    cutoff : integer
+        Maximum node connectivity to consider. If None, the minimum degree
+        of source or target is used as a cutoff in each pair of nodes.
+        Default value None.
+
+    Returns
+    -------
+    K : dictionary
+        Dictionary, keyed by source and target, of pairwise node connectivity
+
+    Examples
+    --------
+    A 3 node cycle with one extra node attached has connectivity 2 between all
+    nodes in the cycle and connectivity 1 between the extra node and the rest:
+
+    >>> G = nx.cycle_graph(3)
+    >>> G.add_edge(2, 3)
+    >>> import pprint  # for nice dictionary formatting
+    >>> pprint.pprint(nx.all_pairs_node_connectivity(G))
+    {0: {1: 2, 2: 2, 3: 1},
+     1: {0: 2, 2: 2, 3: 1},
+     2: {0: 2, 1: 2, 3: 1},
+     3: {0: 1, 1: 1, 2: 1}}
+
+    See Also
+    --------
+    local_node_connectivity
+    node_connectivity
+
+    References
+    ----------
+    .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
+        Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
+        http://eclectic.ss.uci.edu/~drwhite/working.pdf
+    """
+    if nbunch is None:
+        nbunch = G
+    else:
+        nbunch = set(nbunch)
+
+    directed = G.is_directed()
+    if directed:
+        iter_func = itertools.permutations
+    else:
+        iter_func = itertools.combinations
+
+    all_pairs = {n: {} for n in nbunch}
+
+    for u, v in iter_func(nbunch, 2):
+        k = local_node_connectivity(G, u, v, cutoff=cutoff)
+        all_pairs[u][v] = k
+        if not directed:
+            all_pairs[v][u] = k
+
+    return all_pairs
+
+
+def _bidirectional_shortest_path(G, source, target, exclude):
+    """Returns shortest path between source and target ignoring nodes in the
+    container 'exclude'.
+
+    Parameters
+    ----------
+
+    G : NetworkX graph
+
+    source : node
+        Starting node for path
+
+    target : node
+        Ending node for path
+
+    exclude: container
+        Container for nodes to exclude from the search for shortest paths
+
+    Returns
+    -------
+    path: list
+        Shortest path between source and target ignoring nodes in 'exclude'
+
+    Raises
+    ------
+    NetworkXNoPath
+        If there is no path or if nodes are adjacent and have only one path
+        between them
+
+    Notes
+    -----
+    This function and its helper are originally from
+    networkx.algorithms.shortest_paths.unweighted and are modified to
+    accept the extra parameter 'exclude', which is a container for nodes
+    already used in other paths that should be ignored.
+
+    References
+    ----------
+    .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
+        Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
+        http://eclectic.ss.uci.edu/~drwhite/working.pdf
+
+    """
+    # call helper to do the real work
+    results = _bidirectional_pred_succ(G, source, target, exclude)
+    pred, succ, w = results
+
+    # build path from pred+w+succ
+    path = []
+    # from source to w
+    while w is not None:
+        path.append(w)
+        w = pred[w]
+    path.reverse()
+    # from w to target
+    w = succ[path[-1]]
+    while w is not None:
+        path.append(w)
+        w = succ[w]
+
+    return path
+
+
+def _bidirectional_pred_succ(G, source, target, exclude):
+    # does BFS from both source and target and meets in the middle
+    # excludes nodes in the container "exclude" from the search
+
+    # handle either directed or undirected
+    if G.is_directed():
+        Gpred = G.predecessors
+        Gsucc = G.successors
+    else:
+        Gpred = G.neighbors
+        Gsucc = G.neighbors
+
+    # predecessor and successors in search
+    pred = {source: None}
+    succ = {target: None}
+
+    # initialize fringes, start with forward
+    forward_fringe = [source]
+    reverse_fringe = [target]
+
+    level = 0
+
+    while forward_fringe and reverse_fringe:
+        # Make sure that we iterate one step forward and one step backwards
+        # thus source and target will only trigger "found path" when they are
+        # adjacent and then they can be safely included in the container 'exclude'
+        level += 1
+        if level % 2 != 0:
+            this_level = forward_fringe
+            forward_fringe = []
+            for v in this_level:
+                for w in Gsucc(v):
+                    if w in exclude:
+                        continue
+                    if w not in pred:
+                        forward_fringe.append(w)
+                        pred[w] = v
+                    if w in succ:
+                        return pred, succ, w  # found path
+        else:
+            this_level = reverse_fringe
+            reverse_fringe = []
+            for v in this_level:
+                for w in Gpred(v):
+                    if w in exclude:
+                        continue
+                    if w not in succ:
+                        succ[w] = v
+                        reverse_fringe.append(w)
+                    if w in pred:
+                        return pred, succ, w  # found path
+
+    raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/distance_measures.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/distance_measures.py
new file mode 100644
index 00000000..d5847e65
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/distance_measures.py
@@ -0,0 +1,150 @@
+"""Distance measures approximated metrics."""
+
+import networkx as nx
+from networkx.utils.decorators import py_random_state
+
+__all__ = ["diameter"]
+
+
+@py_random_state(1)
+@nx._dispatchable(name="approximate_diameter")
+def diameter(G, seed=None):
+    """Returns a lower bound on the diameter of the graph G.
+
+    The function computes a lower bound on the diameter (i.e., the maximum eccentricity)
+    of a directed or undirected graph G. The procedure used varies depending on the graph
+    being directed or not.
+
+    If G is an `undirected` graph, then the function uses the `2-sweep` algorithm [1]_.
+    The main idea is to pick the farthest node from a random node and return its eccentricity.
+
+    Otherwise, if G is a `directed` graph, the function uses the `2-dSweep` algorithm [2]_,
+    The procedure starts by selecting a random source node $s$ from which it performs a
+    forward and a backward BFS. Let $a_1$ and $a_2$ be the farthest nodes in the forward and
+    backward cases, respectively. Then, it computes the backward eccentricity of $a_1$ using
+    a backward BFS and the forward eccentricity of $a_2$ using a forward BFS.
+    Finally, it returns the best lower bound between the two.
+
+    In both cases, the time complexity is linear with respect to the size of G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    d : integer
+       Lower Bound on the Diameter of G
+
+    Examples
+    --------
+    >>> G = nx.path_graph(10)  # undirected graph
+    >>> nx.diameter(G)
+    9
+    >>> G = nx.cycle_graph(3, create_using=nx.DiGraph)  # directed graph
+    >>> nx.diameter(G)
+    2
+
+    Raises
+    ------
+    NetworkXError
+        If the graph is empty or
+        If the graph is undirected and not connected or
+        If the graph is directed and not strongly connected.
+
+    See Also
+    --------
+    networkx.algorithms.distance_measures.diameter
+
+    References
+    ----------
+    .. [1] Magnien, Clémence, Matthieu Latapy, and Michel Habib.
+       *Fast computation of empirically tight bounds for the diameter of massive graphs.*
+       Journal of Experimental Algorithmics (JEA), 2009.
+       https://arxiv.org/pdf/0904.2728.pdf
+    .. [2] Crescenzi, Pierluigi, Roberto Grossi, Leonardo Lanzi, and Andrea Marino.
+       *On computing the diameter of real-world directed (weighted) graphs.*
+       International Symposium on Experimental Algorithms. Springer, Berlin, Heidelberg, 2012.
+       https://courses.cs.ut.ee/MTAT.03.238/2014_fall/uploads/Main/diameter.pdf
+    """
+    # if G is empty
+    if not G:
+        raise nx.NetworkXError("Expected non-empty NetworkX graph!")
+    # if there's only a node
+    if G.number_of_nodes() == 1:
+        return 0
+    # if G is directed
+    if G.is_directed():
+        return _two_sweep_directed(G, seed)
+    # else if G is undirected
+    return _two_sweep_undirected(G, seed)
+
+
+def _two_sweep_undirected(G, seed):
+    """Helper function for finding a lower bound on the diameter
+        for undirected Graphs.
+
+        The idea is to pick the farthest node from a random node
+        and return its eccentricity.
+
+        ``G`` is a NetworkX undirected graph.
+
+    .. note::
+
+        ``seed`` is a random.Random or numpy.random.RandomState instance
+    """
+    # select a random source node
+    source = seed.choice(list(G))
+    # get the distances to the other nodes
+    distances = nx.shortest_path_length(G, source)
+    # if some nodes have not been visited, then the graph is not connected
+    if len(distances) != len(G):
+        raise nx.NetworkXError("Graph not connected.")
+    # take a node that is (one of) the farthest nodes from the source
+    *_, node = distances
+    # return the eccentricity of the node
+    return nx.eccentricity(G, node)
+
+
+def _two_sweep_directed(G, seed):
+    """Helper function for finding a lower bound on the diameter
+        for directed Graphs.
+
+        It implements 2-dSweep, the directed version of the 2-sweep algorithm.
+        The algorithm follows the following steps.
+        1. Select a source node $s$ at random.
+        2. Perform a forward BFS from $s$ to select a node $a_1$ at the maximum
+        distance from the source, and compute $LB_1$, the backward eccentricity of $a_1$.
+        3. Perform a backward BFS from $s$ to select a node $a_2$ at the maximum
+        distance from the source, and compute $LB_2$, the forward eccentricity of $a_2$.
+        4. Return the maximum between $LB_1$ and $LB_2$.
+
+        ``G`` is a NetworkX directed graph.
+
+    .. note::
+
+        ``seed`` is a random.Random or numpy.random.RandomState instance
+    """
+    # get a new digraph G' with the edges reversed in the opposite direction
+    G_reversed = G.reverse()
+    # select a random source node
+    source = seed.choice(list(G))
+    # compute forward distances from source
+    forward_distances = nx.shortest_path_length(G, source)
+    # compute backward distances  from source
+    backward_distances = nx.shortest_path_length(G_reversed, source)
+    # if either the source can't reach every node or not every node
+    # can reach the source, then the graph is not strongly connected
+    n = len(G)
+    if len(forward_distances) != n or len(backward_distances) != n:
+        raise nx.NetworkXError("DiGraph not strongly connected.")
+    # take a node a_1 at the maximum distance from the source in G
+    *_, a_1 = forward_distances
+    # take a node a_2 at the maximum distance from the source in G_reversed
+    *_, a_2 = backward_distances
+    # return the max between the backward eccentricity of a_1 and the forward eccentricity of a_2
+    return max(nx.eccentricity(G_reversed, a_1), nx.eccentricity(G, a_2))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/dominating_set.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/dominating_set.py
new file mode 100644
index 00000000..e568a827
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/dominating_set.py
@@ -0,0 +1,149 @@
+"""Functions for finding node and edge dominating sets.
+
+A `dominating set`_ for an undirected graph *G* with vertex set *V*
+and edge set *E* is a subset *D* of *V* such that every vertex not in
+*D* is adjacent to at least one member of *D*. An `edge dominating set`_
+is a subset *F* of *E* such that every edge not in *F* is
+incident to an endpoint of at least one edge in *F*.
+
+.. _dominating set: https://en.wikipedia.org/wiki/Dominating_set
+.. _edge dominating set: https://en.wikipedia.org/wiki/Edge_dominating_set
+
+"""
+
+import networkx as nx
+
+from ...utils import not_implemented_for
+from ..matching import maximal_matching
+
+__all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"]
+
+
+# TODO Why doesn't this algorithm work for directed graphs?
+@not_implemented_for("directed")
+@nx._dispatchable(node_attrs="weight")
+def min_weighted_dominating_set(G, weight=None):
+    r"""Returns a dominating set that approximates the minimum weight node
+    dominating set.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph.
+
+    weight : string
+        The node attribute storing the weight of an node. If provided,
+        the node attribute with this key must be a number for each
+        node. If not provided, each node is assumed to have weight one.
+
+    Returns
+    -------
+    min_weight_dominating_set : set
+        A set of nodes, the sum of whose weights is no more than `(\log
+        w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of
+        each node in the graph and `w(V^*)` denotes the sum of the
+        weights of each node in the minimum weight dominating set.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 4), (1, 4), (1, 2), (2, 3), (3, 4), (2, 5)])
+    >>> nx.approximation.min_weighted_dominating_set(G)
+    {1, 2, 4}
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is directed.
+
+    Notes
+    -----
+    This algorithm computes an approximate minimum weighted dominating
+    set for the graph `G`. The returned solution has weight `(\log
+    w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of each
+    node in the graph and `w(V^*)` denotes the sum of the weights of
+    each node in the minimum weight dominating set for the graph.
+
+    This implementation of the algorithm runs in $O(m)$ time, where $m$
+    is the number of edges in the graph.
+
+    References
+    ----------
+    .. [1] Vazirani, Vijay V.
+           *Approximation Algorithms*.
+           Springer Science & Business Media, 2001.
+
+    """
+    # The unique dominating set for the null graph is the empty set.
+    if len(G) == 0:
+        return set()
+
+    # This is the dominating set that will eventually be returned.
+    dom_set = set()
+
+    def _cost(node_and_neighborhood):
+        """Returns the cost-effectiveness of greedily choosing the given
+        node.
+
+        `node_and_neighborhood` is a two-tuple comprising a node and its
+        closed neighborhood.
+
+        """
+        v, neighborhood = node_and_neighborhood
+        return G.nodes[v].get(weight, 1) / len(neighborhood - dom_set)
+
+    # This is a set of all vertices not already covered by the
+    # dominating set.
+    vertices = set(G)
+    # This is a dictionary mapping each node to the closed neighborhood
+    # of that node.
+    neighborhoods = {v: {v} | set(G[v]) for v in G}
+
+    # Continue until all vertices are adjacent to some node in the
+    # dominating set.
+    while vertices:
+        # Find the most cost-effective node to add, along with its
+        # closed neighborhood.
+        dom_node, min_set = min(neighborhoods.items(), key=_cost)
+        # Add the node to the dominating set and reduce the remaining
+        # set of nodes to cover.
+        dom_set.add(dom_node)
+        del neighborhoods[dom_node]
+        vertices -= min_set
+
+    return dom_set
+
+
+@nx._dispatchable
+def min_edge_dominating_set(G):
+    r"""Returns minimum cardinality edge dominating set.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+      Undirected graph
+
+    Returns
+    -------
+    min_edge_dominating_set : set
+      Returns a set of dominating edges whose size is no more than 2 * OPT.
+
+    Examples
+    --------
+    >>> G = nx.petersen_graph()
+    >>> nx.approximation.min_edge_dominating_set(G)
+    {(0, 1), (4, 9), (6, 8), (5, 7), (2, 3)}
+
+    Raises
+    ------
+    ValueError
+        If the input graph `G` is empty.
+
+    Notes
+    -----
+    The algorithm computes an approximate solution to the edge dominating set
+    problem. The result is no more than 2 * OPT in terms of size of the set.
+    Runtime of the algorithm is $O(|E|)$.
+    """
+    if not G:
+        raise ValueError("Expected non-empty NetworkX graph!")
+    return maximal_matching(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/kcomponents.py
new file mode 100644
index 00000000..f726a4e6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/kcomponents.py
@@ -0,0 +1,369 @@
+"""Fast approximation for k-component structure"""
+
+import itertools
+from collections import defaultdict
+from collections.abc import Mapping
+from functools import cached_property
+
+import networkx as nx
+from networkx.algorithms.approximation import local_node_connectivity
+from networkx.exception import NetworkXError
+from networkx.utils import not_implemented_for
+
+__all__ = ["k_components"]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(name="approximate_k_components")
+def k_components(G, min_density=0.95):
+    r"""Returns the approximate k-component structure of a graph G.
+
+    A `k`-component is a maximal subgraph of a graph G that has, at least,
+    node connectivity `k`: we need to remove at least `k` nodes to break it
+    into more components. `k`-components have an inherent hierarchical
+    structure because they are nested in terms of connectivity: a connected
+    graph can contain several 2-components, each of which can contain
+    one or more 3-components, and so forth.
+
+    This implementation is based on the fast heuristics to approximate
+    the `k`-component structure of a graph [1]_. Which, in turn, it is based on
+    a fast approximation algorithm for finding good lower bounds of the number
+    of node independent paths between two nodes [2]_.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    min_density : Float
+        Density relaxation threshold. Default value 0.95
+
+    Returns
+    -------
+    k_components : dict
+        Dictionary with connectivity level `k` as key and a list of
+        sets of nodes that form a k-component of level `k` as values.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is directed.
+
+    Examples
+    --------
+    >>> # Petersen graph has 10 nodes and it is triconnected, thus all
+    >>> # nodes are in a single component on all three connectivity levels
+    >>> from networkx.algorithms import approximation as apxa
+    >>> G = nx.petersen_graph()
+    >>> k_components = apxa.k_components(G)
+
+    Notes
+    -----
+    The logic of the approximation algorithm for computing the `k`-component
+    structure [1]_ is based on repeatedly applying simple and fast algorithms
+    for `k`-cores and biconnected components in order to narrow down the
+    number of pairs of nodes over which we have to compute White and Newman's
+    approximation algorithm for finding node independent paths [2]_. More
+    formally, this algorithm is based on Whitney's theorem, which states
+    an inclusion relation among node connectivity, edge connectivity, and
+    minimum degree for any graph G. This theorem implies that every
+    `k`-component is nested inside a `k`-edge-component, which in turn,
+    is contained in a `k`-core. Thus, this algorithm computes node independent
+    paths among pairs of nodes in each biconnected part of each `k`-core,
+    and repeats this procedure for each `k` from 3 to the maximal core number
+    of a node in the input graph.
+
+    Because, in practice, many nodes of the core of level `k` inside a
+    bicomponent actually are part of a component of level k, the auxiliary
+    graph needed for the algorithm is likely to be very dense. Thus, we use
+    a complement graph data structure (see `AntiGraph`) to save memory.
+    AntiGraph only stores information of the edges that are *not* present
+    in the actual auxiliary graph. When applying algorithms to this
+    complement graph data structure, it behaves as if it were the dense
+    version.
+
+    See also
+    --------
+    k_components
+
+    References
+    ----------
+    .. [1]  Torrents, J. and F. Ferraro (2015) Structural Cohesion:
+            Visualization and Heuristics for Fast Computation.
+            https://arxiv.org/pdf/1503.04476v1
+
+    .. [2]  White, Douglas R., and Mark Newman (2001) A Fast Algorithm for
+            Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
+            https://www.santafe.edu/research/results/working-papers/fast-approximation-algorithms-for-finding-node-ind
+
+    .. [3]  Moody, J. and D. White (2003). Social cohesion and embeddedness:
+            A hierarchical conception of social groups.
+            American Sociological Review 68(1), 103--28.
+            https://doi.org/10.2307/3088904
+
+    """
+    # Dictionary with connectivity level (k) as keys and a list of
+    # sets of nodes that form a k-component as values
+    k_components = defaultdict(list)
+    # make a few functions local for speed
+    node_connectivity = local_node_connectivity
+    k_core = nx.k_core
+    core_number = nx.core_number
+    biconnected_components = nx.biconnected_components
+    combinations = itertools.combinations
+    # Exact solution for k = {1,2}
+    # There is a linear time algorithm for triconnectivity, if we had an
+    # implementation available we could start from k = 4.
+    for component in nx.connected_components(G):
+        # isolated nodes have connectivity 0
+        comp = set(component)
+        if len(comp) > 1:
+            k_components[1].append(comp)
+    for bicomponent in nx.biconnected_components(G):
+        # avoid considering dyads as bicomponents
+        bicomp = set(bicomponent)
+        if len(bicomp) > 2:
+            k_components[2].append(bicomp)
+    # There is no k-component of k > maximum core number
+    # \kappa(G) <= \lambda(G) <= \delta(G)
+    g_cnumber = core_number(G)
+    max_core = max(g_cnumber.values())
+    for k in range(3, max_core + 1):
+        C = k_core(G, k, core_number=g_cnumber)
+        for nodes in biconnected_components(C):
+            # Build a subgraph SG induced by the nodes that are part of
+            # each biconnected component of the k-core subgraph C.
+            if len(nodes) < k:
+                continue
+            SG = G.subgraph(nodes)
+            # Build auxiliary graph
+            H = _AntiGraph()
+            H.add_nodes_from(SG.nodes())
+            for u, v in combinations(SG, 2):
+                K = node_connectivity(SG, u, v, cutoff=k)
+                if k > K:
+                    H.add_edge(u, v)
+            for h_nodes in biconnected_components(H):
+                if len(h_nodes) <= k:
+                    continue
+                SH = H.subgraph(h_nodes)
+                for Gc in _cliques_heuristic(SG, SH, k, min_density):
+                    for k_nodes in biconnected_components(Gc):
+                        Gk = nx.k_core(SG.subgraph(k_nodes), k)
+                        if len(Gk) <= k:
+                            continue
+                        k_components[k].append(set(Gk))
+    return k_components
+
+
+def _cliques_heuristic(G, H, k, min_density):
+    h_cnumber = nx.core_number(H)
+    for i, c_value in enumerate(sorted(set(h_cnumber.values()), reverse=True)):
+        cands = {n for n, c in h_cnumber.items() if c == c_value}
+        # Skip checking for overlap for the highest core value
+        if i == 0:
+            overlap = False
+        else:
+            overlap = set.intersection(
+                *[{x for x in H[n] if x not in cands} for n in cands]
+            )
+        if overlap and len(overlap) < k:
+            SH = H.subgraph(cands | overlap)
+        else:
+            SH = H.subgraph(cands)
+        sh_cnumber = nx.core_number(SH)
+        SG = nx.k_core(G.subgraph(SH), k)
+        while not (_same(sh_cnumber) and nx.density(SH) >= min_density):
+            # This subgraph must be writable => .copy()
+            SH = H.subgraph(SG).copy()
+            if len(SH) <= k:
+                break
+            sh_cnumber = nx.core_number(SH)
+            sh_deg = dict(SH.degree())
+            min_deg = min(sh_deg.values())
+            SH.remove_nodes_from(n for n, d in sh_deg.items() if d == min_deg)
+            SG = nx.k_core(G.subgraph(SH), k)
+        else:
+            yield SG
+
+
+def _same(measure, tol=0):
+    vals = set(measure.values())
+    if (max(vals) - min(vals)) <= tol:
+        return True
+    return False
+
+
+class _AntiGraph(nx.Graph):
+    """
+    Class for complement graphs.
+
+    The main goal is to be able to work with big and dense graphs with
+    a low memory footprint.
+
+    In this class you add the edges that *do not exist* in the dense graph,
+    the report methods of the class return the neighbors, the edges and
+    the degree as if it was the dense graph. Thus it's possible to use
+    an instance of this class with some of NetworkX functions. In this
+    case we only use k-core, connected_components, and biconnected_components.
+    """
+
+    all_edge_dict = {"weight": 1}
+
+    def single_edge_dict(self):
+        return self.all_edge_dict
+
+    edge_attr_dict_factory = single_edge_dict  # type: ignore[assignment]
+
+    def __getitem__(self, n):
+        """Returns a dict of neighbors of node n in the dense graph.
+
+        Parameters
+        ----------
+        n : node
+           A node in the graph.
+
+        Returns
+        -------
+        adj_dict : dictionary
+           The adjacency dictionary for nodes connected to n.
+
+        """
+        all_edge_dict = self.all_edge_dict
+        return {
+            node: all_edge_dict for node in set(self._adj) - set(self._adj[n]) - {n}
+        }
+
+    def neighbors(self, n):
+        """Returns an iterator over all neighbors of node n in the
+        dense graph.
+        """
+        try:
+            return iter(set(self._adj) - set(self._adj[n]) - {n})
+        except KeyError as err:
+            raise NetworkXError(f"The node {n} is not in the graph.") from err
+
+    class AntiAtlasView(Mapping):
+        """An adjacency inner dict for AntiGraph"""
+
+        def __init__(self, graph, node):
+            self._graph = graph
+            self._atlas = graph._adj[node]
+            self._node = node
+
+        def __len__(self):
+            return len(self._graph) - len(self._atlas) - 1
+
+        def __iter__(self):
+            return (n for n in self._graph if n not in self._atlas and n != self._node)
+
+        def __getitem__(self, nbr):
+            nbrs = set(self._graph._adj) - set(self._atlas) - {self._node}
+            if nbr in nbrs:
+                return self._graph.all_edge_dict
+            raise KeyError(nbr)
+
+    class AntiAdjacencyView(AntiAtlasView):
+        """An adjacency outer dict for AntiGraph"""
+
+        def __init__(self, graph):
+            self._graph = graph
+            self._atlas = graph._adj
+
+        def __len__(self):
+            return len(self._atlas)
+
+        def __iter__(self):
+            return iter(self._graph)
+
+        def __getitem__(self, node):
+            if node not in self._graph:
+                raise KeyError(node)
+            return self._graph.AntiAtlasView(self._graph, node)
+
+    @cached_property
+    def adj(self):
+        return self.AntiAdjacencyView(self)
+
+    def subgraph(self, nodes):
+        """This subgraph method returns a full AntiGraph. Not a View"""
+        nodes = set(nodes)
+        G = _AntiGraph()
+        G.add_nodes_from(nodes)
+        for n in G:
+            Gnbrs = G.adjlist_inner_dict_factory()
+            G._adj[n] = Gnbrs
+            for nbr, d in self._adj[n].items():
+                if nbr in G._adj:
+                    Gnbrs[nbr] = d
+                    G._adj[nbr][n] = d
+        G.graph = self.graph
+        return G
+
+    class AntiDegreeView(nx.reportviews.DegreeView):
+        def __iter__(self):
+            all_nodes = set(self._succ)
+            for n in self._nodes:
+                nbrs = all_nodes - set(self._succ[n]) - {n}
+                yield (n, len(nbrs))
+
+        def __getitem__(self, n):
+            nbrs = set(self._succ) - set(self._succ[n]) - {n}
+            # AntiGraph is a ThinGraph so all edges have weight 1
+            return len(nbrs) + (n in nbrs)
+
+    @cached_property
+    def degree(self):
+        """Returns an iterator for (node, degree) and degree for single node.
+
+        The node degree is the number of edges adjacent to the node.
+
+        Parameters
+        ----------
+        nbunch : iterable container, optional (default=all nodes)
+            A container of nodes.  The container will be iterated
+            through once.
+
+        weight : string or None, optional (default=None)
+           The edge attribute that holds the numerical value used
+           as a weight.  If None, then each edge has weight 1.
+           The degree is the sum of the edge weights adjacent to the node.
+
+        Returns
+        -------
+        deg:
+            Degree of the node, if a single node is passed as argument.
+        nd_iter : an iterator
+            The iterator returns two-tuples of (node, degree).
+
+        See Also
+        --------
+        degree
+
+        Examples
+        --------
+        >>> G = nx.path_graph(4)
+        >>> G.degree(0)  # node 0 with degree 1
+        1
+        >>> list(G.degree([0, 1]))
+        [(0, 1), (1, 2)]
+
+        """
+        return self.AntiDegreeView(self)
+
+    def adjacency(self):
+        """Returns an iterator of (node, adjacency set) tuples for all nodes
+           in the dense graph.
+
+        This is the fastest way to look at every edge.
+        For directed graphs, only outgoing adjacencies are included.
+
+        Returns
+        -------
+        adj_iter : iterator
+           An iterator of (node, adjacency set) for all nodes in
+           the graph.
+
+        """
+        for n in self._adj:
+            yield (n, set(self._adj) - set(self._adj[n]) - {n})
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/matching.py
new file mode 100644
index 00000000..dc089194
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/matching.py
@@ -0,0 +1,44 @@
+"""
+**************
+Graph Matching
+**************
+
+Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent
+edges; that is, no two edges share a common vertex.
+
+`Wikipedia: Matching <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
+"""
+
+import networkx as nx
+
+__all__ = ["min_maximal_matching"]
+
+
+@nx._dispatchable
+def min_maximal_matching(G):
+    r"""Returns the minimum maximal matching of G. That is, out of all maximal
+    matchings of the graph G, the smallest is returned.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+      Undirected graph
+
+    Returns
+    -------
+    min_maximal_matching : set
+      Returns a set of edges such that no two edges share a common endpoint
+      and every edge not in the set shares some common endpoint in the set.
+      Cardinality will be 2*OPT in the worst case.
+
+    Notes
+    -----
+    The algorithm computes an approximate solution for the minimum maximal
+    cardinality matching problem. The solution is no more than 2 * OPT in size.
+    Runtime is $O(|E|)$.
+
+    References
+    ----------
+    .. [1] Vazirani, Vijay Approximation Algorithms (2001)
+    """
+    return nx.maximal_matching(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/maxcut.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/maxcut.py
new file mode 100644
index 00000000..f4e1da87
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/maxcut.py
@@ -0,0 +1,143 @@
+import networkx as nx
+from networkx.utils.decorators import not_implemented_for, py_random_state
+
+__all__ = ["randomized_partitioning", "one_exchange"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@py_random_state(1)
+@nx._dispatchable(edge_attrs="weight")
+def randomized_partitioning(G, seed=None, p=0.5, weight=None):
+    """Compute a random partitioning of the graph nodes and its cut value.
+
+    A partitioning is calculated by observing each node
+    and deciding to add it to the partition with probability `p`,
+    returning a random cut and its corresponding value (the
+    sum of weights of edges connecting different partitions).
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    p : scalar
+        Probability for each node to be part of the first partition.
+        Should be in [0,1]
+
+    weight : object
+        Edge attribute key to use as weight. If not specified, edges
+        have weight one.
+
+    Returns
+    -------
+    cut_size : scalar
+        Value of the minimum cut.
+
+    partition : pair of node sets
+        A partitioning of the nodes that defines a minimum cut.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> cut_size, partition = nx.approximation.randomized_partitioning(G, seed=1)
+    >>> cut_size
+    6
+    >>> partition
+    ({0, 3, 4}, {1, 2})
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph is directed or is a multigraph.
+    """
+    cut = {node for node in G.nodes() if seed.random() < p}
+    cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
+    partition = (cut, G.nodes - cut)
+    return cut_size, partition
+
+
+def _swap_node_partition(cut, node):
+    return cut - {node} if node in cut else cut.union({node})
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@py_random_state(2)
+@nx._dispatchable(edge_attrs="weight")
+def one_exchange(G, initial_cut=None, seed=None, weight=None):
+    """Compute a partitioning of the graphs nodes and the corresponding cut value.
+
+    Use a greedy one exchange strategy to find a locally maximal cut
+    and its value, it works by finding the best node (one that gives
+    the highest gain to the cut value) to add to the current cut
+    and repeats this process until no improvement can be made.
+
+    Parameters
+    ----------
+    G : networkx Graph
+        Graph to find a maximum cut for.
+
+    initial_cut : set
+        Cut to use as a starting point. If not supplied the algorithm
+        starts with an empty cut.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    weight : object
+        Edge attribute key to use as weight. If not specified, edges
+        have weight one.
+
+    Returns
+    -------
+    cut_value : scalar
+        Value of the maximum cut.
+
+    partition : pair of node sets
+        A partitioning of the nodes that defines a maximum cut.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> curr_cut_size, partition = nx.approximation.one_exchange(G, seed=1)
+    >>> curr_cut_size
+    6
+    >>> partition
+    ({0, 2}, {1, 3, 4})
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph is directed or is a multigraph.
+    """
+    if initial_cut is None:
+        initial_cut = set()
+    cut = set(initial_cut)
+    current_cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
+    while True:
+        nodes = list(G.nodes())
+        # Shuffling the nodes ensures random tie-breaks in the following call to max
+        seed.shuffle(nodes)
+        best_node_to_swap = max(
+            nodes,
+            key=lambda v: nx.algorithms.cut_size(
+                G, _swap_node_partition(cut, v), weight=weight
+            ),
+            default=None,
+        )
+        potential_cut = _swap_node_partition(cut, best_node_to_swap)
+        potential_cut_size = nx.algorithms.cut_size(G, potential_cut, weight=weight)
+
+        if potential_cut_size > current_cut_size:
+            cut = potential_cut
+            current_cut_size = potential_cut_size
+        else:
+            break
+
+    partition = (cut, G.nodes - cut)
+    return current_cut_size, partition
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/ramsey.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/ramsey.py
new file mode 100644
index 00000000..0552e4a9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/ramsey.py
@@ -0,0 +1,53 @@
+"""
+Ramsey numbers.
+"""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+from ...utils import arbitrary_element
+
+__all__ = ["ramsey_R2"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def ramsey_R2(G):
+    r"""Compute the largest clique and largest independent set in `G`.
+
+    This can be used to estimate bounds for the 2-color
+    Ramsey number `R(2;s,t)` for `G`.
+
+    This is a recursive implementation which could run into trouble
+    for large recursions. Note that self-loop edges are ignored.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    Returns
+    -------
+    max_pair : (set, set) tuple
+        Maximum clique, Maximum independent set.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph is directed or is a multigraph.
+    """
+    if not G:
+        return set(), set()
+
+    node = arbitrary_element(G)
+    nbrs = (nbr for nbr in nx.all_neighbors(G, node) if nbr != node)
+    nnbrs = nx.non_neighbors(G, node)
+    c_1, i_1 = ramsey_R2(G.subgraph(nbrs).copy())
+    c_2, i_2 = ramsey_R2(G.subgraph(nnbrs).copy())
+
+    c_1.add(node)
+    i_2.add(node)
+    # Choose the larger of the two cliques and the larger of the two
+    # independent sets, according to cardinality.
+    return max(c_1, c_2, key=len), max(i_1, i_2, key=len)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/steinertree.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/steinertree.py
new file mode 100644
index 00000000..f4840eff
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/steinertree.py
@@ -0,0 +1,231 @@
+from itertools import chain
+
+import networkx as nx
+from networkx.utils import not_implemented_for, pairwise
+
+__all__ = ["metric_closure", "steiner_tree"]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight", returns_graph=True)
+def metric_closure(G, weight="weight"):
+    """Return the metric closure of a graph.
+
+    The metric closure of a graph *G* is the complete graph in which each edge
+    is weighted by the shortest path distance between the nodes in *G* .
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    NetworkX graph
+        Metric closure of the graph `G`.
+
+    """
+    M = nx.Graph()
+
+    Gnodes = set(G)
+
+    # check for connected graph while processing first node
+    all_paths_iter = nx.all_pairs_dijkstra(G, weight=weight)
+    u, (distance, path) = next(all_paths_iter)
+    if Gnodes - set(distance):
+        msg = "G is not a connected graph. metric_closure is not defined."
+        raise nx.NetworkXError(msg)
+    Gnodes.remove(u)
+    for v in Gnodes:
+        M.add_edge(u, v, distance=distance[v], path=path[v])
+
+    # first node done -- now process the rest
+    for u, (distance, path) in all_paths_iter:
+        Gnodes.remove(u)
+        for v in Gnodes:
+            M.add_edge(u, v, distance=distance[v], path=path[v])
+
+    return M
+
+
+def _mehlhorn_steiner_tree(G, terminal_nodes, weight):
+    paths = nx.multi_source_dijkstra_path(G, terminal_nodes)
+
+    d_1 = {}
+    s = {}
+    for v in G.nodes():
+        s[v] = paths[v][0]
+        d_1[(v, s[v])] = len(paths[v]) - 1
+
+    # G1-G4 names match those from the Mehlhorn 1988 paper.
+    G_1_prime = nx.Graph()
+    for u, v, data in G.edges(data=True):
+        su, sv = s[u], s[v]
+        weight_here = d_1[(u, su)] + data.get(weight, 1) + d_1[(v, sv)]
+        if not G_1_prime.has_edge(su, sv):
+            G_1_prime.add_edge(su, sv, weight=weight_here)
+        else:
+            new_weight = min(weight_here, G_1_prime[su][sv]["weight"])
+            G_1_prime.add_edge(su, sv, weight=new_weight)
+
+    G_2 = nx.minimum_spanning_edges(G_1_prime, data=True)
+
+    G_3 = nx.Graph()
+    for u, v, d in G_2:
+        path = nx.shortest_path(G, u, v, weight)
+        for n1, n2 in pairwise(path):
+            G_3.add_edge(n1, n2)
+
+    G_3_mst = list(nx.minimum_spanning_edges(G_3, data=False))
+    if G.is_multigraph():
+        G_3_mst = (
+            (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in G_3_mst
+        )
+    G_4 = G.edge_subgraph(G_3_mst).copy()
+    _remove_nonterminal_leaves(G_4, terminal_nodes)
+    return G_4.edges()
+
+
+def _kou_steiner_tree(G, terminal_nodes, weight):
+    # H is the subgraph induced by terminal_nodes in the metric closure M of G.
+    M = metric_closure(G, weight=weight)
+    H = M.subgraph(terminal_nodes)
+
+    # Use the 'distance' attribute of each edge provided by M.
+    mst_edges = nx.minimum_spanning_edges(H, weight="distance", data=True)
+
+    # Create an iterator over each edge in each shortest path; repeats are okay
+    mst_all_edges = chain.from_iterable(pairwise(d["path"]) for u, v, d in mst_edges)
+    if G.is_multigraph():
+        mst_all_edges = (
+            (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight]))
+            for u, v in mst_all_edges
+        )
+
+    # Find the MST again, over this new set of edges
+    G_S = G.edge_subgraph(mst_all_edges)
+    T_S = nx.minimum_spanning_edges(G_S, weight="weight", data=False)
+
+    # Leaf nodes that are not terminal might still remain; remove them here
+    T_H = G.edge_subgraph(T_S).copy()
+    _remove_nonterminal_leaves(T_H, terminal_nodes)
+
+    return T_H.edges()
+
+
+def _remove_nonterminal_leaves(G, terminals):
+    terminal_set = set(terminals)
+    leaves = {n for n in G if len(set(G[n]) - {n}) == 1}
+    nonterminal_leaves = leaves - terminal_set
+
+    while nonterminal_leaves:
+        # Removing a node may create new non-terminal leaves, so we limit
+        # search for candidate non-terminal nodes to neighbors of current
+        # non-terminal nodes
+        candidate_leaves = set.union(*(set(G[n]) for n in nonterminal_leaves))
+        candidate_leaves -= nonterminal_leaves | terminal_set
+        # Remove current set of non-terminal nodes
+        G.remove_nodes_from(nonterminal_leaves)
+        # Find any new non-terminal nodes from the set of candidates
+        leaves = {n for n in candidate_leaves if len(set(G[n]) - {n}) == 1}
+        nonterminal_leaves = leaves - terminal_set
+
+
+ALGORITHMS = {
+    "kou": _kou_steiner_tree,
+    "mehlhorn": _mehlhorn_steiner_tree,
+}
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def steiner_tree(G, terminal_nodes, weight="weight", method=None):
+    r"""Return an approximation to the minimum Steiner tree of a graph.
+
+    The minimum Steiner tree of `G` w.r.t a set of `terminal_nodes` (also *S*)
+    is a tree within `G` that spans those nodes and has minimum size (sum of
+    edge weights) among all such trees.
+
+    The approximation algorithm is specified with the `method` keyword
+    argument. All three available algorithms produce a tree whose weight is
+    within a ``(2 - (2 / l))`` factor of the weight of the optimal Steiner tree,
+    where ``l`` is the minimum number of leaf nodes across all possible Steiner
+    trees.
+
+    * ``"kou"`` [2]_ (runtime $O(|S| |V|^2)$) computes the minimum spanning tree of
+      the subgraph of the metric closure of *G* induced by the terminal nodes,
+      where the metric closure of *G* is the complete graph in which each edge is
+      weighted by the shortest path distance between the nodes in *G*.
+
+    * ``"mehlhorn"`` [3]_ (runtime $O(|E|+|V|\log|V|)$) modifies Kou et al.'s
+      algorithm, beginning by finding the closest terminal node for each
+      non-terminal. This data is used to create a complete graph containing only
+      the terminal nodes, in which edge is weighted with the shortest path
+      distance between them. The algorithm then proceeds in the same way as Kou
+      et al..
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    terminal_nodes : list
+         A list of terminal nodes for which minimum steiner tree is
+         to be found.
+
+    weight : string (default = 'weight')
+        Use the edge attribute specified by this string as the edge weight.
+        Any edge attribute not present defaults to 1.
+
+    method : string, optional (default = 'mehlhorn')
+        The algorithm to use to approximate the Steiner tree.
+        Supported options: 'kou', 'mehlhorn'.
+        Other inputs produce a ValueError.
+
+    Returns
+    -------
+    NetworkX graph
+        Approximation to the minimum steiner tree of `G` induced by
+        `terminal_nodes` .
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is directed.
+
+    ValueError
+        If the specified `method` is not supported.
+
+    Notes
+    -----
+    For multigraphs, the edge between two nodes with minimum weight is the
+    edge put into the Steiner tree.
+
+
+    References
+    ----------
+    .. [1] Steiner_tree_problem on Wikipedia.
+           https://en.wikipedia.org/wiki/Steiner_tree_problem
+    .. [2] Kou, L., G. Markowsky, and L. Berman. 1981.
+           ‘A Fast Algorithm for Steiner Trees’.
+           Acta Informatica 15 (2): 141–45.
+           https://doi.org/10.1007/BF00288961.
+    .. [3] Mehlhorn, Kurt. 1988.
+           ‘A Faster Approximation Algorithm for the Steiner Problem in Graphs’.
+           Information Processing Letters 27 (3): 125–28.
+           https://doi.org/10.1016/0020-0190(88)90066-X.
+    """
+    if method is None:
+        method = "mehlhorn"
+
+    try:
+        algo = ALGORITHMS[method]
+    except KeyError as e:
+        raise ValueError(f"{method} is not a valid choice for an algorithm.") from e
+
+    edges = algo(G, terminal_nodes, weight)
+    # For multigraph we should add the minimal weight edge keys
+    if G.is_multigraph():
+        edges = (
+            (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in edges
+        )
+    T = G.edge_subgraph(edges)
+    return T
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py
new file mode 100644
index 00000000..5eab5c1e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py
@@ -0,0 +1,41 @@
+import networkx as nx
+from networkx.algorithms.approximation import average_clustering
+
+# This approximation has to be exact in regular graphs
+# with no triangles or with all possible triangles.
+
+
+def test_petersen():
+    # Actual coefficient is 0
+    G = nx.petersen_graph()
+    assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G)
+
+
+def test_petersen_seed():
+    # Actual coefficient is 0
+    G = nx.petersen_graph()
+    assert average_clustering(G, trials=len(G) // 2, seed=1) == nx.average_clustering(G)
+
+
+def test_tetrahedral():
+    # Actual coefficient is 1
+    G = nx.tetrahedral_graph()
+    assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G)
+
+
+def test_dodecahedral():
+    # Actual coefficient is 0
+    G = nx.dodecahedral_graph()
+    assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G)
+
+
+def test_empty():
+    G = nx.empty_graph(5)
+    assert average_clustering(G, trials=len(G) // 2) == 0
+
+
+def test_complete():
+    G = nx.complete_graph(5)
+    assert average_clustering(G, trials=len(G) // 2) == 1
+    G = nx.complete_graph(7)
+    assert average_clustering(G, trials=len(G) // 2) == 1
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_clique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_clique.py
new file mode 100644
index 00000000..b40dcb90
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_clique.py
@@ -0,0 +1,112 @@
+"""Unit tests for the :mod:`networkx.algorithms.approximation.clique` module."""
+
+import networkx as nx
+from networkx.algorithms.approximation import (
+    clique_removal,
+    large_clique_size,
+    max_clique,
+    maximum_independent_set,
+)
+
+
+def is_independent_set(G, nodes):
+    """Returns True if and only if `nodes` is a clique in `G`.
+
+    `G` is a NetworkX graph. `nodes` is an iterable of nodes in
+    `G`.
+
+    """
+    return G.subgraph(nodes).number_of_edges() == 0
+
+
+def is_clique(G, nodes):
+    """Returns True if and only if `nodes` is an independent set
+    in `G`.
+
+    `G` is an undirected simple graph. `nodes` is an iterable of
+    nodes in `G`.
+
+    """
+    H = G.subgraph(nodes)
+    n = len(H)
+    return H.number_of_edges() == n * (n - 1) // 2
+
+
+class TestCliqueRemoval:
+    """Unit tests for the
+    :func:`~networkx.algorithms.approximation.clique_removal` function.
+
+    """
+
+    def test_trivial_graph(self):
+        G = nx.trivial_graph()
+        independent_set, cliques = clique_removal(G)
+        assert is_independent_set(G, independent_set)
+        assert all(is_clique(G, clique) for clique in cliques)
+        # In fact, we should only have 1-cliques, that is, singleton nodes.
+        assert all(len(clique) == 1 for clique in cliques)
+
+    def test_complete_graph(self):
+        G = nx.complete_graph(10)
+        independent_set, cliques = clique_removal(G)
+        assert is_independent_set(G, independent_set)
+        assert all(is_clique(G, clique) for clique in cliques)
+
+    def test_barbell_graph(self):
+        G = nx.barbell_graph(10, 5)
+        independent_set, cliques = clique_removal(G)
+        assert is_independent_set(G, independent_set)
+        assert all(is_clique(G, clique) for clique in cliques)
+
+
+class TestMaxClique:
+    """Unit tests for the :func:`networkx.algorithms.approximation.max_clique`
+    function.
+
+    """
+
+    def test_null_graph(self):
+        G = nx.null_graph()
+        assert len(max_clique(G)) == 0
+
+    def test_complete_graph(self):
+        graph = nx.complete_graph(30)
+        # this should return the entire graph
+        mc = max_clique(graph)
+        assert 30 == len(mc)
+
+    def test_maximal_by_cardinality(self):
+        """Tests that the maximal clique is computed according to maximum
+        cardinality of the sets.
+
+        For more information, see pull request #1531.
+
+        """
+        G = nx.complete_graph(5)
+        G.add_edge(4, 5)
+        clique = max_clique(G)
+        assert len(clique) > 1
+
+        G = nx.lollipop_graph(30, 2)
+        clique = max_clique(G)
+        assert len(clique) > 2
+
+
+def test_large_clique_size():
+    G = nx.complete_graph(9)
+    nx.add_cycle(G, [9, 10, 11])
+    G.add_edge(8, 9)
+    G.add_edge(1, 12)
+    G.add_node(13)
+
+    assert large_clique_size(G) == 9
+    G.remove_node(5)
+    assert large_clique_size(G) == 8
+    G.remove_edge(2, 3)
+    assert large_clique_size(G) == 7
+
+
+def test_independent_set():
+    # smoke test
+    G = nx.Graph()
+    assert len(maximum_independent_set(G)) == 0
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_connectivity.py
new file mode 100644
index 00000000..887db20b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_connectivity.py
@@ -0,0 +1,199 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms import approximation as approx
+
+
+def test_global_node_connectivity():
+    # Figure 1 chapter on Connectivity
+    G = nx.Graph()
+    G.add_edges_from(
+        [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 5),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 6),
+            (4, 6),
+            (4, 7),
+            (5, 7),
+            (6, 8),
+            (6, 9),
+            (7, 8),
+            (7, 10),
+            (8, 11),
+            (9, 10),
+            (9, 11),
+            (10, 11),
+        ]
+    )
+    assert 2 == approx.local_node_connectivity(G, 1, 11)
+    assert 2 == approx.node_connectivity(G)
+    assert 2 == approx.node_connectivity(G, 1, 11)
+
+
+def test_white_harary1():
+    # Figure 1b white and harary (2001)
+    # A graph with high adhesion (edge connectivity) and low cohesion
+    # (node connectivity)
+    G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4))
+    G.remove_node(7)
+    for i in range(4, 7):
+        G.add_edge(0, i)
+    G = nx.disjoint_union(G, nx.complete_graph(4))
+    G.remove_node(G.order() - 1)
+    for i in range(7, 10):
+        G.add_edge(0, i)
+    assert 1 == approx.node_connectivity(G)
+
+
+def test_complete_graphs():
+    for n in range(5, 25, 5):
+        G = nx.complete_graph(n)
+        assert n - 1 == approx.node_connectivity(G)
+        assert n - 1 == approx.node_connectivity(G, 0, 3)
+
+
+def test_empty_graphs():
+    for k in range(5, 25, 5):
+        G = nx.empty_graph(k)
+        assert 0 == approx.node_connectivity(G)
+        assert 0 == approx.node_connectivity(G, 0, 3)
+
+
+def test_petersen():
+    G = nx.petersen_graph()
+    assert 3 == approx.node_connectivity(G)
+    assert 3 == approx.node_connectivity(G, 0, 5)
+
+
+# Approximation fails with tutte graph
+# def test_tutte():
+#    G = nx.tutte_graph()
+#    assert_equal(3, approx.node_connectivity(G))
+
+
+def test_dodecahedral():
+    G = nx.dodecahedral_graph()
+    assert 3 == approx.node_connectivity(G)
+    assert 3 == approx.node_connectivity(G, 0, 5)
+
+
+def test_octahedral():
+    G = nx.octahedral_graph()
+    assert 4 == approx.node_connectivity(G)
+    assert 4 == approx.node_connectivity(G, 0, 5)
+
+
+# Approximation can fail with icosahedral graph depending
+# on iteration order.
+# def test_icosahedral():
+#    G=nx.icosahedral_graph()
+#    assert_equal(5, approx.node_connectivity(G))
+#    assert_equal(5, approx.node_connectivity(G, 0, 5))
+
+
+def test_only_source():
+    G = nx.complete_graph(5)
+    pytest.raises(nx.NetworkXError, approx.node_connectivity, G, s=0)
+
+
+def test_only_target():
+    G = nx.complete_graph(5)
+    pytest.raises(nx.NetworkXError, approx.node_connectivity, G, t=0)
+
+
+def test_missing_source():
+    G = nx.path_graph(4)
+    pytest.raises(nx.NetworkXError, approx.node_connectivity, G, 10, 1)
+
+
+def test_missing_target():
+    G = nx.path_graph(4)
+    pytest.raises(nx.NetworkXError, approx.node_connectivity, G, 1, 10)
+
+
+def test_source_equals_target():
+    G = nx.complete_graph(5)
+    pytest.raises(nx.NetworkXError, approx.local_node_connectivity, G, 0, 0)
+
+
+def test_directed_node_connectivity():
+    G = nx.cycle_graph(10, create_using=nx.DiGraph())  # only one direction
+    D = nx.cycle_graph(10).to_directed()  # 2 reciprocal edges
+    assert 1 == approx.node_connectivity(G)
+    assert 1 == approx.node_connectivity(G, 1, 4)
+    assert 2 == approx.node_connectivity(D)
+    assert 2 == approx.node_connectivity(D, 1, 4)
+
+
+class TestAllPairsNodeConnectivityApprox:
+    @classmethod
+    def setup_class(cls):
+        cls.path = nx.path_graph(7)
+        cls.directed_path = nx.path_graph(7, create_using=nx.DiGraph())
+        cls.cycle = nx.cycle_graph(7)
+        cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
+        cls.gnp = nx.gnp_random_graph(30, 0.1)
+        cls.directed_gnp = nx.gnp_random_graph(30, 0.1, directed=True)
+        cls.K20 = nx.complete_graph(20)
+        cls.K10 = nx.complete_graph(10)
+        cls.K5 = nx.complete_graph(5)
+        cls.G_list = [
+            cls.path,
+            cls.directed_path,
+            cls.cycle,
+            cls.directed_cycle,
+            cls.gnp,
+            cls.directed_gnp,
+            cls.K10,
+            cls.K5,
+            cls.K20,
+        ]
+
+    def test_cycles(self):
+        K_undir = approx.all_pairs_node_connectivity(self.cycle)
+        for source in K_undir:
+            for target, k in K_undir[source].items():
+                assert k == 2
+        K_dir = approx.all_pairs_node_connectivity(self.directed_cycle)
+        for source in K_dir:
+            for target, k in K_dir[source].items():
+                assert k == 1
+
+    def test_complete(self):
+        for G in [self.K10, self.K5, self.K20]:
+            K = approx.all_pairs_node_connectivity(G)
+            for source in K:
+                for target, k in K[source].items():
+                    assert k == len(G) - 1
+
+    def test_paths(self):
+        K_undir = approx.all_pairs_node_connectivity(self.path)
+        for source in K_undir:
+            for target, k in K_undir[source].items():
+                assert k == 1
+        K_dir = approx.all_pairs_node_connectivity(self.directed_path)
+        for source in K_dir:
+            for target, k in K_dir[source].items():
+                if source < target:
+                    assert k == 1
+                else:
+                    assert k == 0
+
+    def test_cutoff(self):
+        for G in [self.K10, self.K5, self.K20]:
+            for mp in [2, 3, 4]:
+                paths = approx.all_pairs_node_connectivity(G, cutoff=mp)
+                for source in paths:
+                    for target, K in paths[source].items():
+                        assert K == mp
+
+    def test_all_pairs_connectivity_nbunch(self):
+        G = nx.complete_graph(5)
+        nbunch = [0, 2, 3]
+        C = approx.all_pairs_node_connectivity(G, nbunch=nbunch)
+        assert len(C) == len(nbunch)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_distance_measures.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_distance_measures.py
new file mode 100644
index 00000000..3809a8fc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_distance_measures.py
@@ -0,0 +1,59 @@
+"""Unit tests for the :mod:`networkx.algorithms.approximation.distance_measures` module."""
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.approximation import diameter
+
+
+class TestDiameter:
+    """Unit tests for the approximate diameter function
+    :func:`~networkx.algorithms.approximation.distance_measures.diameter`.
+    """
+
+    def test_null_graph(self):
+        """Test empty graph."""
+        G = nx.null_graph()
+        with pytest.raises(
+            nx.NetworkXError, match="Expected non-empty NetworkX graph!"
+        ):
+            diameter(G)
+
+    def test_undirected_non_connected(self):
+        """Test an undirected disconnected graph."""
+        graph = nx.path_graph(10)
+        graph.remove_edge(3, 4)
+        with pytest.raises(nx.NetworkXError, match="Graph not connected."):
+            diameter(graph)
+
+    def test_directed_non_strongly_connected(self):
+        """Test a directed non strongly connected graph."""
+        graph = nx.path_graph(10, create_using=nx.DiGraph())
+        with pytest.raises(nx.NetworkXError, match="DiGraph not strongly connected."):
+            diameter(graph)
+
+    def test_complete_undirected_graph(self):
+        """Test a complete undirected graph."""
+        graph = nx.complete_graph(10)
+        assert diameter(graph) == 1
+
+    def test_complete_directed_graph(self):
+        """Test a complete directed graph."""
+        graph = nx.complete_graph(10, create_using=nx.DiGraph())
+        assert diameter(graph) == 1
+
+    def test_undirected_path_graph(self):
+        """Test an undirected path graph with 10 nodes."""
+        graph = nx.path_graph(10)
+        assert diameter(graph) == 9
+
+    def test_directed_path_graph(self):
+        """Test a directed path graph with 10 nodes."""
+        graph = nx.path_graph(10).to_directed()
+        assert diameter(graph) == 9
+
+    def test_single_node(self):
+        """Test a graph which contains just a node."""
+        graph = nx.Graph()
+        graph.add_node(1)
+        assert diameter(graph) == 0
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py
new file mode 100644
index 00000000..6b90d85e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py
@@ -0,0 +1,78 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms.approximation import (
+    min_edge_dominating_set,
+    min_weighted_dominating_set,
+)
+
+
+class TestMinWeightDominatingSet:
+    def test_min_weighted_dominating_set(self):
+        graph = nx.Graph()
+        graph.add_edge(1, 2)
+        graph.add_edge(1, 5)
+        graph.add_edge(2, 3)
+        graph.add_edge(2, 5)
+        graph.add_edge(3, 4)
+        graph.add_edge(3, 6)
+        graph.add_edge(5, 6)
+
+        vertices = {1, 2, 3, 4, 5, 6}
+        # due to ties, this might be hard to test tight bounds
+        dom_set = min_weighted_dominating_set(graph)
+        for vertex in vertices - dom_set:
+            neighbors = set(graph.neighbors(vertex))
+            assert len(neighbors & dom_set) > 0, "Non dominating set found!"
+
+    def test_star_graph(self):
+        """Tests that an approximate dominating set for the star graph,
+        even when the center node does not have the smallest integer
+        label, gives just the center node.
+
+        For more information, see #1527.
+
+        """
+        # Create a star graph in which the center node has the highest
+        # label instead of the lowest.
+        G = nx.star_graph(10)
+        G = nx.relabel_nodes(G, {0: 9, 9: 0})
+        assert min_weighted_dominating_set(G) == {9}
+
+    def test_null_graph(self):
+        """Tests that the unique dominating set for the null graph is an empty set"""
+        G = nx.Graph()
+        assert min_weighted_dominating_set(G) == set()
+
+    def test_min_edge_dominating_set(self):
+        graph = nx.path_graph(5)
+        dom_set = min_edge_dominating_set(graph)
+
+        # this is a crappy way to test, but good enough for now.
+        for edge in graph.edges():
+            if edge in dom_set:
+                continue
+            else:
+                u, v = edge
+                found = False
+                for dom_edge in dom_set:
+                    found |= u == dom_edge[0] or u == dom_edge[1]
+                assert found, "Non adjacent edge found!"
+
+        graph = nx.complete_graph(10)
+        dom_set = min_edge_dominating_set(graph)
+
+        # this is a crappy way to test, but good enough for now.
+        for edge in graph.edges():
+            if edge in dom_set:
+                continue
+            else:
+                u, v = edge
+                found = False
+                for dom_edge in dom_set:
+                    found |= u == dom_edge[0] or u == dom_edge[1]
+                assert found, "Non adjacent edge found!"
+
+        graph = nx.Graph()  # empty Networkx graph
+        with pytest.raises(ValueError, match="Expected non-empty NetworkX graph!"):
+            min_edge_dominating_set(graph)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py
new file mode 100644
index 00000000..65ba8021
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py
@@ -0,0 +1,303 @@
+# Test for approximation to k-components algorithm
+import pytest
+
+import networkx as nx
+from networkx.algorithms.approximation import k_components
+from networkx.algorithms.approximation.kcomponents import _AntiGraph, _same
+
+
+def build_k_number_dict(k_components):
+    k_num = {}
+    for k, comps in sorted(k_components.items()):
+        for comp in comps:
+            for node in comp:
+                k_num[node] = k
+    return k_num
+
+
+##
+# Some nice synthetic graphs
+##
+
+
+def graph_example_1():
+    G = nx.convert_node_labels_to_integers(
+        nx.grid_graph([5, 5]), label_attribute="labels"
+    )
+    rlabels = nx.get_node_attributes(G, "labels")
+    labels = {v: k for k, v in rlabels.items()}
+
+    for nodes in [
+        (labels[(0, 0)], labels[(1, 0)]),
+        (labels[(0, 4)], labels[(1, 4)]),
+        (labels[(3, 0)], labels[(4, 0)]),
+        (labels[(3, 4)], labels[(4, 4)]),
+    ]:
+        new_node = G.order() + 1
+        # Petersen graph is triconnected
+        P = nx.petersen_graph()
+        G = nx.disjoint_union(G, P)
+        # Add two edges between the grid and P
+        G.add_edge(new_node + 1, nodes[0])
+        G.add_edge(new_node, nodes[1])
+        # K5 is 4-connected
+        K = nx.complete_graph(5)
+        G = nx.disjoint_union(G, K)
+        # Add three edges between P and K5
+        G.add_edge(new_node + 2, new_node + 11)
+        G.add_edge(new_node + 3, new_node + 12)
+        G.add_edge(new_node + 4, new_node + 13)
+        # Add another K5 sharing a node
+        G = nx.disjoint_union(G, K)
+        nbrs = G[new_node + 10]
+        G.remove_node(new_node + 10)
+        for nbr in nbrs:
+            G.add_edge(new_node + 17, nbr)
+        G.add_edge(new_node + 16, new_node + 5)
+    return G
+
+
+def torrents_and_ferraro_graph():
+    G = nx.convert_node_labels_to_integers(
+        nx.grid_graph([5, 5]), label_attribute="labels"
+    )
+    rlabels = nx.get_node_attributes(G, "labels")
+    labels = {v: k for k, v in rlabels.items()}
+
+    for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]:
+        new_node = G.order() + 1
+        # Petersen graph is triconnected
+        P = nx.petersen_graph()
+        G = nx.disjoint_union(G, P)
+        # Add two edges between the grid and P
+        G.add_edge(new_node + 1, nodes[0])
+        G.add_edge(new_node, nodes[1])
+        # K5 is 4-connected
+        K = nx.complete_graph(5)
+        G = nx.disjoint_union(G, K)
+        # Add three edges between P and K5
+        G.add_edge(new_node + 2, new_node + 11)
+        G.add_edge(new_node + 3, new_node + 12)
+        G.add_edge(new_node + 4, new_node + 13)
+        # Add another K5 sharing a node
+        G = nx.disjoint_union(G, K)
+        nbrs = G[new_node + 10]
+        G.remove_node(new_node + 10)
+        for nbr in nbrs:
+            G.add_edge(new_node + 17, nbr)
+        # Commenting this makes the graph not biconnected !!
+        # This stupid mistake make one reviewer very angry :P
+        G.add_edge(new_node + 16, new_node + 8)
+
+    for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]:
+        new_node = G.order() + 1
+        # Petersen graph is triconnected
+        P = nx.petersen_graph()
+        G = nx.disjoint_union(G, P)
+        # Add two edges between the grid and P
+        G.add_edge(new_node + 1, nodes[0])
+        G.add_edge(new_node, nodes[1])
+        # K5 is 4-connected
+        K = nx.complete_graph(5)
+        G = nx.disjoint_union(G, K)
+        # Add three edges between P and K5
+        G.add_edge(new_node + 2, new_node + 11)
+        G.add_edge(new_node + 3, new_node + 12)
+        G.add_edge(new_node + 4, new_node + 13)
+        # Add another K5 sharing two nodes
+        G = nx.disjoint_union(G, K)
+        nbrs = G[new_node + 10]
+        G.remove_node(new_node + 10)
+        for nbr in nbrs:
+            G.add_edge(new_node + 17, nbr)
+        nbrs2 = G[new_node + 9]
+        G.remove_node(new_node + 9)
+        for nbr in nbrs2:
+            G.add_edge(new_node + 18, nbr)
+    return G
+
+
+# Helper function
+
+
+def _check_connectivity(G):
+    result = k_components(G)
+    for k, components in result.items():
+        if k < 3:
+            continue
+        for component in components:
+            C = G.subgraph(component)
+            K = nx.node_connectivity(C)
+            assert K >= k
+
+
+def test_torrents_and_ferraro_graph():
+    G = torrents_and_ferraro_graph()
+    _check_connectivity(G)
+
+
+def test_example_1():
+    G = graph_example_1()
+    _check_connectivity(G)
+
+
+def test_karate_0():
+    G = nx.karate_club_graph()
+    _check_connectivity(G)
+
+
+def test_karate_1():
+    karate_k_num = {
+        0: 4,
+        1: 4,
+        2: 4,
+        3: 4,
+        4: 3,
+        5: 3,
+        6: 3,
+        7: 4,
+        8: 4,
+        9: 2,
+        10: 3,
+        11: 1,
+        12: 2,
+        13: 4,
+        14: 2,
+        15: 2,
+        16: 2,
+        17: 2,
+        18: 2,
+        19: 3,
+        20: 2,
+        21: 2,
+        22: 2,
+        23: 3,
+        24: 3,
+        25: 3,
+        26: 2,
+        27: 3,
+        28: 3,
+        29: 3,
+        30: 4,
+        31: 3,
+        32: 4,
+        33: 4,
+    }
+    approx_karate_k_num = karate_k_num.copy()
+    approx_karate_k_num[24] = 2
+    approx_karate_k_num[25] = 2
+    G = nx.karate_club_graph()
+    k_comps = k_components(G)
+    k_num = build_k_number_dict(k_comps)
+    assert k_num in (karate_k_num, approx_karate_k_num)
+
+
+def test_example_1_detail_3_and_4():
+    G = graph_example_1()
+    result = k_components(G)
+    # In this example graph there are 8 3-components, 4 with 15 nodes
+    # and 4 with 5 nodes.
+    assert len(result[3]) == 8
+    assert len([c for c in result[3] if len(c) == 15]) == 4
+    assert len([c for c in result[3] if len(c) == 5]) == 4
+    # There are also 8 4-components all with 5 nodes.
+    assert len(result[4]) == 8
+    assert all(len(c) == 5 for c in result[4])
+    # Finally check that the k-components detected have actually node
+    # connectivity >= k.
+    for k, components in result.items():
+        if k < 3:
+            continue
+        for component in components:
+            K = nx.node_connectivity(G.subgraph(component))
+            assert K >= k
+
+
+def test_directed():
+    with pytest.raises(nx.NetworkXNotImplemented):
+        G = nx.gnp_random_graph(10, 0.4, directed=True)
+        kc = k_components(G)
+
+
+def test_same():
+    equal = {"A": 2, "B": 2, "C": 2}
+    slightly_different = {"A": 2, "B": 1, "C": 2}
+    different = {"A": 2, "B": 8, "C": 18}
+    assert _same(equal)
+    assert not _same(slightly_different)
+    assert _same(slightly_different, tol=1)
+    assert not _same(different)
+    assert not _same(different, tol=4)
+
+
+class TestAntiGraph:
+    @classmethod
+    def setup_class(cls):
+        cls.Gnp = nx.gnp_random_graph(20, 0.8, seed=42)
+        cls.Anp = _AntiGraph(nx.complement(cls.Gnp))
+        cls.Gd = nx.davis_southern_women_graph()
+        cls.Ad = _AntiGraph(nx.complement(cls.Gd))
+        cls.Gk = nx.karate_club_graph()
+        cls.Ak = _AntiGraph(nx.complement(cls.Gk))
+        cls.GA = [(cls.Gnp, cls.Anp), (cls.Gd, cls.Ad), (cls.Gk, cls.Ak)]
+
+    def test_size(self):
+        for G, A in self.GA:
+            n = G.order()
+            s = len(list(G.edges())) + len(list(A.edges()))
+            assert s == (n * (n - 1)) / 2
+
+    def test_degree(self):
+        for G, A in self.GA:
+            assert sorted(G.degree()) == sorted(A.degree())
+
+    def test_core_number(self):
+        for G, A in self.GA:
+            assert nx.core_number(G) == nx.core_number(A)
+
+    def test_connected_components(self):
+        # ccs are same unless isolated nodes or any node has degree=len(G)-1
+        # graphs in self.GA avoid this problem
+        for G, A in self.GA:
+            gc = [set(c) for c in nx.connected_components(G)]
+            ac = [set(c) for c in nx.connected_components(A)]
+            for comp in ac:
+                assert comp in gc
+
+    def test_adj(self):
+        for G, A in self.GA:
+            for n, nbrs in G.adj.items():
+                a_adj = sorted((n, sorted(ad)) for n, ad in A.adj.items())
+                g_adj = sorted((n, sorted(ad)) for n, ad in G.adj.items())
+                assert a_adj == g_adj
+
+    def test_adjacency(self):
+        for G, A in self.GA:
+            a_adj = list(A.adjacency())
+            for n, nbrs in G.adjacency():
+                assert (n, set(nbrs)) in a_adj
+
+    def test_neighbors(self):
+        for G, A in self.GA:
+            node = list(G.nodes())[0]
+            assert set(G.neighbors(node)) == set(A.neighbors(node))
+
+    def test_node_not_in_graph(self):
+        for G, A in self.GA:
+            node = "non_existent_node"
+            pytest.raises(nx.NetworkXError, A.neighbors, node)
+            pytest.raises(nx.NetworkXError, G.neighbors, node)
+
+    def test_degree_thingraph(self):
+        for G, A in self.GA:
+            node = list(G.nodes())[0]
+            nodes = list(G.nodes())[1:4]
+            assert G.degree(node) == A.degree(node)
+            assert sum(d for n, d in G.degree()) == sum(d for n, d in A.degree())
+            # AntiGraph is a ThinGraph, so all the weights are 1
+            assert sum(d for n, d in A.degree()) == sum(
+                d for n, d in A.degree(weight="weight")
+            )
+            assert sum(d for n, d in G.degree(nodes)) == sum(
+                d for n, d in A.degree(nodes)
+            )
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_matching.py
new file mode 100644
index 00000000..f50da3d2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_matching.py
@@ -0,0 +1,8 @@
+import networkx as nx
+import networkx.algorithms.approximation as a
+
+
+def test_min_maximal_matching():
+    # smoke test
+    G = nx.Graph()
+    assert len(a.min_maximal_matching(G)) == 0
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_maxcut.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_maxcut.py
new file mode 100644
index 00000000..ef042440
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_maxcut.py
@@ -0,0 +1,94 @@
+import random
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.approximation import maxcut
+
+
+@pytest.mark.parametrize(
+    "f", (nx.approximation.randomized_partitioning, nx.approximation.one_exchange)
+)
+@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph))
+def test_raises_on_directed_and_multigraphs(f, graph_constructor):
+    G = graph_constructor([(0, 1), (1, 2)])
+    with pytest.raises(nx.NetworkXNotImplemented):
+        f(G)
+
+
+def _is_valid_cut(G, set1, set2):
+    union = set1.union(set2)
+    assert union == set(G.nodes)
+    assert len(set1) + len(set2) == G.number_of_nodes()
+
+
+def _cut_is_locally_optimal(G, cut_size, set1):
+    # test if cut can be locally improved
+    for i, node in enumerate(set1):
+        cut_size_without_node = nx.algorithms.cut_size(
+            G, set1 - {node}, weight="weight"
+        )
+        assert cut_size_without_node <= cut_size
+
+
+def test_random_partitioning():
+    G = nx.complete_graph(5)
+    _, (set1, set2) = maxcut.randomized_partitioning(G, seed=5)
+    _is_valid_cut(G, set1, set2)
+
+
+def test_random_partitioning_all_to_one():
+    G = nx.complete_graph(5)
+    _, (set1, set2) = maxcut.randomized_partitioning(G, p=1)
+    _is_valid_cut(G, set1, set2)
+    assert len(set1) == G.number_of_nodes()
+    assert len(set2) == 0
+
+
+def test_one_exchange_basic():
+    G = nx.complete_graph(5)
+    random.seed(5)
+    for u, v, w in G.edges(data=True):
+        w["weight"] = random.randrange(-100, 100, 1) / 10
+
+    initial_cut = set(random.sample(sorted(G.nodes()), k=5))
+    cut_size, (set1, set2) = maxcut.one_exchange(
+        G, initial_cut, weight="weight", seed=5
+    )
+
+    _is_valid_cut(G, set1, set2)
+    _cut_is_locally_optimal(G, cut_size, set1)
+
+
+def test_one_exchange_optimal():
+    # Greedy one exchange should find the optimal solution for this graph (14)
+    G = nx.Graph()
+    G.add_edge(1, 2, weight=3)
+    G.add_edge(1, 3, weight=3)
+    G.add_edge(1, 4, weight=3)
+    G.add_edge(1, 5, weight=3)
+    G.add_edge(2, 3, weight=5)
+
+    cut_size, (set1, set2) = maxcut.one_exchange(G, weight="weight", seed=5)
+
+    _is_valid_cut(G, set1, set2)
+    _cut_is_locally_optimal(G, cut_size, set1)
+    # check global optimality
+    assert cut_size == 14
+
+
+def test_negative_weights():
+    G = nx.complete_graph(5)
+    random.seed(5)
+    for u, v, w in G.edges(data=True):
+        w["weight"] = -1 * random.random()
+
+    initial_cut = set(random.sample(sorted(G.nodes()), k=5))
+    cut_size, (set1, set2) = maxcut.one_exchange(G, initial_cut, weight="weight")
+
+    # make sure it is a valid cut
+    _is_valid_cut(G, set1, set2)
+    # check local optimality
+    _cut_is_locally_optimal(G, cut_size, set1)
+    # test that all nodes are in the same partition
+    assert len(set1) == len(G.nodes) or len(set2) == len(G.nodes)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py
new file mode 100644
index 00000000..32fe1fb8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py
@@ -0,0 +1,31 @@
+import networkx as nx
+import networkx.algorithms.approximation as apxa
+
+
+def test_ramsey():
+    # this should only find the complete graph
+    graph = nx.complete_graph(10)
+    c, i = apxa.ramsey_R2(graph)
+    cdens = nx.density(graph.subgraph(c))
+    assert cdens == 1.0, "clique not correctly found by ramsey!"
+    idens = nx.density(graph.subgraph(i))
+    assert idens == 0.0, "i-set not correctly found by ramsey!"
+
+    # this trivial graph has no cliques. should just find i-sets
+    graph = nx.trivial_graph()
+    c, i = apxa.ramsey_R2(graph)
+    assert c == {0}, "clique not correctly found by ramsey!"
+    assert i == {0}, "i-set not correctly found by ramsey!"
+
+    graph = nx.barbell_graph(10, 5, nx.Graph())
+    c, i = apxa.ramsey_R2(graph)
+    cdens = nx.density(graph.subgraph(c))
+    assert cdens == 1.0, "clique not correctly found by ramsey!"
+    idens = nx.density(graph.subgraph(i))
+    assert idens == 0.0, "i-set not correctly found by ramsey!"
+
+    # add self-loops and test again
+    graph.add_edges_from([(n, n) for n in range(0, len(graph), 2)])
+    cc, ii = apxa.ramsey_R2(graph)
+    assert cc == c
+    assert ii == i
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_steinertree.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_steinertree.py
new file mode 100644
index 00000000..1b074757
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_steinertree.py
@@ -0,0 +1,265 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms.approximation.steinertree import (
+    _remove_nonterminal_leaves,
+    metric_closure,
+    steiner_tree,
+)
+from networkx.utils import edges_equal
+
+
+class TestSteinerTree:
+    @classmethod
+    def setup_class(cls):
+        G1 = nx.Graph()
+        G1.add_edge(1, 2, weight=10)
+        G1.add_edge(2, 3, weight=10)
+        G1.add_edge(3, 4, weight=10)
+        G1.add_edge(4, 5, weight=10)
+        G1.add_edge(5, 6, weight=10)
+        G1.add_edge(2, 7, weight=1)
+        G1.add_edge(7, 5, weight=1)
+
+        G2 = nx.Graph()
+        G2.add_edge(0, 5, weight=6)
+        G2.add_edge(1, 2, weight=2)
+        G2.add_edge(1, 5, weight=3)
+        G2.add_edge(2, 4, weight=4)
+        G2.add_edge(3, 5, weight=5)
+        G2.add_edge(4, 5, weight=1)
+
+        G3 = nx.Graph()
+        G3.add_edge(1, 2, weight=8)
+        G3.add_edge(1, 9, weight=3)
+        G3.add_edge(1, 8, weight=6)
+        G3.add_edge(1, 10, weight=2)
+        G3.add_edge(1, 14, weight=3)
+        G3.add_edge(2, 3, weight=6)
+        G3.add_edge(3, 4, weight=3)
+        G3.add_edge(3, 10, weight=2)
+        G3.add_edge(3, 11, weight=1)
+        G3.add_edge(4, 5, weight=1)
+        G3.add_edge(4, 11, weight=1)
+        G3.add_edge(5, 6, weight=4)
+        G3.add_edge(5, 11, weight=2)
+        G3.add_edge(5, 12, weight=1)
+        G3.add_edge(5, 13, weight=3)
+        G3.add_edge(6, 7, weight=2)
+        G3.add_edge(6, 12, weight=3)
+        G3.add_edge(6, 13, weight=1)
+        G3.add_edge(7, 8, weight=3)
+        G3.add_edge(7, 9, weight=3)
+        G3.add_edge(7, 11, weight=5)
+        G3.add_edge(7, 13, weight=2)
+        G3.add_edge(7, 14, weight=4)
+        G3.add_edge(8, 9, weight=2)
+        G3.add_edge(9, 14, weight=1)
+        G3.add_edge(10, 11, weight=2)
+        G3.add_edge(10, 14, weight=1)
+        G3.add_edge(11, 12, weight=1)
+        G3.add_edge(11, 14, weight=7)
+        G3.add_edge(12, 14, weight=3)
+        G3.add_edge(12, 15, weight=1)
+        G3.add_edge(13, 14, weight=4)
+        G3.add_edge(13, 15, weight=1)
+        G3.add_edge(14, 15, weight=2)
+
+        cls.G1 = G1
+        cls.G2 = G2
+        cls.G3 = G3
+        cls.G1_term_nodes = [1, 2, 3, 4, 5]
+        cls.G2_term_nodes = [0, 2, 3]
+        cls.G3_term_nodes = [1, 3, 5, 6, 8, 10, 11, 12, 13]
+
+        cls.methods = ["kou", "mehlhorn"]
+
+    def test_connected_metric_closure(self):
+        G = self.G1.copy()
+        G.add_node(100)
+        pytest.raises(nx.NetworkXError, metric_closure, G)
+
+    def test_metric_closure(self):
+        M = metric_closure(self.G1)
+        mc = [
+            (1, 2, {"distance": 10, "path": [1, 2]}),
+            (1, 3, {"distance": 20, "path": [1, 2, 3]}),
+            (1, 4, {"distance": 22, "path": [1, 2, 7, 5, 4]}),
+            (1, 5, {"distance": 12, "path": [1, 2, 7, 5]}),
+            (1, 6, {"distance": 22, "path": [1, 2, 7, 5, 6]}),
+            (1, 7, {"distance": 11, "path": [1, 2, 7]}),
+            (2, 3, {"distance": 10, "path": [2, 3]}),
+            (2, 4, {"distance": 12, "path": [2, 7, 5, 4]}),
+            (2, 5, {"distance": 2, "path": [2, 7, 5]}),
+            (2, 6, {"distance": 12, "path": [2, 7, 5, 6]}),
+            (2, 7, {"distance": 1, "path": [2, 7]}),
+            (3, 4, {"distance": 10, "path": [3, 4]}),
+            (3, 5, {"distance": 12, "path": [3, 2, 7, 5]}),
+            (3, 6, {"distance": 22, "path": [3, 2, 7, 5, 6]}),
+            (3, 7, {"distance": 11, "path": [3, 2, 7]}),
+            (4, 5, {"distance": 10, "path": [4, 5]}),
+            (4, 6, {"distance": 20, "path": [4, 5, 6]}),
+            (4, 7, {"distance": 11, "path": [4, 5, 7]}),
+            (5, 6, {"distance": 10, "path": [5, 6]}),
+            (5, 7, {"distance": 1, "path": [5, 7]}),
+            (6, 7, {"distance": 11, "path": [6, 5, 7]}),
+        ]
+        assert edges_equal(list(M.edges(data=True)), mc)
+
+    def test_steiner_tree(self):
+        valid_steiner_trees = [
+            [
+                [
+                    (1, 2, {"weight": 10}),
+                    (2, 3, {"weight": 10}),
+                    (2, 7, {"weight": 1}),
+                    (3, 4, {"weight": 10}),
+                    (5, 7, {"weight": 1}),
+                ],
+                [
+                    (1, 2, {"weight": 10}),
+                    (2, 7, {"weight": 1}),
+                    (3, 4, {"weight": 10}),
+                    (4, 5, {"weight": 10}),
+                    (5, 7, {"weight": 1}),
+                ],
+                [
+                    (1, 2, {"weight": 10}),
+                    (2, 3, {"weight": 10}),
+                    (2, 7, {"weight": 1}),
+                    (4, 5, {"weight": 10}),
+                    (5, 7, {"weight": 1}),
+                ],
+            ],
+            [
+                [
+                    (0, 5, {"weight": 6}),
+                    (1, 2, {"weight": 2}),
+                    (1, 5, {"weight": 3}),
+                    (3, 5, {"weight": 5}),
+                ],
+                [
+                    (0, 5, {"weight": 6}),
+                    (4, 2, {"weight": 4}),
+                    (4, 5, {"weight": 1}),
+                    (3, 5, {"weight": 5}),
+                ],
+            ],
+            [
+                [
+                    (1, 10, {"weight": 2}),
+                    (3, 10, {"weight": 2}),
+                    (3, 11, {"weight": 1}),
+                    (5, 12, {"weight": 1}),
+                    (6, 13, {"weight": 1}),
+                    (8, 9, {"weight": 2}),
+                    (9, 14, {"weight": 1}),
+                    (10, 14, {"weight": 1}),
+                    (11, 12, {"weight": 1}),
+                    (12, 15, {"weight": 1}),
+                    (13, 15, {"weight": 1}),
+                ]
+            ],
+        ]
+        for method in self.methods:
+            for G, term_nodes, valid_trees in zip(
+                [self.G1, self.G2, self.G3],
+                [self.G1_term_nodes, self.G2_term_nodes, self.G3_term_nodes],
+                valid_steiner_trees,
+            ):
+                S = steiner_tree(G, term_nodes, method=method)
+                assert any(
+                    edges_equal(list(S.edges(data=True)), valid_tree)
+                    for valid_tree in valid_trees
+                )
+
+    def test_multigraph_steiner_tree(self):
+        G = nx.MultiGraph()
+        G.add_edges_from(
+            [
+                (1, 2, 0, {"weight": 1}),
+                (2, 3, 0, {"weight": 999}),
+                (2, 3, 1, {"weight": 1}),
+                (3, 4, 0, {"weight": 1}),
+                (3, 5, 0, {"weight": 1}),
+            ]
+        )
+        terminal_nodes = [2, 4, 5]
+        expected_edges = [
+            (2, 3, 1, {"weight": 1}),  # edge with key 1 has lower weight
+            (3, 4, 0, {"weight": 1}),
+            (3, 5, 0, {"weight": 1}),
+        ]
+        for method in self.methods:
+            S = steiner_tree(G, terminal_nodes, method=method)
+            assert edges_equal(S.edges(data=True, keys=True), expected_edges)
+
+    def test_remove_nonterminal_leaves(self):
+        G = nx.path_graph(10)
+        _remove_nonterminal_leaves(G, [4, 5, 6])
+
+        assert list(G) == [4, 5, 6]  # only the terminal nodes are left
+
+
+@pytest.mark.parametrize("method", ("kou", "mehlhorn"))
+def test_steiner_tree_weight_attribute(method):
+    G = nx.star_graph(4)
+    # Add an edge attribute that is named something other than "weight"
+    nx.set_edge_attributes(G, {e: 10 for e in G.edges}, name="distance")
+    H = nx.approximation.steiner_tree(G, [1, 3], method=method, weight="distance")
+    assert nx.utils.edges_equal(H.edges, [(0, 1), (0, 3)])
+
+
+@pytest.mark.parametrize("method", ("kou", "mehlhorn"))
+def test_steiner_tree_multigraph_weight_attribute(method):
+    G = nx.cycle_graph(3, create_using=nx.MultiGraph)
+    nx.set_edge_attributes(G, {e: 10 for e in G.edges}, name="distance")
+    G.add_edge(2, 0, distance=5)
+    H = nx.approximation.steiner_tree(G, list(G), method=method, weight="distance")
+    assert len(H.edges) == 2 and H.has_edge(2, 0, key=1)
+    assert sum(dist for *_, dist in H.edges(data="distance")) == 15
+
+
+@pytest.mark.parametrize("method", (None, "mehlhorn", "kou"))
+def test_steiner_tree_methods(method):
+    G = nx.star_graph(4)
+    expected = nx.Graph([(0, 1), (0, 3)])
+    st = nx.approximation.steiner_tree(G, [1, 3], method=method)
+    assert nx.utils.edges_equal(st.edges, expected.edges)
+
+
+def test_steiner_tree_method_invalid():
+    G = nx.star_graph(4)
+    with pytest.raises(
+        ValueError, match="invalid_method is not a valid choice for an algorithm."
+    ):
+        nx.approximation.steiner_tree(G, terminal_nodes=[1, 3], method="invalid_method")
+
+
+def test_steiner_tree_remove_non_terminal_leaves_self_loop_edges():
+    # To verify that the last step of the steiner tree approximation
+    # behaves in the case where a non-terminal leaf has a self loop edge
+    G = nx.path_graph(10)
+
+    # Add self loops to the terminal nodes
+    G.add_edges_from([(2, 2), (3, 3), (4, 4), (7, 7), (8, 8)])
+
+    # Remove non-terminal leaves
+    _remove_nonterminal_leaves(G, [4, 5, 6, 7])
+
+    # The terminal nodes should be left
+    assert list(G) == [4, 5, 6, 7]  # only the terminal nodes are left
+
+
+def test_steiner_tree_non_terminal_leaves_multigraph_self_loop_edges():
+    # To verify that the last step of the steiner tree approximation
+    # behaves in the case where a non-terminal leaf has a self loop edge
+    G = nx.MultiGraph()
+    G.add_edges_from([(i, i + 1) for i in range(10)])
+    G.add_edges_from([(2, 2), (3, 3), (4, 4), (4, 4), (7, 7)])
+
+    # Remove non-terminal leaves
+    _remove_nonterminal_leaves(G, [4, 5, 6, 7])
+
+    # Only the terminal nodes should be left
+    assert list(G) == [4, 5, 6, 7]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_traveling_salesman.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_traveling_salesman.py
new file mode 100644
index 00000000..2084c19a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_traveling_salesman.py
@@ -0,0 +1,977 @@
+"""Unit tests for the traveling_salesman module."""
+
+import random
+
+import pytest
+
+import networkx as nx
+import networkx.algorithms.approximation as nx_app
+
+pairwise = nx.utils.pairwise
+
+
+def test_christofides_hamiltonian():
+    random.seed(42)
+    G = nx.complete_graph(20)
+    for u, v in G.edges():
+        G[u][v]["weight"] = random.randint(0, 10)
+
+    H = nx.Graph()
+    H.add_edges_from(pairwise(nx_app.christofides(G)))
+    H.remove_edges_from(nx.find_cycle(H))
+    assert len(H.edges) == 0
+
+    tree = nx.minimum_spanning_tree(G, weight="weight")
+    H = nx.Graph()
+    H.add_edges_from(pairwise(nx_app.christofides(G, tree)))
+    H.remove_edges_from(nx.find_cycle(H))
+    assert len(H.edges) == 0
+
+
+def test_christofides_incomplete_graph():
+    G = nx.complete_graph(10)
+    G.remove_edge(0, 1)
+    pytest.raises(nx.NetworkXError, nx_app.christofides, G)
+
+
+def test_christofides_ignore_selfloops():
+    G = nx.complete_graph(5)
+    G.add_edge(3, 3)
+    cycle = nx_app.christofides(G)
+    assert len(cycle) - 1 == len(G) == len(set(cycle))
+
+
+# set up graphs for other tests
+class TestBase:
+    @classmethod
+    def setup_class(cls):
+        cls.DG = nx.DiGraph()
+        cls.DG.add_weighted_edges_from(
+            {
+                ("A", "B", 3),
+                ("A", "C", 17),
+                ("A", "D", 14),
+                ("B", "A", 3),
+                ("B", "C", 12),
+                ("B", "D", 16),
+                ("C", "A", 13),
+                ("C", "B", 12),
+                ("C", "D", 4),
+                ("D", "A", 14),
+                ("D", "B", 15),
+                ("D", "C", 2),
+            }
+        )
+        cls.DG_cycle = ["D", "C", "B", "A", "D"]
+        cls.DG_cost = 31.0
+
+        cls.DG2 = nx.DiGraph()
+        cls.DG2.add_weighted_edges_from(
+            {
+                ("A", "B", 3),
+                ("A", "C", 17),
+                ("A", "D", 14),
+                ("B", "A", 30),
+                ("B", "C", 2),
+                ("B", "D", 16),
+                ("C", "A", 33),
+                ("C", "B", 32),
+                ("C", "D", 34),
+                ("D", "A", 14),
+                ("D", "B", 15),
+                ("D", "C", 2),
+            }
+        )
+        cls.DG2_cycle = ["D", "A", "B", "C", "D"]
+        cls.DG2_cost = 53.0
+
+        cls.unweightedUG = nx.complete_graph(5, nx.Graph())
+        cls.unweightedDG = nx.complete_graph(5, nx.DiGraph())
+
+        cls.incompleteUG = nx.Graph()
+        cls.incompleteUG.add_weighted_edges_from({(0, 1, 1), (1, 2, 3)})
+        cls.incompleteDG = nx.DiGraph()
+        cls.incompleteDG.add_weighted_edges_from({(0, 1, 1), (1, 2, 3)})
+
+        cls.UG = nx.Graph()
+        cls.UG.add_weighted_edges_from(
+            {
+                ("A", "B", 3),
+                ("A", "C", 17),
+                ("A", "D", 14),
+                ("B", "C", 12),
+                ("B", "D", 16),
+                ("C", "D", 4),
+            }
+        )
+        cls.UG_cycle = ["D", "C", "B", "A", "D"]
+        cls.UG_cost = 33.0
+
+        cls.UG2 = nx.Graph()
+        cls.UG2.add_weighted_edges_from(
+            {
+                ("A", "B", 1),
+                ("A", "C", 15),
+                ("A", "D", 5),
+                ("B", "C", 16),
+                ("B", "D", 8),
+                ("C", "D", 3),
+            }
+        )
+        cls.UG2_cycle = ["D", "C", "B", "A", "D"]
+        cls.UG2_cost = 25.0
+
+
+def validate_solution(soln, cost, exp_soln, exp_cost):
+    assert soln == exp_soln
+    assert cost == exp_cost
+
+
+def validate_symmetric_solution(soln, cost, exp_soln, exp_cost):
+    assert soln == exp_soln or soln == exp_soln[::-1]
+    assert cost == exp_cost
+
+
+class TestGreedyTSP(TestBase):
+    def test_greedy(self):
+        cycle = nx_app.greedy_tsp(self.DG, source="D")
+        cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 31.0)
+
+        cycle = nx_app.greedy_tsp(self.DG2, source="D")
+        cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 78.0)
+
+        cycle = nx_app.greedy_tsp(self.UG, source="D")
+        cost = sum(self.UG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 33.0)
+
+        cycle = nx_app.greedy_tsp(self.UG2, source="D")
+        cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, ["D", "C", "A", "B", "D"], 27.0)
+
+    def test_not_complete_graph(self):
+        pytest.raises(nx.NetworkXError, nx_app.greedy_tsp, self.incompleteUG)
+        pytest.raises(nx.NetworkXError, nx_app.greedy_tsp, self.incompleteDG)
+
+    def test_not_weighted_graph(self):
+        nx_app.greedy_tsp(self.unweightedUG)
+        nx_app.greedy_tsp(self.unweightedDG)
+
+    def test_two_nodes(self):
+        G = nx.Graph()
+        G.add_weighted_edges_from({(1, 2, 1)})
+        cycle = nx_app.greedy_tsp(G)
+        cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, [1, 2, 1], 2)
+
+    def test_ignore_selfloops(self):
+        G = nx.complete_graph(5)
+        G.add_edge(3, 3)
+        cycle = nx_app.greedy_tsp(G)
+        assert len(cycle) - 1 == len(G) == len(set(cycle))
+
+
+class TestSimulatedAnnealingTSP(TestBase):
+    tsp = staticmethod(nx_app.simulated_annealing_tsp)
+
+    def test_simulated_annealing_directed(self):
+        cycle = self.tsp(self.DG, "greedy", source="D", seed=42)
+        cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, self.DG_cycle, self.DG_cost)
+
+        initial_sol = ["D", "B", "A", "C", "D"]
+        cycle = self.tsp(self.DG, initial_sol, source="D", seed=42)
+        cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, self.DG_cycle, self.DG_cost)
+
+        initial_sol = ["D", "A", "C", "B", "D"]
+        cycle = self.tsp(self.DG, initial_sol, move="1-0", source="D", seed=42)
+        cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, self.DG_cycle, self.DG_cost)
+
+        cycle = self.tsp(self.DG2, "greedy", source="D", seed=42)
+        cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, self.DG2_cycle, self.DG2_cost)
+
+        cycle = self.tsp(self.DG2, "greedy", move="1-0", source="D", seed=42)
+        cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, self.DG2_cycle, self.DG2_cost)
+
+    def test_simulated_annealing_undirected(self):
+        cycle = self.tsp(self.UG, "greedy", source="D", seed=42)
+        cost = sum(self.UG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, self.UG_cycle, self.UG_cost)
+
+        cycle = self.tsp(self.UG2, "greedy", source="D", seed=42)
+        cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_symmetric_solution(cycle, cost, self.UG2_cycle, self.UG2_cost)
+
+        cycle = self.tsp(self.UG2, "greedy", move="1-0", source="D", seed=42)
+        cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_symmetric_solution(cycle, cost, self.UG2_cycle, self.UG2_cost)
+
+    def test_error_on_input_order_mistake(self):
+        # see issue #4846 https://github.com/networkx/networkx/issues/4846
+        pytest.raises(TypeError, self.tsp, self.UG, weight="weight")
+        pytest.raises(nx.NetworkXError, self.tsp, self.UG, "weight")
+
+    def test_not_complete_graph(self):
+        pytest.raises(nx.NetworkXError, self.tsp, self.incompleteUG, "greedy", source=0)
+        pytest.raises(nx.NetworkXError, self.tsp, self.incompleteDG, "greedy", source=0)
+
+    def test_ignore_selfloops(self):
+        G = nx.complete_graph(5)
+        G.add_edge(3, 3)
+        cycle = self.tsp(G, "greedy")
+        assert len(cycle) - 1 == len(G) == len(set(cycle))
+
+    def test_not_weighted_graph(self):
+        self.tsp(self.unweightedUG, "greedy")
+        self.tsp(self.unweightedDG, "greedy")
+
+    def test_two_nodes(self):
+        G = nx.Graph()
+        G.add_weighted_edges_from({(1, 2, 1)})
+
+        cycle = self.tsp(G, "greedy", source=1, seed=42)
+        cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, [1, 2, 1], 2)
+
+        cycle = self.tsp(G, [1, 2, 1], source=1, seed=42)
+        cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        validate_solution(cycle, cost, [1, 2, 1], 2)
+
+    def test_failure_of_costs_too_high_when_iterations_low(self):
+        # Simulated Annealing Version:
+        # set number of moves low and alpha high
+        cycle = self.tsp(
+            self.DG2, "greedy", source="D", move="1-0", alpha=1, N_inner=1, seed=42
+        )
+        cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        print(cycle, cost)
+        assert cost > self.DG2_cost
+
+        # Try with an incorrect initial guess
+        initial_sol = ["D", "A", "B", "C", "D"]
+        cycle = self.tsp(
+            self.DG,
+            initial_sol,
+            source="D",
+            move="1-0",
+            alpha=0.1,
+            N_inner=1,
+            max_iterations=1,
+            seed=42,
+        )
+        cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        print(cycle, cost)
+        assert cost > self.DG_cost
+
+
+class TestThresholdAcceptingTSP(TestSimulatedAnnealingTSP):
+    tsp = staticmethod(nx_app.threshold_accepting_tsp)
+
+    def test_failure_of_costs_too_high_when_iterations_low(self):
+        # Threshold Version:
+        # set number of moves low and number of iterations low
+        cycle = self.tsp(
+            self.DG2,
+            "greedy",
+            source="D",
+            move="1-0",
+            N_inner=1,
+            max_iterations=1,
+            seed=4,
+        )
+        cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        assert cost > self.DG2_cost
+
+        # set threshold too low
+        initial_sol = ["D", "A", "B", "C", "D"]
+        cycle = self.tsp(
+            self.DG, initial_sol, source="D", move="1-0", threshold=-3, seed=42
+        )
+        cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
+        assert cost > self.DG_cost
+
+
+# Tests for function traveling_salesman_problem
+def test_TSP_method():
+    G = nx.cycle_graph(9)
+    G[4][5]["weight"] = 10
+
+    # Test using the old currying method
+    sa_tsp = lambda G, weight: nx_app.simulated_annealing_tsp(
+        G, "greedy", weight, source=4, seed=1
+    )
+
+    path = nx_app.traveling_salesman_problem(
+        G,
+        method=sa_tsp,
+        cycle=False,
+    )
+    print(path)
+    assert path == [4, 3, 2, 1, 0, 8, 7, 6, 5]
+
+
+def test_TSP_unweighted():
+    G = nx.cycle_graph(9)
+    path = nx_app.traveling_salesman_problem(G, nodes=[3, 6], cycle=False)
+    assert path in ([3, 4, 5, 6], [6, 5, 4, 3])
+
+    cycle = nx_app.traveling_salesman_problem(G, nodes=[3, 6])
+    assert cycle in ([3, 4, 5, 6, 5, 4, 3], [6, 5, 4, 3, 4, 5, 6])
+
+
+def test_TSP_weighted():
+    G = nx.cycle_graph(9)
+    G[0][1]["weight"] = 2
+    G[1][2]["weight"] = 2
+    G[2][3]["weight"] = 2
+    G[3][4]["weight"] = 4
+    G[4][5]["weight"] = 5
+    G[5][6]["weight"] = 4
+    G[6][7]["weight"] = 2
+    G[7][8]["weight"] = 2
+    G[8][0]["weight"] = 2
+    tsp = nx_app.traveling_salesman_problem
+
+    # path between 3 and 6
+    expected_paths = ([3, 2, 1, 0, 8, 7, 6], [6, 7, 8, 0, 1, 2, 3])
+    # cycle between 3 and 6
+    expected_cycles = (
+        [3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3],
+        [6, 7, 8, 0, 1, 2, 3, 2, 1, 0, 8, 7, 6],
+    )
+    # path through all nodes
+    expected_tourpaths = ([5, 6, 7, 8, 0, 1, 2, 3, 4], [4, 3, 2, 1, 0, 8, 7, 6, 5])
+
+    # Check default method
+    cycle = tsp(G, nodes=[3, 6], weight="weight")
+    assert cycle in expected_cycles
+
+    path = tsp(G, nodes=[3, 6], weight="weight", cycle=False)
+    assert path in expected_paths
+
+    tourpath = tsp(G, weight="weight", cycle=False)
+    assert tourpath in expected_tourpaths
+
+    # Check all methods
+    methods = [
+        (nx_app.christofides, {}),
+        (nx_app.greedy_tsp, {}),
+        (
+            nx_app.simulated_annealing_tsp,
+            {"init_cycle": "greedy"},
+        ),
+        (
+            nx_app.threshold_accepting_tsp,
+            {"init_cycle": "greedy"},
+        ),
+    ]
+    for method, kwargs in methods:
+        cycle = tsp(G, nodes=[3, 6], weight="weight", method=method, **kwargs)
+        assert cycle in expected_cycles
+
+        path = tsp(
+            G, nodes=[3, 6], weight="weight", method=method, cycle=False, **kwargs
+        )
+        assert path in expected_paths
+
+        tourpath = tsp(G, weight="weight", method=method, cycle=False, **kwargs)
+        assert tourpath in expected_tourpaths
+
+
+def test_TSP_incomplete_graph_short_path():
+    G = nx.cycle_graph(9)
+    G.add_edges_from([(4, 9), (9, 10), (10, 11), (11, 0)])
+    G[4][5]["weight"] = 5
+
+    cycle = nx_app.traveling_salesman_problem(G)
+    print(cycle)
+    assert len(cycle) == 17 and len(set(cycle)) == 12
+
+    # make sure that cutting one edge out of complete graph formulation
+    # cuts out many edges out of the path of the TSP
+    path = nx_app.traveling_salesman_problem(G, cycle=False)
+    print(path)
+    assert len(path) == 13 and len(set(path)) == 12
+
+
+def test_held_karp_ascent():
+    """
+    Test the Held-Karp relaxation with the ascent method
+    """
+    import networkx.algorithms.approximation.traveling_salesman as tsp
+
+    np = pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+
+    # Adjacency matrix from page 1153 of the 1970 Held and Karp paper
+    # which have been edited to be directional, but also symmetric
+    G_array = np.array(
+        [
+            [0, 97, 60, 73, 17, 52],
+            [97, 0, 41, 52, 90, 30],
+            [60, 41, 0, 21, 35, 41],
+            [73, 52, 21, 0, 95, 46],
+            [17, 90, 35, 95, 0, 81],
+            [52, 30, 41, 46, 81, 0],
+        ]
+    )
+
+    solution_edges = [(1, 3), (2, 4), (3, 2), (4, 0), (5, 1), (0, 5)]
+
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+    opt_hk, z_star = tsp.held_karp_ascent(G)
+
+    # Check that the optimal weights are the same
+    assert round(opt_hk, 2) == 207.00
+    # Check that the z_stars are the same
+    solution = nx.DiGraph()
+    solution.add_edges_from(solution_edges)
+    assert nx.utils.edges_equal(z_star.edges, solution.edges)
+
+
+def test_ascent_fractional_solution():
+    """
+    Test the ascent method using a modified version of Figure 2 on page 1140
+    in 'The Traveling Salesman Problem and Minimum Spanning Trees' by Held and
+    Karp
+    """
+    import networkx.algorithms.approximation.traveling_salesman as tsp
+
+    np = pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+
+    # This version of Figure 2 has all of the edge weights multiplied by 100
+    # and is a complete directed graph with infinite edge weights for the
+    # edges not listed in the original graph
+    G_array = np.array(
+        [
+            [0, 100, 100, 100000, 100000, 1],
+            [100, 0, 100, 100000, 1, 100000],
+            [100, 100, 0, 1, 100000, 100000],
+            [100000, 100000, 1, 0, 100, 100],
+            [100000, 1, 100000, 100, 0, 100],
+            [1, 100000, 100000, 100, 100, 0],
+        ]
+    )
+
+    solution_z_star = {
+        (0, 1): 5 / 12,
+        (0, 2): 5 / 12,
+        (0, 5): 5 / 6,
+        (1, 0): 5 / 12,
+        (1, 2): 1 / 3,
+        (1, 4): 5 / 6,
+        (2, 0): 5 / 12,
+        (2, 1): 1 / 3,
+        (2, 3): 5 / 6,
+        (3, 2): 5 / 6,
+        (3, 4): 1 / 3,
+        (3, 5): 1 / 2,
+        (4, 1): 5 / 6,
+        (4, 3): 1 / 3,
+        (4, 5): 1 / 2,
+        (5, 0): 5 / 6,
+        (5, 3): 1 / 2,
+        (5, 4): 1 / 2,
+    }
+
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+    opt_hk, z_star = tsp.held_karp_ascent(G)
+
+    # Check that the optimal weights are the same
+    assert round(opt_hk, 2) == 303.00
+    # Check that the z_stars are the same
+    assert {key: round(z_star[key], 4) for key in z_star} == {
+        key: round(solution_z_star[key], 4) for key in solution_z_star
+    }
+
+
+def test_ascent_method_asymmetric():
+    """
+    Tests the ascent method using a truly asymmetric graph for which the
+    solution has been brute forced
+    """
+    import networkx.algorithms.approximation.traveling_salesman as tsp
+
+    np = pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+
+    G_array = np.array(
+        [
+            [0, 26, 63, 59, 69, 31, 41],
+            [62, 0, 91, 53, 75, 87, 47],
+            [47, 82, 0, 90, 15, 9, 18],
+            [68, 19, 5, 0, 58, 34, 93],
+            [11, 58, 53, 55, 0, 61, 79],
+            [88, 75, 13, 76, 98, 0, 40],
+            [41, 61, 55, 88, 46, 45, 0],
+        ]
+    )
+
+    solution_edges = [(0, 1), (1, 3), (3, 2), (2, 5), (5, 6), (4, 0), (6, 4)]
+
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+    opt_hk, z_star = tsp.held_karp_ascent(G)
+
+    # Check that the optimal weights are the same
+    assert round(opt_hk, 2) == 190.00
+    # Check that the z_stars match.
+    solution = nx.DiGraph()
+    solution.add_edges_from(solution_edges)
+    assert nx.utils.edges_equal(z_star.edges, solution.edges)
+
+
+def test_ascent_method_asymmetric_2():
+    """
+    Tests the ascent method using a truly asymmetric graph for which the
+    solution has been brute forced
+    """
+    import networkx.algorithms.approximation.traveling_salesman as tsp
+
+    np = pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+
+    G_array = np.array(
+        [
+            [0, 45, 39, 92, 29, 31],
+            [72, 0, 4, 12, 21, 60],
+            [81, 6, 0, 98, 70, 53],
+            [49, 71, 59, 0, 98, 94],
+            [74, 95, 24, 43, 0, 47],
+            [56, 43, 3, 65, 22, 0],
+        ]
+    )
+
+    solution_edges = [(0, 5), (5, 4), (1, 3), (3, 0), (2, 1), (4, 2)]
+
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+    opt_hk, z_star = tsp.held_karp_ascent(G)
+
+    # Check that the optimal weights are the same
+    assert round(opt_hk, 2) == 144.00
+    # Check that the z_stars match.
+    solution = nx.DiGraph()
+    solution.add_edges_from(solution_edges)
+    assert nx.utils.edges_equal(z_star.edges, solution.edges)
+
+
+def test_held_karp_ascent_asymmetric_3():
+    """
+    Tests the ascent method using a truly asymmetric graph with a fractional
+    solution for which the solution has been brute forced.
+
+    In this graph their are two different optimal, integral solutions (which
+    are also the overall atsp solutions) to the Held Karp relaxation. However,
+    this particular graph has two different tours of optimal value and the
+    possible solutions in the held_karp_ascent function are not stored in an
+    ordered data structure.
+    """
+    import networkx.algorithms.approximation.traveling_salesman as tsp
+
+    np = pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+
+    G_array = np.array(
+        [
+            [0, 1, 5, 2, 7, 4],
+            [7, 0, 7, 7, 1, 4],
+            [4, 7, 0, 9, 2, 1],
+            [7, 2, 7, 0, 4, 4],
+            [5, 5, 4, 4, 0, 3],
+            [3, 9, 1, 3, 4, 0],
+        ]
+    )
+
+    solution1_edges = [(0, 3), (1, 4), (2, 5), (3, 1), (4, 2), (5, 0)]
+
+    solution2_edges = [(0, 3), (3, 1), (1, 4), (4, 5), (2, 0), (5, 2)]
+
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+    opt_hk, z_star = tsp.held_karp_ascent(G)
+
+    assert round(opt_hk, 2) == 13.00
+    # Check that the z_stars are the same
+    solution1 = nx.DiGraph()
+    solution1.add_edges_from(solution1_edges)
+    solution2 = nx.DiGraph()
+    solution2.add_edges_from(solution2_edges)
+    assert nx.utils.edges_equal(z_star.edges, solution1.edges) or nx.utils.edges_equal(
+        z_star.edges, solution2.edges
+    )
+
+
+def test_held_karp_ascent_fractional_asymmetric():
+    """
+    Tests the ascent method using a truly asymmetric graph with a fractional
+    solution for which the solution has been brute forced
+    """
+    import networkx.algorithms.approximation.traveling_salesman as tsp
+
+    np = pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+
+    G_array = np.array(
+        [
+            [0, 100, 150, 100000, 100000, 1],
+            [150, 0, 100, 100000, 1, 100000],
+            [100, 150, 0, 1, 100000, 100000],
+            [100000, 100000, 1, 0, 150, 100],
+            [100000, 2, 100000, 100, 0, 150],
+            [2, 100000, 100000, 150, 100, 0],
+        ]
+    )
+
+    solution_z_star = {
+        (0, 1): 5 / 12,
+        (0, 2): 5 / 12,
+        (0, 5): 5 / 6,
+        (1, 0): 5 / 12,
+        (1, 2): 5 / 12,
+        (1, 4): 5 / 6,
+        (2, 0): 5 / 12,
+        (2, 1): 5 / 12,
+        (2, 3): 5 / 6,
+        (3, 2): 5 / 6,
+        (3, 4): 5 / 12,
+        (3, 5): 5 / 12,
+        (4, 1): 5 / 6,
+        (4, 3): 5 / 12,
+        (4, 5): 5 / 12,
+        (5, 0): 5 / 6,
+        (5, 3): 5 / 12,
+        (5, 4): 5 / 12,
+    }
+
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+    opt_hk, z_star = tsp.held_karp_ascent(G)
+
+    # Check that the optimal weights are the same
+    assert round(opt_hk, 2) == 304.00
+    # Check that the z_stars are the same
+    assert {key: round(z_star[key], 4) for key in z_star} == {
+        key: round(solution_z_star[key], 4) for key in solution_z_star
+    }
+
+
+def test_spanning_tree_distribution():
+    """
+    Test that we can create an exponential distribution of spanning trees such
+    that the probability of each tree is proportional to the product of edge
+    weights.
+
+    Results of this test have been confirmed with hypothesis testing from the
+    created distribution.
+
+    This test uses the symmetric, fractional Held Karp solution.
+    """
+    import networkx.algorithms.approximation.traveling_salesman as tsp
+
+    pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+
+    z_star = {
+        (0, 1): 5 / 12,
+        (0, 2): 5 / 12,
+        (0, 5): 5 / 6,
+        (1, 0): 5 / 12,
+        (1, 2): 1 / 3,
+        (1, 4): 5 / 6,
+        (2, 0): 5 / 12,
+        (2, 1): 1 / 3,
+        (2, 3): 5 / 6,
+        (3, 2): 5 / 6,
+        (3, 4): 1 / 3,
+        (3, 5): 1 / 2,
+        (4, 1): 5 / 6,
+        (4, 3): 1 / 3,
+        (4, 5): 1 / 2,
+        (5, 0): 5 / 6,
+        (5, 3): 1 / 2,
+        (5, 4): 1 / 2,
+    }
+
+    solution_gamma = {
+        (0, 1): -0.6383,
+        (0, 2): -0.6827,
+        (0, 5): 0,
+        (1, 2): -1.0781,
+        (1, 4): 0,
+        (2, 3): 0,
+        (5, 3): -0.2820,
+        (5, 4): -0.3327,
+        (4, 3): -0.9927,
+    }
+
+    # The undirected support of z_star
+    G = nx.MultiGraph()
+    for u, v in z_star:
+        if (u, v) in G.edges or (v, u) in G.edges:
+            continue
+        G.add_edge(u, v)
+
+    gamma = tsp.spanning_tree_distribution(G, z_star)
+
+    assert {key: round(gamma[key], 4) for key in gamma} == solution_gamma
+
+
+def test_asadpour_tsp():
+    """
+    Test the complete asadpour tsp algorithm with the fractional, symmetric
+    Held Karp solution. This test also uses an incomplete graph as input.
+    """
+    # This version of Figure 2 has all of the edge weights multiplied by 100
+    # and the 0 weight edges have a weight of 1.
+    pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+
+    edge_list = [
+        (0, 1, 100),
+        (0, 2, 100),
+        (0, 5, 1),
+        (1, 2, 100),
+        (1, 4, 1),
+        (2, 3, 1),
+        (3, 4, 100),
+        (3, 5, 100),
+        (4, 5, 100),
+        (1, 0, 100),
+        (2, 0, 100),
+        (5, 0, 1),
+        (2, 1, 100),
+        (4, 1, 1),
+        (3, 2, 1),
+        (4, 3, 100),
+        (5, 3, 100),
+        (5, 4, 100),
+    ]
+
+    G = nx.DiGraph()
+    G.add_weighted_edges_from(edge_list)
+
+    tour = nx_app.traveling_salesman_problem(
+        G, weight="weight", method=nx_app.asadpour_atsp, seed=19
+    )
+
+    # Check that the returned list is a valid tour. Because this is an
+    # incomplete graph, the conditions are not as strict. We need the tour to
+    #
+    #   Start and end at the same node
+    #   Pass through every vertex at least once
+    #   Have a total cost at most ln(6) / ln(ln(6)) = 3.0723 times the optimal
+    #
+    # For the second condition it is possible to have the tour pass through the
+    # same vertex more then. Imagine that the tour on the complete version takes
+    # an edge not in the original graph. In the output this is substituted with
+    # the shortest path between those vertices, allowing vertices to appear more
+    # than once.
+    #
+    # Even though we are using a fixed seed, multiple tours have been known to
+    # be returned. The first two are from the original development of this test,
+    # and the third one from issue #5913 on GitHub. If other tours are returned,
+    # add it on the list of expected tours.
+    expected_tours = [
+        [1, 4, 5, 0, 2, 3, 2, 1],
+        [3, 2, 0, 1, 4, 5, 3],
+        [3, 2, 1, 0, 5, 4, 3],
+    ]
+
+    assert tour in expected_tours
+
+
+def test_asadpour_real_world():
+    """
+    This test uses airline prices between the six largest cities in the US.
+
+        * New York City -> JFK
+        * Los Angeles -> LAX
+        * Chicago -> ORD
+        * Houston -> IAH
+        * Phoenix -> PHX
+        * Philadelphia -> PHL
+
+    Flight prices from August 2021 using Delta or American airlines to get
+    nonstop flight. The brute force solution found the optimal tour to cost $872
+
+    This test also uses the `source` keyword argument to ensure that the tour
+    always starts at city 0.
+    """
+    np = pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+
+    G_array = np.array(
+        [
+            # JFK  LAX  ORD  IAH  PHX  PHL
+            [0, 243, 199, 208, 169, 183],  # JFK
+            [277, 0, 217, 123, 127, 252],  # LAX
+            [297, 197, 0, 197, 123, 177],  # ORD
+            [303, 169, 197, 0, 117, 117],  # IAH
+            [257, 127, 160, 117, 0, 319],  # PHX
+            [183, 332, 217, 117, 319, 0],  # PHL
+        ]
+    )
+
+    node_list = ["JFK", "LAX", "ORD", "IAH", "PHX", "PHL"]
+
+    expected_tours = [
+        ["JFK", "LAX", "PHX", "ORD", "IAH", "PHL", "JFK"],
+        ["JFK", "ORD", "PHX", "LAX", "IAH", "PHL", "JFK"],
+    ]
+
+    G = nx.from_numpy_array(G_array, nodelist=node_list, create_using=nx.DiGraph)
+
+    tour = nx_app.traveling_salesman_problem(
+        G, weight="weight", method=nx_app.asadpour_atsp, seed=37, source="JFK"
+    )
+
+    assert tour in expected_tours
+
+
+def test_asadpour_real_world_path():
+    """
+    This test uses airline prices between the six largest cities in the US. This
+    time using a path, not a cycle.
+
+        * New York City -> JFK
+        * Los Angeles -> LAX
+        * Chicago -> ORD
+        * Houston -> IAH
+        * Phoenix -> PHX
+        * Philadelphia -> PHL
+
+    Flight prices from August 2021 using Delta or American airlines to get
+    nonstop flight. The brute force solution found the optimal tour to cost $872
+    """
+    np = pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+
+    G_array = np.array(
+        [
+            # JFK  LAX  ORD  IAH  PHX  PHL
+            [0, 243, 199, 208, 169, 183],  # JFK
+            [277, 0, 217, 123, 127, 252],  # LAX
+            [297, 197, 0, 197, 123, 177],  # ORD
+            [303, 169, 197, 0, 117, 117],  # IAH
+            [257, 127, 160, 117, 0, 319],  # PHX
+            [183, 332, 217, 117, 319, 0],  # PHL
+        ]
+    )
+
+    node_list = ["JFK", "LAX", "ORD", "IAH", "PHX", "PHL"]
+
+    expected_paths = [
+        ["ORD", "PHX", "LAX", "IAH", "PHL", "JFK"],
+        ["JFK", "PHL", "IAH", "ORD", "PHX", "LAX"],
+    ]
+
+    G = nx.from_numpy_array(G_array, nodelist=node_list, create_using=nx.DiGraph)
+
+    path = nx_app.traveling_salesman_problem(
+        G, weight="weight", cycle=False, method=nx_app.asadpour_atsp, seed=56
+    )
+
+    assert path in expected_paths
+
+
+def test_asadpour_disconnected_graph():
+    """
+    Test that the proper exception is raised when asadpour_atsp is given an
+    disconnected graph.
+    """
+
+    G = nx.complete_graph(4, create_using=nx.DiGraph)
+    # have to set edge weights so that if the exception is not raised, the
+    # function will complete and we will fail the test
+    nx.set_edge_attributes(G, 1, "weight")
+    G.add_node(5)
+
+    pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G)
+
+
+def test_asadpour_incomplete_graph():
+    """
+    Test that the proper exception is raised when asadpour_atsp is given an
+    incomplete graph
+    """
+
+    G = nx.complete_graph(4, create_using=nx.DiGraph)
+    # have to set edge weights so that if the exception is not raised, the
+    # function will complete and we will fail the test
+    nx.set_edge_attributes(G, 1, "weight")
+    G.remove_edge(0, 1)
+
+    pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G)
+
+
+def test_asadpour_empty_graph():
+    """
+    Test the asadpour_atsp function with an empty graph
+    """
+    G = nx.DiGraph()
+
+    pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G)
+
+
+@pytest.mark.slow
+def test_asadpour_integral_held_karp():
+    """
+    This test uses an integral held karp solution and the held karp function
+    will return a graph rather than a dict, bypassing most of the asadpour
+    algorithm.
+
+    At first glance, this test probably doesn't look like it ensures that we
+    skip the rest of the asadpour algorithm, but it does. We are not fixing a
+    see for the random number generator, so if we sample any spanning trees
+    the approximation would be different basically every time this test is
+    executed but it is not since held karp is deterministic and we do not
+    reach the portion of the code with the dependence on random numbers.
+    """
+    np = pytest.importorskip("numpy")
+
+    G_array = np.array(
+        [
+            [0, 26, 63, 59, 69, 31, 41],
+            [62, 0, 91, 53, 75, 87, 47],
+            [47, 82, 0, 90, 15, 9, 18],
+            [68, 19, 5, 0, 58, 34, 93],
+            [11, 58, 53, 55, 0, 61, 79],
+            [88, 75, 13, 76, 98, 0, 40],
+            [41, 61, 55, 88, 46, 45, 0],
+        ]
+    )
+
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+
+    for _ in range(2):
+        tour = nx_app.traveling_salesman_problem(G, method=nx_app.asadpour_atsp)
+
+        assert [1, 3, 2, 5, 2, 6, 4, 0, 1] == tour
+
+
+def test_directed_tsp_impossible():
+    """
+    Test the asadpour algorithm with a graph without a hamiltonian circuit
+    """
+    pytest.importorskip("numpy")
+
+    # In this graph, once we leave node 0 we cannot return
+    edges = [
+        (0, 1, 10),
+        (0, 2, 11),
+        (0, 3, 12),
+        (1, 2, 4),
+        (1, 3, 6),
+        (2, 1, 3),
+        (2, 3, 2),
+        (3, 1, 5),
+        (3, 2, 1),
+    ]
+
+    G = nx.DiGraph()
+    G.add_weighted_edges_from(edges)
+
+    pytest.raises(nx.NetworkXError, nx_app.traveling_salesman_problem, G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py
new file mode 100644
index 00000000..461b0f2e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py
@@ -0,0 +1,280 @@
+import itertools
+
+import networkx as nx
+from networkx.algorithms.approximation import (
+    treewidth_min_degree,
+    treewidth_min_fill_in,
+)
+from networkx.algorithms.approximation.treewidth import (
+    MinDegreeHeuristic,
+    min_fill_in_heuristic,
+)
+
+
+def is_tree_decomp(graph, decomp):
+    """Check if the given tree decomposition is valid."""
+    for x in graph.nodes():
+        appear_once = False
+        for bag in decomp.nodes():
+            if x in bag:
+                appear_once = True
+                break
+        assert appear_once
+
+    # Check if each connected pair of nodes are at least once together in a bag
+    for x, y in graph.edges():
+        appear_together = False
+        for bag in decomp.nodes():
+            if x in bag and y in bag:
+                appear_together = True
+                break
+        assert appear_together
+
+    # Check if the nodes associated with vertex v form a connected subset of T
+    for v in graph.nodes():
+        subset = []
+        for bag in decomp.nodes():
+            if v in bag:
+                subset.append(bag)
+        sub_graph = decomp.subgraph(subset)
+        assert nx.is_connected(sub_graph)
+
+
+class TestTreewidthMinDegree:
+    """Unit tests for the min_degree function"""
+
+    @classmethod
+    def setup_class(cls):
+        """Setup for different kinds of trees"""
+        cls.complete = nx.Graph()
+        cls.complete.add_edge(1, 2)
+        cls.complete.add_edge(2, 3)
+        cls.complete.add_edge(1, 3)
+
+        cls.small_tree = nx.Graph()
+        cls.small_tree.add_edge(1, 3)
+        cls.small_tree.add_edge(4, 3)
+        cls.small_tree.add_edge(2, 3)
+        cls.small_tree.add_edge(3, 5)
+        cls.small_tree.add_edge(5, 6)
+        cls.small_tree.add_edge(5, 7)
+        cls.small_tree.add_edge(6, 7)
+
+        cls.deterministic_graph = nx.Graph()
+        cls.deterministic_graph.add_edge(0, 1)  # deg(0) = 1
+
+        cls.deterministic_graph.add_edge(1, 2)  # deg(1) = 2
+
+        cls.deterministic_graph.add_edge(2, 3)
+        cls.deterministic_graph.add_edge(2, 4)  # deg(2) = 3
+
+        cls.deterministic_graph.add_edge(3, 4)
+        cls.deterministic_graph.add_edge(3, 5)
+        cls.deterministic_graph.add_edge(3, 6)  # deg(3) = 4
+
+        cls.deterministic_graph.add_edge(4, 5)
+        cls.deterministic_graph.add_edge(4, 6)
+        cls.deterministic_graph.add_edge(4, 7)  # deg(4) = 5
+
+        cls.deterministic_graph.add_edge(5, 6)
+        cls.deterministic_graph.add_edge(5, 7)
+        cls.deterministic_graph.add_edge(5, 8)
+        cls.deterministic_graph.add_edge(5, 9)  # deg(5) = 6
+
+        cls.deterministic_graph.add_edge(6, 7)
+        cls.deterministic_graph.add_edge(6, 8)
+        cls.deterministic_graph.add_edge(6, 9)  # deg(6) = 6
+
+        cls.deterministic_graph.add_edge(7, 8)
+        cls.deterministic_graph.add_edge(7, 9)  # deg(7) = 5
+
+        cls.deterministic_graph.add_edge(8, 9)  # deg(8) = 4
+
+    def test_petersen_graph(self):
+        """Test Petersen graph tree decomposition result"""
+        G = nx.petersen_graph()
+        _, decomp = treewidth_min_degree(G)
+        is_tree_decomp(G, decomp)
+
+    def test_small_tree_treewidth(self):
+        """Test small tree
+
+        Test if the computed treewidth of the known self.small_tree is 2.
+        As we know which value we can expect from our heuristic, values other
+        than two are regressions
+        """
+        G = self.small_tree
+        # the order of removal should be [1,2,4]3[5,6,7]
+        # (with [] denoting any order of the containing nodes)
+        # resulting in treewidth 2 for the heuristic
+        treewidth, _ = treewidth_min_fill_in(G)
+        assert treewidth == 2
+
+    def test_heuristic_abort(self):
+        """Test heuristic abort condition for fully connected graph"""
+        graph = {}
+        for u in self.complete:
+            graph[u] = set()
+            for v in self.complete[u]:
+                if u != v:  # ignore self-loop
+                    graph[u].add(v)
+
+        deg_heuristic = MinDegreeHeuristic(graph)
+        node = deg_heuristic.best_node(graph)
+        if node is None:
+            pass
+        else:
+            assert False
+
+    def test_empty_graph(self):
+        """Test empty graph"""
+        G = nx.Graph()
+        _, _ = treewidth_min_degree(G)
+
+    def test_two_component_graph(self):
+        G = nx.Graph()
+        G.add_node(1)
+        G.add_node(2)
+        treewidth, _ = treewidth_min_degree(G)
+        assert treewidth == 0
+
+    def test_not_sortable_nodes(self):
+        G = nx.Graph([(0, "a")])
+        treewidth_min_degree(G)
+
+    def test_heuristic_first_steps(self):
+        """Test first steps of min_degree heuristic"""
+        graph = {
+            n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph
+        }
+        deg_heuristic = MinDegreeHeuristic(graph)
+        elim_node = deg_heuristic.best_node(graph)
+        print(f"Graph {graph}:")
+        steps = []
+
+        while elim_node is not None:
+            print(f"Removing {elim_node}:")
+            steps.append(elim_node)
+            nbrs = graph[elim_node]
+
+            for u, v in itertools.permutations(nbrs, 2):
+                if v not in graph[u]:
+                    graph[u].add(v)
+
+            for u in graph:
+                if elim_node in graph[u]:
+                    graph[u].remove(elim_node)
+
+            del graph[elim_node]
+            print(f"Graph {graph}:")
+            elim_node = deg_heuristic.best_node(graph)
+
+        # check only the first 5 elements for equality
+        assert steps[:5] == [0, 1, 2, 3, 4]
+
+
+class TestTreewidthMinFillIn:
+    """Unit tests for the treewidth_min_fill_in function."""
+
+    @classmethod
+    def setup_class(cls):
+        """Setup for different kinds of trees"""
+        cls.complete = nx.Graph()
+        cls.complete.add_edge(1, 2)
+        cls.complete.add_edge(2, 3)
+        cls.complete.add_edge(1, 3)
+
+        cls.small_tree = nx.Graph()
+        cls.small_tree.add_edge(1, 2)
+        cls.small_tree.add_edge(2, 3)
+        cls.small_tree.add_edge(3, 4)
+        cls.small_tree.add_edge(1, 4)
+        cls.small_tree.add_edge(2, 4)
+        cls.small_tree.add_edge(4, 5)
+        cls.small_tree.add_edge(5, 6)
+        cls.small_tree.add_edge(5, 7)
+        cls.small_tree.add_edge(6, 7)
+
+        cls.deterministic_graph = nx.Graph()
+        cls.deterministic_graph.add_edge(1, 2)
+        cls.deterministic_graph.add_edge(1, 3)
+        cls.deterministic_graph.add_edge(3, 4)
+        cls.deterministic_graph.add_edge(2, 4)
+        cls.deterministic_graph.add_edge(3, 5)
+        cls.deterministic_graph.add_edge(4, 5)
+        cls.deterministic_graph.add_edge(3, 6)
+        cls.deterministic_graph.add_edge(5, 6)
+
+    def test_petersen_graph(self):
+        """Test Petersen graph tree decomposition result"""
+        G = nx.petersen_graph()
+        _, decomp = treewidth_min_fill_in(G)
+        is_tree_decomp(G, decomp)
+
+    def test_small_tree_treewidth(self):
+        """Test if the computed treewidth of the known self.small_tree is 2"""
+        G = self.small_tree
+        # the order of removal should be [1,2,4]3[5,6,7]
+        # (with [] denoting any order of the containing nodes)
+        # resulting in treewidth 2 for the heuristic
+        treewidth, _ = treewidth_min_fill_in(G)
+        assert treewidth == 2
+
+    def test_heuristic_abort(self):
+        """Test if min_fill_in returns None for fully connected graph"""
+        graph = {}
+        for u in self.complete:
+            graph[u] = set()
+            for v in self.complete[u]:
+                if u != v:  # ignore self-loop
+                    graph[u].add(v)
+        next_node = min_fill_in_heuristic(graph)
+        if next_node is None:
+            pass
+        else:
+            assert False
+
+    def test_empty_graph(self):
+        """Test empty graph"""
+        G = nx.Graph()
+        _, _ = treewidth_min_fill_in(G)
+
+    def test_two_component_graph(self):
+        G = nx.Graph()
+        G.add_node(1)
+        G.add_node(2)
+        treewidth, _ = treewidth_min_fill_in(G)
+        assert treewidth == 0
+
+    def test_not_sortable_nodes(self):
+        G = nx.Graph([(0, "a")])
+        treewidth_min_fill_in(G)
+
+    def test_heuristic_first_steps(self):
+        """Test first steps of min_fill_in heuristic"""
+        graph = {
+            n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph
+        }
+        print(f"Graph {graph}:")
+        elim_node = min_fill_in_heuristic(graph)
+        steps = []
+
+        while elim_node is not None:
+            print(f"Removing {elim_node}:")
+            steps.append(elim_node)
+            nbrs = graph[elim_node]
+
+            for u, v in itertools.permutations(nbrs, 2):
+                if v not in graph[u]:
+                    graph[u].add(v)
+
+            for u in graph:
+                if elim_node in graph[u]:
+                    graph[u].remove(elim_node)
+
+            del graph[elim_node]
+            print(f"Graph {graph}:")
+            elim_node = min_fill_in_heuristic(graph)
+
+        # check only the first 2 elements for equality
+        assert steps[:2] == [6, 5]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_vertex_cover.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_vertex_cover.py
new file mode 100644
index 00000000..5cc5a38d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_vertex_cover.py
@@ -0,0 +1,68 @@
+import networkx as nx
+from networkx.algorithms.approximation import min_weighted_vertex_cover
+
+
+def is_cover(G, node_cover):
+    return all({u, v} & node_cover for u, v in G.edges())
+
+
+class TestMWVC:
+    """Unit tests for the approximate minimum weighted vertex cover
+    function,
+    :func:`~networkx.algorithms.approximation.vertex_cover.min_weighted_vertex_cover`.
+
+    """
+
+    def test_unweighted_directed(self):
+        # Create a star graph in which half the nodes are directed in
+        # and half are directed out.
+        G = nx.DiGraph()
+        G.add_edges_from((0, v) for v in range(1, 26))
+        G.add_edges_from((v, 0) for v in range(26, 51))
+        cover = min_weighted_vertex_cover(G)
+        assert 1 == len(cover)
+        assert is_cover(G, cover)
+
+    def test_unweighted_undirected(self):
+        # create a simple star graph
+        size = 50
+        sg = nx.star_graph(size)
+        cover = min_weighted_vertex_cover(sg)
+        assert 1 == len(cover)
+        assert is_cover(sg, cover)
+
+    def test_weighted(self):
+        wg = nx.Graph()
+        wg.add_node(0, weight=10)
+        wg.add_node(1, weight=1)
+        wg.add_node(2, weight=1)
+        wg.add_node(3, weight=1)
+        wg.add_node(4, weight=1)
+
+        wg.add_edge(0, 1)
+        wg.add_edge(0, 2)
+        wg.add_edge(0, 3)
+        wg.add_edge(0, 4)
+
+        wg.add_edge(1, 2)
+        wg.add_edge(2, 3)
+        wg.add_edge(3, 4)
+        wg.add_edge(4, 1)
+
+        cover = min_weighted_vertex_cover(wg, weight="weight")
+        csum = sum(wg.nodes[node]["weight"] for node in cover)
+        assert 4 == csum
+        assert is_cover(wg, cover)
+
+    def test_unweighted_self_loop(self):
+        slg = nx.Graph()
+        slg.add_node(0)
+        slg.add_node(1)
+        slg.add_node(2)
+
+        slg.add_edge(0, 1)
+        slg.add_edge(2, 2)
+
+        cover = min_weighted_vertex_cover(slg)
+        assert 2 == len(cover)
+        assert is_cover(slg, cover)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/traveling_salesman.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/traveling_salesman.py
new file mode 100644
index 00000000..2080c99a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/traveling_salesman.py
@@ -0,0 +1,1501 @@
+"""
+=================================
+Travelling Salesman Problem (TSP)
+=================================
+
+Implementation of approximate algorithms
+for solving and approximating the TSP problem.
+
+Categories of algorithms which are implemented:
+
+- Christofides (provides a 3/2-approximation of TSP)
+- Greedy
+- Simulated Annealing (SA)
+- Threshold Accepting (TA)
+- Asadpour Asymmetric Traveling Salesman Algorithm
+
+The Travelling Salesman Problem tries to find, given the weight
+(distance) between all points where a salesman has to visit, the
+route so that:
+
+- The total distance (cost) which the salesman travels is minimized.
+- The salesman returns to the starting point.
+- Note that for a complete graph, the salesman visits each point once.
+
+The function `travelling_salesman_problem` allows for incomplete
+graphs by finding all-pairs shortest paths, effectively converting
+the problem to a complete graph problem. It calls one of the
+approximate methods on that problem and then converts the result
+back to the original graph using the previously found shortest paths.
+
+TSP is an NP-hard problem in combinatorial optimization,
+important in operations research and theoretical computer science.
+
+http://en.wikipedia.org/wiki/Travelling_salesman_problem
+"""
+
+import math
+
+import networkx as nx
+from networkx.algorithms.tree.mst import random_spanning_tree
+from networkx.utils import not_implemented_for, pairwise, py_random_state
+
+__all__ = [
+    "traveling_salesman_problem",
+    "christofides",
+    "asadpour_atsp",
+    "greedy_tsp",
+    "simulated_annealing_tsp",
+    "threshold_accepting_tsp",
+]
+
+
+def swap_two_nodes(soln, seed):
+    """Swap two nodes in `soln` to give a neighbor solution.
+
+    Parameters
+    ----------
+    soln : list of nodes
+        Current cycle of nodes
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    list
+        The solution after move is applied. (A neighbor solution.)
+
+    Notes
+    -----
+        This function assumes that the incoming list `soln` is a cycle
+        (that the first and last element are the same) and also that
+        we don't want any move to change the first node in the list
+        (and thus not the last node either).
+
+        The input list is changed as well as returned. Make a copy if needed.
+
+    See Also
+    --------
+        move_one_node
+    """
+    a, b = seed.sample(range(1, len(soln) - 1), k=2)
+    soln[a], soln[b] = soln[b], soln[a]
+    return soln
+
+
+def move_one_node(soln, seed):
+    """Move one node to another position to give a neighbor solution.
+
+    The node to move and the position to move to are chosen randomly.
+    The first and last nodes are left untouched as soln must be a cycle
+    starting at that node.
+
+    Parameters
+    ----------
+    soln : list of nodes
+        Current cycle of nodes
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    list
+        The solution after move is applied. (A neighbor solution.)
+
+    Notes
+    -----
+        This function assumes that the incoming list `soln` is a cycle
+        (that the first and last element are the same) and also that
+        we don't want any move to change the first node in the list
+        (and thus not the last node either).
+
+        The input list is changed as well as returned. Make a copy if needed.
+
+    See Also
+    --------
+        swap_two_nodes
+    """
+    a, b = seed.sample(range(1, len(soln) - 1), k=2)
+    soln.insert(b, soln.pop(a))
+    return soln
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def christofides(G, weight="weight", tree=None):
+    """Approximate a solution of the traveling salesman problem
+
+    Compute a 3/2-approximation of the traveling salesman problem
+    in a complete undirected graph using Christofides [1]_ algorithm.
+
+    Parameters
+    ----------
+    G : Graph
+        `G` should be a complete weighted undirected graph.
+        The distance between all pairs of nodes should be included.
+
+    weight : string, optional (default="weight")
+        Edge data key corresponding to the edge weight.
+        If any edge does not have this attribute the weight is set to 1.
+
+    tree : NetworkX graph or None (default: None)
+        A minimum spanning tree of G. Or, if None, the minimum spanning
+        tree is computed using :func:`networkx.minimum_spanning_tree`
+
+    Returns
+    -------
+    list
+        List of nodes in `G` along a cycle with a 3/2-approximation of
+        the minimal Hamiltonian cycle.
+
+    References
+    ----------
+    .. [1] Christofides, Nicos. "Worst-case analysis of a new heuristic for
+       the travelling salesman problem." No. RR-388. Carnegie-Mellon Univ
+       Pittsburgh Pa Management Sciences Research Group, 1976.
+    """
+    # Remove selfloops if necessary
+    loop_nodes = nx.nodes_with_selfloops(G)
+    try:
+        node = next(loop_nodes)
+    except StopIteration:
+        pass
+    else:
+        G = G.copy()
+        G.remove_edge(node, node)
+        G.remove_edges_from((n, n) for n in loop_nodes)
+    # Check that G is a complete graph
+    N = len(G) - 1
+    # This check ignores selfloops which is what we want here.
+    if any(len(nbrdict) != N for n, nbrdict in G.adj.items()):
+        raise nx.NetworkXError("G must be a complete graph.")
+
+    if tree is None:
+        tree = nx.minimum_spanning_tree(G, weight=weight)
+    L = G.copy()
+    L.remove_nodes_from([v for v, degree in tree.degree if not (degree % 2)])
+    MG = nx.MultiGraph()
+    MG.add_edges_from(tree.edges)
+    edges = nx.min_weight_matching(L, weight=weight)
+    MG.add_edges_from(edges)
+    return _shortcutting(nx.eulerian_circuit(MG))
+
+
+def _shortcutting(circuit):
+    """Remove duplicate nodes in the path"""
+    nodes = []
+    for u, v in circuit:
+        if v in nodes:
+            continue
+        if not nodes:
+            nodes.append(u)
+        nodes.append(v)
+    nodes.append(nodes[0])
+    return nodes
+
+
+@nx._dispatchable(edge_attrs="weight")
+def traveling_salesman_problem(
+    G, weight="weight", nodes=None, cycle=True, method=None, **kwargs
+):
+    """Find the shortest path in `G` connecting specified nodes
+
+    This function allows approximate solution to the traveling salesman
+    problem on networks that are not complete graphs and/or where the
+    salesman does not need to visit all nodes.
+
+    This function proceeds in two steps. First, it creates a complete
+    graph using the all-pairs shortest_paths between nodes in `nodes`.
+    Edge weights in the new graph are the lengths of the paths
+    between each pair of nodes in the original graph.
+    Second, an algorithm (default: `christofides` for undirected and
+    `asadpour_atsp` for directed) is used to approximate the minimal Hamiltonian
+    cycle on this new graph. The available algorithms are:
+
+     - christofides
+     - greedy_tsp
+     - simulated_annealing_tsp
+     - threshold_accepting_tsp
+     - asadpour_atsp
+
+    Once the Hamiltonian Cycle is found, this function post-processes to
+    accommodate the structure of the original graph. If `cycle` is ``False``,
+    the biggest weight edge is removed to make a Hamiltonian path.
+    Then each edge on the new complete graph used for that analysis is
+    replaced by the shortest_path between those nodes on the original graph.
+    If the input graph `G` includes edges with weights that do not adhere to
+    the triangle inequality, such as when `G` is not a complete graph (i.e
+    length of non-existent edges is infinity), then the returned path may
+    contain some repeating nodes (other than the starting node).
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A possibly weighted graph
+
+    nodes : collection of nodes (default=G.nodes)
+        collection (list, set, etc.) of nodes to visit
+
+    weight : string, optional (default="weight")
+        Edge data key corresponding to the edge weight.
+        If any edge does not have this attribute the weight is set to 1.
+
+    cycle : bool (default: True)
+        Indicates whether a cycle should be returned, or a path.
+        Note: the cycle is the approximate minimal cycle.
+        The path simply removes the biggest edge in that cycle.
+
+    method : function (default: None)
+        A function that returns a cycle on all nodes and approximates
+        the solution to the traveling salesman problem on a complete
+        graph. The returned cycle is then used to find a corresponding
+        solution on `G`. `method` should be callable; take inputs
+        `G`, and `weight`; and return a list of nodes along the cycle.
+
+        Provided options include :func:`christofides`, :func:`greedy_tsp`,
+        :func:`simulated_annealing_tsp` and :func:`threshold_accepting_tsp`.
+
+        If `method is None`: use :func:`christofides` for undirected `G` and
+        :func:`asadpour_atsp` for directed `G`.
+
+    **kwargs : dict
+        Other keyword arguments to be passed to the `method` function passed in.
+
+    Returns
+    -------
+    list
+        List of nodes in `G` along a path with an approximation of the minimal
+        path through `nodes`.
+
+    Raises
+    ------
+    NetworkXError
+        If `G` is a directed graph it has to be strongly connected or the
+        complete version cannot be generated.
+
+    Examples
+    --------
+    >>> tsp = nx.approximation.traveling_salesman_problem
+    >>> G = nx.cycle_graph(9)
+    >>> G[4][5]["weight"] = 5  # all other weights are 1
+    >>> tsp(G, nodes=[3, 6])
+    [3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3]
+    >>> path = tsp(G, cycle=False)
+    >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
+    True
+
+    While no longer required, you can still build (curry) your own function
+    to provide parameter values to the methods.
+
+    >>> SA_tsp = nx.approximation.simulated_annealing_tsp
+    >>> method = lambda G, weight: SA_tsp(G, "greedy", weight=weight, temp=500)
+    >>> path = tsp(G, cycle=False, method=method)
+    >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
+    True
+
+    Otherwise, pass other keyword arguments directly into the tsp function.
+
+    >>> path = tsp(
+    ...     G,
+    ...     cycle=False,
+    ...     method=nx.approximation.simulated_annealing_tsp,
+    ...     init_cycle="greedy",
+    ...     temp=500,
+    ... )
+    >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
+    True
+    """
+    if method is None:
+        if G.is_directed():
+            method = asadpour_atsp
+        else:
+            method = christofides
+    if nodes is None:
+        nodes = list(G.nodes)
+
+    dist = {}
+    path = {}
+    for n, (d, p) in nx.all_pairs_dijkstra(G, weight=weight):
+        dist[n] = d
+        path[n] = p
+
+    if G.is_directed():
+        # If the graph is not strongly connected, raise an exception
+        if not nx.is_strongly_connected(G):
+            raise nx.NetworkXError("G is not strongly connected")
+        GG = nx.DiGraph()
+    else:
+        GG = nx.Graph()
+    for u in nodes:
+        for v in nodes:
+            if u == v:
+                continue
+            GG.add_edge(u, v, weight=dist[u][v])
+
+    best_GG = method(GG, weight=weight, **kwargs)
+
+    if not cycle:
+        # find and remove the biggest edge
+        (u, v) = max(pairwise(best_GG), key=lambda x: dist[x[0]][x[1]])
+        pos = best_GG.index(u) + 1
+        while best_GG[pos] != v:
+            pos = best_GG[pos:].index(u) + 1
+        best_GG = best_GG[pos:-1] + best_GG[:pos]
+
+    best_path = []
+    for u, v in pairwise(best_GG):
+        best_path.extend(path[u][v][:-1])
+    best_path.append(v)
+    return best_path
+
+
+@not_implemented_for("undirected")
+@py_random_state(2)
+@nx._dispatchable(edge_attrs="weight", mutates_input=True)
+def asadpour_atsp(G, weight="weight", seed=None, source=None):
+    """
+    Returns an approximate solution to the traveling salesman problem.
+
+    This approximate solution is one of the best known approximations for the
+    asymmetric traveling salesman problem developed by Asadpour et al,
+    [1]_. The algorithm first solves the Held-Karp relaxation to find a lower
+    bound for the weight of the cycle. Next, it constructs an exponential
+    distribution of undirected spanning trees where the probability of an
+    edge being in the tree corresponds to the weight of that edge using a
+    maximum entropy rounding scheme. Next we sample that distribution
+    $2 \\lceil \\ln n \\rceil$ times and save the minimum sampled tree once the
+    direction of the arcs is added back to the edges. Finally, we augment
+    then short circuit that graph to find the approximate tour for the
+    salesman.
+
+    Parameters
+    ----------
+    G : nx.DiGraph
+        The graph should be a complete weighted directed graph. The
+        distance between all paris of nodes should be included and the triangle
+        inequality should hold. That is, the direct edge between any two nodes
+        should be the path of least cost.
+
+    weight : string, optional (default="weight")
+        Edge data key corresponding to the edge weight.
+        If any edge does not have this attribute the weight is set to 1.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    source : node label (default=`None`)
+        If given, return the cycle starting and ending at the given node.
+
+    Returns
+    -------
+    cycle : list of nodes
+        Returns the cycle (list of nodes) that a salesman can follow to minimize
+        the total weight of the trip.
+
+    Raises
+    ------
+    NetworkXError
+        If `G` is not complete or has less than two nodes, the algorithm raises
+        an exception.
+
+    NetworkXError
+        If `source` is not `None` and is not a node in `G`, the algorithm raises
+        an exception.
+
+    NetworkXNotImplemented
+        If `G` is an undirected graph.
+
+    References
+    ----------
+    .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
+       An o(log n/log log n)-approximation algorithm for the asymmetric
+       traveling salesman problem, Operations research, 65 (2017),
+       pp. 1043–1061
+
+    Examples
+    --------
+    >>> import networkx as nx
+    >>> import networkx.algorithms.approximation as approx
+    >>> G = nx.complete_graph(3, create_using=nx.DiGraph)
+    >>> nx.set_edge_attributes(
+    ...     G,
+    ...     {(0, 1): 2, (1, 2): 2, (2, 0): 2, (0, 2): 1, (2, 1): 1, (1, 0): 1},
+    ...     "weight",
+    ... )
+    >>> tour = approx.asadpour_atsp(G, source=0)
+    >>> tour
+    [0, 2, 1, 0]
+    """
+    from math import ceil, exp
+    from math import log as ln
+
+    # Check that G is a complete graph
+    N = len(G) - 1
+    if N < 2:
+        raise nx.NetworkXError("G must have at least two nodes")
+    # This check ignores selfloops which is what we want here.
+    if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
+        raise nx.NetworkXError("G is not a complete DiGraph")
+    # Check that the source vertex, if given, is in the graph
+    if source is not None and source not in G.nodes:
+        raise nx.NetworkXError("Given source node not in G.")
+
+    opt_hk, z_star = held_karp_ascent(G, weight)
+
+    # Test to see if the ascent method found an integer solution or a fractional
+    # solution. If it is integral then z_star is a nx.Graph, otherwise it is
+    # a dict
+    if not isinstance(z_star, dict):
+        # Here we are using the shortcutting method to go from the list of edges
+        # returned from eulerian_circuit to a list of nodes
+        return _shortcutting(nx.eulerian_circuit(z_star, source=source))
+
+    # Create the undirected support of z_star
+    z_support = nx.MultiGraph()
+    for u, v in z_star:
+        if (u, v) not in z_support.edges:
+            edge_weight = min(G[u][v][weight], G[v][u][weight])
+            z_support.add_edge(u, v, **{weight: edge_weight})
+
+    # Create the exponential distribution of spanning trees
+    gamma = spanning_tree_distribution(z_support, z_star)
+
+    # Write the lambda values to the edges of z_support
+    z_support = nx.Graph(z_support)
+    lambda_dict = {(u, v): exp(gamma[(u, v)]) for u, v in z_support.edges()}
+    nx.set_edge_attributes(z_support, lambda_dict, "weight")
+    del gamma, lambda_dict
+
+    # Sample 2 * ceil( ln(n) ) spanning trees and record the minimum one
+    minimum_sampled_tree = None
+    minimum_sampled_tree_weight = math.inf
+    for _ in range(2 * ceil(ln(G.number_of_nodes()))):
+        sampled_tree = random_spanning_tree(z_support, "weight", seed=seed)
+        sampled_tree_weight = sampled_tree.size(weight)
+        if sampled_tree_weight < minimum_sampled_tree_weight:
+            minimum_sampled_tree = sampled_tree.copy()
+            minimum_sampled_tree_weight = sampled_tree_weight
+
+    # Orient the edges in that tree to keep the cost of the tree the same.
+    t_star = nx.MultiDiGraph()
+    for u, v, d in minimum_sampled_tree.edges(data=weight):
+        if d == G[u][v][weight]:
+            t_star.add_edge(u, v, **{weight: d})
+        else:
+            t_star.add_edge(v, u, **{weight: d})
+
+    # Find the node demands needed to neutralize the flow of t_star in G
+    node_demands = {n: t_star.out_degree(n) - t_star.in_degree(n) for n in t_star}
+    nx.set_node_attributes(G, node_demands, "demand")
+
+    # Find the min_cost_flow
+    flow_dict = nx.min_cost_flow(G, "demand")
+
+    # Build the flow into t_star
+    for source, values in flow_dict.items():
+        for target in values:
+            if (source, target) not in t_star.edges and values[target] > 0:
+                # IF values[target] > 0 we have to add that many edges
+                for _ in range(values[target]):
+                    t_star.add_edge(source, target)
+
+    # Return the shortcut eulerian circuit
+    circuit = nx.eulerian_circuit(t_star, source=source)
+    return _shortcutting(circuit)
+
+
+@nx._dispatchable(edge_attrs="weight", mutates_input=True, returns_graph=True)
+def held_karp_ascent(G, weight="weight"):
+    """
+    Minimizes the Held-Karp relaxation of the TSP for `G`
+
+    Solves the Held-Karp relaxation of the input complete digraph and scales
+    the output solution for use in the Asadpour [1]_ ASTP algorithm.
+
+    The Held-Karp relaxation defines the lower bound for solutions to the
+    ATSP, although it does return a fractional solution. This is used in the
+    Asadpour algorithm as an initial solution which is later rounded to a
+    integral tree within the spanning tree polytopes. This function solves
+    the relaxation with the branch and bound method in [2]_.
+
+    Parameters
+    ----------
+    G : nx.DiGraph
+        The graph should be a complete weighted directed graph.
+        The distance between all paris of nodes should be included.
+
+    weight : string, optional (default="weight")
+        Edge data key corresponding to the edge weight.
+        If any edge does not have this attribute the weight is set to 1.
+
+    Returns
+    -------
+    OPT : float
+        The cost for the optimal solution to the Held-Karp relaxation
+    z : dict or nx.Graph
+        A symmetrized and scaled version of the optimal solution to the
+        Held-Karp relaxation for use in the Asadpour algorithm.
+
+        If an integral solution is found, then that is an optimal solution for
+        the ATSP problem and that is returned instead.
+
+    References
+    ----------
+    .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
+       An o(log n/log log n)-approximation algorithm for the asymmetric
+       traveling salesman problem, Operations research, 65 (2017),
+       pp. 1043–1061
+
+    .. [2] M. Held, R. M. Karp, The traveling-salesman problem and minimum
+           spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
+           pp.1138-1162
+    """
+    import numpy as np
+    from scipy import optimize
+
+    def k_pi():
+        """
+        Find the set of minimum 1-Arborescences for G at point pi.
+
+        Returns
+        -------
+        Set
+            The set of minimum 1-Arborescences
+        """
+        # Create a copy of G without vertex 1.
+        G_1 = G.copy()
+        minimum_1_arborescences = set()
+        minimum_1_arborescence_weight = math.inf
+
+        # node is node '1' in the Held and Karp paper
+        n = next(G.__iter__())
+        G_1.remove_node(n)
+
+        # Iterate over the spanning arborescences of the graph until we know
+        # that we have found the minimum 1-arborescences. My proposed strategy
+        # is to find the most extensive root to connect to from 'node 1' and
+        # the least expensive one. We then iterate over arborescences until
+        # the cost of the basic arborescence is the cost of the minimum one
+        # plus the difference between the most and least expensive roots,
+        # that way the cost of connecting 'node 1' will by definition not by
+        # minimum
+        min_root = {"node": None, weight: math.inf}
+        max_root = {"node": None, weight: -math.inf}
+        for u, v, d in G.edges(n, data=True):
+            if d[weight] < min_root[weight]:
+                min_root = {"node": v, weight: d[weight]}
+            if d[weight] > max_root[weight]:
+                max_root = {"node": v, weight: d[weight]}
+
+        min_in_edge = min(G.in_edges(n, data=True), key=lambda x: x[2][weight])
+        min_root[weight] = min_root[weight] + min_in_edge[2][weight]
+        max_root[weight] = max_root[weight] + min_in_edge[2][weight]
+
+        min_arb_weight = math.inf
+        for arb in nx.ArborescenceIterator(G_1):
+            arb_weight = arb.size(weight)
+            if min_arb_weight == math.inf:
+                min_arb_weight = arb_weight
+            elif arb_weight > min_arb_weight + max_root[weight] - min_root[weight]:
+                break
+            # We have to pick the root node of the arborescence for the out
+            # edge of the first vertex as that is the only node without an
+            # edge directed into it.
+            for N, deg in arb.in_degree:
+                if deg == 0:
+                    # root found
+                    arb.add_edge(n, N, **{weight: G[n][N][weight]})
+                    arb_weight += G[n][N][weight]
+                    break
+
+            # We can pick the minimum weight in-edge for the vertex with
+            # a cycle. If there are multiple edges with the same, minimum
+            # weight, We need to add all of them.
+            #
+            # Delete the edge (N, v) so that we cannot pick it.
+            edge_data = G[N][n]
+            G.remove_edge(N, n)
+            min_weight = min(G.in_edges(n, data=weight), key=lambda x: x[2])[2]
+            min_edges = [
+                (u, v, d) for u, v, d in G.in_edges(n, data=weight) if d == min_weight
+            ]
+            for u, v, d in min_edges:
+                new_arb = arb.copy()
+                new_arb.add_edge(u, v, **{weight: d})
+                new_arb_weight = arb_weight + d
+                # Check to see the weight of the arborescence, if it is a
+                # new minimum, clear all of the old potential minimum
+                # 1-arborescences and add this is the only one. If its
+                # weight is above the known minimum, do not add it.
+                if new_arb_weight < minimum_1_arborescence_weight:
+                    minimum_1_arborescences.clear()
+                    minimum_1_arborescence_weight = new_arb_weight
+                # We have a 1-arborescence, add it to the set
+                if new_arb_weight == minimum_1_arborescence_weight:
+                    minimum_1_arborescences.add(new_arb)
+            G.add_edge(N, n, **edge_data)
+
+        return minimum_1_arborescences
+
+    def direction_of_ascent():
+        """
+        Find the direction of ascent at point pi.
+
+        See [1]_ for more information.
+
+        Returns
+        -------
+        dict
+            A mapping from the nodes of the graph which represents the direction
+            of ascent.
+
+        References
+        ----------
+        .. [1] M. Held, R. M. Karp, The traveling-salesman problem and minimum
+           spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
+           pp.1138-1162
+        """
+        # 1. Set d equal to the zero n-vector.
+        d = {}
+        for n in G:
+            d[n] = 0
+        del n
+        # 2. Find a 1-Arborescence T^k such that k is in K(pi, d).
+        minimum_1_arborescences = k_pi()
+        while True:
+            # Reduce K(pi) to K(pi, d)
+            # Find the arborescence in K(pi) which increases the lest in
+            # direction d
+            min_k_d_weight = math.inf
+            min_k_d = None
+            for arborescence in minimum_1_arborescences:
+                weighted_cost = 0
+                for n, deg in arborescence.degree:
+                    weighted_cost += d[n] * (deg - 2)
+                if weighted_cost < min_k_d_weight:
+                    min_k_d_weight = weighted_cost
+                    min_k_d = arborescence
+
+            # 3. If sum of d_i * v_{i, k} is greater than zero, terminate
+            if min_k_d_weight > 0:
+                return d, min_k_d
+            # 4. d_i = d_i + v_{i, k}
+            for n, deg in min_k_d.degree:
+                d[n] += deg - 2
+            # Check that we do not need to terminate because the direction
+            # of ascent does not exist. This is done with linear
+            # programming.
+            c = np.full(len(minimum_1_arborescences), -1, dtype=int)
+            a_eq = np.empty((len(G) + 1, len(minimum_1_arborescences)), dtype=int)
+            b_eq = np.zeros(len(G) + 1, dtype=int)
+            b_eq[len(G)] = 1
+            for arb_count, arborescence in enumerate(minimum_1_arborescences):
+                n_count = len(G) - 1
+                for n, deg in arborescence.degree:
+                    a_eq[n_count][arb_count] = deg - 2
+                    n_count -= 1
+                a_eq[len(G)][arb_count] = 1
+            program_result = optimize.linprog(
+                c, A_eq=a_eq, b_eq=b_eq, method="highs-ipm"
+            )
+            # If the constants exist, then the direction of ascent doesn't
+            if program_result.success:
+                # There is no direction of ascent
+                return None, minimum_1_arborescences
+
+            # 5. GO TO 2
+
+    def find_epsilon(k, d):
+        """
+        Given the direction of ascent at pi, find the maximum distance we can go
+        in that direction.
+
+        Parameters
+        ----------
+        k_xy : set
+            The set of 1-arborescences which have the minimum rate of increase
+            in the direction of ascent
+
+        d : dict
+            The direction of ascent
+
+        Returns
+        -------
+        float
+            The distance we can travel in direction `d`
+        """
+        min_epsilon = math.inf
+        for e_u, e_v, e_w in G.edges(data=weight):
+            if (e_u, e_v) in k.edges:
+                continue
+            # Now, I have found a condition which MUST be true for the edges to
+            # be a valid substitute. The edge in the graph which is the
+            # substitute is the one with the same terminated end. This can be
+            # checked rather simply.
+            #
+            # Find the edge within k which is the substitute. Because k is a
+            # 1-arborescence, we know that they is only one such edges
+            # leading into every vertex.
+            if len(k.in_edges(e_v, data=weight)) > 1:
+                raise Exception
+            sub_u, sub_v, sub_w = next(k.in_edges(e_v, data=weight).__iter__())
+            k.add_edge(e_u, e_v, **{weight: e_w})
+            k.remove_edge(sub_u, sub_v)
+            if (
+                max(d for n, d in k.in_degree()) <= 1
+                and len(G) == k.number_of_edges()
+                and nx.is_weakly_connected(k)
+            ):
+                # Ascent method calculation
+                if d[sub_u] == d[e_u] or sub_w == e_w:
+                    # Revert to the original graph
+                    k.remove_edge(e_u, e_v)
+                    k.add_edge(sub_u, sub_v, **{weight: sub_w})
+                    continue
+                epsilon = (sub_w - e_w) / (d[e_u] - d[sub_u])
+                if 0 < epsilon < min_epsilon:
+                    min_epsilon = epsilon
+            # Revert to the original graph
+            k.remove_edge(e_u, e_v)
+            k.add_edge(sub_u, sub_v, **{weight: sub_w})
+
+        return min_epsilon
+
+    # I have to know that the elements in pi correspond to the correct elements
+    # in the direction of ascent, even if the node labels are not integers.
+    # Thus, I will use dictionaries to made that mapping.
+    pi_dict = {}
+    for n in G:
+        pi_dict[n] = 0
+    del n
+    original_edge_weights = {}
+    for u, v, d in G.edges(data=True):
+        original_edge_weights[(u, v)] = d[weight]
+    dir_ascent, k_d = direction_of_ascent()
+    while dir_ascent is not None:
+        max_distance = find_epsilon(k_d, dir_ascent)
+        for n, v in dir_ascent.items():
+            pi_dict[n] += max_distance * v
+        for u, v, d in G.edges(data=True):
+            d[weight] = original_edge_weights[(u, v)] + pi_dict[u]
+        dir_ascent, k_d = direction_of_ascent()
+    nx._clear_cache(G)
+    # k_d is no longer an individual 1-arborescence but rather a set of
+    # minimal 1-arborescences at the maximum point of the polytope and should
+    # be reflected as such
+    k_max = k_d
+
+    # Search for a cycle within k_max. If a cycle exists, return it as the
+    # solution
+    for k in k_max:
+        if len([n for n in k if k.degree(n) == 2]) == G.order():
+            # Tour found
+            # TODO: this branch does not restore original_edge_weights of G!
+            return k.size(weight), k
+
+    # Write the original edge weights back to G and every member of k_max at
+    # the maximum point. Also average the number of times that edge appears in
+    # the set of minimal 1-arborescences.
+    x_star = {}
+    size_k_max = len(k_max)
+    for u, v, d in G.edges(data=True):
+        edge_count = 0
+        d[weight] = original_edge_weights[(u, v)]
+        for k in k_max:
+            if (u, v) in k.edges():
+                edge_count += 1
+                k[u][v][weight] = original_edge_weights[(u, v)]
+        x_star[(u, v)] = edge_count / size_k_max
+    # Now symmetrize the edges in x_star and scale them according to (5) in
+    # reference [1]
+    z_star = {}
+    scale_factor = (G.order() - 1) / G.order()
+    for u, v in x_star:
+        frequency = x_star[(u, v)] + x_star[(v, u)]
+        if frequency > 0:
+            z_star[(u, v)] = scale_factor * frequency
+    del x_star
+    # Return the optimal weight and the z dict
+    return next(k_max.__iter__()).size(weight), z_star
+
+
+@nx._dispatchable
+def spanning_tree_distribution(G, z):
+    """
+    Find the asadpour exponential distribution of spanning trees.
+
+    Solves the Maximum Entropy Convex Program in the Asadpour algorithm [1]_
+    using the approach in section 7 to build an exponential distribution of
+    undirected spanning trees.
+
+    This algorithm ensures that the probability of any edge in a spanning
+    tree is proportional to the sum of the probabilities of the tress
+    containing that edge over the sum of the probabilities of all spanning
+    trees of the graph.
+
+    Parameters
+    ----------
+    G : nx.MultiGraph
+        The undirected support graph for the Held Karp relaxation
+
+    z : dict
+        The output of `held_karp_ascent()`, a scaled version of the Held-Karp
+        solution.
+
+    Returns
+    -------
+    gamma : dict
+        The probability distribution which approximately preserves the marginal
+        probabilities of `z`.
+    """
+    from math import exp
+    from math import log as ln
+
+    def q(e):
+        """
+        The value of q(e) is described in the Asadpour paper is "the
+        probability that edge e will be included in a spanning tree T that is
+        chosen with probability proportional to exp(gamma(T))" which
+        basically means that it is the total probability of the edge appearing
+        across the whole distribution.
+
+        Parameters
+        ----------
+        e : tuple
+            The `(u, v)` tuple describing the edge we are interested in
+
+        Returns
+        -------
+        float
+            The probability that a spanning tree chosen according to the
+            current values of gamma will include edge `e`.
+        """
+        # Create the laplacian matrices
+        for u, v, d in G.edges(data=True):
+            d[lambda_key] = exp(gamma[(u, v)])
+        G_Kirchhoff = nx.total_spanning_tree_weight(G, lambda_key)
+        G_e = nx.contracted_edge(G, e, self_loops=False)
+        G_e_Kirchhoff = nx.total_spanning_tree_weight(G_e, lambda_key)
+
+        # Multiply by the weight of the contracted edge since it is not included
+        # in the total weight of the contracted graph.
+        return exp(gamma[(e[0], e[1])]) * G_e_Kirchhoff / G_Kirchhoff
+
+    # initialize gamma to the zero dict
+    gamma = {}
+    for u, v, _ in G.edges:
+        gamma[(u, v)] = 0
+
+    # set epsilon
+    EPSILON = 0.2
+
+    # pick an edge attribute name that is unlikely to be in the graph
+    lambda_key = "spanning_tree_distribution's secret attribute name for lambda"
+
+    while True:
+        # We need to know that know that no values of q_e are greater than
+        # (1 + epsilon) * z_e, however changing one gamma value can increase the
+        # value of a different q_e, so we have to complete the for loop without
+        # changing anything for the condition to be meet
+        in_range_count = 0
+        # Search for an edge with q_e > (1 + epsilon) * z_e
+        for u, v in gamma:
+            e = (u, v)
+            q_e = q(e)
+            z_e = z[e]
+            if q_e > (1 + EPSILON) * z_e:
+                delta = ln(
+                    (q_e * (1 - (1 + EPSILON / 2) * z_e))
+                    / ((1 - q_e) * (1 + EPSILON / 2) * z_e)
+                )
+                gamma[e] -= delta
+                # Check that delta had the desired effect
+                new_q_e = q(e)
+                desired_q_e = (1 + EPSILON / 2) * z_e
+                if round(new_q_e, 8) != round(desired_q_e, 8):
+                    raise nx.NetworkXError(
+                        f"Unable to modify probability for edge ({u}, {v})"
+                    )
+            else:
+                in_range_count += 1
+        # Check if the for loop terminated without changing any gamma
+        if in_range_count == len(gamma):
+            break
+
+    # Remove the new edge attributes
+    for _, _, d in G.edges(data=True):
+        if lambda_key in d:
+            del d[lambda_key]
+
+    return gamma
+
+
+@nx._dispatchable(edge_attrs="weight")
+def greedy_tsp(G, weight="weight", source=None):
+    """Return a low cost cycle starting at `source` and its cost.
+
+    This approximates a solution to the traveling salesman problem.
+    It finds a cycle of all the nodes that a salesman can visit in order
+    to visit many nodes while minimizing total distance.
+    It uses a simple greedy algorithm.
+    In essence, this function returns a large cycle given a source point
+    for which the total cost of the cycle is minimized.
+
+    Parameters
+    ----------
+    G : Graph
+        The Graph should be a complete weighted undirected graph.
+        The distance between all pairs of nodes should be included.
+
+    weight : string, optional (default="weight")
+        Edge data key corresponding to the edge weight.
+        If any edge does not have this attribute the weight is set to 1.
+
+    source : node, optional (default: first node in list(G))
+        Starting node.  If None, defaults to ``next(iter(G))``
+
+    Returns
+    -------
+    cycle : list of nodes
+        Returns the cycle (list of nodes) that a salesman
+        can follow to minimize total weight of the trip.
+
+    Raises
+    ------
+    NetworkXError
+        If `G` is not complete, the algorithm raises an exception.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import approximation as approx
+    >>> G = nx.DiGraph()
+    >>> G.add_weighted_edges_from(
+    ...     {
+    ...         ("A", "B", 3),
+    ...         ("A", "C", 17),
+    ...         ("A", "D", 14),
+    ...         ("B", "A", 3),
+    ...         ("B", "C", 12),
+    ...         ("B", "D", 16),
+    ...         ("C", "A", 13),
+    ...         ("C", "B", 12),
+    ...         ("C", "D", 4),
+    ...         ("D", "A", 14),
+    ...         ("D", "B", 15),
+    ...         ("D", "C", 2),
+    ...     }
+    ... )
+    >>> cycle = approx.greedy_tsp(G, source="D")
+    >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
+    >>> cycle
+    ['D', 'C', 'B', 'A', 'D']
+    >>> cost
+    31
+
+    Notes
+    -----
+    This implementation of a greedy algorithm is based on the following:
+
+    - The algorithm adds a node to the solution at every iteration.
+    - The algorithm selects a node not already in the cycle whose connection
+      to the previous node adds the least cost to the cycle.
+
+    A greedy algorithm does not always give the best solution.
+    However, it can construct a first feasible solution which can
+    be passed as a parameter to an iterative improvement algorithm such
+    as Simulated Annealing, or Threshold Accepting.
+
+    Time complexity: It has a running time $O(|V|^2)$
+    """
+    # Check that G is a complete graph
+    N = len(G) - 1
+    # This check ignores selfloops which is what we want here.
+    if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
+        raise nx.NetworkXError("G must be a complete graph.")
+
+    if source is None:
+        source = nx.utils.arbitrary_element(G)
+
+    if G.number_of_nodes() == 2:
+        neighbor = next(G.neighbors(source))
+        return [source, neighbor, source]
+
+    nodeset = set(G)
+    nodeset.remove(source)
+    cycle = [source]
+    next_node = source
+    while nodeset:
+        nbrdict = G[next_node]
+        next_node = min(nodeset, key=lambda n: nbrdict[n].get(weight, 1))
+        cycle.append(next_node)
+        nodeset.remove(next_node)
+    cycle.append(cycle[0])
+    return cycle
+
+
+@py_random_state(9)
+@nx._dispatchable(edge_attrs="weight")
+def simulated_annealing_tsp(
+    G,
+    init_cycle,
+    weight="weight",
+    source=None,
+    temp=100,
+    move="1-1",
+    max_iterations=10,
+    N_inner=100,
+    alpha=0.01,
+    seed=None,
+):
+    """Returns an approximate solution to the traveling salesman problem.
+
+    This function uses simulated annealing to approximate the minimal cost
+    cycle through the nodes. Starting from a suboptimal solution, simulated
+    annealing perturbs that solution, occasionally accepting changes that make
+    the solution worse to escape from a locally optimal solution. The chance
+    of accepting such changes decreases over the iterations to encourage
+    an optimal result.  In summary, the function returns a cycle starting
+    at `source` for which the total cost is minimized. It also returns the cost.
+
+    The chance of accepting a proposed change is related to a parameter called
+    the temperature (annealing has a physical analogue of steel hardening
+    as it cools). As the temperature is reduced, the chance of moves that
+    increase cost goes down.
+
+    Parameters
+    ----------
+    G : Graph
+        `G` should be a complete weighted graph.
+        The distance between all pairs of nodes should be included.
+
+    init_cycle : list of all nodes or "greedy"
+        The initial solution (a cycle through all nodes returning to the start).
+        This argument has no default to make you think about it.
+        If "greedy", use `greedy_tsp(G, weight)`.
+        Other common starting cycles are `list(G) + [next(iter(G))]` or the final
+        result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
+
+    weight : string, optional (default="weight")
+        Edge data key corresponding to the edge weight.
+        If any edge does not have this attribute the weight is set to 1.
+
+    source : node, optional (default: first node in list(G))
+        Starting node.  If None, defaults to ``next(iter(G))``
+
+    temp : int, optional (default=100)
+        The algorithm's temperature parameter. It represents the initial
+        value of temperature
+
+    move : "1-1" or "1-0" or function, optional (default="1-1")
+        Indicator of what move to use when finding new trial solutions.
+        Strings indicate two special built-in moves:
+
+        - "1-1": 1-1 exchange which transposes the position
+          of two elements of the current solution.
+          The function called is :func:`swap_two_nodes`.
+          For example if we apply 1-1 exchange in the solution
+          ``A = [3, 2, 1, 4, 3]``
+          we can get the following by the transposition of 1 and 4 elements:
+          ``A' = [3, 2, 4, 1, 3]``
+        - "1-0": 1-0 exchange which moves an node in the solution
+          to a new position.
+          The function called is :func:`move_one_node`.
+          For example if we apply 1-0 exchange in the solution
+          ``A = [3, 2, 1, 4, 3]``
+          we can transfer the fourth element to the second position:
+          ``A' = [3, 4, 2, 1, 3]``
+
+        You may provide your own functions to enact a move from
+        one solution to a neighbor solution. The function must take
+        the solution as input along with a `seed` input to control
+        random number generation (see the `seed` input here).
+        Your function should maintain the solution as a cycle with
+        equal first and last node and all others appearing once.
+        Your function should return the new solution.
+
+    max_iterations : int, optional (default=10)
+        Declared done when this number of consecutive iterations of
+        the outer loop occurs without any change in the best cost solution.
+
+    N_inner : int, optional (default=100)
+        The number of iterations of the inner loop.
+
+    alpha : float between (0, 1), optional (default=0.01)
+        Percentage of temperature decrease in each iteration
+        of outer loop
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    cycle : list of nodes
+        Returns the cycle (list of nodes) that a salesman
+        can follow to minimize total weight of the trip.
+
+    Raises
+    ------
+    NetworkXError
+        If `G` is not complete the algorithm raises an exception.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import approximation as approx
+    >>> G = nx.DiGraph()
+    >>> G.add_weighted_edges_from(
+    ...     {
+    ...         ("A", "B", 3),
+    ...         ("A", "C", 17),
+    ...         ("A", "D", 14),
+    ...         ("B", "A", 3),
+    ...         ("B", "C", 12),
+    ...         ("B", "D", 16),
+    ...         ("C", "A", 13),
+    ...         ("C", "B", 12),
+    ...         ("C", "D", 4),
+    ...         ("D", "A", 14),
+    ...         ("D", "B", 15),
+    ...         ("D", "C", 2),
+    ...     }
+    ... )
+    >>> cycle = approx.simulated_annealing_tsp(G, "greedy", source="D")
+    >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
+    >>> cycle
+    ['D', 'C', 'B', 'A', 'D']
+    >>> cost
+    31
+    >>> incycle = ["D", "B", "A", "C", "D"]
+    >>> cycle = approx.simulated_annealing_tsp(G, incycle, source="D")
+    >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
+    >>> cycle
+    ['D', 'C', 'B', 'A', 'D']
+    >>> cost
+    31
+
+    Notes
+    -----
+    Simulated Annealing is a metaheuristic local search algorithm.
+    The main characteristic of this algorithm is that it accepts
+    even solutions which lead to the increase of the cost in order
+    to escape from low quality local optimal solutions.
+
+    This algorithm needs an initial solution. If not provided, it is
+    constructed by a simple greedy algorithm. At every iteration, the
+    algorithm selects thoughtfully a neighbor solution.
+    Consider $c(x)$ cost of current solution and $c(x')$ cost of a
+    neighbor solution.
+    If $c(x') - c(x) <= 0$ then the neighbor solution becomes the current
+    solution for the next iteration. Otherwise, the algorithm accepts
+    the neighbor solution with probability $p = exp - ([c(x') - c(x)] / temp)$.
+    Otherwise the current solution is retained.
+
+    `temp` is a parameter of the algorithm and represents temperature.
+
+    Time complexity:
+    For $N_i$ iterations of the inner loop and $N_o$ iterations of the
+    outer loop, this algorithm has running time $O(N_i * N_o * |V|)$.
+
+    For more information and how the algorithm is inspired see:
+    http://en.wikipedia.org/wiki/Simulated_annealing
+    """
+    if move == "1-1":
+        move = swap_two_nodes
+    elif move == "1-0":
+        move = move_one_node
+    if init_cycle == "greedy":
+        # Construct an initial solution using a greedy algorithm.
+        cycle = greedy_tsp(G, weight=weight, source=source)
+        if G.number_of_nodes() == 2:
+            return cycle
+
+    else:
+        cycle = list(init_cycle)
+        if source is None:
+            source = cycle[0]
+        elif source != cycle[0]:
+            raise nx.NetworkXError("source must be first node in init_cycle")
+        if cycle[0] != cycle[-1]:
+            raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
+
+        if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
+            raise nx.NetworkXError("init_cycle should be a cycle over all nodes in G.")
+
+        # Check that G is a complete graph
+        N = len(G) - 1
+        # This check ignores selfloops which is what we want here.
+        if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
+            raise nx.NetworkXError("G must be a complete graph.")
+
+        if G.number_of_nodes() == 2:
+            neighbor = next(G.neighbors(source))
+            return [source, neighbor, source]
+
+    # Find the cost of initial solution
+    cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
+
+    count = 0
+    best_cycle = cycle.copy()
+    best_cost = cost
+    while count <= max_iterations and temp > 0:
+        count += 1
+        for i in range(N_inner):
+            adj_sol = move(cycle, seed)
+            adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
+            delta = adj_cost - cost
+            if delta <= 0:
+                # Set current solution the adjacent solution.
+                cycle = adj_sol
+                cost = adj_cost
+
+                if cost < best_cost:
+                    count = 0
+                    best_cycle = cycle.copy()
+                    best_cost = cost
+            else:
+                # Accept even a worse solution with probability p.
+                p = math.exp(-delta / temp)
+                if p >= seed.random():
+                    cycle = adj_sol
+                    cost = adj_cost
+        temp -= temp * alpha
+
+    return best_cycle
+
+
+@py_random_state(9)
+@nx._dispatchable(edge_attrs="weight")
+def threshold_accepting_tsp(
+    G,
+    init_cycle,
+    weight="weight",
+    source=None,
+    threshold=1,
+    move="1-1",
+    max_iterations=10,
+    N_inner=100,
+    alpha=0.1,
+    seed=None,
+):
+    """Returns an approximate solution to the traveling salesman problem.
+
+    This function uses threshold accepting methods to approximate the minimal cost
+    cycle through the nodes. Starting from a suboptimal solution, threshold
+    accepting methods perturb that solution, accepting any changes that make
+    the solution no worse than increasing by a threshold amount. Improvements
+    in cost are accepted, but so are changes leading to small increases in cost.
+    This allows the solution to leave suboptimal local minima in solution space.
+    The threshold is decreased slowly as iterations proceed helping to ensure
+    an optimum. In summary, the function returns a cycle starting at `source`
+    for which the total cost is minimized.
+
+    Parameters
+    ----------
+    G : Graph
+        `G` should be a complete weighted graph.
+        The distance between all pairs of nodes should be included.
+
+    init_cycle : list or "greedy"
+        The initial solution (a cycle through all nodes returning to the start).
+        This argument has no default to make you think about it.
+        If "greedy", use `greedy_tsp(G, weight)`.
+        Other common starting cycles are `list(G) + [next(iter(G))]` or the final
+        result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
+
+    weight : string, optional (default="weight")
+        Edge data key corresponding to the edge weight.
+        If any edge does not have this attribute the weight is set to 1.
+
+    source : node, optional (default: first node in list(G))
+        Starting node.  If None, defaults to ``next(iter(G))``
+
+    threshold : int, optional (default=1)
+        The algorithm's threshold parameter. It represents the initial
+        threshold's value
+
+    move : "1-1" or "1-0" or function, optional (default="1-1")
+        Indicator of what move to use when finding new trial solutions.
+        Strings indicate two special built-in moves:
+
+        - "1-1": 1-1 exchange which transposes the position
+          of two elements of the current solution.
+          The function called is :func:`swap_two_nodes`.
+          For example if we apply 1-1 exchange in the solution
+          ``A = [3, 2, 1, 4, 3]``
+          we can get the following by the transposition of 1 and 4 elements:
+          ``A' = [3, 2, 4, 1, 3]``
+        - "1-0": 1-0 exchange which moves an node in the solution
+          to a new position.
+          The function called is :func:`move_one_node`.
+          For example if we apply 1-0 exchange in the solution
+          ``A = [3, 2, 1, 4, 3]``
+          we can transfer the fourth element to the second position:
+          ``A' = [3, 4, 2, 1, 3]``
+
+        You may provide your own functions to enact a move from
+        one solution to a neighbor solution. The function must take
+        the solution as input along with a `seed` input to control
+        random number generation (see the `seed` input here).
+        Your function should maintain the solution as a cycle with
+        equal first and last node and all others appearing once.
+        Your function should return the new solution.
+
+    max_iterations : int, optional (default=10)
+        Declared done when this number of consecutive iterations of
+        the outer loop occurs without any change in the best cost solution.
+
+    N_inner : int, optional (default=100)
+        The number of iterations of the inner loop.
+
+    alpha : float between (0, 1), optional (default=0.1)
+        Percentage of threshold decrease when there is at
+        least one acceptance of a neighbor solution.
+        If no inner loop moves are accepted the threshold remains unchanged.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    cycle : list of nodes
+        Returns the cycle (list of nodes) that a salesman
+        can follow to minimize total weight of the trip.
+
+    Raises
+    ------
+    NetworkXError
+        If `G` is not complete the algorithm raises an exception.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import approximation as approx
+    >>> G = nx.DiGraph()
+    >>> G.add_weighted_edges_from(
+    ...     {
+    ...         ("A", "B", 3),
+    ...         ("A", "C", 17),
+    ...         ("A", "D", 14),
+    ...         ("B", "A", 3),
+    ...         ("B", "C", 12),
+    ...         ("B", "D", 16),
+    ...         ("C", "A", 13),
+    ...         ("C", "B", 12),
+    ...         ("C", "D", 4),
+    ...         ("D", "A", 14),
+    ...         ("D", "B", 15),
+    ...         ("D", "C", 2),
+    ...     }
+    ... )
+    >>> cycle = approx.threshold_accepting_tsp(G, "greedy", source="D")
+    >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
+    >>> cycle
+    ['D', 'C', 'B', 'A', 'D']
+    >>> cost
+    31
+    >>> incycle = ["D", "B", "A", "C", "D"]
+    >>> cycle = approx.threshold_accepting_tsp(G, incycle, source="D")
+    >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
+    >>> cycle
+    ['D', 'C', 'B', 'A', 'D']
+    >>> cost
+    31
+
+    Notes
+    -----
+    Threshold Accepting is a metaheuristic local search algorithm.
+    The main characteristic of this algorithm is that it accepts
+    even solutions which lead to the increase of the cost in order
+    to escape from low quality local optimal solutions.
+
+    This algorithm needs an initial solution. This solution can be
+    constructed by a simple greedy algorithm. At every iteration, it
+    selects thoughtfully a neighbor solution.
+    Consider $c(x)$ cost of current solution and $c(x')$ cost of
+    neighbor solution.
+    If $c(x') - c(x) <= threshold$ then the neighbor solution becomes the current
+    solution for the next iteration, where the threshold is named threshold.
+
+    In comparison to the Simulated Annealing algorithm, the Threshold
+    Accepting algorithm does not accept very low quality solutions
+    (due to the presence of the threshold value). In the case of
+    Simulated Annealing, even a very low quality solution can
+    be accepted with probability $p$.
+
+    Time complexity:
+    It has a running time $O(m * n * |V|)$ where $m$ and $n$ are the number
+    of times the outer and inner loop run respectively.
+
+    For more information and how algorithm is inspired see:
+    https://doi.org/10.1016/0021-9991(90)90201-B
+
+    See Also
+    --------
+    simulated_annealing_tsp
+
+    """
+    if move == "1-1":
+        move = swap_two_nodes
+    elif move == "1-0":
+        move = move_one_node
+    if init_cycle == "greedy":
+        # Construct an initial solution using a greedy algorithm.
+        cycle = greedy_tsp(G, weight=weight, source=source)
+        if G.number_of_nodes() == 2:
+            return cycle
+
+    else:
+        cycle = list(init_cycle)
+        if source is None:
+            source = cycle[0]
+        elif source != cycle[0]:
+            raise nx.NetworkXError("source must be first node in init_cycle")
+        if cycle[0] != cycle[-1]:
+            raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
+
+        if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
+            raise nx.NetworkXError("init_cycle is not all and only nodes.")
+
+        # Check that G is a complete graph
+        N = len(G) - 1
+        # This check ignores selfloops which is what we want here.
+        if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
+            raise nx.NetworkXError("G must be a complete graph.")
+
+        if G.number_of_nodes() == 2:
+            neighbor = list(G.neighbors(source))[0]
+            return [source, neighbor, source]
+
+    # Find the cost of initial solution
+    cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
+
+    count = 0
+    best_cycle = cycle.copy()
+    best_cost = cost
+    while count <= max_iterations:
+        count += 1
+        accepted = False
+        for i in range(N_inner):
+            adj_sol = move(cycle, seed)
+            adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
+            delta = adj_cost - cost
+            if delta <= threshold:
+                accepted = True
+
+                # Set current solution the adjacent solution.
+                cycle = adj_sol
+                cost = adj_cost
+
+                if cost < best_cost:
+                    count = 0
+                    best_cycle = cycle.copy()
+                    best_cost = cost
+        if accepted:
+            threshold -= threshold * alpha
+
+    return best_cycle
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/treewidth.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/treewidth.py
new file mode 100644
index 00000000..31d73f63
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/treewidth.py
@@ -0,0 +1,252 @@
+"""Functions for computing treewidth decomposition.
+
+Treewidth of an undirected graph is a number associated with the graph.
+It can be defined as the size of the largest vertex set (bag) in a tree
+decomposition of the graph minus one.
+
+`Wikipedia: Treewidth <https://en.wikipedia.org/wiki/Treewidth>`_
+
+The notions of treewidth and tree decomposition have gained their
+attractiveness partly because many graph and network problems that are
+intractable (e.g., NP-hard) on arbitrary graphs become efficiently
+solvable (e.g., with a linear time algorithm) when the treewidth of the
+input graphs is bounded by a constant [1]_ [2]_.
+
+There are two different functions for computing a tree decomposition:
+:func:`treewidth_min_degree` and :func:`treewidth_min_fill_in`.
+
+.. [1] Hans L. Bodlaender and Arie M. C. A. Koster. 2010. "Treewidth
+      computations I.Upper bounds". Inf. Comput. 208, 3 (March 2010),259-275.
+      http://dx.doi.org/10.1016/j.ic.2009.03.008
+
+.. [2] Hans L. Bodlaender. "Discovering Treewidth". Institute of Information
+      and Computing Sciences, Utrecht University.
+      Technical Report UU-CS-2005-018.
+      http://www.cs.uu.nl
+
+.. [3] K. Wang, Z. Lu, and J. Hicks *Treewidth*.
+      https://web.archive.org/web/20210507025929/http://web.eecs.utk.edu/~cphill25/cs594_spring2015_projects/treewidth.pdf
+
+"""
+
+import itertools
+import sys
+from heapq import heapify, heappop, heappush
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["treewidth_min_degree", "treewidth_min_fill_in"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(returns_graph=True)
+def treewidth_min_degree(G):
+    """Returns a treewidth decomposition using the Minimum Degree heuristic.
+
+    The heuristic chooses the nodes according to their degree, i.e., first
+    the node with the lowest degree is chosen, then the graph is updated
+    and the corresponding node is removed. Next, a new node with the lowest
+    degree is chosen, and so on.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    Treewidth decomposition : (int, Graph) tuple
+          2-tuple with treewidth and the corresponding decomposed tree.
+    """
+    deg_heuristic = MinDegreeHeuristic(G)
+    return treewidth_decomp(G, lambda graph: deg_heuristic.best_node(graph))
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(returns_graph=True)
+def treewidth_min_fill_in(G):
+    """Returns a treewidth decomposition using the Minimum Fill-in heuristic.
+
+    The heuristic chooses a node from the graph, where the number of edges
+    added turning the neighborhood of the chosen node into clique is as
+    small as possible.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    Treewidth decomposition : (int, Graph) tuple
+        2-tuple with treewidth and the corresponding decomposed tree.
+    """
+    return treewidth_decomp(G, min_fill_in_heuristic)
+
+
+class MinDegreeHeuristic:
+    """Implements the Minimum Degree heuristic.
+
+    The heuristic chooses the nodes according to their degree
+    (number of neighbors), i.e., first the node with the lowest degree is
+    chosen, then the graph is updated and the corresponding node is
+    removed. Next, a new node with the lowest degree is chosen, and so on.
+    """
+
+    def __init__(self, graph):
+        self._graph = graph
+
+        # nodes that have to be updated in the heap before each iteration
+        self._update_nodes = []
+
+        self._degreeq = []  # a heapq with 3-tuples (degree,unique_id,node)
+        self.count = itertools.count()
+
+        # build heap with initial degrees
+        for n in graph:
+            self._degreeq.append((len(graph[n]), next(self.count), n))
+        heapify(self._degreeq)
+
+    def best_node(self, graph):
+        # update nodes in self._update_nodes
+        for n in self._update_nodes:
+            # insert changed degrees into degreeq
+            heappush(self._degreeq, (len(graph[n]), next(self.count), n))
+
+        # get the next valid (minimum degree) node
+        while self._degreeq:
+            (min_degree, _, elim_node) = heappop(self._degreeq)
+            if elim_node not in graph or len(graph[elim_node]) != min_degree:
+                # outdated entry in degreeq
+                continue
+            elif min_degree == len(graph) - 1:
+                # fully connected: abort condition
+                return None
+
+            # remember to update nodes in the heap before getting the next node
+            self._update_nodes = graph[elim_node]
+            return elim_node
+
+        # the heap is empty: abort
+        return None
+
+
+def min_fill_in_heuristic(graph):
+    """Implements the Minimum Degree heuristic.
+
+    Returns the node from the graph, where the number of edges added when
+    turning the neighborhood of the chosen node into clique is as small as
+    possible. This algorithm chooses the nodes using the Minimum Fill-In
+    heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses
+    additional constant memory."""
+
+    if len(graph) == 0:
+        return None
+
+    min_fill_in_node = None
+
+    min_fill_in = sys.maxsize
+
+    # sort nodes by degree
+    nodes_by_degree = sorted(graph, key=lambda x: len(graph[x]))
+    min_degree = len(graph[nodes_by_degree[0]])
+
+    # abort condition (handle complete graph)
+    if min_degree == len(graph) - 1:
+        return None
+
+    for node in nodes_by_degree:
+        num_fill_in = 0
+        nbrs = graph[node]
+        for nbr in nbrs:
+            # count how many nodes in nbrs current nbr is not connected to
+            # subtract 1 for the node itself
+            num_fill_in += len(nbrs - graph[nbr]) - 1
+            if num_fill_in >= 2 * min_fill_in:
+                break
+
+        num_fill_in /= 2  # divide by 2 because of double counting
+
+        if num_fill_in < min_fill_in:  # update min-fill-in node
+            if num_fill_in == 0:
+                return node
+            min_fill_in = num_fill_in
+            min_fill_in_node = node
+
+    return min_fill_in_node
+
+
+@nx._dispatchable(returns_graph=True)
+def treewidth_decomp(G, heuristic=min_fill_in_heuristic):
+    """Returns a treewidth decomposition using the passed heuristic.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+    heuristic : heuristic function
+
+    Returns
+    -------
+    Treewidth decomposition : (int, Graph) tuple
+        2-tuple with treewidth and the corresponding decomposed tree.
+    """
+
+    # make dict-of-sets structure
+    graph = {n: set(G[n]) - {n} for n in G}
+
+    # stack containing nodes and neighbors in the order from the heuristic
+    node_stack = []
+
+    # get first node from heuristic
+    elim_node = heuristic(graph)
+    while elim_node is not None:
+        # connect all neighbors with each other
+        nbrs = graph[elim_node]
+        for u, v in itertools.permutations(nbrs, 2):
+            if v not in graph[u]:
+                graph[u].add(v)
+
+        # push node and its current neighbors on stack
+        node_stack.append((elim_node, nbrs))
+
+        # remove node from graph
+        for u in graph[elim_node]:
+            graph[u].remove(elim_node)
+
+        del graph[elim_node]
+        elim_node = heuristic(graph)
+
+    # the abort condition is met; put all remaining nodes into one bag
+    decomp = nx.Graph()
+    first_bag = frozenset(graph.keys())
+    decomp.add_node(first_bag)
+
+    treewidth = len(first_bag) - 1
+
+    while node_stack:
+        # get node and its neighbors from the stack
+        (curr_node, nbrs) = node_stack.pop()
+
+        # find a bag all neighbors are in
+        old_bag = None
+        for bag in decomp.nodes:
+            if nbrs <= bag:
+                old_bag = bag
+                break
+
+        if old_bag is None:
+            # no old_bag was found: just connect to the first_bag
+            old_bag = first_bag
+
+        # create new node for decomposition
+        nbrs.add(curr_node)
+        new_bag = frozenset(nbrs)
+
+        # update treewidth
+        treewidth = max(treewidth, len(new_bag) - 1)
+
+        # add edge to decomposition (implicitly also adds the new node)
+        decomp.add_edge(old_bag, new_bag)
+
+    return treewidth, decomp
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/vertex_cover.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/vertex_cover.py
new file mode 100644
index 00000000..13d7167c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/vertex_cover.py
@@ -0,0 +1,83 @@
+"""Functions for computing an approximate minimum weight vertex cover.
+
+A |vertex cover|_ is a subset of nodes such that each edge in the graph
+is incident to at least one node in the subset.
+
+.. _vertex cover: https://en.wikipedia.org/wiki/Vertex_cover
+.. |vertex cover| replace:: *vertex cover*
+
+"""
+
+import networkx as nx
+
+__all__ = ["min_weighted_vertex_cover"]
+
+
+@nx._dispatchable(node_attrs="weight")
+def min_weighted_vertex_cover(G, weight=None):
+    r"""Returns an approximate minimum weighted vertex cover.
+
+    The set of nodes returned by this function is guaranteed to be a
+    vertex cover, and the total weight of the set is guaranteed to be at
+    most twice the total weight of the minimum weight vertex cover. In
+    other words,
+
+    .. math::
+
+       w(S) \leq 2 * w(S^*),
+
+    where $S$ is the vertex cover returned by this function,
+    $S^*$ is the vertex cover of minimum weight out of all vertex
+    covers of the graph, and $w$ is the function that computes the
+    sum of the weights of each node in that given set.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : string, optional (default = None)
+        If None, every node has weight 1. If a string, use this node
+        attribute as the node weight. A node without this attribute is
+        assumed to have weight 1.
+
+    Returns
+    -------
+    min_weighted_cover : set
+        Returns a set of nodes whose weight sum is no more than twice
+        the weight sum of the minimum weight vertex cover.
+
+    Notes
+    -----
+    For a directed graph, a vertex cover has the same definition: a set
+    of nodes such that each edge in the graph is incident to at least
+    one node in the set. Whether the node is the head or tail of the
+    directed edge is ignored.
+
+    This is the local-ratio algorithm for computing an approximate
+    vertex cover. The algorithm greedily reduces the costs over edges,
+    iteratively building a cover. The worst-case runtime of this
+    implementation is $O(m \log n)$, where $n$ is the number
+    of nodes and $m$ the number of edges in the graph.
+
+    References
+    ----------
+    .. [1] Bar-Yehuda, R., and Even, S. (1985). "A local-ratio theorem for
+       approximating the weighted vertex cover problem."
+       *Annals of Discrete Mathematics*, 25, 27–46
+       <http://www.cs.technion.ac.il/~reuven/PDF/vc_lr.pdf>
+
+    """
+    cost = dict(G.nodes(data=weight, default=1))
+    # While there are uncovered edges, choose an uncovered and update
+    # the cost of the remaining edges.
+    cover = set()
+    for u, v in G.edges():
+        if u in cover or v in cover:
+            continue
+        if cost[u] <= cost[v]:
+            cover.add(u)
+            cost[v] -= cost[u]
+        else:
+            cover.add(v)
+            cost[u] -= cost[v]
+    return cover
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__init__.py
new file mode 100644
index 00000000..4d988860
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__init__.py
@@ -0,0 +1,5 @@
+from networkx.algorithms.assortativity.connectivity import *
+from networkx.algorithms.assortativity.correlation import *
+from networkx.algorithms.assortativity.mixing import *
+from networkx.algorithms.assortativity.neighbor_degree import *
+from networkx.algorithms.assortativity.pairs import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/connectivity.py
new file mode 100644
index 00000000..c3fde0da
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/connectivity.py
@@ -0,0 +1,122 @@
+from collections import defaultdict
+
+import networkx as nx
+
+__all__ = ["average_degree_connectivity"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def average_degree_connectivity(
+    G, source="in+out", target="in+out", nodes=None, weight=None
+):
+    r"""Compute the average degree connectivity of graph.
+
+    The average degree connectivity is the average nearest neighbor degree of
+    nodes with degree k. For weighted graphs, an analogous measure can
+    be computed using the weighted average neighbors degree defined in
+    [1]_, for a node `i`, as
+
+    .. math::
+
+        k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j
+
+    where `s_i` is the weighted degree of node `i`,
+    `w_{ij}` is the weight of the edge that links `i` and `j`,
+    and `N(i)` are the neighbors of node `i`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source :  "in"|"out"|"in+out" (default:"in+out")
+       Directed graphs only. Use "in"- or "out"-degree for source node.
+
+    target : "in"|"out"|"in+out" (default:"in+out"
+       Directed graphs only. Use "in"- or "out"-degree for target node.
+
+    nodes : list or iterable (optional)
+        Compute neighbor connectivity for these nodes. The default is all
+        nodes.
+
+    weight : string or None, optional (default=None)
+       The edge attribute that holds the numerical value used as a weight.
+       If None, then each edge has weight 1.
+
+    Returns
+    -------
+    d : dict
+       A dictionary keyed by degree k with the value of average connectivity.
+
+    Raises
+    ------
+    NetworkXError
+        If either `source` or `target` are not one of 'in',
+        'out', or 'in+out'.
+        If either `source` or `target` is passed for an undirected graph.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> G.edges[1, 2]["weight"] = 3
+    >>> nx.average_degree_connectivity(G)
+    {1: 2.0, 2: 1.5}
+    >>> nx.average_degree_connectivity(G, weight="weight")
+    {1: 2.0, 2: 1.75}
+
+    See Also
+    --------
+    average_neighbor_degree
+
+    References
+    ----------
+    .. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
+       "The architecture of complex weighted networks".
+       PNAS 101 (11): 3747–3752 (2004).
+    """
+    # First, determine the type of neighbors and the type of degree to use.
+    if G.is_directed():
+        if source not in ("in", "out", "in+out"):
+            raise nx.NetworkXError('source must be one of "in", "out", or "in+out"')
+        if target not in ("in", "out", "in+out"):
+            raise nx.NetworkXError('target must be one of "in", "out", or "in+out"')
+        direction = {"out": G.out_degree, "in": G.in_degree, "in+out": G.degree}
+        neighbor_funcs = {
+            "out": G.successors,
+            "in": G.predecessors,
+            "in+out": G.neighbors,
+        }
+        source_degree = direction[source]
+        target_degree = direction[target]
+        neighbors = neighbor_funcs[source]
+        # `reverse` indicates whether to look at the in-edge when
+        # computing the weight of an edge.
+        reverse = source == "in"
+    else:
+        if source != "in+out" or target != "in+out":
+            raise nx.NetworkXError(
+                f"source and target arguments are only supported for directed graphs"
+            )
+        source_degree = G.degree
+        target_degree = G.degree
+        neighbors = G.neighbors
+        reverse = False
+    dsum = defaultdict(int)
+    dnorm = defaultdict(int)
+    # Check if `source_nodes` is actually a single node in the graph.
+    source_nodes = source_degree(nodes)
+    if nodes in G:
+        source_nodes = [(nodes, source_degree(nodes))]
+    for n, k in source_nodes:
+        nbrdeg = target_degree(neighbors(n))
+        if weight is None:
+            s = sum(d for n, d in nbrdeg)
+        else:  # weight nbr degree by weight of (n,nbr) edge
+            if reverse:
+                s = sum(G[nbr][n].get(weight, 1) * d for nbr, d in nbrdeg)
+            else:
+                s = sum(G[n][nbr].get(weight, 1) * d for nbr, d in nbrdeg)
+        dnorm[k] += source_degree(n, weight=weight)
+        dsum[k] += s
+
+    # normalize
+    return {k: avg if dnorm[k] == 0 else avg / dnorm[k] for k, avg in dsum.items()}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/correlation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/correlation.py
new file mode 100644
index 00000000..52ae7a12
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/correlation.py
@@ -0,0 +1,302 @@
+"""Node assortativity coefficients and correlation measures."""
+
+import networkx as nx
+from networkx.algorithms.assortativity.mixing import (
+    attribute_mixing_matrix,
+    degree_mixing_matrix,
+)
+from networkx.algorithms.assortativity.pairs import node_degree_xy
+
+__all__ = [
+    "degree_pearson_correlation_coefficient",
+    "degree_assortativity_coefficient",
+    "attribute_assortativity_coefficient",
+    "numeric_assortativity_coefficient",
+]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None):
+    """Compute degree assortativity of graph.
+
+    Assortativity measures the similarity of connections
+    in the graph with respect to the node degree.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    x: string ('in','out')
+       The degree type for source node (directed graphs only).
+
+    y: string ('in','out')
+       The degree type for target node (directed graphs only).
+
+    weight: string or None, optional (default=None)
+       The edge attribute that holds the numerical value used
+       as a weight.  If None, then each edge has weight 1.
+       The degree is the sum of the edge weights adjacent to the node.
+
+    nodes: list or iterable (optional)
+        Compute degree assortativity only for nodes in container.
+        The default is all nodes.
+
+    Returns
+    -------
+    r : float
+       Assortativity of graph by degree.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> r = nx.degree_assortativity_coefficient(G)
+    >>> print(f"{r:3.1f}")
+    -0.5
+
+    See Also
+    --------
+    attribute_assortativity_coefficient
+    numeric_assortativity_coefficient
+    degree_mixing_dict
+    degree_mixing_matrix
+
+    Notes
+    -----
+    This computes Eq. (21) in Ref. [1]_ , where e is the joint
+    probability distribution (mixing matrix) of the degrees.  If G is
+    directed than the matrix e is the joint probability of the
+    user-specified degree type for the source and target.
+
+    References
+    ----------
+    .. [1] M. E. J. Newman, Mixing patterns in networks,
+       Physical Review E, 67 026126, 2003
+    .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M.
+       Edge direction and the structure of networks, PNAS 107, 10815-20 (2010).
+    """
+    if nodes is None:
+        nodes = G.nodes
+
+    degrees = None
+
+    if G.is_directed():
+        indeg = (
+            {d for _, d in G.in_degree(nodes, weight=weight)}
+            if "in" in (x, y)
+            else set()
+        )
+        outdeg = (
+            {d for _, d in G.out_degree(nodes, weight=weight)}
+            if "out" in (x, y)
+            else set()
+        )
+        degrees = set.union(indeg, outdeg)
+    else:
+        degrees = {d for _, d in G.degree(nodes, weight=weight)}
+
+    mapping = {d: i for i, d in enumerate(degrees)}
+    M = degree_mixing_matrix(G, x=x, y=y, nodes=nodes, weight=weight, mapping=mapping)
+
+    return _numeric_ac(M, mapping=mapping)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, nodes=None):
+    """Compute degree assortativity of graph.
+
+    Assortativity measures the similarity of connections
+    in the graph with respect to the node degree.
+
+    This is the same as degree_assortativity_coefficient but uses the
+    potentially faster scipy.stats.pearsonr function.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    x: string ('in','out')
+       The degree type for source node (directed graphs only).
+
+    y: string ('in','out')
+       The degree type for target node (directed graphs only).
+
+    weight: string or None, optional (default=None)
+       The edge attribute that holds the numerical value used
+       as a weight.  If None, then each edge has weight 1.
+       The degree is the sum of the edge weights adjacent to the node.
+
+    nodes: list or iterable (optional)
+        Compute pearson correlation of degrees only for specified nodes.
+        The default is all nodes.
+
+    Returns
+    -------
+    r : float
+       Assortativity of graph by degree.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> r = nx.degree_pearson_correlation_coefficient(G)
+    >>> print(f"{r:3.1f}")
+    -0.5
+
+    Notes
+    -----
+    This calls scipy.stats.pearsonr.
+
+    References
+    ----------
+    .. [1] M. E. J. Newman, Mixing patterns in networks
+           Physical Review E, 67 026126, 2003
+    .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M.
+       Edge direction and the structure of networks, PNAS 107, 10815-20 (2010).
+    """
+    import scipy as sp
+
+    xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
+    x, y = zip(*xy)
+    return float(sp.stats.pearsonr(x, y)[0])
+
+
+@nx._dispatchable(node_attrs="attribute")
+def attribute_assortativity_coefficient(G, attribute, nodes=None):
+    """Compute assortativity for node attributes.
+
+    Assortativity measures the similarity of connections
+    in the graph with respect to the given attribute.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    attribute : string
+        Node attribute key
+
+    nodes: list or iterable (optional)
+        Compute attribute assortativity for nodes in container.
+        The default is all nodes.
+
+    Returns
+    -------
+    r: float
+       Assortativity of graph for given attribute
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> G.add_nodes_from([0, 1], color="red")
+    >>> G.add_nodes_from([2, 3], color="blue")
+    >>> G.add_edges_from([(0, 1), (2, 3)])
+    >>> print(nx.attribute_assortativity_coefficient(G, "color"))
+    1.0
+
+    Notes
+    -----
+    This computes Eq. (2) in Ref. [1]_ , (trace(M)-sum(M^2))/(1-sum(M^2)),
+    where M is the joint probability distribution (mixing matrix)
+    of the specified attribute.
+
+    References
+    ----------
+    .. [1] M. E. J. Newman, Mixing patterns in networks,
+       Physical Review E, 67 026126, 2003
+    """
+    M = attribute_mixing_matrix(G, attribute, nodes)
+    return attribute_ac(M)
+
+
+@nx._dispatchable(node_attrs="attribute")
+def numeric_assortativity_coefficient(G, attribute, nodes=None):
+    """Compute assortativity for numerical node attributes.
+
+    Assortativity measures the similarity of connections
+    in the graph with respect to the given numeric attribute.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    attribute : string
+        Node attribute key.
+
+    nodes: list or iterable (optional)
+        Compute numeric assortativity only for attributes of nodes in
+        container. The default is all nodes.
+
+    Returns
+    -------
+    r: float
+       Assortativity of graph for given attribute
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> G.add_nodes_from([0, 1], size=2)
+    >>> G.add_nodes_from([2, 3], size=3)
+    >>> G.add_edges_from([(0, 1), (2, 3)])
+    >>> print(nx.numeric_assortativity_coefficient(G, "size"))
+    1.0
+
+    Notes
+    -----
+    This computes Eq. (21) in Ref. [1]_ , which is the Pearson correlation
+    coefficient of the specified (scalar valued) attribute across edges.
+
+    References
+    ----------
+    .. [1] M. E. J. Newman, Mixing patterns in networks
+           Physical Review E, 67 026126, 2003
+    """
+    if nodes is None:
+        nodes = G.nodes
+    vals = {G.nodes[n][attribute] for n in nodes}
+    mapping = {d: i for i, d in enumerate(vals)}
+    M = attribute_mixing_matrix(G, attribute, nodes, mapping)
+    return _numeric_ac(M, mapping)
+
+
+def attribute_ac(M):
+    """Compute assortativity for attribute matrix M.
+
+    Parameters
+    ----------
+    M : numpy.ndarray
+        2D ndarray representing the attribute mixing matrix.
+
+    Notes
+    -----
+    This computes Eq. (2) in Ref. [1]_ , (trace(e)-sum(e^2))/(1-sum(e^2)),
+    where e is the joint probability distribution (mixing matrix)
+    of the specified attribute.
+
+    References
+    ----------
+    .. [1] M. E. J. Newman, Mixing patterns in networks,
+       Physical Review E, 67 026126, 2003
+    """
+    if M.sum() != 1.0:
+        M = M / M.sum()
+    s = (M @ M).sum()
+    t = M.trace()
+    r = (t - s) / (1 - s)
+    return float(r)
+
+
+def _numeric_ac(M, mapping):
+    # M is a 2D numpy array
+    # numeric assortativity coefficient, pearsonr
+    import numpy as np
+
+    if M.sum() != 1.0:
+        M = M / M.sum()
+    x = np.array(list(mapping.keys()))
+    y = x  # x and y have the same support
+    idx = list(mapping.values())
+    a = M.sum(axis=0)
+    b = M.sum(axis=1)
+    vara = (a[idx] * x**2).sum() - ((a[idx] * x).sum()) ** 2
+    varb = (b[idx] * y**2).sum() - ((b[idx] * y).sum()) ** 2
+    xy = np.outer(x, y)
+    ab = np.outer(a[idx], b[idx])
+    return float((xy * (M - ab)).sum() / np.sqrt(vara * varb))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/mixing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/mixing.py
new file mode 100644
index 00000000..1762d4e5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/mixing.py
@@ -0,0 +1,255 @@
+"""
+Mixing matrices for node attributes and degree.
+"""
+
+import networkx as nx
+from networkx.algorithms.assortativity.pairs import node_attribute_xy, node_degree_xy
+from networkx.utils import dict_to_numpy_array
+
+__all__ = [
+    "attribute_mixing_matrix",
+    "attribute_mixing_dict",
+    "degree_mixing_matrix",
+    "degree_mixing_dict",
+    "mixing_dict",
+]
+
+
+@nx._dispatchable(node_attrs="attribute")
+def attribute_mixing_dict(G, attribute, nodes=None, normalized=False):
+    """Returns dictionary representation of mixing matrix for attribute.
+
+    Parameters
+    ----------
+    G : graph
+       NetworkX graph object.
+
+    attribute : string
+       Node attribute key.
+
+    nodes: list or iterable (optional)
+        Unse nodes in container to build the dict. The default is all nodes.
+
+    normalized : bool (default=False)
+       Return counts if False or probabilities if True.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> G.add_nodes_from([0, 1], color="red")
+    >>> G.add_nodes_from([2, 3], color="blue")
+    >>> G.add_edge(1, 3)
+    >>> d = nx.attribute_mixing_dict(G, "color")
+    >>> print(d["red"]["blue"])
+    1
+    >>> print(d["blue"]["red"])  # d symmetric for undirected graphs
+    1
+
+    Returns
+    -------
+    d : dictionary
+       Counts or joint probability of occurrence of attribute pairs.
+    """
+    xy_iter = node_attribute_xy(G, attribute, nodes)
+    return mixing_dict(xy_iter, normalized=normalized)
+
+
+@nx._dispatchable(node_attrs="attribute")
+def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=True):
+    """Returns mixing matrix for attribute.
+
+    Parameters
+    ----------
+    G : graph
+       NetworkX graph object.
+
+    attribute : string
+       Node attribute key.
+
+    nodes: list or iterable (optional)
+        Use only nodes in container to build the matrix. The default is
+        all nodes.
+
+    mapping : dictionary, optional
+       Mapping from node attribute to integer index in matrix.
+       If not specified, an arbitrary ordering will be used.
+
+    normalized : bool (default=True)
+       Return counts if False or probabilities if True.
+
+    Returns
+    -------
+    m: numpy array
+       Counts or joint probability of occurrence of attribute pairs.
+
+    Notes
+    -----
+    If each node has a unique attribute value, the unnormalized mixing matrix
+    will be equal to the adjacency matrix. To get a denser mixing matrix,
+    the rounding can be performed to form groups of nodes with equal values.
+    For example, the exact height of persons in cm (180.79155222, 163.9080892,
+    163.30095355, 167.99016217, 168.21590163, ...) can be rounded to (180, 163,
+    163, 168, 168, ...).
+
+    Definitions of attribute mixing matrix vary on whether the matrix
+    should include rows for attribute values that don't arise. Here we
+    do not include such empty-rows. But you can force them to appear
+    by inputting a `mapping` that includes those values.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(3)
+    >>> gender = {0: "male", 1: "female", 2: "female"}
+    >>> nx.set_node_attributes(G, gender, "gender")
+    >>> mapping = {"male": 0, "female": 1}
+    >>> mix_mat = nx.attribute_mixing_matrix(G, "gender", mapping=mapping)
+    >>> mix_mat
+    array([[0.  , 0.25],
+           [0.25, 0.5 ]])
+    """
+    d = attribute_mixing_dict(G, attribute, nodes)
+    a = dict_to_numpy_array(d, mapping=mapping)
+    if normalized:
+        a = a / a.sum()
+    return a
+
+
+@nx._dispatchable(edge_attrs="weight")
+def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=False):
+    """Returns dictionary representation of mixing matrix for degree.
+
+    Parameters
+    ----------
+    G : graph
+        NetworkX graph object.
+
+    x: string ('in','out')
+       The degree type for source node (directed graphs only).
+
+    y: string ('in','out')
+       The degree type for target node (directed graphs only).
+
+    weight: string or None, optional (default=None)
+       The edge attribute that holds the numerical value used
+       as a weight.  If None, then each edge has weight 1.
+       The degree is the sum of the edge weights adjacent to the node.
+
+    normalized : bool (default=False)
+        Return counts if False or probabilities if True.
+
+    Returns
+    -------
+    d: dictionary
+       Counts or joint probability of occurrence of degree pairs.
+    """
+    xy_iter = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
+    return mixing_dict(xy_iter, normalized=normalized)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def degree_mixing_matrix(
+    G, x="out", y="in", weight=None, nodes=None, normalized=True, mapping=None
+):
+    """Returns mixing matrix for attribute.
+
+    Parameters
+    ----------
+    G : graph
+       NetworkX graph object.
+
+    x: string ('in','out')
+       The degree type for source node (directed graphs only).
+
+    y: string ('in','out')
+       The degree type for target node (directed graphs only).
+
+    nodes: list or iterable (optional)
+        Build the matrix using only nodes in container.
+        The default is all nodes.
+
+    weight: string or None, optional (default=None)
+       The edge attribute that holds the numerical value used
+       as a weight.  If None, then each edge has weight 1.
+       The degree is the sum of the edge weights adjacent to the node.
+
+    normalized : bool (default=True)
+       Return counts if False or probabilities if True.
+
+    mapping : dictionary, optional
+       Mapping from node degree to integer index in matrix.
+       If not specified, an arbitrary ordering will be used.
+
+    Returns
+    -------
+    m: numpy array
+       Counts, or joint probability, of occurrence of node degree.
+
+    Notes
+    -----
+    Definitions of degree mixing matrix vary on whether the matrix
+    should include rows for degree values that don't arise. Here we
+    do not include such empty-rows. But you can force them to appear
+    by inputting a `mapping` that includes those values. See examples.
+
+    Examples
+    --------
+    >>> G = nx.star_graph(3)
+    >>> mix_mat = nx.degree_mixing_matrix(G)
+    >>> mix_mat
+    array([[0. , 0.5],
+           [0.5, 0. ]])
+
+    If you want every possible degree to appear as a row, even if no nodes
+    have that degree, use `mapping` as follows,
+
+    >>> max_degree = max(deg for n, deg in G.degree)
+    >>> mapping = {x: x for x in range(max_degree + 1)}  # identity mapping
+    >>> mix_mat = nx.degree_mixing_matrix(G, mapping=mapping)
+    >>> mix_mat
+    array([[0. , 0. , 0. , 0. ],
+           [0. , 0. , 0. , 0.5],
+           [0. , 0. , 0. , 0. ],
+           [0. , 0.5, 0. , 0. ]])
+    """
+    d = degree_mixing_dict(G, x=x, y=y, nodes=nodes, weight=weight)
+    a = dict_to_numpy_array(d, mapping=mapping)
+    if normalized:
+        a = a / a.sum()
+    return a
+
+
+def mixing_dict(xy, normalized=False):
+    """Returns a dictionary representation of mixing matrix.
+
+    Parameters
+    ----------
+    xy : list or container of two-tuples
+       Pairs of (x,y) items.
+
+    attribute : string
+       Node attribute key
+
+    normalized : bool (default=False)
+       Return counts if False or probabilities if True.
+
+    Returns
+    -------
+    d: dictionary
+       Counts or Joint probability of occurrence of values in xy.
+    """
+    d = {}
+    psum = 0.0
+    for x, y in xy:
+        if x not in d:
+            d[x] = {}
+        if y not in d:
+            d[y] = {}
+        v = d[x].get(y, 0)
+        d[x][y] = v + 1
+        psum += 1
+
+    if normalized:
+        for _, jdict in d.items():
+            for j in jdict:
+                jdict[j] /= psum
+    return d
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/neighbor_degree.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/neighbor_degree.py
new file mode 100644
index 00000000..6488d041
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/neighbor_degree.py
@@ -0,0 +1,160 @@
+import networkx as nx
+
+__all__ = ["average_neighbor_degree"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def average_neighbor_degree(G, source="out", target="out", nodes=None, weight=None):
+    r"""Returns the average degree of the neighborhood of each node.
+
+    In an undirected graph, the neighborhood `N(i)` of node `i` contains the
+    nodes that are connected to `i` by an edge.
+
+    For directed graphs, `N(i)` is defined according to the parameter `source`:
+
+        - if source is 'in', then `N(i)` consists of predecessors of node `i`.
+        - if source is 'out', then `N(i)` consists of successors of node `i`.
+        - if source is 'in+out', then `N(i)` is both predecessors and successors.
+
+    The average neighborhood degree of a node `i` is
+
+    .. math::
+
+        k_{nn,i} = \frac{1}{|N(i)|} \sum_{j \in N(i)} k_j
+
+    where `N(i)` are the neighbors of node `i` and `k_j` is
+    the degree of node `j` which belongs to `N(i)`. For weighted
+    graphs, an analogous measure can be defined [1]_,
+
+    .. math::
+
+        k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j
+
+    where `s_i` is the weighted degree of node `i`, `w_{ij}`
+    is the weight of the edge that links `i` and `j` and
+    `N(i)` are the neighbors of node `i`.
+
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : string ("in"|"out"|"in+out"), optional (default="out")
+       Directed graphs only.
+       Use "in"- or "out"-neighbors of source node.
+
+    target : string ("in"|"out"|"in+out"), optional (default="out")
+       Directed graphs only.
+       Use "in"- or "out"-degree for target node.
+
+    nodes : list or iterable, optional (default=G.nodes)
+        Compute neighbor degree only for specified nodes.
+
+    weight : string or None, optional (default=None)
+       The edge attribute that holds the numerical value used as a weight.
+       If None, then each edge has weight 1.
+
+    Returns
+    -------
+    d: dict
+       A dictionary keyed by node to the average degree of its neighbors.
+
+    Raises
+    ------
+    NetworkXError
+        If either `source` or `target` are not one of 'in', 'out', or 'in+out'.
+        If either `source` or `target` is passed for an undirected graph.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> G.edges[0, 1]["weight"] = 5
+    >>> G.edges[2, 3]["weight"] = 3
+
+    >>> nx.average_neighbor_degree(G)
+    {0: 2.0, 1: 1.5, 2: 1.5, 3: 2.0}
+    >>> nx.average_neighbor_degree(G, weight="weight")
+    {0: 2.0, 1: 1.1666666666666667, 2: 1.25, 3: 2.0}
+
+    >>> G = nx.DiGraph()
+    >>> nx.add_path(G, [0, 1, 2, 3])
+    >>> nx.average_neighbor_degree(G, source="in", target="in")
+    {0: 0.0, 1: 0.0, 2: 1.0, 3: 1.0}
+
+    >>> nx.average_neighbor_degree(G, source="out", target="out")
+    {0: 1.0, 1: 1.0, 2: 0.0, 3: 0.0}
+
+    See Also
+    --------
+    average_degree_connectivity
+
+    References
+    ----------
+    .. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
+       "The architecture of complex weighted networks".
+       PNAS 101 (11): 3747–3752 (2004).
+    """
+    if G.is_directed():
+        if source == "in":
+            source_degree = G.in_degree
+        elif source == "out":
+            source_degree = G.out_degree
+        elif source == "in+out":
+            source_degree = G.degree
+        else:
+            raise nx.NetworkXError(
+                f"source argument {source} must be 'in', 'out' or 'in+out'"
+            )
+
+        if target == "in":
+            target_degree = G.in_degree
+        elif target == "out":
+            target_degree = G.out_degree
+        elif target == "in+out":
+            target_degree = G.degree
+        else:
+            raise nx.NetworkXError(
+                f"target argument {target} must be 'in', 'out' or 'in+out'"
+            )
+    else:
+        if source != "out" or target != "out":
+            raise nx.NetworkXError(
+                f"source and target arguments are only supported for directed graphs"
+            )
+        source_degree = target_degree = G.degree
+
+    # precompute target degrees -- should *not* be weighted degree
+    t_deg = dict(target_degree())
+
+    # Set up both predecessor and successor neighbor dicts leaving empty if not needed
+    G_P = G_S = {n: {} for n in G}
+    if G.is_directed():
+        # "in" or "in+out" cases: G_P contains predecessors
+        if "in" in source:
+            G_P = G.pred
+        # "out" or "in+out" cases: G_S contains successors
+        if "out" in source:
+            G_S = G.succ
+    else:
+        # undirected leave G_P empty but G_S is the adjacency
+        G_S = G.adj
+
+    # Main loop: Compute average degree of neighbors
+    avg = {}
+    for n, deg in source_degree(nodes, weight=weight):
+        # handle degree zero average
+        if deg == 0:
+            avg[n] = 0.0
+            continue
+
+        # we sum over both G_P and G_S, but one of the two is usually empty.
+        if weight is None:
+            avg[n] = (
+                sum(t_deg[nbr] for nbr in G_S[n]) + sum(t_deg[nbr] for nbr in G_P[n])
+            ) / deg
+        else:
+            avg[n] = (
+                sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_S[n].items())
+                + sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_P[n].items())
+            ) / deg
+    return avg
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/pairs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/pairs.py
new file mode 100644
index 00000000..ea5fd287
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/pairs.py
@@ -0,0 +1,127 @@
+"""Generators of x-y pairs of node data."""
+
+import networkx as nx
+
+__all__ = ["node_attribute_xy", "node_degree_xy"]
+
+
+@nx._dispatchable(node_attrs="attribute")
+def node_attribute_xy(G, attribute, nodes=None):
+    """Yields 2-tuples of node attribute values for all edges in `G`.
+
+    This generator yields, for each edge in `G` incident to a node in `nodes`,
+    a 2-tuple of form ``(attribute value,  attribute value)`` for the parameter
+    specified node-attribute.
+
+    Parameters
+    ----------
+    G: NetworkX graph
+
+    attribute: key
+        The node attribute key.
+
+    nodes: list or iterable (optional)
+        Use only edges that are incident to specified nodes.
+        The default is all nodes.
+
+    Yields
+    ------
+    (x, y): 2-tuple
+        Generates 2-tuple of (attribute, attribute) values.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_node(1, color="red")
+    >>> G.add_node(2, color="blue")
+    >>> G.add_node(3, color="green")
+    >>> G.add_edge(1, 2)
+    >>> list(nx.node_attribute_xy(G, "color"))
+    [('red', 'blue')]
+
+    Notes
+    -----
+    For undirected graphs, each edge is produced twice, once for each edge
+    representation (u, v) and (v, u), with the exception of self-loop edges
+    which only appear once.
+    """
+    if nodes is None:
+        nodes = set(G)
+    else:
+        nodes = set(nodes)
+    Gnodes = G.nodes
+    for u, nbrsdict in G.adjacency():
+        if u not in nodes:
+            continue
+        uattr = Gnodes[u].get(attribute, None)
+        if G.is_multigraph():
+            for v, keys in nbrsdict.items():
+                vattr = Gnodes[v].get(attribute, None)
+                for _ in keys:
+                    yield (uattr, vattr)
+        else:
+            for v in nbrsdict:
+                vattr = Gnodes[v].get(attribute, None)
+                yield (uattr, vattr)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def node_degree_xy(G, x="out", y="in", weight=None, nodes=None):
+    """Yields 2-tuples of ``(degree, degree)`` values for edges in `G`.
+
+    This generator yields, for each edge in `G` incident to a node in `nodes`,
+    a 2-tuple of form ``(degree, degree)``. The node degrees are weighted
+    when a `weight` attribute is specified.
+
+    Parameters
+    ----------
+    G: NetworkX graph
+
+    x: string ('in','out')
+       The degree type for source node (directed graphs only).
+
+    y: string ('in','out')
+       The degree type for target node (directed graphs only).
+
+    weight: string or None, optional (default=None)
+       The edge attribute that holds the numerical value used
+       as a weight.  If None, then each edge has weight 1.
+       The degree is the sum of the edge weights adjacent to the node.
+
+    nodes: list or iterable (optional)
+        Use only edges that are adjacency to specified nodes.
+        The default is all nodes.
+
+    Yields
+    ------
+    (x, y): 2-tuple
+        Generates 2-tuple of (degree, degree) values.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_edge(1, 2)
+    >>> list(nx.node_degree_xy(G, x="out", y="in"))
+    [(1, 1)]
+    >>> list(nx.node_degree_xy(G, x="in", y="out"))
+    [(0, 0)]
+
+    Notes
+    -----
+    For undirected graphs, each edge is produced twice, once for each edge
+    representation (u, v) and (v, u), with the exception of self-loop edges
+    which only appear once.
+    """
+    nodes = set(G) if nodes is None else set(nodes)
+    if G.is_directed():
+        direction = {"out": G.out_degree, "in": G.in_degree}
+        xdeg = direction[x]
+        ydeg = direction[y]
+    else:
+        xdeg = ydeg = G.degree
+
+    for u, degu in xdeg(nodes, weight=weight):
+        # use G.edges to treat multigraphs correctly
+        neighbors = (nbr for _, nbr in G.edges(u) if nbr in nodes)
+        for _, degv in ydeg(neighbors, weight=weight):
+            yield degu, degv
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/base_test.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/base_test.py
new file mode 100644
index 00000000..46d63006
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/base_test.py
@@ -0,0 +1,81 @@
+import networkx as nx
+
+
+class BaseTestAttributeMixing:
+    @classmethod
+    def setup_class(cls):
+        G = nx.Graph()
+        G.add_nodes_from([0, 1], fish="one")
+        G.add_nodes_from([2, 3], fish="two")
+        G.add_nodes_from([4], fish="red")
+        G.add_nodes_from([5], fish="blue")
+        G.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
+        cls.G = G
+
+        D = nx.DiGraph()
+        D.add_nodes_from([0, 1], fish="one")
+        D.add_nodes_from([2, 3], fish="two")
+        D.add_nodes_from([4], fish="red")
+        D.add_nodes_from([5], fish="blue")
+        D.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
+        cls.D = D
+
+        M = nx.MultiGraph()
+        M.add_nodes_from([0, 1], fish="one")
+        M.add_nodes_from([2, 3], fish="two")
+        M.add_nodes_from([4], fish="red")
+        M.add_nodes_from([5], fish="blue")
+        M.add_edges_from([(0, 1), (0, 1), (2, 3)])
+        cls.M = M
+
+        S = nx.Graph()
+        S.add_nodes_from([0, 1], fish="one")
+        S.add_nodes_from([2, 3], fish="two")
+        S.add_nodes_from([4], fish="red")
+        S.add_nodes_from([5], fish="blue")
+        S.add_edge(0, 0)
+        S.add_edge(2, 2)
+        cls.S = S
+
+        N = nx.Graph()
+        N.add_nodes_from([0, 1], margin=-2)
+        N.add_nodes_from([2, 3], margin=-2)
+        N.add_nodes_from([4], margin=-3)
+        N.add_nodes_from([5], margin=-4)
+        N.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
+        cls.N = N
+
+        F = nx.Graph()
+        F.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
+        F.add_edge(0, 2, weight=1)
+        nx.set_node_attributes(F, dict(F.degree(weight="weight")), "margin")
+        cls.F = F
+
+        K = nx.Graph()
+        K.add_nodes_from([1, 2], margin=-1)
+        K.add_nodes_from([3], margin=1)
+        K.add_nodes_from([4], margin=2)
+        K.add_edges_from([(3, 4), (1, 2), (1, 3)])
+        cls.K = K
+
+
+class BaseTestDegreeMixing:
+    @classmethod
+    def setup_class(cls):
+        cls.P4 = nx.path_graph(4)
+        cls.D = nx.DiGraph()
+        cls.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)])
+        cls.D2 = nx.DiGraph()
+        cls.D2.add_edges_from([(0, 3), (1, 0), (1, 2), (2, 4), (4, 1), (4, 3), (4, 2)])
+        cls.M = nx.MultiGraph()
+        nx.add_path(cls.M, range(4))
+        cls.M.add_edge(0, 1)
+        cls.S = nx.Graph()
+        cls.S.add_edges_from([(0, 0), (1, 1)])
+        cls.W = nx.Graph()
+        cls.W.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
+        cls.W.add_edge(0, 2, weight=1)
+        S1 = nx.star_graph(4)
+        S2 = nx.star_graph(4)
+        cls.DS = nx.disjoint_union(S1, S2)
+        cls.DS.add_edge(4, 5)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py
new file mode 100644
index 00000000..21c6287b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py
@@ -0,0 +1,143 @@
+from itertools import permutations
+
+import pytest
+
+import networkx as nx
+
+
+class TestNeighborConnectivity:
+    def test_degree_p4(self):
+        G = nx.path_graph(4)
+        answer = {1: 2.0, 2: 1.5}
+        nd = nx.average_degree_connectivity(G)
+        assert nd == answer
+
+        D = G.to_directed()
+        answer = {2: 2.0, 4: 1.5}
+        nd = nx.average_degree_connectivity(D)
+        assert nd == answer
+
+        answer = {1: 2.0, 2: 1.5}
+        D = G.to_directed()
+        nd = nx.average_degree_connectivity(D, source="in", target="in")
+        assert nd == answer
+
+        D = G.to_directed()
+        nd = nx.average_degree_connectivity(D, source="in", target="in")
+        assert nd == answer
+
+    def test_degree_p4_weighted(self):
+        G = nx.path_graph(4)
+        G[1][2]["weight"] = 4
+        answer = {1: 2.0, 2: 1.8}
+        nd = nx.average_degree_connectivity(G, weight="weight")
+        assert nd == answer
+        answer = {1: 2.0, 2: 1.5}
+        nd = nx.average_degree_connectivity(G)
+        assert nd == answer
+
+        D = G.to_directed()
+        answer = {2: 2.0, 4: 1.8}
+        nd = nx.average_degree_connectivity(D, weight="weight")
+        assert nd == answer
+
+        answer = {1: 2.0, 2: 1.8}
+        D = G.to_directed()
+        nd = nx.average_degree_connectivity(
+            D, weight="weight", source="in", target="in"
+        )
+        assert nd == answer
+
+        D = G.to_directed()
+        nd = nx.average_degree_connectivity(
+            D, source="in", target="out", weight="weight"
+        )
+        assert nd == answer
+
+    def test_weight_keyword(self):
+        G = nx.path_graph(4)
+        G[1][2]["other"] = 4
+        answer = {1: 2.0, 2: 1.8}
+        nd = nx.average_degree_connectivity(G, weight="other")
+        assert nd == answer
+        answer = {1: 2.0, 2: 1.5}
+        nd = nx.average_degree_connectivity(G, weight=None)
+        assert nd == answer
+
+        D = G.to_directed()
+        answer = {2: 2.0, 4: 1.8}
+        nd = nx.average_degree_connectivity(D, weight="other")
+        assert nd == answer
+
+        answer = {1: 2.0, 2: 1.8}
+        D = G.to_directed()
+        nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in")
+        assert nd == answer
+
+        D = G.to_directed()
+        nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in")
+        assert nd == answer
+
+    def test_degree_barrat(self):
+        G = nx.star_graph(5)
+        G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
+        G[0][5]["weight"] = 5
+        nd = nx.average_degree_connectivity(G)[5]
+        assert nd == 1.8
+        nd = nx.average_degree_connectivity(G, weight="weight")[5]
+        assert nd == pytest.approx(3.222222, abs=1e-5)
+
+    def test_zero_deg(self):
+        G = nx.DiGraph()
+        G.add_edge(1, 2)
+        G.add_edge(1, 3)
+        G.add_edge(1, 4)
+        c = nx.average_degree_connectivity(G)
+        assert c == {1: 0, 3: 1}
+        c = nx.average_degree_connectivity(G, source="in", target="in")
+        assert c == {0: 0, 1: 0}
+        c = nx.average_degree_connectivity(G, source="in", target="out")
+        assert c == {0: 0, 1: 3}
+        c = nx.average_degree_connectivity(G, source="in", target="in+out")
+        assert c == {0: 0, 1: 3}
+        c = nx.average_degree_connectivity(G, source="out", target="out")
+        assert c == {0: 0, 3: 0}
+        c = nx.average_degree_connectivity(G, source="out", target="in")
+        assert c == {0: 0, 3: 1}
+        c = nx.average_degree_connectivity(G, source="out", target="in+out")
+        assert c == {0: 0, 3: 1}
+
+    def test_in_out_weight(self):
+        G = nx.DiGraph()
+        G.add_edge(1, 2, weight=1)
+        G.add_edge(1, 3, weight=1)
+        G.add_edge(3, 1, weight=1)
+        for s, t in permutations(["in", "out", "in+out"], 2):
+            c = nx.average_degree_connectivity(G, source=s, target=t)
+            cw = nx.average_degree_connectivity(G, source=s, target=t, weight="weight")
+            assert c == cw
+
+    def test_invalid_source(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.DiGraph()
+            nx.average_degree_connectivity(G, source="bogus")
+
+    def test_invalid_target(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.DiGraph()
+            nx.average_degree_connectivity(G, target="bogus")
+
+    def test_invalid_undirected_graph(self):
+        G = nx.Graph()
+        with pytest.raises(nx.NetworkXError):
+            nx.average_degree_connectivity(G, target="bogus")
+        with pytest.raises(nx.NetworkXError):
+            nx.average_degree_connectivity(G, source="bogus")
+
+    def test_single_node(self):
+        # TODO Is this really the intended behavior for providing a
+        # single node as the argument `nodes`? Shouldn't the function
+        # just return the connectivity value itself?
+        G = nx.trivial_graph()
+        conn = nx.average_degree_connectivity(G, nodes=0)
+        assert conn == {0: 0}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py
new file mode 100644
index 00000000..5203f944
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py
@@ -0,0 +1,123 @@
+import pytest
+
+np = pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+
+import networkx as nx
+from networkx.algorithms.assortativity.correlation import attribute_ac
+
+from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
+
+
+class TestDegreeMixingCorrelation(BaseTestDegreeMixing):
+    def test_degree_assortativity_undirected(self):
+        r = nx.degree_assortativity_coefficient(self.P4)
+        np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4)
+
+    def test_degree_assortativity_node_kwargs(self):
+        G = nx.Graph()
+        edges = [(0, 1), (0, 3), (1, 2), (1, 3), (1, 4), (5, 9), (9, 0)]
+        G.add_edges_from(edges)
+        r = nx.degree_assortativity_coefficient(G, nodes=[1, 2, 4])
+        np.testing.assert_almost_equal(r, -1.0, decimal=4)
+
+    def test_degree_assortativity_directed(self):
+        r = nx.degree_assortativity_coefficient(self.D)
+        np.testing.assert_almost_equal(r, -0.57735, decimal=4)
+
+    def test_degree_assortativity_directed2(self):
+        """Test degree assortativity for a directed graph where the set of
+        in/out degree does not equal the total degree."""
+        r = nx.degree_assortativity_coefficient(self.D2)
+        np.testing.assert_almost_equal(r, 0.14852, decimal=4)
+
+    def test_degree_assortativity_multigraph(self):
+        r = nx.degree_assortativity_coefficient(self.M)
+        np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
+
+    def test_degree_pearson_assortativity_undirected(self):
+        r = nx.degree_pearson_correlation_coefficient(self.P4)
+        np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4)
+
+    def test_degree_pearson_assortativity_directed(self):
+        r = nx.degree_pearson_correlation_coefficient(self.D)
+        np.testing.assert_almost_equal(r, -0.57735, decimal=4)
+
+    def test_degree_pearson_assortativity_directed2(self):
+        """Test degree assortativity with Pearson for a directed graph where
+        the set of in/out degree does not equal the total degree."""
+        r = nx.degree_pearson_correlation_coefficient(self.D2)
+        np.testing.assert_almost_equal(r, 0.14852, decimal=4)
+
+    def test_degree_pearson_assortativity_multigraph(self):
+        r = nx.degree_pearson_correlation_coefficient(self.M)
+        np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
+
+    def test_degree_assortativity_weighted(self):
+        r = nx.degree_assortativity_coefficient(self.W, weight="weight")
+        np.testing.assert_almost_equal(r, -0.1429, decimal=4)
+
+    def test_degree_assortativity_double_star(self):
+        r = nx.degree_assortativity_coefficient(self.DS)
+        np.testing.assert_almost_equal(r, -0.9339, decimal=4)
+
+
+class TestAttributeMixingCorrelation(BaseTestAttributeMixing):
+    def test_attribute_assortativity_undirected(self):
+        r = nx.attribute_assortativity_coefficient(self.G, "fish")
+        assert r == 6.0 / 22.0
+
+    def test_attribute_assortativity_directed(self):
+        r = nx.attribute_assortativity_coefficient(self.D, "fish")
+        assert r == 1.0 / 3.0
+
+    def test_attribute_assortativity_multigraph(self):
+        r = nx.attribute_assortativity_coefficient(self.M, "fish")
+        assert r == 1.0
+
+    def test_attribute_assortativity_coefficient(self):
+        # from "Mixing patterns in networks"
+        # fmt: off
+        a = np.array([[0.258, 0.016, 0.035, 0.013],
+                      [0.012, 0.157, 0.058, 0.019],
+                      [0.013, 0.023, 0.306, 0.035],
+                      [0.005, 0.007, 0.024, 0.016]])
+        # fmt: on
+        r = attribute_ac(a)
+        np.testing.assert_almost_equal(r, 0.623, decimal=3)
+
+    def test_attribute_assortativity_coefficient2(self):
+        # fmt: off
+        a = np.array([[0.18, 0.02, 0.01, 0.03],
+                      [0.02, 0.20, 0.03, 0.02],
+                      [0.01, 0.03, 0.16, 0.01],
+                      [0.03, 0.02, 0.01, 0.22]])
+        # fmt: on
+        r = attribute_ac(a)
+        np.testing.assert_almost_equal(r, 0.68, decimal=2)
+
+    def test_attribute_assortativity(self):
+        a = np.array([[50, 50, 0], [50, 50, 0], [0, 0, 2]])
+        r = attribute_ac(a)
+        np.testing.assert_almost_equal(r, 0.029, decimal=3)
+
+    def test_attribute_assortativity_negative(self):
+        r = nx.numeric_assortativity_coefficient(self.N, "margin")
+        np.testing.assert_almost_equal(r, -0.2903, decimal=4)
+
+    def test_assortativity_node_kwargs(self):
+        G = nx.Graph()
+        G.add_nodes_from([0, 1], size=2)
+        G.add_nodes_from([2, 3], size=3)
+        G.add_edges_from([(0, 1), (2, 3)])
+        r = nx.numeric_assortativity_coefficient(G, "size", nodes=[0, 3])
+        np.testing.assert_almost_equal(r, 1.0, decimal=4)
+
+    def test_attribute_assortativity_float(self):
+        r = nx.numeric_assortativity_coefficient(self.F, "margin")
+        np.testing.assert_almost_equal(r, -0.1429, decimal=4)
+
+    def test_attribute_assortativity_mixed(self):
+        r = nx.numeric_assortativity_coefficient(self.K, "margin")
+        np.testing.assert_almost_equal(r, 0.4340, decimal=4)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py
new file mode 100644
index 00000000..9af09867
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py
@@ -0,0 +1,176 @@
+import pytest
+
+np = pytest.importorskip("numpy")
+
+
+import networkx as nx
+
+from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
+
+
+class TestDegreeMixingDict(BaseTestDegreeMixing):
+    def test_degree_mixing_dict_undirected(self):
+        d = nx.degree_mixing_dict(self.P4)
+        d_result = {1: {2: 2}, 2: {1: 2, 2: 2}}
+        assert d == d_result
+
+    def test_degree_mixing_dict_undirected_normalized(self):
+        d = nx.degree_mixing_dict(self.P4, normalized=True)
+        d_result = {1: {2: 1.0 / 3}, 2: {1: 1.0 / 3, 2: 1.0 / 3}}
+        assert d == d_result
+
+    def test_degree_mixing_dict_directed(self):
+        d = nx.degree_mixing_dict(self.D)
+        print(d)
+        d_result = {1: {3: 2}, 2: {1: 1, 3: 1}, 3: {}}
+        assert d == d_result
+
+    def test_degree_mixing_dict_multigraph(self):
+        d = nx.degree_mixing_dict(self.M)
+        d_result = {1: {2: 1}, 2: {1: 1, 3: 3}, 3: {2: 3}}
+        assert d == d_result
+
+    def test_degree_mixing_dict_weighted(self):
+        d = nx.degree_mixing_dict(self.W, weight="weight")
+        d_result = {0.5: {1.5: 1}, 1.5: {1.5: 6, 0.5: 1}}
+        assert d == d_result
+
+
+class TestDegreeMixingMatrix(BaseTestDegreeMixing):
+    def test_degree_mixing_matrix_undirected(self):
+        # fmt: off
+        a_result = np.array([[0, 2],
+                             [2, 2]]
+                            )
+        # fmt: on
+        a = nx.degree_mixing_matrix(self.P4, normalized=False)
+        np.testing.assert_equal(a, a_result)
+        a = nx.degree_mixing_matrix(self.P4)
+        np.testing.assert_equal(a, a_result / a_result.sum())
+
+    def test_degree_mixing_matrix_directed(self):
+        # fmt: off
+        a_result = np.array([[0, 0, 2],
+                             [1, 0, 1],
+                             [0, 0, 0]]
+                            )
+        # fmt: on
+        a = nx.degree_mixing_matrix(self.D, normalized=False)
+        np.testing.assert_equal(a, a_result)
+        a = nx.degree_mixing_matrix(self.D)
+        np.testing.assert_equal(a, a_result / a_result.sum())
+
+    def test_degree_mixing_matrix_multigraph(self):
+        # fmt: off
+        a_result = np.array([[0, 1, 0],
+                             [1, 0, 3],
+                             [0, 3, 0]]
+                            )
+        # fmt: on
+        a = nx.degree_mixing_matrix(self.M, normalized=False)
+        np.testing.assert_equal(a, a_result)
+        a = nx.degree_mixing_matrix(self.M)
+        np.testing.assert_equal(a, a_result / a_result.sum())
+
+    def test_degree_mixing_matrix_selfloop(self):
+        # fmt: off
+        a_result = np.array([[2]])
+        # fmt: on
+        a = nx.degree_mixing_matrix(self.S, normalized=False)
+        np.testing.assert_equal(a, a_result)
+        a = nx.degree_mixing_matrix(self.S)
+        np.testing.assert_equal(a, a_result / a_result.sum())
+
+    def test_degree_mixing_matrix_weighted(self):
+        a_result = np.array([[0.0, 1.0], [1.0, 6.0]])
+        a = nx.degree_mixing_matrix(self.W, weight="weight", normalized=False)
+        np.testing.assert_equal(a, a_result)
+        a = nx.degree_mixing_matrix(self.W, weight="weight")
+        np.testing.assert_equal(a, a_result / float(a_result.sum()))
+
+    def test_degree_mixing_matrix_mapping(self):
+        a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
+        mapping = {0.5: 1, 1.5: 0}
+        a = nx.degree_mixing_matrix(
+            self.W, weight="weight", normalized=False, mapping=mapping
+        )
+        np.testing.assert_equal(a, a_result)
+
+
+class TestAttributeMixingDict(BaseTestAttributeMixing):
+    def test_attribute_mixing_dict_undirected(self):
+        d = nx.attribute_mixing_dict(self.G, "fish")
+        d_result = {
+            "one": {"one": 2, "red": 1},
+            "two": {"two": 2, "blue": 1},
+            "red": {"one": 1},
+            "blue": {"two": 1},
+        }
+        assert d == d_result
+
+    def test_attribute_mixing_dict_directed(self):
+        d = nx.attribute_mixing_dict(self.D, "fish")
+        d_result = {
+            "one": {"one": 1, "red": 1},
+            "two": {"two": 1, "blue": 1},
+            "red": {},
+            "blue": {},
+        }
+        assert d == d_result
+
+    def test_attribute_mixing_dict_multigraph(self):
+        d = nx.attribute_mixing_dict(self.M, "fish")
+        d_result = {"one": {"one": 4}, "two": {"two": 2}}
+        assert d == d_result
+
+
+class TestAttributeMixingMatrix(BaseTestAttributeMixing):
+    def test_attribute_mixing_matrix_undirected(self):
+        mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
+        a_result = np.array([[2, 0, 1, 0], [0, 2, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]])
+        a = nx.attribute_mixing_matrix(
+            self.G, "fish", mapping=mapping, normalized=False
+        )
+        np.testing.assert_equal(a, a_result)
+        a = nx.attribute_mixing_matrix(self.G, "fish", mapping=mapping)
+        np.testing.assert_equal(a, a_result / a_result.sum())
+
+    def test_attribute_mixing_matrix_directed(self):
+        mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
+        a_result = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]])
+        a = nx.attribute_mixing_matrix(
+            self.D, "fish", mapping=mapping, normalized=False
+        )
+        np.testing.assert_equal(a, a_result)
+        a = nx.attribute_mixing_matrix(self.D, "fish", mapping=mapping)
+        np.testing.assert_equal(a, a_result / a_result.sum())
+
+    def test_attribute_mixing_matrix_multigraph(self):
+        mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
+        a_result = np.array([[4, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])
+        a = nx.attribute_mixing_matrix(
+            self.M, "fish", mapping=mapping, normalized=False
+        )
+        np.testing.assert_equal(a, a_result)
+        a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping)
+        np.testing.assert_equal(a, a_result / a_result.sum())
+
+    def test_attribute_mixing_matrix_negative(self):
+        mapping = {-2: 0, -3: 1, -4: 2}
+        a_result = np.array([[4.0, 1.0, 1.0], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0]])
+        a = nx.attribute_mixing_matrix(
+            self.N, "margin", mapping=mapping, normalized=False
+        )
+        np.testing.assert_equal(a, a_result)
+        a = nx.attribute_mixing_matrix(self.N, "margin", mapping=mapping)
+        np.testing.assert_equal(a, a_result / float(a_result.sum()))
+
+    def test_attribute_mixing_matrix_float(self):
+        mapping = {0.5: 1, 1.5: 0}
+        a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
+        a = nx.attribute_mixing_matrix(
+            self.F, "margin", mapping=mapping, normalized=False
+        )
+        np.testing.assert_equal(a, a_result)
+        a = nx.attribute_mixing_matrix(self.F, "margin", mapping=mapping)
+        np.testing.assert_equal(a, a_result / a_result.sum())
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py
new file mode 100644
index 00000000..bf1252d5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py
@@ -0,0 +1,108 @@
+import pytest
+
+import networkx as nx
+
+
+class TestAverageNeighbor:
+    def test_degree_p4(self):
+        G = nx.path_graph(4)
+        answer = {0: 2, 1: 1.5, 2: 1.5, 3: 2}
+        nd = nx.average_neighbor_degree(G)
+        assert nd == answer
+
+        D = G.to_directed()
+        nd = nx.average_neighbor_degree(D)
+        assert nd == answer
+
+        D = nx.DiGraph(G.edges(data=True))
+        nd = nx.average_neighbor_degree(D)
+        assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
+        nd = nx.average_neighbor_degree(D, "in", "out")
+        assert nd == {0: 0, 1: 1, 2: 1, 3: 1}
+        nd = nx.average_neighbor_degree(D, "out", "in")
+        assert nd == {0: 1, 1: 1, 2: 1, 3: 0}
+        nd = nx.average_neighbor_degree(D, "in", "in")
+        assert nd == {0: 0, 1: 0, 2: 1, 3: 1}
+
+    def test_degree_p4_weighted(self):
+        G = nx.path_graph(4)
+        G[1][2]["weight"] = 4
+        answer = {0: 2, 1: 1.8, 2: 1.8, 3: 2}
+        nd = nx.average_neighbor_degree(G, weight="weight")
+        assert nd == answer
+
+        D = G.to_directed()
+        nd = nx.average_neighbor_degree(D, weight="weight")
+        assert nd == answer
+
+        D = nx.DiGraph(G.edges(data=True))
+        print(D.edges(data=True))
+        nd = nx.average_neighbor_degree(D, weight="weight")
+        assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
+        nd = nx.average_neighbor_degree(D, "out", "out", weight="weight")
+        assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
+        nd = nx.average_neighbor_degree(D, "in", "in", weight="weight")
+        assert nd == {0: 0, 1: 0, 2: 1, 3: 1}
+        nd = nx.average_neighbor_degree(D, "in", "out", weight="weight")
+        assert nd == {0: 0, 1: 1, 2: 1, 3: 1}
+        nd = nx.average_neighbor_degree(D, "out", "in", weight="weight")
+        assert nd == {0: 1, 1: 1, 2: 1, 3: 0}
+        nd = nx.average_neighbor_degree(D, source="in+out", weight="weight")
+        assert nd == {0: 1.0, 1: 1.0, 2: 0.8, 3: 1.0}
+        nd = nx.average_neighbor_degree(D, target="in+out", weight="weight")
+        assert nd == {0: 2.0, 1: 2.0, 2: 1.0, 3: 0.0}
+
+        D = G.to_directed()
+        nd = nx.average_neighbor_degree(D, weight="weight")
+        assert nd == answer
+        nd = nx.average_neighbor_degree(D, source="out", target="out", weight="weight")
+        assert nd == answer
+
+        D = G.to_directed()
+        nd = nx.average_neighbor_degree(D, source="in", target="in", weight="weight")
+        assert nd == answer
+
+    def test_degree_k4(self):
+        G = nx.complete_graph(4)
+        answer = {0: 3, 1: 3, 2: 3, 3: 3}
+        nd = nx.average_neighbor_degree(G)
+        assert nd == answer
+
+        D = G.to_directed()
+        nd = nx.average_neighbor_degree(D)
+        assert nd == answer
+
+        D = G.to_directed()
+        nd = nx.average_neighbor_degree(D)
+        assert nd == answer
+
+        D = G.to_directed()
+        nd = nx.average_neighbor_degree(D, source="in", target="in")
+        assert nd == answer
+
+    def test_degree_k4_nodes(self):
+        G = nx.complete_graph(4)
+        answer = {1: 3.0, 2: 3.0}
+        nd = nx.average_neighbor_degree(G, nodes=[1, 2])
+        assert nd == answer
+
+    def test_degree_barrat(self):
+        G = nx.star_graph(5)
+        G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
+        G[0][5]["weight"] = 5
+        nd = nx.average_neighbor_degree(G)[5]
+        assert nd == 1.8
+        nd = nx.average_neighbor_degree(G, weight="weight")[5]
+        assert nd == pytest.approx(3.222222, abs=1e-5)
+
+    def test_error_invalid_source_target(self):
+        G = nx.path_graph(4)
+        with pytest.raises(nx.NetworkXError):
+            nx.average_neighbor_degree(G, "error")
+        with pytest.raises(nx.NetworkXError):
+            nx.average_neighbor_degree(G, "in", "error")
+        G = G.to_directed()
+        with pytest.raises(nx.NetworkXError):
+            nx.average_neighbor_degree(G, "error")
+        with pytest.raises(nx.NetworkXError):
+            nx.average_neighbor_degree(G, "in", "error")
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py
new file mode 100644
index 00000000..3984292b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py
@@ -0,0 +1,87 @@
+import networkx as nx
+
+from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
+
+
+class TestAttributeMixingXY(BaseTestAttributeMixing):
+    def test_node_attribute_xy_undirected(self):
+        attrxy = sorted(nx.node_attribute_xy(self.G, "fish"))
+        attrxy_result = sorted(
+            [
+                ("one", "one"),
+                ("one", "one"),
+                ("two", "two"),
+                ("two", "two"),
+                ("one", "red"),
+                ("red", "one"),
+                ("blue", "two"),
+                ("two", "blue"),
+            ]
+        )
+        assert attrxy == attrxy_result
+
+    def test_node_attribute_xy_undirected_nodes(self):
+        attrxy = sorted(nx.node_attribute_xy(self.G, "fish", nodes=["one", "yellow"]))
+        attrxy_result = sorted([])
+        assert attrxy == attrxy_result
+
+    def test_node_attribute_xy_directed(self):
+        attrxy = sorted(nx.node_attribute_xy(self.D, "fish"))
+        attrxy_result = sorted(
+            [("one", "one"), ("two", "two"), ("one", "red"), ("two", "blue")]
+        )
+        assert attrxy == attrxy_result
+
+    def test_node_attribute_xy_multigraph(self):
+        attrxy = sorted(nx.node_attribute_xy(self.M, "fish"))
+        attrxy_result = [
+            ("one", "one"),
+            ("one", "one"),
+            ("one", "one"),
+            ("one", "one"),
+            ("two", "two"),
+            ("two", "two"),
+        ]
+        assert attrxy == attrxy_result
+
+    def test_node_attribute_xy_selfloop(self):
+        attrxy = sorted(nx.node_attribute_xy(self.S, "fish"))
+        attrxy_result = [("one", "one"), ("two", "two")]
+        assert attrxy == attrxy_result
+
+
+class TestDegreeMixingXY(BaseTestDegreeMixing):
+    def test_node_degree_xy_undirected(self):
+        xy = sorted(nx.node_degree_xy(self.P4))
+        xy_result = sorted([(1, 2), (2, 1), (2, 2), (2, 2), (1, 2), (2, 1)])
+        assert xy == xy_result
+
+    def test_node_degree_xy_undirected_nodes(self):
+        xy = sorted(nx.node_degree_xy(self.P4, nodes=[0, 1, -1]))
+        xy_result = sorted([(1, 2), (2, 1)])
+        assert xy == xy_result
+
+    def test_node_degree_xy_directed(self):
+        xy = sorted(nx.node_degree_xy(self.D))
+        xy_result = sorted([(2, 1), (2, 3), (1, 3), (1, 3)])
+        assert xy == xy_result
+
+    def test_node_degree_xy_multigraph(self):
+        xy = sorted(nx.node_degree_xy(self.M))
+        xy_result = sorted(
+            [(2, 3), (2, 3), (3, 2), (3, 2), (2, 3), (3, 2), (1, 2), (2, 1)]
+        )
+        assert xy == xy_result
+
+    def test_node_degree_xy_selfloop(self):
+        xy = sorted(nx.node_degree_xy(self.S))
+        xy_result = sorted([(2, 2), (2, 2)])
+        assert xy == xy_result
+
+    def test_node_degree_xy_weighted(self):
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=7)
+        G.add_edge(2, 3, weight=10)
+        xy = sorted(nx.node_degree_xy(G, weight="weight"))
+        xy_result = sorted([(7, 17), (17, 10), (17, 7), (10, 17)])
+        assert xy == xy_result
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/asteroidal.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/asteroidal.py
new file mode 100644
index 00000000..3f9b2ab5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/asteroidal.py
@@ -0,0 +1,171 @@
+"""
+Algorithms for asteroidal triples and asteroidal numbers in graphs.
+
+An asteroidal triple in a graph G is a set of three non-adjacent vertices
+u, v and w such that there exist a path between any two of them that avoids
+closed neighborhood of the third. More formally, v_j, v_k belongs to the same
+connected component of G - N[v_i], where N[v_i] denotes the closed neighborhood
+of v_i. A graph which does not contain any asteroidal triples is called
+an AT-free graph. The class of AT-free graphs is a graph class for which
+many NP-complete problems are solvable in polynomial time. Amongst them,
+independent set and coloring.
+"""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["is_at_free", "find_asteroidal_triple"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def find_asteroidal_triple(G):
+    r"""Find an asteroidal triple in the given graph.
+
+    An asteroidal triple is a triple of non-adjacent vertices such that
+    there exists a path between any two of them which avoids the closed
+    neighborhood of the third. It checks all independent triples of vertices
+    and whether they are an asteroidal triple or not. This is done with the
+    help of a data structure called a component structure.
+    A component structure encodes information about which vertices belongs to
+    the same connected component when the closed neighborhood of a given vertex
+    is removed from the graph. The algorithm used to check is the trivial
+    one, outlined in [1]_, which has a runtime of
+    :math:`O(|V||\overline{E} + |V||E|)`, where the second term is the
+    creation of the component structure.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        The graph to check whether is AT-free or not
+
+    Returns
+    -------
+    list or None
+        An asteroidal triple is returned as a list of nodes. If no asteroidal
+        triple exists, i.e. the graph is AT-free, then None is returned.
+        The returned value depends on the certificate parameter. The default
+        option is a bool which is True if the graph is AT-free, i.e. the
+        given graph contains no asteroidal triples, and False otherwise, i.e.
+        if the graph contains at least one asteroidal triple.
+
+    Notes
+    -----
+    The component structure and the algorithm is described in [1]_. The current
+    implementation implements the trivial algorithm for simple graphs.
+
+    References
+    ----------
+    .. [1] Ekkehard Köhler,
+       "Recognizing Graphs without asteroidal triples",
+       Journal of Discrete Algorithms 2, pages 439-452, 2004.
+       https://www.sciencedirect.com/science/article/pii/S157086670400019X
+    """
+    V = set(G.nodes)
+
+    if len(V) < 6:
+        # An asteroidal triple cannot exist in a graph with 5 or less vertices.
+        return None
+
+    component_structure = create_component_structure(G)
+    E_complement = set(nx.complement(G).edges)
+
+    for e in E_complement:
+        u = e[0]
+        v = e[1]
+        u_neighborhood = set(G[u]).union([u])
+        v_neighborhood = set(G[v]).union([v])
+        union_of_neighborhoods = u_neighborhood.union(v_neighborhood)
+        for w in V - union_of_neighborhoods:
+            # Check for each pair of vertices whether they belong to the
+            # same connected component when the closed neighborhood of the
+            # third is removed.
+            if (
+                component_structure[u][v] == component_structure[u][w]
+                and component_structure[v][u] == component_structure[v][w]
+                and component_structure[w][u] == component_structure[w][v]
+            ):
+                return [u, v, w]
+    return None
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def is_at_free(G):
+    """Check if a graph is AT-free.
+
+    The method uses the `find_asteroidal_triple` method to recognize
+    an AT-free graph. If no asteroidal triple is found the graph is
+    AT-free and True is returned. If at least one asteroidal triple is
+    found the graph is not AT-free and False is returned.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        The graph to check whether is AT-free or not.
+
+    Returns
+    -------
+    bool
+        True if G is AT-free and False otherwise.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
+    >>> nx.is_at_free(G)
+    True
+
+    >>> G = nx.cycle_graph(6)
+    >>> nx.is_at_free(G)
+    False
+    """
+    return find_asteroidal_triple(G) is None
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def create_component_structure(G):
+    r"""Create component structure for G.
+
+    A *component structure* is an `nxn` array, denoted `c`, where `n` is
+    the number of vertices,  where each row and column corresponds to a vertex.
+
+    .. math::
+        c_{uv} = \begin{cases} 0, if v \in N[u] \\
+            k, if v \in component k of G \setminus N[u] \end{cases}
+
+    Where `k` is an arbitrary label for each component. The structure is used
+    to simplify the detection of asteroidal triples.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        Undirected, simple graph.
+
+    Returns
+    -------
+    component_structure : dictionary
+        A dictionary of dictionaries, keyed by pairs of vertices.
+
+    """
+    V = set(G.nodes)
+    component_structure = {}
+    for v in V:
+        label = 0
+        closed_neighborhood = set(G[v]).union({v})
+        row_dict = {}
+        for u in closed_neighborhood:
+            row_dict[u] = 0
+
+        G_reduced = G.subgraph(set(G.nodes) - closed_neighborhood)
+        for cc in nx.connected_components(G_reduced):
+            label += 1
+            for u in cc:
+                row_dict[u] = label
+
+        component_structure[v] = row_dict
+
+    return component_structure
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__init__.py
new file mode 100644
index 00000000..7839db96
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__init__.py
@@ -0,0 +1,87 @@
+r"""This module provides functions and operations for bipartite
+graphs.  Bipartite graphs `B = (U, V, E)` have two node sets `U,V` and edges in
+`E` that only connect nodes from opposite sets. It is common in the literature
+to use an spatial analogy referring to the two node sets as top and bottom nodes.
+
+The bipartite algorithms are not imported into the networkx namespace
+at the top level so the easiest way to use them is with:
+
+>>> from networkx.algorithms import bipartite
+
+NetworkX does not have a custom bipartite graph class but the Graph()
+or DiGraph() classes can be used to represent bipartite graphs. However,
+you have to keep track of which set each node belongs to, and make
+sure that there is no edge between nodes of the same set. The convention used
+in NetworkX is to use a node attribute named `bipartite` with values 0 or 1 to
+identify the sets each node belongs to. This convention is not enforced in
+the source code of bipartite functions, it's only a recommendation.
+
+For example:
+
+>>> B = nx.Graph()
+>>> # Add nodes with the node attribute "bipartite"
+>>> B.add_nodes_from([1, 2, 3, 4], bipartite=0)
+>>> B.add_nodes_from(["a", "b", "c"], bipartite=1)
+>>> # Add edges only between nodes of opposite node sets
+>>> B.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
+
+Many algorithms of the bipartite module of NetworkX require, as an argument, a
+container with all the nodes that belong to one set, in addition to the bipartite
+graph `B`. The functions in the bipartite package do not check that the node set
+is actually correct nor that the input graph is actually bipartite.
+If `B` is connected, you can find the two node sets using a two-coloring
+algorithm:
+
+>>> nx.is_connected(B)
+True
+>>> bottom_nodes, top_nodes = bipartite.sets(B)
+
+However, if the input graph is not connected, there are more than one possible
+colorations. This is the reason why we require the user to pass a container
+with all nodes of one bipartite node set as an argument to most bipartite
+functions. In the face of ambiguity, we refuse the temptation to guess and
+raise an :exc:`AmbiguousSolution <networkx.AmbiguousSolution>`
+Exception if the input graph for
+:func:`bipartite.sets <networkx.algorithms.bipartite.basic.sets>`
+is disconnected.
+
+Using the `bipartite` node attribute, you can easily get the two node sets:
+
+>>> top_nodes = {n for n, d in B.nodes(data=True) if d["bipartite"] == 0}
+>>> bottom_nodes = set(B) - top_nodes
+
+So you can easily use the bipartite algorithms that require, as an argument, a
+container with all nodes that belong to one node set:
+
+>>> print(round(bipartite.density(B, bottom_nodes), 2))
+0.5
+>>> G = bipartite.projected_graph(B, top_nodes)
+
+All bipartite graph generators in NetworkX build bipartite graphs with the
+`bipartite` node attribute. Thus, you can use the same approach:
+
+>>> RB = bipartite.random_graph(5, 7, 0.2)
+>>> RB_top = {n for n, d in RB.nodes(data=True) if d["bipartite"] == 0}
+>>> RB_bottom = set(RB) - RB_top
+>>> list(RB_top)
+[0, 1, 2, 3, 4]
+>>> list(RB_bottom)
+[5, 6, 7, 8, 9, 10, 11]
+
+For other bipartite graph generators see
+:mod:`Generators <networkx.algorithms.bipartite.generators>`.
+
+"""
+
+from networkx.algorithms.bipartite.basic import *
+from networkx.algorithms.bipartite.centrality import *
+from networkx.algorithms.bipartite.cluster import *
+from networkx.algorithms.bipartite.covering import *
+from networkx.algorithms.bipartite.edgelist import *
+from networkx.algorithms.bipartite.matching import *
+from networkx.algorithms.bipartite.matrix import *
+from networkx.algorithms.bipartite.projection import *
+from networkx.algorithms.bipartite.redundancy import *
+from networkx.algorithms.bipartite.spectral import *
+from networkx.algorithms.bipartite.generators import *
+from networkx.algorithms.bipartite.extendability import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/basic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/basic.py
new file mode 100644
index 00000000..8d9a4d5b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/basic.py
@@ -0,0 +1,322 @@
+"""
+==========================
+Bipartite Graph Algorithms
+==========================
+"""
+
+import networkx as nx
+from networkx.algorithms.components import connected_components
+from networkx.exception import AmbiguousSolution
+
+__all__ = [
+    "is_bipartite",
+    "is_bipartite_node_set",
+    "color",
+    "sets",
+    "density",
+    "degrees",
+]
+
+
+@nx._dispatchable
+def color(G):
+    """Returns a two-coloring of the graph.
+
+    Raises an exception if the graph is not bipartite.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    color : dictionary
+        A dictionary keyed by node with a 1 or 0 as data for each node color.
+
+    Raises
+    ------
+    NetworkXError
+        If the graph is not two-colorable.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.path_graph(4)
+    >>> c = bipartite.color(G)
+    >>> print(c)
+    {0: 1, 1: 0, 2: 1, 3: 0}
+
+    You can use this to set a node attribute indicating the bipartite set:
+
+    >>> nx.set_node_attributes(G, c, "bipartite")
+    >>> print(G.nodes[0]["bipartite"])
+    1
+    >>> print(G.nodes[1]["bipartite"])
+    0
+    """
+    if G.is_directed():
+        import itertools
+
+        def neighbors(v):
+            return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)])
+
+    else:
+        neighbors = G.neighbors
+
+    color = {}
+    for n in G:  # handle disconnected graphs
+        if n in color or len(G[n]) == 0:  # skip isolates
+            continue
+        queue = [n]
+        color[n] = 1  # nodes seen with color (1 or 0)
+        while queue:
+            v = queue.pop()
+            c = 1 - color[v]  # opposite color of node v
+            for w in neighbors(v):
+                if w in color:
+                    if color[w] == color[v]:
+                        raise nx.NetworkXError("Graph is not bipartite.")
+                else:
+                    color[w] = c
+                    queue.append(w)
+    # color isolates with 0
+    color.update(dict.fromkeys(nx.isolates(G), 0))
+    return color
+
+
+@nx._dispatchable
+def is_bipartite(G):
+    """Returns True if graph G is bipartite, False if not.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.path_graph(4)
+    >>> print(bipartite.is_bipartite(G))
+    True
+
+    See Also
+    --------
+    color, is_bipartite_node_set
+    """
+    try:
+        color(G)
+        return True
+    except nx.NetworkXError:
+        return False
+
+
+@nx._dispatchable
+def is_bipartite_node_set(G, nodes):
+    """Returns True if nodes and G/nodes are a bipartition of G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    nodes: list or container
+      Check if nodes are a one of a bipartite set.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.path_graph(4)
+    >>> X = set([1, 3])
+    >>> bipartite.is_bipartite_node_set(G, X)
+    True
+
+    Notes
+    -----
+    An exception is raised if the input nodes are not distinct, because in this
+    case some bipartite algorithms will yield incorrect results.
+    For connected graphs the bipartite sets are unique.  This function handles
+    disconnected graphs.
+    """
+    S = set(nodes)
+
+    if len(S) < len(nodes):
+        # this should maybe just return False?
+        raise AmbiguousSolution(
+            "The input node set contains duplicates.\n"
+            "This may lead to incorrect results when using it in bipartite algorithms.\n"
+            "Consider using set(nodes) as the input"
+        )
+
+    for CC in (G.subgraph(c).copy() for c in connected_components(G)):
+        X, Y = sets(CC)
+        if not (
+            (X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S))
+        ):
+            return False
+    return True
+
+
+@nx._dispatchable
+def sets(G, top_nodes=None):
+    """Returns bipartite node sets of graph G.
+
+    Raises an exception if the graph is not bipartite or if the input
+    graph is disconnected and thus more than one valid solution exists.
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    top_nodes : container, optional
+      Container with all nodes in one bipartite node set. If not supplied
+      it will be computed. But if more than one solution exists an exception
+      will be raised.
+
+    Returns
+    -------
+    X : set
+      Nodes from one side of the bipartite graph.
+    Y : set
+      Nodes from the other side.
+
+    Raises
+    ------
+    AmbiguousSolution
+      Raised if the input bipartite graph is disconnected and no container
+      with all nodes in one bipartite set is provided. When determining
+      the nodes in each bipartite set more than one valid solution is
+      possible if the input graph is disconnected.
+    NetworkXError
+      Raised if the input graph is not bipartite.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.path_graph(4)
+    >>> X, Y = bipartite.sets(G)
+    >>> list(X)
+    [0, 2]
+    >>> list(Y)
+    [1, 3]
+
+    See Also
+    --------
+    color
+
+    """
+    if G.is_directed():
+        is_connected = nx.is_weakly_connected
+    else:
+        is_connected = nx.is_connected
+    if top_nodes is not None:
+        X = set(top_nodes)
+        Y = set(G) - X
+    else:
+        if not is_connected(G):
+            msg = "Disconnected graph: Ambiguous solution for bipartite sets."
+            raise nx.AmbiguousSolution(msg)
+        c = color(G)
+        X = {n for n, is_top in c.items() if is_top}
+        Y = {n for n, is_top in c.items() if not is_top}
+    return (X, Y)
+
+
+@nx._dispatchable(graphs="B")
+def density(B, nodes):
+    """Returns density of bipartite graph B.
+
+    Parameters
+    ----------
+    B : NetworkX graph
+
+    nodes: list or container
+      Nodes in one node set of the bipartite graph.
+
+    Returns
+    -------
+    d : float
+       The bipartite density
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.complete_bipartite_graph(3, 2)
+    >>> X = set([0, 1, 2])
+    >>> bipartite.density(G, X)
+    1.0
+    >>> Y = set([3, 4])
+    >>> bipartite.density(G, Y)
+    1.0
+
+    Notes
+    -----
+    The container of nodes passed as argument must contain all nodes
+    in one of the two bipartite node sets to avoid ambiguity in the
+    case of disconnected graphs.
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    See Also
+    --------
+    color
+    """
+    n = len(B)
+    m = nx.number_of_edges(B)
+    nb = len(nodes)
+    nt = n - nb
+    if m == 0:  # includes cases n==0 and n==1
+        d = 0.0
+    else:
+        if B.is_directed():
+            d = m / (2 * nb * nt)
+        else:
+            d = m / (nb * nt)
+    return d
+
+
+@nx._dispatchable(graphs="B", edge_attrs="weight")
+def degrees(B, nodes, weight=None):
+    """Returns the degrees of the two node sets in the bipartite graph B.
+
+    Parameters
+    ----------
+    B : NetworkX graph
+
+    nodes: list or container
+      Nodes in one node set of the bipartite graph.
+
+    weight : string or None, optional (default=None)
+       The edge attribute that holds the numerical value used as a weight.
+       If None, then each edge has weight 1.
+       The degree is the sum of the edge weights adjacent to the node.
+
+    Returns
+    -------
+    (degX,degY) : tuple of dictionaries
+       The degrees of the two bipartite sets as dictionaries keyed by node.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.complete_bipartite_graph(3, 2)
+    >>> Y = set([3, 4])
+    >>> degX, degY = bipartite.degrees(G, Y)
+    >>> dict(degX)
+    {0: 2, 1: 2, 2: 2}
+
+    Notes
+    -----
+    The container of nodes passed as argument must contain all nodes
+    in one of the two bipartite node sets to avoid ambiguity in the
+    case of disconnected graphs.
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    See Also
+    --------
+    color, density
+    """
+    bottom = set(nodes)
+    top = set(B) - bottom
+    return (B.degree(top, weight), B.degree(bottom, weight))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/centrality.py
new file mode 100644
index 00000000..42d7270e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/centrality.py
@@ -0,0 +1,290 @@
+import networkx as nx
+
+__all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"]
+
+
+@nx._dispatchable(name="bipartite_degree_centrality")
+def degree_centrality(G, nodes):
+    r"""Compute the degree centrality for nodes in a bipartite network.
+
+    The degree centrality for a node `v` is the fraction of nodes
+    connected to it.
+
+    Parameters
+    ----------
+    G : graph
+       A bipartite network
+
+    nodes : list or container
+      Container with all nodes in one bipartite node set.
+
+    Returns
+    -------
+    centrality : dictionary
+       Dictionary keyed by node with bipartite degree centrality as the value.
+
+    Examples
+    --------
+    >>> G = nx.wheel_graph(5)
+    >>> top_nodes = {0, 1, 2}
+    >>> nx.bipartite.degree_centrality(G, nodes=top_nodes)
+    {0: 2.0, 1: 1.5, 2: 1.5, 3: 1.0, 4: 1.0}
+
+    See Also
+    --------
+    betweenness_centrality
+    closeness_centrality
+    :func:`~networkx.algorithms.bipartite.basic.sets`
+    :func:`~networkx.algorithms.bipartite.basic.is_bipartite`
+
+    Notes
+    -----
+    The nodes input parameter must contain all nodes in one bipartite node set,
+    but the dictionary returned contains all nodes from both bipartite node
+    sets. See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    For unipartite networks, the degree centrality values are
+    normalized by dividing by the maximum possible degree (which is
+    `n-1` where `n` is the number of nodes in G).
+
+    In the bipartite case, the maximum possible degree of a node in a
+    bipartite node set is the number of nodes in the opposite node set
+    [1]_.  The degree centrality for a node `v` in the bipartite
+    sets `U` with `n` nodes and `V` with `m` nodes is
+
+    .. math::
+
+        d_{v} = \frac{deg(v)}{m}, \mbox{for} v \in U ,
+
+        d_{v} = \frac{deg(v)}{n}, \mbox{for} v \in V ,
+
+
+    where `deg(v)` is the degree of node `v`.
+
+    References
+    ----------
+    .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
+        Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
+        of Social Network Analysis. Sage Publications.
+        https://dx.doi.org/10.4135/9781446294413.n28
+    """
+    top = set(nodes)
+    bottom = set(G) - top
+    s = 1.0 / len(bottom)
+    centrality = {n: d * s for n, d in G.degree(top)}
+    s = 1.0 / len(top)
+    centrality.update({n: d * s for n, d in G.degree(bottom)})
+    return centrality
+
+
+@nx._dispatchable(name="bipartite_betweenness_centrality")
+def betweenness_centrality(G, nodes):
+    r"""Compute betweenness centrality for nodes in a bipartite network.
+
+    Betweenness centrality of a node `v` is the sum of the
+    fraction of all-pairs shortest paths that pass through `v`.
+
+    Values of betweenness are normalized by the maximum possible
+    value which for bipartite graphs is limited by the relative size
+    of the two node sets [1]_.
+
+    Let `n` be the number of nodes in the node set `U` and
+    `m` be the number of nodes in the node set `V`, then
+    nodes in `U` are normalized by dividing by
+
+    .. math::
+
+       \frac{1}{2} [m^2 (s + 1)^2 + m (s + 1)(2t - s - 1) - t (2s - t + 3)] ,
+
+    where
+
+    .. math::
+
+        s = (n - 1) \div m , t = (n - 1) \mod m ,
+
+    and nodes in `V` are normalized by dividing by
+
+    .. math::
+
+        \frac{1}{2} [n^2 (p + 1)^2 + n (p + 1)(2r - p - 1) - r (2p - r + 3)] ,
+
+    where,
+
+    .. math::
+
+        p = (m - 1) \div n , r = (m - 1) \mod n .
+
+    Parameters
+    ----------
+    G : graph
+        A bipartite graph
+
+    nodes : list or container
+        Container with all nodes in one bipartite node set.
+
+    Returns
+    -------
+    betweenness : dictionary
+        Dictionary keyed by node with bipartite betweenness centrality
+        as the value.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4)
+    >>> top_nodes = {1, 2}
+    >>> nx.bipartite.betweenness_centrality(G, nodes=top_nodes)
+    {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
+
+    See Also
+    --------
+    degree_centrality
+    closeness_centrality
+    :func:`~networkx.algorithms.bipartite.basic.sets`
+    :func:`~networkx.algorithms.bipartite.basic.is_bipartite`
+
+    Notes
+    -----
+    The nodes input parameter must contain all nodes in one bipartite node set,
+    but the dictionary returned contains all nodes from both node sets.
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+
+    References
+    ----------
+    .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
+        Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
+        of Social Network Analysis. Sage Publications.
+        https://dx.doi.org/10.4135/9781446294413.n28
+    """
+    top = set(nodes)
+    bottom = set(G) - top
+    n = len(top)
+    m = len(bottom)
+    s, t = divmod(n - 1, m)
+    bet_max_top = (
+        ((m**2) * ((s + 1) ** 2))
+        + (m * (s + 1) * (2 * t - s - 1))
+        - (t * ((2 * s) - t + 3))
+    ) / 2.0
+    p, r = divmod(m - 1, n)
+    bet_max_bot = (
+        ((n**2) * ((p + 1) ** 2))
+        + (n * (p + 1) * (2 * r - p - 1))
+        - (r * ((2 * p) - r + 3))
+    ) / 2.0
+    betweenness = nx.betweenness_centrality(G, normalized=False, weight=None)
+    for node in top:
+        betweenness[node] /= bet_max_top
+    for node in bottom:
+        betweenness[node] /= bet_max_bot
+    return betweenness
+
+
+@nx._dispatchable(name="bipartite_closeness_centrality")
+def closeness_centrality(G, nodes, normalized=True):
+    r"""Compute the closeness centrality for nodes in a bipartite network.
+
+    The closeness of a node is the distance to all other nodes in the
+    graph or in the case that the graph is not connected to all other nodes
+    in the connected component containing that node.
+
+    Parameters
+    ----------
+    G : graph
+        A bipartite network
+
+    nodes : list or container
+        Container with all nodes in one bipartite node set.
+
+    normalized : bool, optional
+      If True (default) normalize by connected component size.
+
+    Returns
+    -------
+    closeness : dictionary
+        Dictionary keyed by node with bipartite closeness centrality
+        as the value.
+
+    Examples
+    --------
+    >>> G = nx.wheel_graph(5)
+    >>> top_nodes = {0, 1, 2}
+    >>> nx.bipartite.closeness_centrality(G, nodes=top_nodes)
+    {0: 1.5, 1: 1.2, 2: 1.2, 3: 1.0, 4: 1.0}
+
+    See Also
+    --------
+    betweenness_centrality
+    degree_centrality
+    :func:`~networkx.algorithms.bipartite.basic.sets`
+    :func:`~networkx.algorithms.bipartite.basic.is_bipartite`
+
+    Notes
+    -----
+    The nodes input parameter must contain all nodes in one bipartite node set,
+    but the dictionary returned contains all nodes from both node sets.
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+
+    Closeness centrality is normalized by the minimum distance possible.
+    In the bipartite case the minimum distance for a node in one bipartite
+    node set is 1 from all nodes in the other node set and 2 from all
+    other nodes in its own set [1]_. Thus the closeness centrality
+    for node `v`  in the two bipartite sets `U` with
+    `n` nodes and `V` with `m` nodes is
+
+    .. math::
+
+        c_{v} = \frac{m + 2(n - 1)}{d}, \mbox{for} v \in U,
+
+        c_{v} = \frac{n + 2(m - 1)}{d}, \mbox{for} v \in V,
+
+    where `d` is the sum of the distances from `v` to all
+    other nodes.
+
+    Higher values of closeness  indicate higher centrality.
+
+    As in the unipartite case, setting normalized=True causes the
+    values to normalized further to n-1 / size(G)-1 where n is the
+    number of nodes in the connected part of graph containing the
+    node.  If the graph is not completely connected, this algorithm
+    computes the closeness centrality for each connected part
+    separately.
+
+    References
+    ----------
+    .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
+        Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
+        of Social Network Analysis. Sage Publications.
+        https://dx.doi.org/10.4135/9781446294413.n28
+    """
+    closeness = {}
+    path_length = nx.single_source_shortest_path_length
+    top = set(nodes)
+    bottom = set(G) - top
+    n = len(top)
+    m = len(bottom)
+    for node in top:
+        sp = dict(path_length(G, node))
+        totsp = sum(sp.values())
+        if totsp > 0.0 and len(G) > 1:
+            closeness[node] = (m + 2 * (n - 1)) / totsp
+            if normalized:
+                s = (len(sp) - 1) / (len(G) - 1)
+                closeness[node] *= s
+        else:
+            closeness[node] = 0.0
+    for node in bottom:
+        sp = dict(path_length(G, node))
+        totsp = sum(sp.values())
+        if totsp > 0.0 and len(G) > 1:
+            closeness[node] = (n + 2 * (m - 1)) / totsp
+            if normalized:
+                s = (len(sp) - 1) / (len(G) - 1)
+                closeness[node] *= s
+        else:
+            closeness[node] = 0.0
+    return closeness
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/cluster.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/cluster.py
new file mode 100644
index 00000000..5b66b280
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/cluster.py
@@ -0,0 +1,278 @@
+"""Functions for computing clustering of pairs"""
+
+import itertools
+
+import networkx as nx
+
+__all__ = [
+    "clustering",
+    "average_clustering",
+    "latapy_clustering",
+    "robins_alexander_clustering",
+]
+
+
+def cc_dot(nu, nv):
+    return len(nu & nv) / len(nu | nv)
+
+
+def cc_max(nu, nv):
+    return len(nu & nv) / max(len(nu), len(nv))
+
+
+def cc_min(nu, nv):
+    return len(nu & nv) / min(len(nu), len(nv))
+
+
+modes = {"dot": cc_dot, "min": cc_min, "max": cc_max}
+
+
+@nx._dispatchable
+def latapy_clustering(G, nodes=None, mode="dot"):
+    r"""Compute a bipartite clustering coefficient for nodes.
+
+    The bipartite clustering coefficient is a measure of local density
+    of connections defined as [1]_:
+
+    .. math::
+
+       c_u = \frac{\sum_{v \in N(N(u))} c_{uv} }{|N(N(u))|}
+
+    where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`,
+    and `c_{uv}` is the pairwise clustering coefficient between nodes
+    `u` and `v`.
+
+    The mode selects the function for `c_{uv}` which can be:
+
+    `dot`:
+
+    .. math::
+
+       c_{uv}=\frac{|N(u)\cap N(v)|}{|N(u) \cup N(v)|}
+
+    `min`:
+
+    .. math::
+
+       c_{uv}=\frac{|N(u)\cap N(v)|}{min(|N(u)|,|N(v)|)}
+
+    `max`:
+
+    .. math::
+
+       c_{uv}=\frac{|N(u)\cap N(v)|}{max(|N(u)|,|N(v)|)}
+
+
+    Parameters
+    ----------
+    G : graph
+        A bipartite graph
+
+    nodes : list or iterable (optional)
+        Compute bipartite clustering for these nodes. The default
+        is all nodes in G.
+
+    mode : string
+        The pairwise bipartite clustering method to be used in the computation.
+        It must be "dot", "max", or "min".
+
+    Returns
+    -------
+    clustering : dictionary
+        A dictionary keyed by node with the clustering coefficient value.
+
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.path_graph(4)  # path graphs are bipartite
+    >>> c = bipartite.clustering(G)
+    >>> c[0]
+    0.5
+    >>> c = bipartite.clustering(G, mode="min")
+    >>> c[0]
+    1.0
+
+    See Also
+    --------
+    robins_alexander_clustering
+    average_clustering
+    networkx.algorithms.cluster.square_clustering
+
+    References
+    ----------
+    .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
+       Basic notions for the analysis of large two-mode networks.
+       Social Networks 30(1), 31--48.
+    """
+    if not nx.algorithms.bipartite.is_bipartite(G):
+        raise nx.NetworkXError("Graph is not bipartite")
+
+    try:
+        cc_func = modes[mode]
+    except KeyError as err:
+        raise nx.NetworkXError(
+            "Mode for bipartite clustering must be: dot, min or max"
+        ) from err
+
+    if nodes is None:
+        nodes = G
+    ccs = {}
+    for v in nodes:
+        cc = 0.0
+        nbrs2 = {u for nbr in G[v] for u in G[nbr]} - {v}
+        for u in nbrs2:
+            cc += cc_func(set(G[u]), set(G[v]))
+        if cc > 0.0:  # len(nbrs2)>0
+            cc /= len(nbrs2)
+        ccs[v] = cc
+    return ccs
+
+
+clustering = latapy_clustering
+
+
+@nx._dispatchable(name="bipartite_average_clustering")
+def average_clustering(G, nodes=None, mode="dot"):
+    r"""Compute the average bipartite clustering coefficient.
+
+    A clustering coefficient for the whole graph is the average,
+
+    .. math::
+
+       C = \frac{1}{n}\sum_{v \in G} c_v,
+
+    where `n` is the number of nodes in `G`.
+
+    Similar measures for the two bipartite sets can be defined [1]_
+
+    .. math::
+
+       C_X = \frac{1}{|X|}\sum_{v \in X} c_v,
+
+    where `X` is a bipartite set of `G`.
+
+    Parameters
+    ----------
+    G : graph
+        a bipartite graph
+
+    nodes : list or iterable, optional
+        A container of nodes to use in computing the average.
+        The nodes should be either the entire graph (the default) or one of the
+        bipartite sets.
+
+    mode : string
+        The pairwise bipartite clustering method.
+        It must be "dot", "max", or "min"
+
+    Returns
+    -------
+    clustering : float
+       The average bipartite clustering for the given set of nodes or the
+       entire graph if no nodes are specified.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.star_graph(3)  # star graphs are bipartite
+    >>> bipartite.average_clustering(G)
+    0.75
+    >>> X, Y = bipartite.sets(G)
+    >>> bipartite.average_clustering(G, X)
+    0.0
+    >>> bipartite.average_clustering(G, Y)
+    1.0
+
+    See Also
+    --------
+    clustering
+
+    Notes
+    -----
+    The container of nodes passed to this function must contain all of the nodes
+    in one of the bipartite sets ("top" or "bottom") in order to compute
+    the correct average bipartite clustering coefficients.
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+
+    References
+    ----------
+    .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
+        Basic notions for the analysis of large two-mode networks.
+        Social Networks 30(1), 31--48.
+    """
+    if nodes is None:
+        nodes = G
+    ccs = latapy_clustering(G, nodes=nodes, mode=mode)
+    return sum(ccs[v] for v in nodes) / len(nodes)
+
+
+@nx._dispatchable
+def robins_alexander_clustering(G):
+    r"""Compute the bipartite clustering of G.
+
+    Robins and Alexander [1]_ defined bipartite clustering coefficient as
+    four times the number of four cycles `C_4` divided by the number of
+    three paths `L_3` in a bipartite graph:
+
+    .. math::
+
+       CC_4 = \frac{4 * C_4}{L_3}
+
+    Parameters
+    ----------
+    G : graph
+        a bipartite graph
+
+    Returns
+    -------
+    clustering : float
+       The Robins and Alexander bipartite clustering for the input graph.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.davis_southern_women_graph()
+    >>> print(round(bipartite.robins_alexander_clustering(G), 3))
+    0.468
+
+    See Also
+    --------
+    latapy_clustering
+    networkx.algorithms.cluster.square_clustering
+
+    References
+    ----------
+    .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
+           directors: Network structure and distance in bipartite graphs.
+           Computational & Mathematical Organization Theory 10(1), 69–94.
+
+    """
+    if G.order() < 4 or G.size() < 3:
+        return 0
+    L_3 = _threepaths(G)
+    if L_3 == 0:
+        return 0
+    C_4 = _four_cycles(G)
+    return (4.0 * C_4) / L_3
+
+
+def _four_cycles(G):
+    cycles = 0
+    for v in G:
+        for u, w in itertools.combinations(G[v], 2):
+            cycles += len((set(G[u]) & set(G[w])) - {v})
+    return cycles / 4
+
+
+def _threepaths(G):
+    paths = 0
+    for v in G:
+        for u in G[v]:
+            for w in set(G[u]) - {v}:
+                paths += len(set(G[w]) - {v, u})
+    # Divide by two because we count each three path twice
+    # one for each possible starting point
+    return paths / 2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/covering.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/covering.py
new file mode 100644
index 00000000..f937903e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/covering.py
@@ -0,0 +1,57 @@
+"""Functions related to graph covers."""
+
+import networkx as nx
+from networkx.algorithms.bipartite.matching import hopcroft_karp_matching
+from networkx.algorithms.covering import min_edge_cover as _min_edge_cover
+from networkx.utils import not_implemented_for
+
+__all__ = ["min_edge_cover"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(name="bipartite_min_edge_cover")
+def min_edge_cover(G, matching_algorithm=None):
+    """Returns a set of edges which constitutes
+    the minimum edge cover of the graph.
+
+    The smallest edge cover can be found in polynomial time by finding
+    a maximum matching and extending it greedily so that all nodes
+    are covered.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected bipartite graph.
+
+    matching_algorithm : function
+        A function that returns a maximum cardinality matching in a
+        given bipartite graph. The function must take one input, the
+        graph ``G``, and return a dictionary mapping each node to its
+        mate. If not specified,
+        :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
+        will be used. Other possibilities include
+        :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`,
+
+    Returns
+    -------
+    set
+        A set of the edges in a minimum edge cover of the graph, given as
+        pairs of nodes. It contains both the edges `(u, v)` and `(v, u)`
+        for given nodes `u` and `v` among the edges of minimum edge cover.
+
+    Notes
+    -----
+    An edge cover of a graph is a set of edges such that every node of
+    the graph is incident to at least one edge of the set.
+    A minimum edge cover is an edge covering of smallest cardinality.
+
+    Due to its implementation, the worst-case running time of this algorithm
+    is bounded by the worst-case running time of the function
+    ``matching_algorithm``.
+    """
+    if G.order() == 0:  # Special case for the empty graph
+        return set()
+    if matching_algorithm is None:
+        matching_algorithm = hopcroft_karp_matching
+    return _min_edge_cover(G, matching_algorithm=matching_algorithm)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/edgelist.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/edgelist.py
new file mode 100644
index 00000000..db6ef9d8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/edgelist.py
@@ -0,0 +1,360 @@
+"""
+********************
+Bipartite Edge Lists
+********************
+Read and write NetworkX graphs as bipartite edge lists.
+
+Format
+------
+You can read or write three formats of edge lists with these functions.
+
+Node pairs with no data::
+
+ 1 2
+
+Python dictionary as data::
+
+ 1 2 {'weight':7, 'color':'green'}
+
+Arbitrary data::
+
+ 1 2 7 green
+
+For each edge (u, v) the node u is assigned to part 0 and the node v to part 1.
+"""
+
+__all__ = ["generate_edgelist", "write_edgelist", "parse_edgelist", "read_edgelist"]
+
+import networkx as nx
+from networkx.utils import not_implemented_for, open_file
+
+
+@open_file(1, mode="wb")
+def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"):
+    """Write a bipartite graph as a list of edges.
+
+    Parameters
+    ----------
+    G : Graph
+       A NetworkX bipartite graph
+    path : file or string
+       File or filename to write. If a file is provided, it must be
+       opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed.
+    comments : string, optional
+       The character used to indicate the start of a comment
+    delimiter : string, optional
+       The string used to separate values.  The default is whitespace.
+    data : bool or list, optional
+       If False write no edge data.
+       If True write a string representation of the edge data dictionary..
+       If a list (or other iterable) is provided, write the  keys specified
+       in the list.
+    encoding: string, optional
+       Specify which encoding to use when writing file.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> G.add_nodes_from([0, 2], bipartite=0)
+    >>> G.add_nodes_from([1, 3], bipartite=1)
+    >>> nx.write_edgelist(G, "test.edgelist")
+    >>> fh = open("test.edgelist", "wb")
+    >>> nx.write_edgelist(G, fh)
+    >>> nx.write_edgelist(G, "test.edgelist.gz")
+    >>> nx.write_edgelist(G, "test.edgelist.gz", data=False)
+
+    >>> G = nx.Graph()
+    >>> G.add_edge(1, 2, weight=7, color="red")
+    >>> nx.write_edgelist(G, "test.edgelist", data=False)
+    >>> nx.write_edgelist(G, "test.edgelist", data=["color"])
+    >>> nx.write_edgelist(G, "test.edgelist", data=["color", "weight"])
+
+    See Also
+    --------
+    write_edgelist
+    generate_edgelist
+    """
+    for line in generate_edgelist(G, delimiter, data):
+        line += "\n"
+        path.write(line.encode(encoding))
+
+
+@not_implemented_for("directed")
+def generate_edgelist(G, delimiter=" ", data=True):
+    """Generate a single line of the bipartite graph G in edge list format.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       The graph is assumed to have node attribute `part` set to 0,1 representing
+       the two graph parts
+
+    delimiter : string, optional
+       Separator for node labels
+
+    data : bool or list of keys
+       If False generate no edge data.  If True use a dictionary
+       representation of edge data.  If a list of keys use a list of data
+       values corresponding to the keys.
+
+    Returns
+    -------
+    lines : string
+        Lines of data in adjlist format.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.path_graph(4)
+    >>> G.add_nodes_from([0, 2], bipartite=0)
+    >>> G.add_nodes_from([1, 3], bipartite=1)
+    >>> G[1][2]["weight"] = 3
+    >>> G[2][3]["capacity"] = 12
+    >>> for line in bipartite.generate_edgelist(G, data=False):
+    ...     print(line)
+    0 1
+    2 1
+    2 3
+
+    >>> for line in bipartite.generate_edgelist(G):
+    ...     print(line)
+    0 1 {}
+    2 1 {'weight': 3}
+    2 3 {'capacity': 12}
+
+    >>> for line in bipartite.generate_edgelist(G, data=["weight"]):
+    ...     print(line)
+    0 1
+    2 1 3
+    2 3
+    """
+    try:
+        part0 = [n for n, d in G.nodes.items() if d["bipartite"] == 0]
+    except BaseException as err:
+        raise AttributeError("Missing node attribute `bipartite`") from err
+    if data is True or data is False:
+        for n in part0:
+            for edge in G.edges(n, data=data):
+                yield delimiter.join(map(str, edge))
+    else:
+        for n in part0:
+            for u, v, d in G.edges(n, data=True):
+                edge = [u, v]
+                try:
+                    edge.extend(d[k] for k in data)
+                except KeyError:
+                    pass  # missing data for this edge, should warn?
+                yield delimiter.join(map(str, edge))
+
+
+@nx._dispatchable(name="bipartite_parse_edgelist", graphs=None, returns_graph=True)
+def parse_edgelist(
+    lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True
+):
+    """Parse lines of an edge list representation of a bipartite graph.
+
+    Parameters
+    ----------
+    lines : list or iterator of strings
+        Input data in edgelist format
+    comments : string, optional
+       Marker for comment lines
+    delimiter : string, optional
+       Separator for node labels
+    create_using: NetworkX graph container, optional
+       Use given NetworkX graph for holding nodes or edges.
+    nodetype : Python type, optional
+       Convert nodes to this type.
+    data : bool or list of (label,type) tuples
+       If False generate no edge data or if True use a dictionary
+       representation of edge data or a list tuples specifying dictionary
+       key names and types for edge data.
+
+    Returns
+    -------
+    G: NetworkX Graph
+        The bipartite graph corresponding to lines
+
+    Examples
+    --------
+    Edgelist with no data:
+
+    >>> from networkx.algorithms import bipartite
+    >>> lines = ["1 2", "2 3", "3 4"]
+    >>> G = bipartite.parse_edgelist(lines, nodetype=int)
+    >>> sorted(G.nodes())
+    [1, 2, 3, 4]
+    >>> sorted(G.nodes(data=True))
+    [(1, {'bipartite': 0}), (2, {'bipartite': 0}), (3, {'bipartite': 0}), (4, {'bipartite': 1})]
+    >>> sorted(G.edges())
+    [(1, 2), (2, 3), (3, 4)]
+
+    Edgelist with data in Python dictionary representation:
+
+    >>> lines = ["1 2 {'weight':3}", "2 3 {'weight':27}", "3 4 {'weight':3.0}"]
+    >>> G = bipartite.parse_edgelist(lines, nodetype=int)
+    >>> sorted(G.nodes())
+    [1, 2, 3, 4]
+    >>> sorted(G.edges(data=True))
+    [(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})]
+
+    Edgelist with data in a list:
+
+    >>> lines = ["1 2 3", "2 3 27", "3 4 3.0"]
+    >>> G = bipartite.parse_edgelist(lines, nodetype=int, data=(("weight", float),))
+    >>> sorted(G.nodes())
+    [1, 2, 3, 4]
+    >>> sorted(G.edges(data=True))
+    [(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})]
+
+    See Also
+    --------
+    """
+    from ast import literal_eval
+
+    G = nx.empty_graph(0, create_using)
+    for line in lines:
+        p = line.find(comments)
+        if p >= 0:
+            line = line[:p]
+        if not len(line):
+            continue
+        # split line, should have 2 or more
+        s = line.rstrip("\n").split(delimiter)
+        if len(s) < 2:
+            continue
+        u = s.pop(0)
+        v = s.pop(0)
+        d = s
+        if nodetype is not None:
+            try:
+                u = nodetype(u)
+                v = nodetype(v)
+            except BaseException as err:
+                raise TypeError(
+                    f"Failed to convert nodes {u},{v} to type {nodetype}."
+                ) from err
+
+        if len(d) == 0 or data is False:
+            # no data or data type specified
+            edgedata = {}
+        elif data is True:
+            # no edge types specified
+            try:  # try to evaluate as dictionary
+                edgedata = dict(literal_eval(" ".join(d)))
+            except BaseException as err:
+                raise TypeError(
+                    f"Failed to convert edge data ({d}) to dictionary."
+                ) from err
+        else:
+            # convert edge data to dictionary with specified keys and type
+            if len(d) != len(data):
+                raise IndexError(
+                    f"Edge data {d} and data_keys {data} are not the same length"
+                )
+            edgedata = {}
+            for (edge_key, edge_type), edge_value in zip(data, d):
+                try:
+                    edge_value = edge_type(edge_value)
+                except BaseException as err:
+                    raise TypeError(
+                        f"Failed to convert {edge_key} data "
+                        f"{edge_value} to type {edge_type}."
+                    ) from err
+                edgedata.update({edge_key: edge_value})
+        G.add_node(u, bipartite=0)
+        G.add_node(v, bipartite=1)
+        G.add_edge(u, v, **edgedata)
+    return G
+
+
+@open_file(0, mode="rb")
+@nx._dispatchable(name="bipartite_read_edgelist", graphs=None, returns_graph=True)
+def read_edgelist(
+    path,
+    comments="#",
+    delimiter=None,
+    create_using=None,
+    nodetype=None,
+    data=True,
+    edgetype=None,
+    encoding="utf-8",
+):
+    """Read a bipartite graph from a list of edges.
+
+    Parameters
+    ----------
+    path : file or string
+       File or filename to read. If a file is provided, it must be
+       opened in 'rb' mode.
+       Filenames ending in .gz or .bz2 will be uncompressed.
+    comments : string, optional
+       The character used to indicate the start of a comment.
+    delimiter : string, optional
+       The string used to separate values.  The default is whitespace.
+    create_using : Graph container, optional,
+       Use specified container to build graph.  The default is networkx.Graph,
+       an undirected graph.
+    nodetype : int, float, str, Python type, optional
+       Convert node data from strings to specified type
+    data : bool or list of (label,type) tuples
+       Tuples specifying dictionary key names and types for edge data
+    edgetype : int, float, str, Python type, optional OBSOLETE
+       Convert edge data from strings to specified type and use as 'weight'
+    encoding: string, optional
+       Specify which encoding to use when reading file.
+
+    Returns
+    -------
+    G : graph
+       A networkx Graph or other type specified with create_using
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.path_graph(4)
+    >>> G.add_nodes_from([0, 2], bipartite=0)
+    >>> G.add_nodes_from([1, 3], bipartite=1)
+    >>> bipartite.write_edgelist(G, "test.edgelist")
+    >>> G = bipartite.read_edgelist("test.edgelist")
+
+    >>> fh = open("test.edgelist", "rb")
+    >>> G = bipartite.read_edgelist(fh)
+    >>> fh.close()
+
+    >>> G = bipartite.read_edgelist("test.edgelist", nodetype=int)
+
+    Edgelist with data in a list:
+
+    >>> textline = "1 2 3"
+    >>> fh = open("test.edgelist", "w")
+    >>> d = fh.write(textline)
+    >>> fh.close()
+    >>> G = bipartite.read_edgelist(
+    ...     "test.edgelist", nodetype=int, data=(("weight", float),)
+    ... )
+    >>> list(G)
+    [1, 2]
+    >>> list(G.edges(data=True))
+    [(1, 2, {'weight': 3.0})]
+
+    See parse_edgelist() for more examples of formatting.
+
+    See Also
+    --------
+    parse_edgelist
+
+    Notes
+    -----
+    Since nodes must be hashable, the function nodetype must return hashable
+    types (e.g. int, float, str, frozenset - or tuples of those, etc.)
+    """
+    lines = (line.decode(encoding) for line in path)
+    return parse_edgelist(
+        lines,
+        comments=comments,
+        delimiter=delimiter,
+        create_using=create_using,
+        nodetype=nodetype,
+        data=data,
+    )
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/extendability.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/extendability.py
new file mode 100644
index 00000000..61d8d067
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/extendability.py
@@ -0,0 +1,105 @@
+"""Provides a function for computing the extendability of a graph which is
+undirected, simple, connected and bipartite and contains at least one perfect matching."""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["maximal_extendability"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def maximal_extendability(G):
+    """Computes the extendability of a graph.
+
+    The extendability of a graph is defined as the maximum $k$ for which `G`
+    is $k$-extendable. Graph `G` is $k$-extendable if and only if `G` has a
+    perfect matching and every set of $k$ independent edges can be extended
+    to a perfect matching in `G`.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        A fully-connected bipartite graph without self-loops
+
+    Returns
+    -------
+    extendability : int
+
+    Raises
+    ------
+    NetworkXError
+       If the graph `G` is disconnected.
+       If the graph `G` is not bipartite.
+       If the graph `G` does not contain a perfect matching.
+       If the residual graph of `G` is not strongly connected.
+
+    Notes
+    -----
+    Definition:
+    Let `G` be a simple, connected, undirected and bipartite graph with a perfect
+    matching M and bipartition (U,V). The residual graph of `G`, denoted by $G_M$,
+    is the graph obtained from G by directing the edges of M from V to U and the
+    edges that do not belong to M from U to V.
+
+    Lemma [1]_ :
+    Let M be a perfect matching of `G`. `G` is $k$-extendable if and only if its residual
+    graph $G_M$ is strongly connected and there are $k$ vertex-disjoint directed
+    paths between every vertex of U and every vertex of V.
+
+    Assuming that input graph `G` is undirected, simple, connected, bipartite and contains
+    a perfect matching M, this function constructs the residual graph $G_M$ of G and
+    returns the minimum value among the maximum vertex-disjoint directed paths between
+    every vertex of U and every vertex of V in $G_M$. By combining the definitions
+    and the lemma, this value represents the extendability of the graph `G`.
+
+    Time complexity O($n^3$ $m^2$)) where $n$ is the number of vertices
+    and $m$ is the number of edges.
+
+    References
+    ----------
+    .. [1] "A polynomial algorithm for the extendability problem in bipartite graphs",
+          J. Lakhal, L. Litzler, Information Processing Letters, 1998.
+    .. [2] "On n-extendible graphs", M. D. Plummer, Discrete Mathematics, 31:201–210, 1980
+          https://doi.org/10.1016/0012-365X(80)90037-0
+
+    """
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Graph G is not connected")
+
+    if not nx.bipartite.is_bipartite(G):
+        raise nx.NetworkXError("Graph G is not bipartite")
+
+    U, V = nx.bipartite.sets(G)
+
+    maximum_matching = nx.bipartite.hopcroft_karp_matching(G)
+
+    if not nx.is_perfect_matching(G, maximum_matching):
+        raise nx.NetworkXError("Graph G does not contain a perfect matching")
+
+    # list of edges in perfect matching, directed from V to U
+    pm = [(node, maximum_matching[node]) for node in V & maximum_matching.keys()]
+
+    # Direct all the edges of G, from V to U if in matching, else from U to V
+    directed_edges = [
+        (x, y) if (x in V and (x, y) in pm) or (x in U and (y, x) not in pm) else (y, x)
+        for x, y in G.edges
+    ]
+
+    # Construct the residual graph of G
+    residual_G = nx.DiGraph()
+    residual_G.add_nodes_from(G)
+    residual_G.add_edges_from(directed_edges)
+
+    if not nx.is_strongly_connected(residual_G):
+        raise nx.NetworkXError("The residual graph of G is not strongly connected")
+
+    # For node-pairs between V & U, keep min of max number of node-disjoint paths
+    # Variable $k$ stands for the extendability of graph G
+    k = float("inf")
+    for u in U:
+        for v in V:
+            num_paths = sum(1 for _ in nx.node_disjoint_paths(residual_G, u, v))
+            k = k if k < num_paths else num_paths
+    return k
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/generators.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/generators.py
new file mode 100644
index 00000000..e8428f6b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/generators.py
@@ -0,0 +1,604 @@
+"""
+Generators and functions for bipartite graphs.
+"""
+
+import math
+import numbers
+from functools import reduce
+
+import networkx as nx
+from networkx.utils import nodes_or_number, py_random_state
+
+__all__ = [
+    "configuration_model",
+    "havel_hakimi_graph",
+    "reverse_havel_hakimi_graph",
+    "alternating_havel_hakimi_graph",
+    "preferential_attachment_graph",
+    "random_graph",
+    "gnmk_random_graph",
+    "complete_bipartite_graph",
+]
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+@nodes_or_number([0, 1])
+def complete_bipartite_graph(n1, n2, create_using=None):
+    """Returns the complete bipartite graph `K_{n_1,n_2}`.
+
+    The graph is composed of two partitions with nodes 0 to (n1 - 1)
+    in the first and nodes n1 to (n1 + n2 - 1) in the second.
+    Each node in the first is connected to each node in the second.
+
+    Parameters
+    ----------
+    n1, n2 : integer or iterable container of nodes
+        If integers, nodes are from `range(n1)` and `range(n1, n1 + n2)`.
+        If a container, the elements are the nodes.
+    create_using : NetworkX graph instance, (default: nx.Graph)
+       Return graph of this type.
+
+    Notes
+    -----
+    Nodes are the integers 0 to `n1 + n2 - 1` unless either n1 or n2 are
+    containers of nodes. If only one of n1 or n2 are integers, that
+    integer is replaced by `range` of that integer.
+
+    The nodes are assigned the attribute 'bipartite' with the value 0 or 1
+    to indicate which bipartite set the node belongs to.
+
+    This function is not imported in the main namespace.
+    To use it use nx.bipartite.complete_bipartite_graph
+    """
+    G = nx.empty_graph(0, create_using)
+    if G.is_directed():
+        raise nx.NetworkXError("Directed Graph not supported")
+
+    n1, top = n1
+    n2, bottom = n2
+    if isinstance(n1, numbers.Integral) and isinstance(n2, numbers.Integral):
+        bottom = [n1 + i for i in bottom]
+    G.add_nodes_from(top, bipartite=0)
+    G.add_nodes_from(bottom, bipartite=1)
+    if len(G) != len(top) + len(bottom):
+        raise nx.NetworkXError("Inputs n1 and n2 must contain distinct nodes")
+    G.add_edges_from((u, v) for u in top for v in bottom)
+    G.graph["name"] = f"complete_bipartite_graph({len(top)}, {len(bottom)})"
+    return G
+
+
+@py_random_state(3)
+@nx._dispatchable(name="bipartite_configuration_model", graphs=None, returns_graph=True)
+def configuration_model(aseq, bseq, create_using=None, seed=None):
+    """Returns a random bipartite graph from two given degree sequences.
+
+    Parameters
+    ----------
+    aseq : list
+       Degree sequence for node set A.
+    bseq : list
+       Degree sequence for node set B.
+    create_using : NetworkX graph instance, optional
+       Return graph of this type.
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    The graph is composed of two partitions. Set A has nodes 0 to
+    (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
+    Nodes from set A are connected to nodes in set B by choosing
+    randomly from the possible free stubs, one in A and one in B.
+
+    Notes
+    -----
+    The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
+    If no graph type is specified use MultiGraph with parallel edges.
+    If you want a graph with no parallel edges use create_using=Graph()
+    but then the resulting degree sequences might not be exact.
+
+    The nodes are assigned the attribute 'bipartite' with the value 0 or 1
+    to indicate which bipartite set the node belongs to.
+
+    This function is not imported in the main namespace.
+    To use it use nx.bipartite.configuration_model
+    """
+    G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
+    if G.is_directed():
+        raise nx.NetworkXError("Directed Graph not supported")
+
+    # length and sum of each sequence
+    lena = len(aseq)
+    lenb = len(bseq)
+    suma = sum(aseq)
+    sumb = sum(bseq)
+
+    if not suma == sumb:
+        raise nx.NetworkXError(
+            f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
+        )
+
+    G = _add_nodes_with_bipartite_label(G, lena, lenb)
+
+    if len(aseq) == 0 or max(aseq) == 0:
+        return G  # done if no edges
+
+    # build lists of degree-repeated vertex numbers
+    stubs = [[v] * aseq[v] for v in range(lena)]
+    astubs = [x for subseq in stubs for x in subseq]
+
+    stubs = [[v] * bseq[v - lena] for v in range(lena, lena + lenb)]
+    bstubs = [x for subseq in stubs for x in subseq]
+
+    # shuffle lists
+    seed.shuffle(astubs)
+    seed.shuffle(bstubs)
+
+    G.add_edges_from([astubs[i], bstubs[i]] for i in range(suma))
+
+    G.name = "bipartite_configuration_model"
+    return G
+
+
+@nx._dispatchable(name="bipartite_havel_hakimi_graph", graphs=None, returns_graph=True)
+def havel_hakimi_graph(aseq, bseq, create_using=None):
+    """Returns a bipartite graph from two given degree sequences using a
+    Havel-Hakimi style construction.
+
+    The graph is composed of two partitions. Set A has nodes 0 to
+    (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
+    Nodes from the set A are connected to nodes in the set B by
+    connecting the highest degree nodes in set A to the highest degree
+    nodes in set B until all stubs are connected.
+
+    Parameters
+    ----------
+    aseq : list
+       Degree sequence for node set A.
+    bseq : list
+       Degree sequence for node set B.
+    create_using : NetworkX graph instance, optional
+       Return graph of this type.
+
+    Notes
+    -----
+    The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
+    If no graph type is specified use MultiGraph with parallel edges.
+    If you want a graph with no parallel edges use create_using=Graph()
+    but then the resulting degree sequences might not be exact.
+
+    The nodes are assigned the attribute 'bipartite' with the value 0 or 1
+    to indicate which bipartite set the node belongs to.
+
+    This function is not imported in the main namespace.
+    To use it use nx.bipartite.havel_hakimi_graph
+    """
+    G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
+    if G.is_directed():
+        raise nx.NetworkXError("Directed Graph not supported")
+
+    # length of the each sequence
+    naseq = len(aseq)
+    nbseq = len(bseq)
+
+    suma = sum(aseq)
+    sumb = sum(bseq)
+
+    if not suma == sumb:
+        raise nx.NetworkXError(
+            f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
+        )
+
+    G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
+
+    if len(aseq) == 0 or max(aseq) == 0:
+        return G  # done if no edges
+
+    # build list of degree-repeated vertex numbers
+    astubs = [[aseq[v], v] for v in range(naseq)]
+    bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
+    astubs.sort()
+    while astubs:
+        (degree, u) = astubs.pop()  # take of largest degree node in the a set
+        if degree == 0:
+            break  # done, all are zero
+        # connect the source to largest degree nodes in the b set
+        bstubs.sort()
+        for target in bstubs[-degree:]:
+            v = target[1]
+            G.add_edge(u, v)
+            target[0] -= 1  # note this updates bstubs too.
+            if target[0] == 0:
+                bstubs.remove(target)
+
+    G.name = "bipartite_havel_hakimi_graph"
+    return G
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def reverse_havel_hakimi_graph(aseq, bseq, create_using=None):
+    """Returns a bipartite graph from two given degree sequences using a
+    Havel-Hakimi style construction.
+
+    The graph is composed of two partitions. Set A has nodes 0 to
+    (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
+    Nodes from set A are connected to nodes in the set B by connecting
+    the highest degree nodes in set A to the lowest degree nodes in
+    set B until all stubs are connected.
+
+    Parameters
+    ----------
+    aseq : list
+       Degree sequence for node set A.
+    bseq : list
+       Degree sequence for node set B.
+    create_using : NetworkX graph instance, optional
+       Return graph of this type.
+
+    Notes
+    -----
+    The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
+    If no graph type is specified use MultiGraph with parallel edges.
+    If you want a graph with no parallel edges use create_using=Graph()
+    but then the resulting degree sequences might not be exact.
+
+    The nodes are assigned the attribute 'bipartite' with the value 0 or 1
+    to indicate which bipartite set the node belongs to.
+
+    This function is not imported in the main namespace.
+    To use it use nx.bipartite.reverse_havel_hakimi_graph
+    """
+    G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
+    if G.is_directed():
+        raise nx.NetworkXError("Directed Graph not supported")
+
+    # length of the each sequence
+    lena = len(aseq)
+    lenb = len(bseq)
+    suma = sum(aseq)
+    sumb = sum(bseq)
+
+    if not suma == sumb:
+        raise nx.NetworkXError(
+            f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
+        )
+
+    G = _add_nodes_with_bipartite_label(G, lena, lenb)
+
+    if len(aseq) == 0 or max(aseq) == 0:
+        return G  # done if no edges
+
+    # build list of degree-repeated vertex numbers
+    astubs = [[aseq[v], v] for v in range(lena)]
+    bstubs = [[bseq[v - lena], v] for v in range(lena, lena + lenb)]
+    astubs.sort()
+    bstubs.sort()
+    while astubs:
+        (degree, u) = astubs.pop()  # take of largest degree node in the a set
+        if degree == 0:
+            break  # done, all are zero
+        # connect the source to the smallest degree nodes in the b set
+        for target in bstubs[0:degree]:
+            v = target[1]
+            G.add_edge(u, v)
+            target[0] -= 1  # note this updates bstubs too.
+            if target[0] == 0:
+                bstubs.remove(target)
+
+    G.name = "bipartite_reverse_havel_hakimi_graph"
+    return G
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def alternating_havel_hakimi_graph(aseq, bseq, create_using=None):
+    """Returns a bipartite graph from two given degree sequences using
+    an alternating Havel-Hakimi style construction.
+
+    The graph is composed of two partitions. Set A has nodes 0 to
+    (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
+    Nodes from the set A are connected to nodes in the set B by
+    connecting the highest degree nodes in set A to alternatively the
+    highest and the lowest degree nodes in set B until all stubs are
+    connected.
+
+    Parameters
+    ----------
+    aseq : list
+       Degree sequence for node set A.
+    bseq : list
+       Degree sequence for node set B.
+    create_using : NetworkX graph instance, optional
+       Return graph of this type.
+
+    Notes
+    -----
+    The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
+    If no graph type is specified use MultiGraph with parallel edges.
+    If you want a graph with no parallel edges use create_using=Graph()
+    but then the resulting degree sequences might not be exact.
+
+    The nodes are assigned the attribute 'bipartite' with the value 0 or 1
+    to indicate which bipartite set the node belongs to.
+
+    This function is not imported in the main namespace.
+    To use it use nx.bipartite.alternating_havel_hakimi_graph
+    """
+    G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
+    if G.is_directed():
+        raise nx.NetworkXError("Directed Graph not supported")
+
+    # length of the each sequence
+    naseq = len(aseq)
+    nbseq = len(bseq)
+    suma = sum(aseq)
+    sumb = sum(bseq)
+
+    if not suma == sumb:
+        raise nx.NetworkXError(
+            f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
+        )
+
+    G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
+
+    if len(aseq) == 0 or max(aseq) == 0:
+        return G  # done if no edges
+    # build list of degree-repeated vertex numbers
+    astubs = [[aseq[v], v] for v in range(naseq)]
+    bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
+    while astubs:
+        astubs.sort()
+        (degree, u) = astubs.pop()  # take of largest degree node in the a set
+        if degree == 0:
+            break  # done, all are zero
+        bstubs.sort()
+        small = bstubs[0 : degree // 2]  # add these low degree targets
+        large = bstubs[(-degree + degree // 2) :]  # now high degree targets
+        stubs = [x for z in zip(large, small) for x in z]  # combine, sorry
+        if len(stubs) < len(small) + len(large):  # check for zip truncation
+            stubs.append(large.pop())
+        for target in stubs:
+            v = target[1]
+            G.add_edge(u, v)
+            target[0] -= 1  # note this updates bstubs too.
+            if target[0] == 0:
+                bstubs.remove(target)
+
+    G.name = "bipartite_alternating_havel_hakimi_graph"
+    return G
+
+
+@py_random_state(3)
+@nx._dispatchable(graphs=None, returns_graph=True)
+def preferential_attachment_graph(aseq, p, create_using=None, seed=None):
+    """Create a bipartite graph with a preferential attachment model from
+    a given single degree sequence.
+
+    The graph is composed of two partitions. Set A has nodes 0 to
+    (len(aseq) - 1) and set B has nodes starting with node len(aseq).
+    The number of nodes in set B is random.
+
+    Parameters
+    ----------
+    aseq : list
+       Degree sequence for node set A.
+    p :  float
+       Probability that a new bottom node is added.
+    create_using : NetworkX graph instance, optional
+       Return graph of this type.
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    References
+    ----------
+    .. [1] Guillaume, J.L. and Latapy, M.,
+       Bipartite graphs as models of complex networks.
+       Physica A: Statistical Mechanics and its Applications,
+       2006, 371(2), pp.795-813.
+    .. [2] Jean-Loup Guillaume and Matthieu Latapy,
+       Bipartite structure of all complex networks,
+       Inf. Process. Lett. 90, 2004, pg. 215-221
+       https://doi.org/10.1016/j.ipl.2004.03.007
+
+    Notes
+    -----
+    The nodes are assigned the attribute 'bipartite' with the value 0 or 1
+    to indicate which bipartite set the node belongs to.
+
+    This function is not imported in the main namespace.
+    To use it use nx.bipartite.preferential_attachment_graph
+    """
+    G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
+    if G.is_directed():
+        raise nx.NetworkXError("Directed Graph not supported")
+
+    if p > 1:
+        raise nx.NetworkXError(f"probability {p} > 1")
+
+    naseq = len(aseq)
+    G = _add_nodes_with_bipartite_label(G, naseq, 0)
+    vv = [[v] * aseq[v] for v in range(naseq)]
+    while vv:
+        while vv[0]:
+            source = vv[0][0]
+            vv[0].remove(source)
+            if seed.random() < p or len(G) == naseq:
+                target = len(G)
+                G.add_node(target, bipartite=1)
+                G.add_edge(source, target)
+            else:
+                bb = [[b] * G.degree(b) for b in range(naseq, len(G))]
+                # flatten the list of lists into a list.
+                bbstubs = reduce(lambda x, y: x + y, bb)
+                # choose preferentially a bottom node.
+                target = seed.choice(bbstubs)
+                G.add_node(target, bipartite=1)
+                G.add_edge(source, target)
+        vv.remove(vv[0])
+    G.name = "bipartite_preferential_attachment_model"
+    return G
+
+
+@py_random_state(3)
+@nx._dispatchable(graphs=None, returns_graph=True)
+def random_graph(n, m, p, seed=None, directed=False):
+    """Returns a bipartite random graph.
+
+    This is a bipartite version of the binomial (Erdős-Rényi) graph.
+    The graph is composed of two partitions. Set A has nodes 0 to
+    (n - 1) and set B has nodes n to (n + m - 1).
+
+    Parameters
+    ----------
+    n : int
+        The number of nodes in the first bipartite set.
+    m : int
+        The number of nodes in the second bipartite set.
+    p : float
+        Probability for edge creation.
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+    directed : bool, optional (default=False)
+        If True return a directed graph
+
+    Notes
+    -----
+    The bipartite random graph algorithm chooses each of the n*m (undirected)
+    or 2*nm (directed) possible edges with probability p.
+
+    This algorithm is $O(n+m)$ where $m$ is the expected number of edges.
+
+    The nodes are assigned the attribute 'bipartite' with the value 0 or 1
+    to indicate which bipartite set the node belongs to.
+
+    This function is not imported in the main namespace.
+    To use it use nx.bipartite.random_graph
+
+    See Also
+    --------
+    gnp_random_graph, configuration_model
+
+    References
+    ----------
+    .. [1] Vladimir Batagelj and Ulrik Brandes,
+       "Efficient generation of large random networks",
+       Phys. Rev. E, 71, 036113, 2005.
+    """
+    G = nx.Graph()
+    G = _add_nodes_with_bipartite_label(G, n, m)
+    if directed:
+        G = nx.DiGraph(G)
+    G.name = f"fast_gnp_random_graph({n},{m},{p})"
+
+    if p <= 0:
+        return G
+    if p >= 1:
+        return nx.complete_bipartite_graph(n, m)
+
+    lp = math.log(1.0 - p)
+
+    v = 0
+    w = -1
+    while v < n:
+        lr = math.log(1.0 - seed.random())
+        w = w + 1 + int(lr / lp)
+        while w >= m and v < n:
+            w = w - m
+            v = v + 1
+        if v < n:
+            G.add_edge(v, n + w)
+
+    if directed:
+        # use the same algorithm to
+        # add edges from the "m" to "n" set
+        v = 0
+        w = -1
+        while v < n:
+            lr = math.log(1.0 - seed.random())
+            w = w + 1 + int(lr / lp)
+            while w >= m and v < n:
+                w = w - m
+                v = v + 1
+            if v < n:
+                G.add_edge(n + w, v)
+
+    return G
+
+
+@py_random_state(3)
+@nx._dispatchable(graphs=None, returns_graph=True)
+def gnmk_random_graph(n, m, k, seed=None, directed=False):
+    """Returns a random bipartite graph G_{n,m,k}.
+
+    Produces a bipartite graph chosen randomly out of the set of all graphs
+    with n top nodes, m bottom nodes, and k edges.
+    The graph is composed of two sets of nodes.
+    Set A has nodes 0 to (n - 1) and set B has nodes n to (n + m - 1).
+
+    Parameters
+    ----------
+    n : int
+        The number of nodes in the first bipartite set.
+    m : int
+        The number of nodes in the second bipartite set.
+    k : int
+        The number of edges
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+    directed : bool, optional (default=False)
+        If True return a directed graph
+
+    Examples
+    --------
+    from nx.algorithms import bipartite
+    G = bipartite.gnmk_random_graph(10,20,50)
+
+    See Also
+    --------
+    gnm_random_graph
+
+    Notes
+    -----
+    If k > m * n then a complete bipartite graph is returned.
+
+    This graph is a bipartite version of the `G_{nm}` random graph model.
+
+    The nodes are assigned the attribute 'bipartite' with the value 0 or 1
+    to indicate which bipartite set the node belongs to.
+
+    This function is not imported in the main namespace.
+    To use it use nx.bipartite.gnmk_random_graph
+    """
+    G = nx.Graph()
+    G = _add_nodes_with_bipartite_label(G, n, m)
+    if directed:
+        G = nx.DiGraph(G)
+    G.name = f"bipartite_gnm_random_graph({n},{m},{k})"
+    if n == 1 or m == 1:
+        return G
+    max_edges = n * m  # max_edges for bipartite networks
+    if k >= max_edges:  # Maybe we should raise an exception here
+        return nx.complete_bipartite_graph(n, m, create_using=G)
+
+    top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
+    bottom = list(set(G) - set(top))
+    edge_count = 0
+    while edge_count < k:
+        # generate random edge,u,v
+        u = seed.choice(top)
+        v = seed.choice(bottom)
+        if v in G[u]:
+            continue
+        else:
+            G.add_edge(u, v)
+            edge_count += 1
+    return G
+
+
+def _add_nodes_with_bipartite_label(G, lena, lenb):
+    G.add_nodes_from(range(lena + lenb))
+    b = dict(zip(range(lena), [0] * lena))
+    b.update(dict(zip(range(lena, lena + lenb), [1] * lenb)))
+    nx.set_node_attributes(G, b, "bipartite")
+    return G
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matching.py
new file mode 100644
index 00000000..38a17478
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matching.py
@@ -0,0 +1,590 @@
+# This module uses material from the Wikipedia article Hopcroft--Karp algorithm
+# <https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>, accessed on
+# January 3, 2015, which is released under the Creative Commons
+# Attribution-Share-Alike License 3.0
+# <http://creativecommons.org/licenses/by-sa/3.0/>. That article includes
+# pseudocode, which has been translated into the corresponding Python code.
+#
+# Portions of this module use code from David Eppstein's Python Algorithms and
+# Data Structures (PADS) library, which is dedicated to the public domain (for
+# proof, see <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>).
+"""Provides functions for computing maximum cardinality matchings and minimum
+weight full matchings in a bipartite graph.
+
+If you don't care about the particular implementation of the maximum matching
+algorithm, simply use the :func:`maximum_matching`. If you do care, you can
+import one of the named maximum matching algorithms directly.
+
+For example, to find a maximum matching in the complete bipartite graph with
+two vertices on the left and three vertices on the right:
+
+>>> G = nx.complete_bipartite_graph(2, 3)
+>>> left, right = nx.bipartite.sets(G)
+>>> list(left)
+[0, 1]
+>>> list(right)
+[2, 3, 4]
+>>> nx.bipartite.maximum_matching(G)
+{0: 2, 1: 3, 2: 0, 3: 1}
+
+The dictionary returned by :func:`maximum_matching` includes a mapping for
+vertices in both the left and right vertex sets.
+
+Similarly, :func:`minimum_weight_full_matching` produces, for a complete
+weighted bipartite graph, a matching whose cardinality is the cardinality of
+the smaller of the two partitions, and for which the sum of the weights of the
+edges included in the matching is minimal.
+
+"""
+
+import collections
+import itertools
+
+import networkx as nx
+from networkx.algorithms.bipartite import sets as bipartite_sets
+from networkx.algorithms.bipartite.matrix import biadjacency_matrix
+
+__all__ = [
+    "maximum_matching",
+    "hopcroft_karp_matching",
+    "eppstein_matching",
+    "to_vertex_cover",
+    "minimum_weight_full_matching",
+]
+
+INFINITY = float("inf")
+
+
+@nx._dispatchable
+def hopcroft_karp_matching(G, top_nodes=None):
+    """Returns the maximum cardinality matching of the bipartite graph `G`.
+
+    A matching is a set of edges that do not share any nodes. A maximum
+    cardinality matching is a matching with the most edges possible. It
+    is not always unique. Finding a matching in a bipartite graph can be
+    treated as a networkx flow problem.
+
+    The functions ``hopcroft_karp_matching`` and ``maximum_matching``
+    are aliases of the same function.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+      Undirected bipartite graph
+
+    top_nodes : container of nodes
+
+      Container with all nodes in one bipartite node set. If not supplied
+      it will be computed. But if more than one solution exists an exception
+      will be raised.
+
+    Returns
+    -------
+    matches : dictionary
+
+      The matching is returned as a dictionary, `matches`, such that
+      ``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
+      nodes do not occur as a key in `matches`.
+
+    Raises
+    ------
+    AmbiguousSolution
+      Raised if the input bipartite graph is disconnected and no container
+      with all nodes in one bipartite set is provided. When determining
+      the nodes in each bipartite set more than one valid solution is
+      possible if the input graph is disconnected.
+
+    Notes
+    -----
+    This function is implemented with the `Hopcroft--Karp matching algorithm
+    <https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>`_ for
+    bipartite graphs.
+
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    See Also
+    --------
+    maximum_matching
+    hopcroft_karp_matching
+    eppstein_matching
+
+    References
+    ----------
+    .. [1] John E. Hopcroft and Richard M. Karp. "An n^{5 / 2} Algorithm for
+       Maximum Matchings in Bipartite Graphs" In: **SIAM Journal of Computing**
+       2.4 (1973), pp. 225--231. <https://doi.org/10.1137/0202019>.
+
+    """
+
+    # First we define some auxiliary search functions.
+    #
+    # If you are a human reading these auxiliary search functions, the "global"
+    # variables `leftmatches`, `rightmatches`, `distances`, etc. are defined
+    # below the functions, so that they are initialized close to the initial
+    # invocation of the search functions.
+    def breadth_first_search():
+        for v in left:
+            if leftmatches[v] is None:
+                distances[v] = 0
+                queue.append(v)
+            else:
+                distances[v] = INFINITY
+        distances[None] = INFINITY
+        while queue:
+            v = queue.popleft()
+            if distances[v] < distances[None]:
+                for u in G[v]:
+                    if distances[rightmatches[u]] is INFINITY:
+                        distances[rightmatches[u]] = distances[v] + 1
+                        queue.append(rightmatches[u])
+        return distances[None] is not INFINITY
+
+    def depth_first_search(v):
+        if v is not None:
+            for u in G[v]:
+                if distances[rightmatches[u]] == distances[v] + 1:
+                    if depth_first_search(rightmatches[u]):
+                        rightmatches[u] = v
+                        leftmatches[v] = u
+                        return True
+            distances[v] = INFINITY
+            return False
+        return True
+
+    # Initialize the "global" variables that maintain state during the search.
+    left, right = bipartite_sets(G, top_nodes)
+    leftmatches = {v: None for v in left}
+    rightmatches = {v: None for v in right}
+    distances = {}
+    queue = collections.deque()
+
+    # Implementation note: this counter is incremented as pairs are matched but
+    # it is currently not used elsewhere in the computation.
+    num_matched_pairs = 0
+    while breadth_first_search():
+        for v in left:
+            if leftmatches[v] is None:
+                if depth_first_search(v):
+                    num_matched_pairs += 1
+
+    # Strip the entries matched to `None`.
+    leftmatches = {k: v for k, v in leftmatches.items() if v is not None}
+    rightmatches = {k: v for k, v in rightmatches.items() if v is not None}
+
+    # At this point, the left matches and the right matches are inverses of one
+    # another. In other words,
+    #
+    #     leftmatches == {v, k for k, v in rightmatches.items()}
+    #
+    # Finally, we combine both the left matches and right matches.
+    return dict(itertools.chain(leftmatches.items(), rightmatches.items()))
+
+
+@nx._dispatchable
+def eppstein_matching(G, top_nodes=None):
+    """Returns the maximum cardinality matching of the bipartite graph `G`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+      Undirected bipartite graph
+
+    top_nodes : container
+
+      Container with all nodes in one bipartite node set. If not supplied
+      it will be computed. But if more than one solution exists an exception
+      will be raised.
+
+    Returns
+    -------
+    matches : dictionary
+
+      The matching is returned as a dictionary, `matching`, such that
+      ``matching[v] == w`` if node `v` is matched to node `w`. Unmatched
+      nodes do not occur as a key in `matching`.
+
+    Raises
+    ------
+    AmbiguousSolution
+      Raised if the input bipartite graph is disconnected and no container
+      with all nodes in one bipartite set is provided. When determining
+      the nodes in each bipartite set more than one valid solution is
+      possible if the input graph is disconnected.
+
+    Notes
+    -----
+    This function is implemented with David Eppstein's version of the algorithm
+    Hopcroft--Karp algorithm (see :func:`hopcroft_karp_matching`), which
+    originally appeared in the `Python Algorithms and Data Structures library
+    (PADS) <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>`_.
+
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    See Also
+    --------
+
+    hopcroft_karp_matching
+
+    """
+    # Due to its original implementation, a directed graph is needed
+    # so that the two sets of bipartite nodes can be distinguished
+    left, right = bipartite_sets(G, top_nodes)
+    G = nx.DiGraph(G.edges(left))
+    # initialize greedy matching (redundant, but faster than full search)
+    matching = {}
+    for u in G:
+        for v in G[u]:
+            if v not in matching:
+                matching[v] = u
+                break
+    while True:
+        # structure residual graph into layers
+        # pred[u] gives the neighbor in the previous layer for u in U
+        # preds[v] gives a list of neighbors in the previous layer for v in V
+        # unmatched gives a list of unmatched vertices in final layer of V,
+        # and is also used as a flag value for pred[u] when u is in the first
+        # layer
+        preds = {}
+        unmatched = []
+        pred = {u: unmatched for u in G}
+        for v in matching:
+            del pred[matching[v]]
+        layer = list(pred)
+
+        # repeatedly extend layering structure by another pair of layers
+        while layer and not unmatched:
+            newLayer = {}
+            for u in layer:
+                for v in G[u]:
+                    if v not in preds:
+                        newLayer.setdefault(v, []).append(u)
+            layer = []
+            for v in newLayer:
+                preds[v] = newLayer[v]
+                if v in matching:
+                    layer.append(matching[v])
+                    pred[matching[v]] = v
+                else:
+                    unmatched.append(v)
+
+        # did we finish layering without finding any alternating paths?
+        if not unmatched:
+            # TODO - The lines between --- were unused and were thus commented
+            # out. This whole commented chunk should be reviewed to determine
+            # whether it should be built upon or completely removed.
+            # ---
+            # unlayered = {}
+            # for u in G:
+            #     # TODO Why is extra inner loop necessary?
+            #     for v in G[u]:
+            #         if v not in preds:
+            #             unlayered[v] = None
+            # ---
+            # TODO Originally, this function returned a three-tuple:
+            #
+            #     return (matching, list(pred), list(unlayered))
+            #
+            # For some reason, the documentation for this function
+            # indicated that the second and third elements of the returned
+            # three-tuple would be the vertices in the left and right vertex
+            # sets, respectively, that are also in the maximum independent set.
+            # However, what I think the author meant was that the second
+            # element is the list of vertices that were unmatched and the third
+            # element was the list of vertices that were matched. Since that
+            # seems to be the case, they don't really need to be returned,
+            # since that information can be inferred from the matching
+            # dictionary.
+
+            # All the matched nodes must be a key in the dictionary
+            for key in matching.copy():
+                matching[matching[key]] = key
+            return matching
+
+        # recursively search backward through layers to find alternating paths
+        # recursion returns true if found path, false otherwise
+        def recurse(v):
+            if v in preds:
+                L = preds.pop(v)
+                for u in L:
+                    if u in pred:
+                        pu = pred.pop(u)
+                        if pu is unmatched or recurse(pu):
+                            matching[v] = u
+                            return True
+            return False
+
+        for v in unmatched:
+            recurse(v)
+
+
+def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, targets):
+    """Returns True if and only if the vertex `v` is connected to one of
+    the target vertices by an alternating path in `G`.
+
+    An *alternating path* is a path in which every other edge is in the
+    specified maximum matching (and the remaining edges in the path are not in
+    the matching). An alternating path may have matched edges in the even
+    positions or in the odd positions, as long as the edges alternate between
+    'matched' and 'unmatched'.
+
+    `G` is an undirected bipartite NetworkX graph.
+
+    `v` is a vertex in `G`.
+
+    `matched_edges` is a set of edges present in a maximum matching in `G`.
+
+    `unmatched_edges` is a set of edges not present in a maximum
+    matching in `G`.
+
+    `targets` is a set of vertices.
+
+    """
+
+    def _alternating_dfs(u, along_matched=True):
+        """Returns True if and only if `u` is connected to one of the
+        targets by an alternating path.
+
+        `u` is a vertex in the graph `G`.
+
+        If `along_matched` is True, this step of the depth-first search
+        will continue only through edges in the given matching. Otherwise, it
+        will continue only through edges *not* in the given matching.
+
+        """
+        visited = set()
+        # Follow matched edges when depth is even,
+        # and follow unmatched edges when depth is odd.
+        initial_depth = 0 if along_matched else 1
+        stack = [(u, iter(G[u]), initial_depth)]
+        while stack:
+            parent, children, depth = stack[-1]
+            valid_edges = matched_edges if depth % 2 else unmatched_edges
+            try:
+                child = next(children)
+                if child not in visited:
+                    if (parent, child) in valid_edges or (child, parent) in valid_edges:
+                        if child in targets:
+                            return True
+                        visited.add(child)
+                        stack.append((child, iter(G[child]), depth + 1))
+            except StopIteration:
+                stack.pop()
+        return False
+
+    # Check for alternating paths starting with edges in the matching, then
+    # check for alternating paths starting with edges not in the
+    # matching.
+    return _alternating_dfs(v, along_matched=True) or _alternating_dfs(
+        v, along_matched=False
+    )
+
+
+def _connected_by_alternating_paths(G, matching, targets):
+    """Returns the set of vertices that are connected to one of the target
+    vertices by an alternating path in `G` or are themselves a target.
+
+    An *alternating path* is a path in which every other edge is in the
+    specified maximum matching (and the remaining edges in the path are not in
+    the matching). An alternating path may have matched edges in the even
+    positions or in the odd positions, as long as the edges alternate between
+    'matched' and 'unmatched'.
+
+    `G` is an undirected bipartite NetworkX graph.
+
+    `matching` is a dictionary representing a maximum matching in `G`, as
+    returned by, for example, :func:`maximum_matching`.
+
+    `targets` is a set of vertices.
+
+    """
+    # Get the set of matched edges and the set of unmatched edges. Only include
+    # one version of each undirected edge (for example, include edge (1, 2) but
+    # not edge (2, 1)). Using frozensets as an intermediary step we do not
+    # require nodes to be orderable.
+    edge_sets = {frozenset((u, v)) for u, v in matching.items()}
+    matched_edges = {tuple(edge) for edge in edge_sets}
+    unmatched_edges = {
+        (u, v) for (u, v) in G.edges() if frozenset((u, v)) not in edge_sets
+    }
+
+    return {
+        v
+        for v in G
+        if v in targets
+        or _is_connected_by_alternating_path(
+            G, v, matched_edges, unmatched_edges, targets
+        )
+    }
+
+
+@nx._dispatchable
+def to_vertex_cover(G, matching, top_nodes=None):
+    """Returns the minimum vertex cover corresponding to the given maximum
+    matching of the bipartite graph `G`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+      Undirected bipartite graph
+
+    matching : dictionary
+
+      A dictionary whose keys are vertices in `G` and whose values are the
+      distinct neighbors comprising the maximum matching for `G`, as returned
+      by, for example, :func:`maximum_matching`. The dictionary *must*
+      represent the maximum matching.
+
+    top_nodes : container
+
+      Container with all nodes in one bipartite node set. If not supplied
+      it will be computed. But if more than one solution exists an exception
+      will be raised.
+
+    Returns
+    -------
+    vertex_cover : :class:`set`
+
+      The minimum vertex cover in `G`.
+
+    Raises
+    ------
+    AmbiguousSolution
+      Raised if the input bipartite graph is disconnected and no container
+      with all nodes in one bipartite set is provided. When determining
+      the nodes in each bipartite set more than one valid solution is
+      possible if the input graph is disconnected.
+
+    Notes
+    -----
+    This function is implemented using the procedure guaranteed by `Konig's
+    theorem
+    <https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29>`_,
+    which proves an equivalence between a maximum matching and a minimum vertex
+    cover in bipartite graphs.
+
+    Since a minimum vertex cover is the complement of a maximum independent set
+    for any graph, one can compute the maximum independent set of a bipartite
+    graph this way:
+
+    >>> G = nx.complete_bipartite_graph(2, 3)
+    >>> matching = nx.bipartite.maximum_matching(G)
+    >>> vertex_cover = nx.bipartite.to_vertex_cover(G, matching)
+    >>> independent_set = set(G) - vertex_cover
+    >>> print(list(independent_set))
+    [2, 3, 4]
+
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    """
+    # This is a Python implementation of the algorithm described at
+    # <https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29#Proof>.
+    L, R = bipartite_sets(G, top_nodes)
+    # Let U be the set of unmatched vertices in the left vertex set.
+    unmatched_vertices = set(G) - set(matching)
+    U = unmatched_vertices & L
+    # Let Z be the set of vertices that are either in U or are connected to U
+    # by alternating paths.
+    Z = _connected_by_alternating_paths(G, matching, U)
+    # At this point, every edge either has a right endpoint in Z or a left
+    # endpoint not in Z. This gives us the vertex cover.
+    return (L - Z) | (R & Z)
+
+
+#: Returns the maximum cardinality matching in the given bipartite graph.
+#:
+#: This function is simply an alias for :func:`hopcroft_karp_matching`.
+maximum_matching = hopcroft_karp_matching
+
+
+@nx._dispatchable(edge_attrs="weight")
+def minimum_weight_full_matching(G, top_nodes=None, weight="weight"):
+    r"""Returns a minimum weight full matching of the bipartite graph `G`.
+
+    Let :math:`G = ((U, V), E)` be a weighted bipartite graph with real weights
+    :math:`w : E \to \mathbb{R}`. This function then produces a matching
+    :math:`M \subseteq E` with cardinality
+
+    .. math::
+       \lvert M \rvert = \min(\lvert U \rvert, \lvert V \rvert),
+
+    which minimizes the sum of the weights of the edges included in the
+    matching, :math:`\sum_{e \in M} w(e)`, or raises an error if no such
+    matching exists.
+
+    When :math:`\lvert U \rvert = \lvert V \rvert`, this is commonly
+    referred to as a perfect matching; here, since we allow
+    :math:`\lvert U \rvert` and :math:`\lvert V \rvert` to differ, we
+    follow Karp [1]_ and refer to the matching as *full*.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+      Undirected bipartite graph
+
+    top_nodes : container
+
+      Container with all nodes in one bipartite node set. If not supplied
+      it will be computed.
+
+    weight : string, optional (default='weight')
+
+       The edge data key used to provide each value in the matrix.
+       If None, then each edge has weight 1.
+
+    Returns
+    -------
+    matches : dictionary
+
+      The matching is returned as a dictionary, `matches`, such that
+      ``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
+      nodes do not occur as a key in `matches`.
+
+    Raises
+    ------
+    ValueError
+      Raised if no full matching exists.
+
+    ImportError
+      Raised if SciPy is not available.
+
+    Notes
+    -----
+    The problem of determining a minimum weight full matching is also known as
+    the rectangular linear assignment problem. This implementation defers the
+    calculation of the assignment to SciPy.
+
+    References
+    ----------
+    .. [1] Richard Manning Karp:
+       An algorithm to Solve the m x n Assignment Problem in Expected Time
+       O(mn log n).
+       Networks, 10(2):143–152, 1980.
+
+    """
+    import numpy as np
+    import scipy as sp
+
+    left, right = nx.bipartite.sets(G, top_nodes)
+    U = list(left)
+    V = list(right)
+    # We explicitly create the biadjacency matrix having infinities
+    # where edges are missing (as opposed to zeros, which is what one would
+    # get by using toarray on the sparse matrix).
+    weights_sparse = biadjacency_matrix(
+        G, row_order=U, column_order=V, weight=weight, format="coo"
+    )
+    weights = np.full(weights_sparse.shape, np.inf)
+    weights[weights_sparse.row, weights_sparse.col] = weights_sparse.data
+    left_matches = sp.optimize.linear_sum_assignment(weights)
+    d = {U[u]: V[v] for u, v in zip(*left_matches)}
+    # d will contain the matching from edges in left to right; we need to
+    # add the ones from right to left as well.
+    d.update({v: u for u, v in d.items()})
+    return d
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matrix.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matrix.py
new file mode 100644
index 00000000..bbfa47c7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matrix.py
@@ -0,0 +1,168 @@
+"""
+====================
+Biadjacency matrices
+====================
+"""
+
+import itertools
+
+import networkx as nx
+from networkx.convert_matrix import _generate_weighted_edges
+
+__all__ = ["biadjacency_matrix", "from_biadjacency_matrix"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def biadjacency_matrix(
+    G, row_order, column_order=None, dtype=None, weight="weight", format="csr"
+):
+    r"""Returns the biadjacency matrix of the bipartite graph G.
+
+    Let `G = (U, V, E)` be a bipartite graph with node sets
+    `U = u_{1},...,u_{r}` and `V = v_{1},...,v_{s}`. The biadjacency
+    matrix [1]_ is the `r` x `s` matrix `B` in which `b_{i,j} = 1`
+    if, and only if, `(u_i, v_j) \in E`. If the parameter `weight` is
+    not `None` and matches the name of an edge attribute, its value is
+    used instead of 1.
+
+    Parameters
+    ----------
+    G : graph
+       A NetworkX graph
+
+    row_order : list of nodes
+       The rows of the matrix are ordered according to the list of nodes.
+
+    column_order : list, optional
+       The columns of the matrix are ordered according to the list of nodes.
+       If column_order is None, then the ordering of columns is arbitrary.
+
+    dtype : NumPy data-type, optional
+        A valid NumPy dtype used to initialize the array. If None, then the
+        NumPy default is used.
+
+    weight : string or None, optional (default='weight')
+       The edge data key used to provide each value in the matrix.
+       If None, then each edge has weight 1.
+
+    format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'}
+        The type of the matrix to be returned (default 'csr').  For
+        some algorithms different implementations of sparse matrices
+        can perform better.  See [2]_ for details.
+
+    Returns
+    -------
+    M : SciPy sparse array
+        Biadjacency matrix representation of the bipartite graph G.
+
+    Notes
+    -----
+    No attempt is made to check that the input graph is bipartite.
+
+    For directed bipartite graphs only successors are considered as neighbors.
+    To obtain an adjacency matrix with ones (or weight values) for both
+    predecessors and successors you have to generate two biadjacency matrices
+    where the rows of one of them are the columns of the other, and then add
+    one to the transpose of the other.
+
+    See Also
+    --------
+    adjacency_matrix
+    from_biadjacency_matrix
+
+    References
+    ----------
+    .. [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
+    .. [2] Scipy Dev. References, "Sparse Matrices",
+       https://docs.scipy.org/doc/scipy/reference/sparse.html
+    """
+    import scipy as sp
+
+    nlen = len(row_order)
+    if nlen == 0:
+        raise nx.NetworkXError("row_order is empty list")
+    if len(row_order) != len(set(row_order)):
+        msg = "Ambiguous ordering: `row_order` contained duplicates."
+        raise nx.NetworkXError(msg)
+    if column_order is None:
+        column_order = list(set(G) - set(row_order))
+    mlen = len(column_order)
+    if len(column_order) != len(set(column_order)):
+        msg = "Ambiguous ordering: `column_order` contained duplicates."
+        raise nx.NetworkXError(msg)
+
+    row_index = dict(zip(row_order, itertools.count()))
+    col_index = dict(zip(column_order, itertools.count()))
+
+    if G.number_of_edges() == 0:
+        row, col, data = [], [], []
+    else:
+        row, col, data = zip(
+            *(
+                (row_index[u], col_index[v], d.get(weight, 1))
+                for u, v, d in G.edges(row_order, data=True)
+                if u in row_index and v in col_index
+            )
+        )
+    A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, mlen), dtype=dtype)
+    try:
+        return A.asformat(format)
+    except ValueError as err:
+        raise nx.NetworkXError(f"Unknown sparse array format: {format}") from err
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"):
+    r"""Creates a new bipartite graph from a biadjacency matrix given as a
+    SciPy sparse array.
+
+    Parameters
+    ----------
+    A: scipy sparse array
+      A biadjacency matrix representation of a graph
+
+    create_using: NetworkX graph
+       Use specified graph for result.  The default is Graph()
+
+    edge_attribute: string
+       Name of edge attribute to store matrix numeric value. The data will
+       have the same type as the matrix entry (int, float, (real,imag)).
+
+    Notes
+    -----
+    The nodes are labeled with the attribute `bipartite` set to an integer
+    0 or 1 representing membership in part 0 or part 1 of the bipartite graph.
+
+    If `create_using` is an instance of :class:`networkx.MultiGraph` or
+    :class:`networkx.MultiDiGraph` and the entries of `A` are of
+    type :class:`int`, then this function returns a multigraph (of the same
+    type as `create_using`) with parallel edges. In this case, `edge_attribute`
+    will be ignored.
+
+    See Also
+    --------
+    biadjacency_matrix
+    from_numpy_array
+
+    References
+    ----------
+    [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
+    """
+    G = nx.empty_graph(0, create_using)
+    n, m = A.shape
+    # Make sure we get even the isolated nodes of the graph.
+    G.add_nodes_from(range(n), bipartite=0)
+    G.add_nodes_from(range(n, n + m), bipartite=1)
+    # Create an iterable over (u, v, w) triples and for each triple, add an
+    # edge from u to v with weight w.
+    triples = ((u, n + v, d) for (u, v, d) in _generate_weighted_edges(A))
+    # If the entries in the adjacency matrix are integers and the graph is a
+    # multigraph, then create parallel edges, each with weight 1, for each
+    # entry in the adjacency matrix. Otherwise, create one edge for each
+    # positive entry in the adjacency matrix and set the weight of that edge to
+    # be the entry in the matrix.
+    if A.dtype.kind in ("i", "u") and G.is_multigraph():
+        chain = itertools.chain.from_iterable
+        triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples)
+    G.add_weighted_edges_from(triples, weight=edge_attribute)
+    return G
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/projection.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/projection.py
new file mode 100644
index 00000000..7c2a26cf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/projection.py
@@ -0,0 +1,526 @@
+"""One-mode (unipartite) projections of bipartite graphs."""
+
+import networkx as nx
+from networkx.exception import NetworkXAlgorithmError
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "projected_graph",
+    "weighted_projected_graph",
+    "collaboration_weighted_projected_graph",
+    "overlap_weighted_projected_graph",
+    "generic_weighted_projected_graph",
+]
+
+
+@nx._dispatchable(
+    graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True, returns_graph=True
+)
+def projected_graph(B, nodes, multigraph=False):
+    r"""Returns the projection of B onto one of its node sets.
+
+    Returns the graph G that is the projection of the bipartite graph B
+    onto the specified nodes. They retain their attributes and are connected
+    in G if they have a common neighbor in B.
+
+    Parameters
+    ----------
+    B : NetworkX graph
+      The input graph should be bipartite.
+
+    nodes : list or iterable
+      Nodes to project onto (the "bottom" nodes).
+
+    multigraph: bool (default=False)
+       If True return a multigraph where the multiple edges represent multiple
+       shared neighbors.  They edge key in the multigraph is assigned to the
+       label of the neighbor.
+
+    Returns
+    -------
+    Graph : NetworkX graph or multigraph
+       A graph that is the projection onto the given nodes.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> B = nx.path_graph(4)
+    >>> G = bipartite.projected_graph(B, [1, 3])
+    >>> list(G)
+    [1, 3]
+    >>> list(G.edges())
+    [(1, 3)]
+
+    If nodes `a`, and `b` are connected through both nodes 1 and 2 then
+    building a multigraph results in two edges in the projection onto
+    [`a`, `b`]:
+
+    >>> B = nx.Graph()
+    >>> B.add_edges_from([("a", 1), ("b", 1), ("a", 2), ("b", 2)])
+    >>> G = bipartite.projected_graph(B, ["a", "b"], multigraph=True)
+    >>> print([sorted((u, v)) for u, v in G.edges()])
+    [['a', 'b'], ['a', 'b']]
+
+    Notes
+    -----
+    No attempt is made to verify that the input graph B is bipartite.
+    Returns a simple graph that is the projection of the bipartite graph B
+    onto the set of nodes given in list nodes.  If multigraph=True then
+    a multigraph is returned with an edge for every shared neighbor.
+
+    Directed graphs are allowed as input.  The output will also then
+    be a directed graph with edges if there is a directed path between
+    the nodes.
+
+    The graph and node properties are (shallow) copied to the projected graph.
+
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    See Also
+    --------
+    is_bipartite,
+    is_bipartite_node_set,
+    sets,
+    weighted_projected_graph,
+    collaboration_weighted_projected_graph,
+    overlap_weighted_projected_graph,
+    generic_weighted_projected_graph
+    """
+    if B.is_multigraph():
+        raise nx.NetworkXError("not defined for multigraphs")
+    if B.is_directed():
+        directed = True
+        if multigraph:
+            G = nx.MultiDiGraph()
+        else:
+            G = nx.DiGraph()
+    else:
+        directed = False
+        if multigraph:
+            G = nx.MultiGraph()
+        else:
+            G = nx.Graph()
+    G.graph.update(B.graph)
+    G.add_nodes_from((n, B.nodes[n]) for n in nodes)
+    for u in nodes:
+        nbrs2 = {v for nbr in B[u] for v in B[nbr] if v != u}
+        if multigraph:
+            for n in nbrs2:
+                if directed:
+                    links = set(B[u]) & set(B.pred[n])
+                else:
+                    links = set(B[u]) & set(B[n])
+                for l in links:
+                    if not G.has_edge(u, n, l):
+                        G.add_edge(u, n, key=l)
+        else:
+            G.add_edges_from((u, n) for n in nbrs2)
+    return G
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(graphs="B", returns_graph=True)
+def weighted_projected_graph(B, nodes, ratio=False):
+    r"""Returns a weighted projection of B onto one of its node sets.
+
+    The weighted projected graph is the projection of the bipartite
+    network B onto the specified nodes with weights representing the
+    number of shared neighbors or the ratio between actual shared
+    neighbors and possible shared neighbors if ``ratio is True`` [1]_.
+    The nodes retain their attributes and are connected in the resulting
+    graph if they have an edge to a common node in the original graph.
+
+    Parameters
+    ----------
+    B : NetworkX graph
+        The input graph should be bipartite.
+
+    nodes : list or iterable
+        Distinct nodes to project onto (the "bottom" nodes).
+
+    ratio: Bool (default=False)
+        If True, edge weight is the ratio between actual shared neighbors
+        and maximum possible shared neighbors (i.e., the size of the other
+        node set). If False, edges weight is the number of shared neighbors.
+
+    Returns
+    -------
+    Graph : NetworkX graph
+       A graph that is the projection onto the given nodes.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> B = nx.path_graph(4)
+    >>> G = bipartite.weighted_projected_graph(B, [1, 3])
+    >>> list(G)
+    [1, 3]
+    >>> list(G.edges(data=True))
+    [(1, 3, {'weight': 1})]
+    >>> G = bipartite.weighted_projected_graph(B, [1, 3], ratio=True)
+    >>> list(G.edges(data=True))
+    [(1, 3, {'weight': 0.5})]
+
+    Notes
+    -----
+    No attempt is made to verify that the input graph B is bipartite, or that
+    the input nodes are distinct. However, if the length of the input nodes is
+    greater than or equal to the nodes in the graph B, an exception is raised.
+    If the nodes are not distinct but don't raise this error, the output weights
+    will be incorrect.
+    The graph and node properties are (shallow) copied to the projected graph.
+
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    See Also
+    --------
+    is_bipartite,
+    is_bipartite_node_set,
+    sets,
+    collaboration_weighted_projected_graph,
+    overlap_weighted_projected_graph,
+    generic_weighted_projected_graph
+    projected_graph
+
+    References
+    ----------
+    .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
+        Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
+        of Social Network Analysis. Sage Publications.
+    """
+    if B.is_directed():
+        pred = B.pred
+        G = nx.DiGraph()
+    else:
+        pred = B.adj
+        G = nx.Graph()
+    G.graph.update(B.graph)
+    G.add_nodes_from((n, B.nodes[n]) for n in nodes)
+    n_top = len(B) - len(nodes)
+
+    if n_top < 1:
+        raise NetworkXAlgorithmError(
+            f"the size of the nodes to project onto ({len(nodes)}) is >= the graph size ({len(B)}).\n"
+            "They are either not a valid bipartite partition or contain duplicates"
+        )
+
+    for u in nodes:
+        unbrs = set(B[u])
+        nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
+        for v in nbrs2:
+            vnbrs = set(pred[v])
+            common = unbrs & vnbrs
+            if not ratio:
+                weight = len(common)
+            else:
+                weight = len(common) / n_top
+            G.add_edge(u, v, weight=weight)
+    return G
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(graphs="B", returns_graph=True)
+def collaboration_weighted_projected_graph(B, nodes):
+    r"""Newman's weighted projection of B onto one of its node sets.
+
+    The collaboration weighted projection is the projection of the
+    bipartite network B onto the specified nodes with weights assigned
+    using Newman's collaboration model [1]_:
+
+    .. math::
+
+        w_{u, v} = \sum_k \frac{\delta_{u}^{k} \delta_{v}^{k}}{d_k - 1}
+
+    where `u` and `v` are nodes from the bottom bipartite node set,
+    and `k` is a node of the top node set.
+    The value `d_k` is the degree of node `k` in the bipartite
+    network and `\delta_{u}^{k}` is 1 if node `u` is
+    linked to node `k` in the original bipartite graph or 0 otherwise.
+
+    The nodes retain their attributes and are connected in the resulting
+    graph if have an edge to a common node in the original bipartite
+    graph.
+
+    Parameters
+    ----------
+    B : NetworkX graph
+      The input graph should be bipartite.
+
+    nodes : list or iterable
+      Nodes to project onto (the "bottom" nodes).
+
+    Returns
+    -------
+    Graph : NetworkX graph
+       A graph that is the projection onto the given nodes.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> B = nx.path_graph(5)
+    >>> B.add_edge(1, 5)
+    >>> G = bipartite.collaboration_weighted_projected_graph(B, [0, 2, 4, 5])
+    >>> list(G)
+    [0, 2, 4, 5]
+    >>> for edge in sorted(G.edges(data=True)):
+    ...     print(edge)
+    (0, 2, {'weight': 0.5})
+    (0, 5, {'weight': 0.5})
+    (2, 4, {'weight': 1.0})
+    (2, 5, {'weight': 0.5})
+
+    Notes
+    -----
+    No attempt is made to verify that the input graph B is bipartite.
+    The graph and node properties are (shallow) copied to the projected graph.
+
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    See Also
+    --------
+    is_bipartite,
+    is_bipartite_node_set,
+    sets,
+    weighted_projected_graph,
+    overlap_weighted_projected_graph,
+    generic_weighted_projected_graph,
+    projected_graph
+
+    References
+    ----------
+    .. [1] Scientific collaboration networks: II.
+        Shortest paths, weighted networks, and centrality,
+        M. E. J. Newman, Phys. Rev. E 64, 016132 (2001).
+    """
+    if B.is_directed():
+        pred = B.pred
+        G = nx.DiGraph()
+    else:
+        pred = B.adj
+        G = nx.Graph()
+    G.graph.update(B.graph)
+    G.add_nodes_from((n, B.nodes[n]) for n in nodes)
+    for u in nodes:
+        unbrs = set(B[u])
+        nbrs2 = {n for nbr in unbrs for n in B[nbr] if n != u}
+        for v in nbrs2:
+            vnbrs = set(pred[v])
+            common_degree = (len(B[n]) for n in unbrs & vnbrs)
+            weight = sum(1.0 / (deg - 1) for deg in common_degree if deg > 1)
+            G.add_edge(u, v, weight=weight)
+    return G
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(graphs="B", returns_graph=True)
+def overlap_weighted_projected_graph(B, nodes, jaccard=True):
+    r"""Overlap weighted projection of B onto one of its node sets.
+
+    The overlap weighted projection is the projection of the bipartite
+    network B onto the specified nodes with weights representing
+    the Jaccard index between the neighborhoods of the two nodes in the
+    original bipartite network [1]_:
+
+    .. math::
+
+        w_{v, u} = \frac{|N(u) \cap N(v)|}{|N(u) \cup N(v)|}
+
+    or if the parameter 'jaccard' is False, the fraction of common
+    neighbors by minimum of both nodes degree in the original
+    bipartite graph [1]_:
+
+    .. math::
+
+        w_{v, u} = \frac{|N(u) \cap N(v)|}{min(|N(u)|, |N(v)|)}
+
+    The nodes retain their attributes and are connected in the resulting
+    graph if have an edge to a common node in the original bipartite graph.
+
+    Parameters
+    ----------
+    B : NetworkX graph
+        The input graph should be bipartite.
+
+    nodes : list or iterable
+        Nodes to project onto (the "bottom" nodes).
+
+    jaccard: Bool (default=True)
+
+    Returns
+    -------
+    Graph : NetworkX graph
+       A graph that is the projection onto the given nodes.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> B = nx.path_graph(5)
+    >>> nodes = [0, 2, 4]
+    >>> G = bipartite.overlap_weighted_projected_graph(B, nodes)
+    >>> list(G)
+    [0, 2, 4]
+    >>> list(G.edges(data=True))
+    [(0, 2, {'weight': 0.5}), (2, 4, {'weight': 0.5})]
+    >>> G = bipartite.overlap_weighted_projected_graph(B, nodes, jaccard=False)
+    >>> list(G.edges(data=True))
+    [(0, 2, {'weight': 1.0}), (2, 4, {'weight': 1.0})]
+
+    Notes
+    -----
+    No attempt is made to verify that the input graph B is bipartite.
+    The graph and node properties are (shallow) copied to the projected graph.
+
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    See Also
+    --------
+    is_bipartite,
+    is_bipartite_node_set,
+    sets,
+    weighted_projected_graph,
+    collaboration_weighted_projected_graph,
+    generic_weighted_projected_graph,
+    projected_graph
+
+    References
+    ----------
+    .. [1] Borgatti, S.P. and Halgin, D. In press. Analyzing Affiliation
+        Networks. In Carrington, P. and Scott, J. (eds) The Sage Handbook
+        of Social Network Analysis. Sage Publications.
+
+    """
+    if B.is_directed():
+        pred = B.pred
+        G = nx.DiGraph()
+    else:
+        pred = B.adj
+        G = nx.Graph()
+    G.graph.update(B.graph)
+    G.add_nodes_from((n, B.nodes[n]) for n in nodes)
+    for u in nodes:
+        unbrs = set(B[u])
+        nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
+        for v in nbrs2:
+            vnbrs = set(pred[v])
+            if jaccard:
+                wt = len(unbrs & vnbrs) / len(unbrs | vnbrs)
+            else:
+                wt = len(unbrs & vnbrs) / min(len(unbrs), len(vnbrs))
+            G.add_edge(u, v, weight=wt)
+    return G
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(graphs="B", preserve_all_attrs=True, returns_graph=True)
+def generic_weighted_projected_graph(B, nodes, weight_function=None):
+    r"""Weighted projection of B with a user-specified weight function.
+
+    The bipartite network B is projected on to the specified nodes
+    with weights computed by a user-specified function.  This function
+    must accept as a parameter the neighborhood sets of two nodes and
+    return an integer or a float.
+
+    The nodes retain their attributes and are connected in the resulting graph
+    if they have an edge to a common node in the original graph.
+
+    Parameters
+    ----------
+    B : NetworkX graph
+        The input graph should be bipartite.
+
+    nodes : list or iterable
+        Nodes to project onto (the "bottom" nodes).
+
+    weight_function : function
+        This function must accept as parameters the same input graph
+        that this function, and two nodes; and return an integer or a float.
+        The default function computes the number of shared neighbors.
+
+    Returns
+    -------
+    Graph : NetworkX graph
+       A graph that is the projection onto the given nodes.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> # Define some custom weight functions
+    >>> def jaccard(G, u, v):
+    ...     unbrs = set(G[u])
+    ...     vnbrs = set(G[v])
+    ...     return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
+    >>> def my_weight(G, u, v, weight="weight"):
+    ...     w = 0
+    ...     for nbr in set(G[u]) & set(G[v]):
+    ...         w += G[u][nbr].get(weight, 1) + G[v][nbr].get(weight, 1)
+    ...     return w
+    >>> # A complete bipartite graph with 4 nodes and 4 edges
+    >>> B = nx.complete_bipartite_graph(2, 2)
+    >>> # Add some arbitrary weight to the edges
+    >>> for i, (u, v) in enumerate(B.edges()):
+    ...     B.edges[u, v]["weight"] = i + 1
+    >>> for edge in B.edges(data=True):
+    ...     print(edge)
+    (0, 2, {'weight': 1})
+    (0, 3, {'weight': 2})
+    (1, 2, {'weight': 3})
+    (1, 3, {'weight': 4})
+    >>> # By default, the weight is the number of shared neighbors
+    >>> G = bipartite.generic_weighted_projected_graph(B, [0, 1])
+    >>> print(list(G.edges(data=True)))
+    [(0, 1, {'weight': 2})]
+    >>> # To specify a custom weight function use the weight_function parameter
+    >>> G = bipartite.generic_weighted_projected_graph(
+    ...     B, [0, 1], weight_function=jaccard
+    ... )
+    >>> print(list(G.edges(data=True)))
+    [(0, 1, {'weight': 1.0})]
+    >>> G = bipartite.generic_weighted_projected_graph(
+    ...     B, [0, 1], weight_function=my_weight
+    ... )
+    >>> print(list(G.edges(data=True)))
+    [(0, 1, {'weight': 10})]
+
+    Notes
+    -----
+    No attempt is made to verify that the input graph B is bipartite.
+    The graph and node properties are (shallow) copied to the projected graph.
+
+    See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
+    for further details on how bipartite graphs are handled in NetworkX.
+
+    See Also
+    --------
+    is_bipartite,
+    is_bipartite_node_set,
+    sets,
+    weighted_projected_graph,
+    collaboration_weighted_projected_graph,
+    overlap_weighted_projected_graph,
+    projected_graph
+
+    """
+    if B.is_directed():
+        pred = B.pred
+        G = nx.DiGraph()
+    else:
+        pred = B.adj
+        G = nx.Graph()
+    if weight_function is None:
+
+        def weight_function(G, u, v):
+            # Notice that we use set(pred[v]) for handling the directed case.
+            return len(set(G[u]) & set(pred[v]))
+
+    G.graph.update(B.graph)
+    G.add_nodes_from((n, B.nodes[n]) for n in nodes)
+    for u in nodes:
+        nbrs2 = {n for nbr in set(B[u]) for n in B[nbr]} - {u}
+        for v in nbrs2:
+            weight = weight_function(B, u, v)
+            G.add_edge(u, v, weight=weight)
+    return G
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/redundancy.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/redundancy.py
new file mode 100644
index 00000000..b622b975
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/redundancy.py
@@ -0,0 +1,112 @@
+"""Node redundancy for bipartite graphs."""
+
+from itertools import combinations
+
+import networkx as nx
+from networkx import NetworkXError
+
+__all__ = ["node_redundancy"]
+
+
+@nx._dispatchable
+def node_redundancy(G, nodes=None):
+    r"""Computes the node redundancy coefficients for the nodes in the bipartite
+    graph `G`.
+
+    The redundancy coefficient of a node `v` is the fraction of pairs of
+    neighbors of `v` that are both linked to other nodes. In a one-mode
+    projection these nodes would be linked together even if `v` were
+    not there.
+
+    More formally, for any vertex `v`, the *redundancy coefficient of `v`* is
+    defined by
+
+    .. math::
+
+        rc(v) = \frac{|\{\{u, w\} \subseteq N(v),
+        \: \exists v' \neq  v,\: (v',u) \in E\:
+        \mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}},
+
+    where `N(v)` is the set of neighbors of `v` in `G`.
+
+    Parameters
+    ----------
+    G : graph
+        A bipartite graph
+
+    nodes : list or iterable (optional)
+        Compute redundancy for these nodes. The default is all nodes in G.
+
+    Returns
+    -------
+    redundancy : dictionary
+        A dictionary keyed by node with the node redundancy value.
+
+    Examples
+    --------
+    Compute the redundancy coefficient of each node in a graph::
+
+        >>> from networkx.algorithms import bipartite
+        >>> G = nx.cycle_graph(4)
+        >>> rc = bipartite.node_redundancy(G)
+        >>> rc[0]
+        1.0
+
+    Compute the average redundancy for the graph::
+
+        >>> from networkx.algorithms import bipartite
+        >>> G = nx.cycle_graph(4)
+        >>> rc = bipartite.node_redundancy(G)
+        >>> sum(rc.values()) / len(G)
+        1.0
+
+    Compute the average redundancy for a set of nodes::
+
+        >>> from networkx.algorithms import bipartite
+        >>> G = nx.cycle_graph(4)
+        >>> rc = bipartite.node_redundancy(G)
+        >>> nodes = [0, 2]
+        >>> sum(rc[n] for n in nodes) / len(nodes)
+        1.0
+
+    Raises
+    ------
+    NetworkXError
+        If any of the nodes in the graph (or in `nodes`, if specified) has
+        (out-)degree less than two (which would result in division by zero,
+        according to the definition of the redundancy coefficient).
+
+    References
+    ----------
+    .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
+       Basic notions for the analysis of large two-mode networks.
+       Social Networks 30(1), 31--48.
+
+    """
+    if nodes is None:
+        nodes = G
+    if any(len(G[v]) < 2 for v in nodes):
+        raise NetworkXError(
+            "Cannot compute redundancy coefficient for a node"
+            " that has fewer than two neighbors."
+        )
+    # TODO This can be trivially parallelized.
+    return {v: _node_redundancy(G, v) for v in nodes}
+
+
+def _node_redundancy(G, v):
+    """Returns the redundancy of the node `v` in the bipartite graph `G`.
+
+    If `G` is a graph with `n` nodes, the redundancy of a node is the ratio
+    of the "overlap" of `v` to the maximum possible overlap of `v`
+    according to its degree. The overlap of `v` is the number of pairs of
+    neighbors that have mutual neighbors themselves, other than `v`.
+
+    `v` must have at least two neighbors in `G`.
+
+    """
+    n = len(G[v])
+    overlap = sum(
+        1 for (u, w) in combinations(G[v], 2) if (set(G[u]) & set(G[w])) - {v}
+    )
+    return (2 * overlap) / (n * (n - 1))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/spectral.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/spectral.py
new file mode 100644
index 00000000..cb9388f6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/spectral.py
@@ -0,0 +1,69 @@
+"""
+Spectral bipartivity measure.
+"""
+
+import networkx as nx
+
+__all__ = ["spectral_bipartivity"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def spectral_bipartivity(G, nodes=None, weight="weight"):
+    """Returns the spectral bipartivity.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    nodes : list or container  optional(default is all nodes)
+      Nodes to return value of spectral bipartivity contribution.
+
+    weight : string or None  optional (default = 'weight')
+      Edge data key to use for edge weights. If None, weights set to 1.
+
+    Returns
+    -------
+    sb : float or dict
+       A single number if the keyword nodes is not specified, or
+       a dictionary keyed by node with the spectral bipartivity contribution
+       of that node as the value.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import bipartite
+    >>> G = nx.path_graph(4)
+    >>> bipartite.spectral_bipartivity(G)
+    1.0
+
+    Notes
+    -----
+    This implementation uses Numpy (dense) matrices which are not efficient
+    for storing large sparse graphs.
+
+    See Also
+    --------
+    color
+
+    References
+    ----------
+    .. [1] E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
+       bipartivity in complex networks", PhysRev E 72, 046105 (2005)
+    """
+    import scipy as sp
+
+    nodelist = list(G)  # ordering of nodes in matrix
+    A = nx.to_numpy_array(G, nodelist, weight=weight)
+    expA = sp.linalg.expm(A)
+    expmA = sp.linalg.expm(-A)
+    coshA = 0.5 * (expA + expmA)
+    if nodes is None:
+        # return single number for entire graph
+        return float(coshA.diagonal().sum() / expA.diagonal().sum())
+    else:
+        # contribution for individual nodes
+        index = dict(zip(nodelist, range(len(nodelist))))
+        sb = {}
+        for n in nodes:
+            i = index[n]
+            sb[n] = coshA.item(i, i) / expA.item(i, i)
+        return sb
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_basic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_basic.py
new file mode 100644
index 00000000..655506b4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_basic.py
@@ -0,0 +1,125 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms import bipartite
+
+
+class TestBipartiteBasic:
+    def test_is_bipartite(self):
+        assert bipartite.is_bipartite(nx.path_graph(4))
+        assert bipartite.is_bipartite(nx.DiGraph([(1, 0)]))
+        assert not bipartite.is_bipartite(nx.complete_graph(3))
+
+    def test_bipartite_color(self):
+        G = nx.path_graph(4)
+        c = bipartite.color(G)
+        assert c == {0: 1, 1: 0, 2: 1, 3: 0}
+
+    def test_not_bipartite_color(self):
+        with pytest.raises(nx.NetworkXError):
+            c = bipartite.color(nx.complete_graph(4))
+
+    def test_bipartite_directed(self):
+        G = bipartite.random_graph(10, 10, 0.1, directed=True)
+        assert bipartite.is_bipartite(G)
+
+    def test_bipartite_sets(self):
+        G = nx.path_graph(4)
+        X, Y = bipartite.sets(G)
+        assert X == {0, 2}
+        assert Y == {1, 3}
+
+    def test_bipartite_sets_directed(self):
+        G = nx.path_graph(4)
+        D = G.to_directed()
+        X, Y = bipartite.sets(D)
+        assert X == {0, 2}
+        assert Y == {1, 3}
+
+    def test_bipartite_sets_given_top_nodes(self):
+        G = nx.path_graph(4)
+        top_nodes = [0, 2]
+        X, Y = bipartite.sets(G, top_nodes)
+        assert X == {0, 2}
+        assert Y == {1, 3}
+
+    def test_bipartite_sets_disconnected(self):
+        with pytest.raises(nx.AmbiguousSolution):
+            G = nx.path_graph(4)
+            G.add_edges_from([(5, 6), (6, 7)])
+            X, Y = bipartite.sets(G)
+
+    def test_is_bipartite_node_set(self):
+        G = nx.path_graph(4)
+
+        with pytest.raises(nx.AmbiguousSolution):
+            bipartite.is_bipartite_node_set(G, [1, 1, 2, 3])
+
+        assert bipartite.is_bipartite_node_set(G, [0, 2])
+        assert bipartite.is_bipartite_node_set(G, [1, 3])
+        assert not bipartite.is_bipartite_node_set(G, [1, 2])
+        G.add_edge(10, 20)
+        assert bipartite.is_bipartite_node_set(G, [0, 2, 10])
+        assert bipartite.is_bipartite_node_set(G, [0, 2, 20])
+        assert bipartite.is_bipartite_node_set(G, [1, 3, 10])
+        assert bipartite.is_bipartite_node_set(G, [1, 3, 20])
+
+    def test_bipartite_density(self):
+        G = nx.path_graph(5)
+        X, Y = bipartite.sets(G)
+        density = len(list(G.edges())) / (len(X) * len(Y))
+        assert bipartite.density(G, X) == density
+        D = nx.DiGraph(G.edges())
+        assert bipartite.density(D, X) == density / 2.0
+        assert bipartite.density(nx.Graph(), {}) == 0.0
+
+    def test_bipartite_degrees(self):
+        G = nx.path_graph(5)
+        X = {1, 3}
+        Y = {0, 2, 4}
+        u, d = bipartite.degrees(G, Y)
+        assert dict(u) == {1: 2, 3: 2}
+        assert dict(d) == {0: 1, 2: 2, 4: 1}
+
+    def test_bipartite_weighted_degrees(self):
+        G = nx.path_graph(5)
+        G.add_edge(0, 1, weight=0.1, other=0.2)
+        X = {1, 3}
+        Y = {0, 2, 4}
+        u, d = bipartite.degrees(G, Y, weight="weight")
+        assert dict(u) == {1: 1.1, 3: 2}
+        assert dict(d) == {0: 0.1, 2: 2, 4: 1}
+        u, d = bipartite.degrees(G, Y, weight="other")
+        assert dict(u) == {1: 1.2, 3: 2}
+        assert dict(d) == {0: 0.2, 2: 2, 4: 1}
+
+    def test_biadjacency_matrix_weight(self):
+        pytest.importorskip("scipy")
+        G = nx.path_graph(5)
+        G.add_edge(0, 1, weight=2, other=4)
+        X = [1, 3]
+        Y = [0, 2, 4]
+        M = bipartite.biadjacency_matrix(G, X, weight="weight")
+        assert M[0, 0] == 2
+        M = bipartite.biadjacency_matrix(G, X, weight="other")
+        assert M[0, 0] == 4
+
+    def test_biadjacency_matrix(self):
+        pytest.importorskip("scipy")
+        tops = [2, 5, 10]
+        bots = [5, 10, 15]
+        for i in range(len(tops)):
+            G = bipartite.random_graph(tops[i], bots[i], 0.2)
+            top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
+            M = bipartite.biadjacency_matrix(G, top)
+            assert M.shape[0] == tops[i]
+            assert M.shape[1] == bots[i]
+
+    def test_biadjacency_matrix_order(self):
+        pytest.importorskip("scipy")
+        G = nx.path_graph(5)
+        G.add_edge(0, 1, weight=2)
+        X = [3, 1]
+        Y = [4, 2, 0]
+        M = bipartite.biadjacency_matrix(G, X, Y, weight="weight")
+        assert M[1, 2] == 2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py
new file mode 100644
index 00000000..19fb5d11
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py
@@ -0,0 +1,192 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms import bipartite
+
+
+class TestBipartiteCentrality:
+    @classmethod
+    def setup_class(cls):
+        cls.P4 = nx.path_graph(4)
+        cls.K3 = nx.complete_bipartite_graph(3, 3)
+        cls.C4 = nx.cycle_graph(4)
+        cls.davis = nx.davis_southern_women_graph()
+        cls.top_nodes = [
+            n for n, d in cls.davis.nodes(data=True) if d["bipartite"] == 0
+        ]
+
+    def test_degree_centrality(self):
+        d = bipartite.degree_centrality(self.P4, [1, 3])
+        answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5}
+        assert d == answer
+        d = bipartite.degree_centrality(self.K3, [0, 1, 2])
+        answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
+        assert d == answer
+        d = bipartite.degree_centrality(self.C4, [0, 2])
+        answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
+        assert d == answer
+
+    def test_betweenness_centrality(self):
+        c = bipartite.betweenness_centrality(self.P4, [1, 3])
+        answer = {0: 0.0, 1: 1.0, 2: 1.0, 3: 0.0}
+        assert c == answer
+        c = bipartite.betweenness_centrality(self.K3, [0, 1, 2])
+        answer = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.125}
+        assert c == answer
+        c = bipartite.betweenness_centrality(self.C4, [0, 2])
+        answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
+        assert c == answer
+
+    def test_closeness_centrality(self):
+        c = bipartite.closeness_centrality(self.P4, [1, 3])
+        answer = {0: 2.0 / 3, 1: 1.0, 2: 1.0, 3: 2.0 / 3}
+        assert c == answer
+        c = bipartite.closeness_centrality(self.K3, [0, 1, 2])
+        answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
+        assert c == answer
+        c = bipartite.closeness_centrality(self.C4, [0, 2])
+        answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
+        assert c == answer
+        G = nx.Graph()
+        G.add_node(0)
+        G.add_node(1)
+        c = bipartite.closeness_centrality(G, [0])
+        assert c == {0: 0.0, 1: 0.0}
+        c = bipartite.closeness_centrality(G, [1])
+        assert c == {0: 0.0, 1: 0.0}
+
+    def test_bipartite_closeness_centrality_unconnected(self):
+        G = nx.complete_bipartite_graph(3, 3)
+        G.add_edge(6, 7)
+        c = bipartite.closeness_centrality(G, [0, 2, 4, 6], normalized=False)
+        answer = {
+            0: 10.0 / 7,
+            2: 10.0 / 7,
+            4: 10.0 / 7,
+            6: 10.0,
+            1: 10.0 / 7,
+            3: 10.0 / 7,
+            5: 10.0 / 7,
+            7: 10.0,
+        }
+        assert c == answer
+
+    def test_davis_degree_centrality(self):
+        G = self.davis
+        deg = bipartite.degree_centrality(G, self.top_nodes)
+        answer = {
+            "E8": 0.78,
+            "E9": 0.67,
+            "E7": 0.56,
+            "Nora Fayette": 0.57,
+            "Evelyn Jefferson": 0.57,
+            "Theresa Anderson": 0.57,
+            "E6": 0.44,
+            "Sylvia Avondale": 0.50,
+            "Laura Mandeville": 0.50,
+            "Brenda Rogers": 0.50,
+            "Katherina Rogers": 0.43,
+            "E5": 0.44,
+            "Helen Lloyd": 0.36,
+            "E3": 0.33,
+            "Ruth DeSand": 0.29,
+            "Verne Sanderson": 0.29,
+            "E12": 0.33,
+            "Myra Liddel": 0.29,
+            "E11": 0.22,
+            "Eleanor Nye": 0.29,
+            "Frances Anderson": 0.29,
+            "Pearl Oglethorpe": 0.21,
+            "E4": 0.22,
+            "Charlotte McDowd": 0.29,
+            "E10": 0.28,
+            "Olivia Carleton": 0.14,
+            "Flora Price": 0.14,
+            "E2": 0.17,
+            "E1": 0.17,
+            "Dorothy Murchison": 0.14,
+            "E13": 0.17,
+            "E14": 0.17,
+        }
+        for node, value in answer.items():
+            assert value == pytest.approx(deg[node], abs=1e-2)
+
+    def test_davis_betweenness_centrality(self):
+        G = self.davis
+        bet = bipartite.betweenness_centrality(G, self.top_nodes)
+        answer = {
+            "E8": 0.24,
+            "E9": 0.23,
+            "E7": 0.13,
+            "Nora Fayette": 0.11,
+            "Evelyn Jefferson": 0.10,
+            "Theresa Anderson": 0.09,
+            "E6": 0.07,
+            "Sylvia Avondale": 0.07,
+            "Laura Mandeville": 0.05,
+            "Brenda Rogers": 0.05,
+            "Katherina Rogers": 0.05,
+            "E5": 0.04,
+            "Helen Lloyd": 0.04,
+            "E3": 0.02,
+            "Ruth DeSand": 0.02,
+            "Verne Sanderson": 0.02,
+            "E12": 0.02,
+            "Myra Liddel": 0.02,
+            "E11": 0.02,
+            "Eleanor Nye": 0.01,
+            "Frances Anderson": 0.01,
+            "Pearl Oglethorpe": 0.01,
+            "E4": 0.01,
+            "Charlotte McDowd": 0.01,
+            "E10": 0.01,
+            "Olivia Carleton": 0.01,
+            "Flora Price": 0.01,
+            "E2": 0.00,
+            "E1": 0.00,
+            "Dorothy Murchison": 0.00,
+            "E13": 0.00,
+            "E14": 0.00,
+        }
+        for node, value in answer.items():
+            assert value == pytest.approx(bet[node], abs=1e-2)
+
+    def test_davis_closeness_centrality(self):
+        G = self.davis
+        clos = bipartite.closeness_centrality(G, self.top_nodes)
+        answer = {
+            "E8": 0.85,
+            "E9": 0.79,
+            "E7": 0.73,
+            "Nora Fayette": 0.80,
+            "Evelyn Jefferson": 0.80,
+            "Theresa Anderson": 0.80,
+            "E6": 0.69,
+            "Sylvia Avondale": 0.77,
+            "Laura Mandeville": 0.73,
+            "Brenda Rogers": 0.73,
+            "Katherina Rogers": 0.73,
+            "E5": 0.59,
+            "Helen Lloyd": 0.73,
+            "E3": 0.56,
+            "Ruth DeSand": 0.71,
+            "Verne Sanderson": 0.71,
+            "E12": 0.56,
+            "Myra Liddel": 0.69,
+            "E11": 0.54,
+            "Eleanor Nye": 0.67,
+            "Frances Anderson": 0.67,
+            "Pearl Oglethorpe": 0.67,
+            "E4": 0.54,
+            "Charlotte McDowd": 0.60,
+            "E10": 0.55,
+            "Olivia Carleton": 0.59,
+            "Flora Price": 0.59,
+            "E2": 0.52,
+            "E1": 0.52,
+            "Dorothy Murchison": 0.65,
+            "E13": 0.52,
+            "E14": 0.52,
+        }
+        for node, value in answer.items():
+            assert value == pytest.approx(clos[node], abs=1e-2)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py
new file mode 100644
index 00000000..72e2dbad
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py
@@ -0,0 +1,84 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms import bipartite
+from networkx.algorithms.bipartite.cluster import cc_dot, cc_max, cc_min
+
+
+def test_pairwise_bipartite_cc_functions():
+    # Test functions for different kinds of bipartite clustering coefficients
+    # between pairs of nodes using 3 example graphs from figure 5 p. 40
+    # Latapy et al (2008)
+    G1 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7)])
+    G2 = nx.Graph([(0, 2), (0, 3), (0, 4), (1, 3), (1, 4), (1, 5)])
+    G3 = nx.Graph(
+        [(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)]
+    )
+    result = {
+        0: [1 / 3.0, 2 / 3.0, 2 / 5.0],
+        1: [1 / 2.0, 2 / 3.0, 2 / 3.0],
+        2: [2 / 8.0, 2 / 5.0, 2 / 5.0],
+    }
+    for i, G in enumerate([G1, G2, G3]):
+        assert bipartite.is_bipartite(G)
+        assert cc_dot(set(G[0]), set(G[1])) == result[i][0]
+        assert cc_min(set(G[0]), set(G[1])) == result[i][1]
+        assert cc_max(set(G[0]), set(G[1])) == result[i][2]
+
+
+def test_star_graph():
+    G = nx.star_graph(3)
+    # all modes are the same
+    answer = {0: 0, 1: 1, 2: 1, 3: 1}
+    assert bipartite.clustering(G, mode="dot") == answer
+    assert bipartite.clustering(G, mode="min") == answer
+    assert bipartite.clustering(G, mode="max") == answer
+
+
+def test_not_bipartite():
+    with pytest.raises(nx.NetworkXError):
+        bipartite.clustering(nx.complete_graph(4))
+
+
+def test_bad_mode():
+    with pytest.raises(nx.NetworkXError):
+        bipartite.clustering(nx.path_graph(4), mode="foo")
+
+
+def test_path_graph():
+    G = nx.path_graph(4)
+    answer = {0: 0.5, 1: 0.5, 2: 0.5, 3: 0.5}
+    assert bipartite.clustering(G, mode="dot") == answer
+    assert bipartite.clustering(G, mode="max") == answer
+    answer = {0: 1, 1: 1, 2: 1, 3: 1}
+    assert bipartite.clustering(G, mode="min") == answer
+
+
+def test_average_path_graph():
+    G = nx.path_graph(4)
+    assert bipartite.average_clustering(G, mode="dot") == 0.5
+    assert bipartite.average_clustering(G, mode="max") == 0.5
+    assert bipartite.average_clustering(G, mode="min") == 1
+
+
+def test_ra_clustering_davis():
+    G = nx.davis_southern_women_graph()
+    cc4 = round(bipartite.robins_alexander_clustering(G), 3)
+    assert cc4 == 0.468
+
+
+def test_ra_clustering_square():
+    G = nx.path_graph(4)
+    G.add_edge(0, 3)
+    assert bipartite.robins_alexander_clustering(G) == 1.0
+
+
+def test_ra_clustering_zero():
+    G = nx.Graph()
+    assert bipartite.robins_alexander_clustering(G) == 0
+    G.add_nodes_from(range(4))
+    assert bipartite.robins_alexander_clustering(G) == 0
+    G.add_edges_from([(0, 1), (2, 3), (3, 4)])
+    assert bipartite.robins_alexander_clustering(G) == 0
+    G.add_edge(1, 2)
+    assert bipartite.robins_alexander_clustering(G) == 0
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_covering.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_covering.py
new file mode 100644
index 00000000..9507e134
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_covering.py
@@ -0,0 +1,33 @@
+import networkx as nx
+from networkx.algorithms import bipartite
+
+
+class TestMinEdgeCover:
+    """Tests for :func:`networkx.algorithms.bipartite.min_edge_cover`"""
+
+    def test_empty_graph(self):
+        G = nx.Graph()
+        assert bipartite.min_edge_cover(G) == set()
+
+    def test_graph_single_edge(self):
+        G = nx.Graph()
+        G.add_edge(0, 1)
+        assert bipartite.min_edge_cover(G) == {(0, 1), (1, 0)}
+
+    def test_bipartite_default(self):
+        G = nx.Graph()
+        G.add_nodes_from([1, 2, 3, 4], bipartite=0)
+        G.add_nodes_from(["a", "b", "c"], bipartite=1)
+        G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
+        min_cover = bipartite.min_edge_cover(G)
+        assert nx.is_edge_cover(G, min_cover)
+        assert len(min_cover) == 8
+
+    def test_bipartite_explicit(self):
+        G = nx.Graph()
+        G.add_nodes_from([1, 2, 3, 4], bipartite=0)
+        G.add_nodes_from(["a", "b", "c"], bipartite=1)
+        G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
+        min_cover = bipartite.min_edge_cover(G, bipartite.eppstein_matching)
+        assert nx.is_edge_cover(G, min_cover)
+        assert len(min_cover) == 8
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py
new file mode 100644
index 00000000..66be8a2f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py
@@ -0,0 +1,240 @@
+"""
+Unit tests for bipartite edgelists.
+"""
+
+import io
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms import bipartite
+from networkx.utils import edges_equal, graphs_equal, nodes_equal
+
+
+class TestEdgelist:
+    @classmethod
+    def setup_class(cls):
+        cls.G = nx.Graph(name="test")
+        e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")]
+        cls.G.add_edges_from(e)
+        cls.G.add_nodes_from(["a", "c", "e"], bipartite=0)
+        cls.G.add_nodes_from(["b", "d", "f"], bipartite=1)
+        cls.G.add_node("g", bipartite=0)
+        cls.DG = nx.DiGraph(cls.G)
+        cls.MG = nx.MultiGraph()
+        cls.MG.add_edges_from([(1, 2), (1, 2), (1, 2)])
+        cls.MG.add_node(1, bipartite=0)
+        cls.MG.add_node(2, bipartite=1)
+
+    def test_read_edgelist_1(self):
+        s = b"""\
+# comment line
+1 2
+# comment line
+2 3
+"""
+        bytesIO = io.BytesIO(s)
+        G = bipartite.read_edgelist(bytesIO, nodetype=int)
+        assert edges_equal(G.edges(), [(1, 2), (2, 3)])
+
+    def test_read_edgelist_3(self):
+        s = b"""\
+# comment line
+1 2 {'weight':2.0}
+# comment line
+2 3 {'weight':3.0}
+"""
+        bytesIO = io.BytesIO(s)
+        G = bipartite.read_edgelist(bytesIO, nodetype=int, data=False)
+        assert edges_equal(G.edges(), [(1, 2), (2, 3)])
+
+        bytesIO = io.BytesIO(s)
+        G = bipartite.read_edgelist(bytesIO, nodetype=int, data=True)
+        assert edges_equal(
+            G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})]
+        )
+
+    def test_write_edgelist_1(self):
+        fh = io.BytesIO()
+        G = nx.Graph()
+        G.add_edges_from([(1, 2), (2, 3)])
+        G.add_node(1, bipartite=0)
+        G.add_node(2, bipartite=1)
+        G.add_node(3, bipartite=0)
+        bipartite.write_edgelist(G, fh, data=False)
+        fh.seek(0)
+        assert fh.read() == b"1 2\n3 2\n"
+
+    def test_write_edgelist_2(self):
+        fh = io.BytesIO()
+        G = nx.Graph()
+        G.add_edges_from([(1, 2), (2, 3)])
+        G.add_node(1, bipartite=0)
+        G.add_node(2, bipartite=1)
+        G.add_node(3, bipartite=0)
+        bipartite.write_edgelist(G, fh, data=True)
+        fh.seek(0)
+        assert fh.read() == b"1 2 {}\n3 2 {}\n"
+
+    def test_write_edgelist_3(self):
+        fh = io.BytesIO()
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=2.0)
+        G.add_edge(2, 3, weight=3.0)
+        G.add_node(1, bipartite=0)
+        G.add_node(2, bipartite=1)
+        G.add_node(3, bipartite=0)
+        bipartite.write_edgelist(G, fh, data=True)
+        fh.seek(0)
+        assert fh.read() == b"1 2 {'weight': 2.0}\n3 2 {'weight': 3.0}\n"
+
+    def test_write_edgelist_4(self):
+        fh = io.BytesIO()
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=2.0)
+        G.add_edge(2, 3, weight=3.0)
+        G.add_node(1, bipartite=0)
+        G.add_node(2, bipartite=1)
+        G.add_node(3, bipartite=0)
+        bipartite.write_edgelist(G, fh, data=[("weight")])
+        fh.seek(0)
+        assert fh.read() == b"1 2 2.0\n3 2 3.0\n"
+
+    def test_unicode(self, tmp_path):
+        G = nx.Graph()
+        name1 = chr(2344) + chr(123) + chr(6543)
+        name2 = chr(5543) + chr(1543) + chr(324)
+        G.add_edge(name1, "Radiohead", **{name2: 3})
+        G.add_node(name1, bipartite=0)
+        G.add_node("Radiohead", bipartite=1)
+
+        fname = tmp_path / "edgelist.txt"
+        bipartite.write_edgelist(G, fname)
+        H = bipartite.read_edgelist(fname)
+        assert graphs_equal(G, H)
+
+    def test_latin1_issue(self, tmp_path):
+        G = nx.Graph()
+        name1 = chr(2344) + chr(123) + chr(6543)
+        name2 = chr(5543) + chr(1543) + chr(324)
+        G.add_edge(name1, "Radiohead", **{name2: 3})
+        G.add_node(name1, bipartite=0)
+        G.add_node("Radiohead", bipartite=1)
+
+        fname = tmp_path / "edgelist.txt"
+        with pytest.raises(UnicodeEncodeError):
+            bipartite.write_edgelist(G, fname, encoding="latin-1")
+
+    def test_latin1(self, tmp_path):
+        G = nx.Graph()
+        name1 = "Bj" + chr(246) + "rk"
+        name2 = chr(220) + "ber"
+        G.add_edge(name1, "Radiohead", **{name2: 3})
+        G.add_node(name1, bipartite=0)
+        G.add_node("Radiohead", bipartite=1)
+
+        fname = tmp_path / "edgelist.txt"
+        bipartite.write_edgelist(G, fname, encoding="latin-1")
+        H = bipartite.read_edgelist(fname, encoding="latin-1")
+        assert graphs_equal(G, H)
+
+    def test_edgelist_graph(self, tmp_path):
+        G = self.G
+        fname = tmp_path / "edgelist.txt"
+        bipartite.write_edgelist(G, fname)
+        H = bipartite.read_edgelist(fname)
+        H2 = bipartite.read_edgelist(fname)
+        assert H is not H2  # they should be different graphs
+        G.remove_node("g")  # isolated nodes are not written in edgelist
+        assert nodes_equal(list(H), list(G))
+        assert edges_equal(list(H.edges()), list(G.edges()))
+
+    def test_edgelist_integers(self, tmp_path):
+        G = nx.convert_node_labels_to_integers(self.G)
+        fname = tmp_path / "edgelist.txt"
+        bipartite.write_edgelist(G, fname)
+        H = bipartite.read_edgelist(fname, nodetype=int)
+        # isolated nodes are not written in edgelist
+        G.remove_nodes_from(list(nx.isolates(G)))
+        assert nodes_equal(list(H), list(G))
+        assert edges_equal(list(H.edges()), list(G.edges()))
+
+    def test_edgelist_multigraph(self, tmp_path):
+        G = self.MG
+        fname = tmp_path / "edgelist.txt"
+        bipartite.write_edgelist(G, fname)
+        H = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
+        H2 = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
+        assert H is not H2  # they should be different graphs
+        assert nodes_equal(list(H), list(G))
+        assert edges_equal(list(H.edges()), list(G.edges()))
+
+    def test_empty_digraph(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            bytesIO = io.BytesIO()
+            bipartite.write_edgelist(nx.DiGraph(), bytesIO)
+
+    def test_raise_attribute(self):
+        with pytest.raises(AttributeError):
+            G = nx.path_graph(4)
+            bytesIO = io.BytesIO()
+            bipartite.write_edgelist(G, bytesIO)
+
+    def test_parse_edgelist(self):
+        """Tests for conditions specific to
+        parse_edge_list method"""
+
+        # ignore strings of length less than 2
+        lines = ["1 2", "2 3", "3 1", "4", " "]
+        G = bipartite.parse_edgelist(lines, nodetype=int)
+        assert list(G.nodes) == [1, 2, 3]
+
+        # Exception raised when node is not convertible
+        # to specified data type
+        with pytest.raises(TypeError, match=".*Failed to convert nodes"):
+            lines = ["a b", "b c", "c a"]
+            G = bipartite.parse_edgelist(lines, nodetype=int)
+
+        # Exception raised when format of data is not
+        # convertible to dictionary object
+        with pytest.raises(TypeError, match=".*Failed to convert edge data"):
+            lines = ["1 2 3", "2 3 4", "3 1 2"]
+            G = bipartite.parse_edgelist(lines, nodetype=int)
+
+        # Exception raised when edge data and data
+        # keys are not of same length
+        with pytest.raises(IndexError):
+            lines = ["1 2 3 4", "2 3 4"]
+            G = bipartite.parse_edgelist(
+                lines, nodetype=int, data=[("weight", int), ("key", int)]
+            )
+
+        # Exception raised when edge data is not
+        # convertible to specified data type
+        with pytest.raises(TypeError, match=".*Failed to convert key data"):
+            lines = ["1 2 3 a", "2 3 4 b"]
+            G = bipartite.parse_edgelist(
+                lines, nodetype=int, data=[("weight", int), ("key", int)]
+            )
+
+
+def test_bipartite_edgelist_consistent_strip_handling():
+    """See gh-7462
+
+    Input when printed looks like:
+
+        A       B       interaction     2
+        B       C       interaction     4
+        C       A       interaction
+
+    Note the trailing \\t in the last line, which indicates the existence of
+    an empty data field.
+    """
+    lines = io.StringIO(
+        "A\tB\tinteraction\t2\nB\tC\tinteraction\t4\nC\tA\tinteraction\t"
+    )
+    descr = [("type", str), ("weight", str)]
+    # Should not raise
+    G = nx.bipartite.parse_edgelist(lines, delimiter="\t", data=descr)
+    expected = [("A", "B", "2"), ("A", "C", ""), ("B", "C", "4")]
+    assert sorted(G.edges(data="weight")) == expected
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_extendability.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_extendability.py
new file mode 100644
index 00000000..17b71243
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_extendability.py
@@ -0,0 +1,334 @@
+import pytest
+
+import networkx as nx
+
+
+def test_selfloops_raises():
+    G = nx.ladder_graph(3)
+    G.add_edge(0, 0)
+    with pytest.raises(nx.NetworkXError, match=".*not bipartite"):
+        nx.bipartite.maximal_extendability(G)
+
+
+def test_disconnected_raises():
+    G = nx.ladder_graph(3)
+    G.add_node("a")
+    with pytest.raises(nx.NetworkXError, match=".*not connected"):
+        nx.bipartite.maximal_extendability(G)
+
+
+def test_not_bipartite_raises():
+    G = nx.complete_graph(5)
+    with pytest.raises(nx.NetworkXError, match=".*not bipartite"):
+        nx.bipartite.maximal_extendability(G)
+
+
+def test_no_perfect_matching_raises():
+    G = nx.Graph([(0, 1), (0, 2)])
+    with pytest.raises(nx.NetworkXError, match=".*not contain a perfect matching"):
+        nx.bipartite.maximal_extendability(G)
+
+
+def test_residual_graph_not_strongly_connected_raises():
+    G = nx.Graph([(1, 2), (2, 3), (3, 4)])
+    with pytest.raises(
+        nx.NetworkXError, match="The residual graph of G is not strongly connected"
+    ):
+        nx.bipartite.maximal_extendability(G)
+
+
+def test_ladder_graph_is_1():
+    G = nx.ladder_graph(3)
+    assert nx.bipartite.maximal_extendability(G) == 1
+
+
+def test_cubical_graph_is_2():
+    G = nx.cubical_graph()
+    assert nx.bipartite.maximal_extendability(G) == 2
+
+
+def test_k_is_3():
+    G = nx.Graph(
+        [
+            (1, 6),
+            (1, 7),
+            (1, 8),
+            (1, 9),
+            (2, 6),
+            (2, 7),
+            (2, 8),
+            (2, 10),
+            (3, 6),
+            (3, 8),
+            (3, 9),
+            (3, 10),
+            (4, 7),
+            (4, 8),
+            (4, 9),
+            (4, 10),
+            (5, 6),
+            (5, 7),
+            (5, 9),
+            (5, 10),
+        ]
+    )
+    assert nx.bipartite.maximal_extendability(G) == 3
+
+
+def test_k_is_4():
+    G = nx.Graph(
+        [
+            (8, 1),
+            (8, 2),
+            (8, 3),
+            (8, 4),
+            (8, 5),
+            (9, 1),
+            (9, 2),
+            (9, 3),
+            (9, 4),
+            (9, 7),
+            (10, 1),
+            (10, 2),
+            (10, 3),
+            (10, 4),
+            (10, 6),
+            (11, 1),
+            (11, 2),
+            (11, 5),
+            (11, 6),
+            (11, 7),
+            (12, 1),
+            (12, 3),
+            (12, 5),
+            (12, 6),
+            (12, 7),
+            (13, 2),
+            (13, 4),
+            (13, 5),
+            (13, 6),
+            (13, 7),
+            (14, 3),
+            (14, 4),
+            (14, 5),
+            (14, 6),
+            (14, 7),
+        ]
+    )
+    assert nx.bipartite.maximal_extendability(G) == 4
+
+
+def test_k_is_5():
+    G = nx.Graph(
+        [
+            (8, 1),
+            (8, 2),
+            (8, 3),
+            (8, 4),
+            (8, 5),
+            (8, 6),
+            (9, 1),
+            (9, 2),
+            (9, 3),
+            (9, 4),
+            (9, 5),
+            (9, 7),
+            (10, 1),
+            (10, 2),
+            (10, 3),
+            (10, 4),
+            (10, 6),
+            (10, 7),
+            (11, 1),
+            (11, 2),
+            (11, 3),
+            (11, 5),
+            (11, 6),
+            (11, 7),
+            (12, 1),
+            (12, 2),
+            (12, 4),
+            (12, 5),
+            (12, 6),
+            (12, 7),
+            (13, 1),
+            (13, 3),
+            (13, 4),
+            (13, 5),
+            (13, 6),
+            (13, 7),
+            (14, 2),
+            (14, 3),
+            (14, 4),
+            (14, 5),
+            (14, 6),
+            (14, 7),
+        ]
+    )
+    assert nx.bipartite.maximal_extendability(G) == 5
+
+
+def test_k_is_6():
+    G = nx.Graph(
+        [
+            (9, 1),
+            (9, 2),
+            (9, 3),
+            (9, 4),
+            (9, 5),
+            (9, 6),
+            (9, 7),
+            (10, 1),
+            (10, 2),
+            (10, 3),
+            (10, 4),
+            (10, 5),
+            (10, 6),
+            (10, 8),
+            (11, 1),
+            (11, 2),
+            (11, 3),
+            (11, 4),
+            (11, 5),
+            (11, 7),
+            (11, 8),
+            (12, 1),
+            (12, 2),
+            (12, 3),
+            (12, 4),
+            (12, 6),
+            (12, 7),
+            (12, 8),
+            (13, 1),
+            (13, 2),
+            (13, 3),
+            (13, 5),
+            (13, 6),
+            (13, 7),
+            (13, 8),
+            (14, 1),
+            (14, 2),
+            (14, 4),
+            (14, 5),
+            (14, 6),
+            (14, 7),
+            (14, 8),
+            (15, 1),
+            (15, 3),
+            (15, 4),
+            (15, 5),
+            (15, 6),
+            (15, 7),
+            (15, 8),
+            (16, 2),
+            (16, 3),
+            (16, 4),
+            (16, 5),
+            (16, 6),
+            (16, 7),
+            (16, 8),
+        ]
+    )
+    assert nx.bipartite.maximal_extendability(G) == 6
+
+
+def test_k_is_7():
+    G = nx.Graph(
+        [
+            (1, 11),
+            (1, 12),
+            (1, 13),
+            (1, 14),
+            (1, 15),
+            (1, 16),
+            (1, 17),
+            (1, 18),
+            (2, 11),
+            (2, 12),
+            (2, 13),
+            (2, 14),
+            (2, 15),
+            (2, 16),
+            (2, 17),
+            (2, 19),
+            (3, 11),
+            (3, 12),
+            (3, 13),
+            (3, 14),
+            (3, 15),
+            (3, 16),
+            (3, 17),
+            (3, 20),
+            (4, 11),
+            (4, 12),
+            (4, 13),
+            (4, 14),
+            (4, 15),
+            (4, 16),
+            (4, 17),
+            (4, 18),
+            (4, 19),
+            (4, 20),
+            (5, 11),
+            (5, 12),
+            (5, 13),
+            (5, 14),
+            (5, 15),
+            (5, 16),
+            (5, 17),
+            (5, 18),
+            (5, 19),
+            (5, 20),
+            (6, 11),
+            (6, 12),
+            (6, 13),
+            (6, 14),
+            (6, 15),
+            (6, 16),
+            (6, 17),
+            (6, 18),
+            (6, 19),
+            (6, 20),
+            (7, 11),
+            (7, 12),
+            (7, 13),
+            (7, 14),
+            (7, 15),
+            (7, 16),
+            (7, 17),
+            (7, 18),
+            (7, 19),
+            (7, 20),
+            (8, 11),
+            (8, 12),
+            (8, 13),
+            (8, 14),
+            (8, 15),
+            (8, 16),
+            (8, 17),
+            (8, 18),
+            (8, 19),
+            (8, 20),
+            (9, 11),
+            (9, 12),
+            (9, 13),
+            (9, 14),
+            (9, 15),
+            (9, 16),
+            (9, 17),
+            (9, 18),
+            (9, 19),
+            (9, 20),
+            (10, 11),
+            (10, 12),
+            (10, 13),
+            (10, 14),
+            (10, 15),
+            (10, 16),
+            (10, 17),
+            (10, 18),
+            (10, 19),
+            (10, 20),
+        ]
+    )
+    assert nx.bipartite.maximal_extendability(G) == 7
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_generators.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_generators.py
new file mode 100644
index 00000000..8b1e7793
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_generators.py
@@ -0,0 +1,409 @@
+import numbers
+
+import pytest
+
+import networkx as nx
+
+from ..generators import (
+    alternating_havel_hakimi_graph,
+    complete_bipartite_graph,
+    configuration_model,
+    gnmk_random_graph,
+    havel_hakimi_graph,
+    preferential_attachment_graph,
+    random_graph,
+    reverse_havel_hakimi_graph,
+)
+
+"""
+Generators - Bipartite
+----------------------
+"""
+
+
+class TestGeneratorsBipartite:
+    def test_complete_bipartite_graph(self):
+        G = complete_bipartite_graph(0, 0)
+        assert nx.is_isomorphic(G, nx.null_graph())
+
+        for i in [1, 5]:
+            G = complete_bipartite_graph(i, 0)
+            assert nx.is_isomorphic(G, nx.empty_graph(i))
+            G = complete_bipartite_graph(0, i)
+            assert nx.is_isomorphic(G, nx.empty_graph(i))
+
+        G = complete_bipartite_graph(2, 2)
+        assert nx.is_isomorphic(G, nx.cycle_graph(4))
+
+        G = complete_bipartite_graph(1, 5)
+        assert nx.is_isomorphic(G, nx.star_graph(5))
+
+        G = complete_bipartite_graph(5, 1)
+        assert nx.is_isomorphic(G, nx.star_graph(5))
+
+        # complete_bipartite_graph(m1,m2) is a connected graph with
+        # m1+m2 nodes and  m1*m2 edges
+        for m1, m2 in [(5, 11), (7, 3)]:
+            G = complete_bipartite_graph(m1, m2)
+            assert nx.number_of_nodes(G) == m1 + m2
+            assert nx.number_of_edges(G) == m1 * m2
+
+        with pytest.raises(nx.NetworkXError):
+            complete_bipartite_graph(7, 3, create_using=nx.DiGraph)
+        with pytest.raises(nx.NetworkXError):
+            complete_bipartite_graph(7, 3, create_using=nx.MultiDiGraph)
+
+        mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph)
+        assert mG.is_multigraph()
+        assert sorted(mG.edges()) == sorted(G.edges())
+
+        mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph)
+        assert mG.is_multigraph()
+        assert sorted(mG.edges()) == sorted(G.edges())
+
+        mG = complete_bipartite_graph(7, 3)  # default to Graph
+        assert sorted(mG.edges()) == sorted(G.edges())
+        assert not mG.is_multigraph()
+        assert not mG.is_directed()
+
+        # specify nodes rather than number of nodes
+        for n1, n2 in [([1, 2], "ab"), (3, 2), (3, "ab"), ("ab", 3)]:
+            G = complete_bipartite_graph(n1, n2)
+            if isinstance(n1, numbers.Integral):
+                if isinstance(n2, numbers.Integral):
+                    n2 = range(n1, n1 + n2)
+                n1 = range(n1)
+            elif isinstance(n2, numbers.Integral):
+                n2 = range(n2)
+            edges = {(u, v) for u in n1 for v in n2}
+            assert edges == set(G.edges)
+            assert G.size() == len(edges)
+
+        # raise when node sets are not distinct
+        for n1, n2 in [([1, 2], 3), (3, [1, 2]), ("abc", "bcd")]:
+            pytest.raises(nx.NetworkXError, complete_bipartite_graph, n1, n2)
+
+    def test_configuration_model(self):
+        aseq = []
+        bseq = []
+        G = configuration_model(aseq, bseq)
+        assert len(G) == 0
+
+        aseq = [0, 0]
+        bseq = [0, 0]
+        G = configuration_model(aseq, bseq)
+        assert len(G) == 4
+        assert G.number_of_edges() == 0
+
+        aseq = [3, 3, 3, 3]
+        bseq = [2, 2, 2, 2, 2]
+        pytest.raises(nx.NetworkXError, configuration_model, aseq, bseq)
+
+        aseq = [3, 3, 3, 3]
+        bseq = [2, 2, 2, 2, 2, 2]
+        G = configuration_model(aseq, bseq)
+        assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
+
+        aseq = [2, 2, 2, 2, 2, 2]
+        bseq = [3, 3, 3, 3]
+        G = configuration_model(aseq, bseq)
+        assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
+
+        aseq = [2, 2, 2, 1, 1, 1]
+        bseq = [3, 3, 3]
+        G = configuration_model(aseq, bseq)
+        assert G.is_multigraph()
+        assert not G.is_directed()
+        assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
+
+        GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
+        assert GU.number_of_nodes() == 6
+
+        GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
+        assert GD.number_of_nodes() == 3
+
+        G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
+        assert not G.is_multigraph()
+        assert not G.is_directed()
+
+        pytest.raises(
+            nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph()
+        )
+        pytest.raises(
+            nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph
+        )
+        pytest.raises(
+            nx.NetworkXError,
+            configuration_model,
+            aseq,
+            bseq,
+            create_using=nx.MultiDiGraph,
+        )
+
+    def test_havel_hakimi_graph(self):
+        aseq = []
+        bseq = []
+        G = havel_hakimi_graph(aseq, bseq)
+        assert len(G) == 0
+
+        aseq = [0, 0]
+        bseq = [0, 0]
+        G = havel_hakimi_graph(aseq, bseq)
+        assert len(G) == 4
+        assert G.number_of_edges() == 0
+
+        aseq = [3, 3, 3, 3]
+        bseq = [2, 2, 2, 2, 2]
+        pytest.raises(nx.NetworkXError, havel_hakimi_graph, aseq, bseq)
+
+        bseq = [2, 2, 2, 2, 2, 2]
+        G = havel_hakimi_graph(aseq, bseq)
+        assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
+
+        aseq = [2, 2, 2, 2, 2, 2]
+        bseq = [3, 3, 3, 3]
+        G = havel_hakimi_graph(aseq, bseq)
+        assert G.is_multigraph()
+        assert not G.is_directed()
+        assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
+
+        GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
+        assert GU.number_of_nodes() == 6
+
+        GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
+        assert GD.number_of_nodes() == 4
+
+        G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
+        assert not G.is_multigraph()
+        assert not G.is_directed()
+
+        pytest.raises(
+            nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph
+        )
+        pytest.raises(
+            nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph
+        )
+        pytest.raises(
+            nx.NetworkXError,
+            havel_hakimi_graph,
+            aseq,
+            bseq,
+            create_using=nx.MultiDiGraph,
+        )
+
+    def test_reverse_havel_hakimi_graph(self):
+        aseq = []
+        bseq = []
+        G = reverse_havel_hakimi_graph(aseq, bseq)
+        assert len(G) == 0
+
+        aseq = [0, 0]
+        bseq = [0, 0]
+        G = reverse_havel_hakimi_graph(aseq, bseq)
+        assert len(G) == 4
+        assert G.number_of_edges() == 0
+
+        aseq = [3, 3, 3, 3]
+        bseq = [2, 2, 2, 2, 2]
+        pytest.raises(nx.NetworkXError, reverse_havel_hakimi_graph, aseq, bseq)
+
+        bseq = [2, 2, 2, 2, 2, 2]
+        G = reverse_havel_hakimi_graph(aseq, bseq)
+        assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
+
+        aseq = [2, 2, 2, 2, 2, 2]
+        bseq = [3, 3, 3, 3]
+        G = reverse_havel_hakimi_graph(aseq, bseq)
+        assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
+
+        aseq = [2, 2, 2, 1, 1, 1]
+        bseq = [3, 3, 3]
+        G = reverse_havel_hakimi_graph(aseq, bseq)
+        assert G.is_multigraph()
+        assert not G.is_directed()
+        assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
+
+        GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
+        assert GU.number_of_nodes() == 6
+
+        GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
+        assert GD.number_of_nodes() == 3
+
+        G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
+        assert not G.is_multigraph()
+        assert not G.is_directed()
+
+        pytest.raises(
+            nx.NetworkXError,
+            reverse_havel_hakimi_graph,
+            aseq,
+            bseq,
+            create_using=nx.DiGraph,
+        )
+        pytest.raises(
+            nx.NetworkXError,
+            reverse_havel_hakimi_graph,
+            aseq,
+            bseq,
+            create_using=nx.DiGraph,
+        )
+        pytest.raises(
+            nx.NetworkXError,
+            reverse_havel_hakimi_graph,
+            aseq,
+            bseq,
+            create_using=nx.MultiDiGraph,
+        )
+
+    def test_alternating_havel_hakimi_graph(self):
+        aseq = []
+        bseq = []
+        G = alternating_havel_hakimi_graph(aseq, bseq)
+        assert len(G) == 0
+
+        aseq = [0, 0]
+        bseq = [0, 0]
+        G = alternating_havel_hakimi_graph(aseq, bseq)
+        assert len(G) == 4
+        assert G.number_of_edges() == 0
+
+        aseq = [3, 3, 3, 3]
+        bseq = [2, 2, 2, 2, 2]
+        pytest.raises(nx.NetworkXError, alternating_havel_hakimi_graph, aseq, bseq)
+
+        bseq = [2, 2, 2, 2, 2, 2]
+        G = alternating_havel_hakimi_graph(aseq, bseq)
+        assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
+
+        aseq = [2, 2, 2, 2, 2, 2]
+        bseq = [3, 3, 3, 3]
+        G = alternating_havel_hakimi_graph(aseq, bseq)
+        assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
+
+        aseq = [2, 2, 2, 1, 1, 1]
+        bseq = [3, 3, 3]
+        G = alternating_havel_hakimi_graph(aseq, bseq)
+        assert G.is_multigraph()
+        assert not G.is_directed()
+        assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
+
+        GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
+        assert GU.number_of_nodes() == 6
+
+        GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
+        assert GD.number_of_nodes() == 3
+
+        G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
+        assert not G.is_multigraph()
+        assert not G.is_directed()
+
+        pytest.raises(
+            nx.NetworkXError,
+            alternating_havel_hakimi_graph,
+            aseq,
+            bseq,
+            create_using=nx.DiGraph,
+        )
+        pytest.raises(
+            nx.NetworkXError,
+            alternating_havel_hakimi_graph,
+            aseq,
+            bseq,
+            create_using=nx.DiGraph,
+        )
+        pytest.raises(
+            nx.NetworkXError,
+            alternating_havel_hakimi_graph,
+            aseq,
+            bseq,
+            create_using=nx.MultiDiGraph,
+        )
+
+    def test_preferential_attachment(self):
+        aseq = [3, 2, 1, 1]
+        G = preferential_attachment_graph(aseq, 0.5)
+        assert G.is_multigraph()
+        assert not G.is_directed()
+
+        G = preferential_attachment_graph(aseq, 0.5, create_using=nx.Graph)
+        assert not G.is_multigraph()
+        assert not G.is_directed()
+
+        pytest.raises(
+            nx.NetworkXError,
+            preferential_attachment_graph,
+            aseq,
+            0.5,
+            create_using=nx.DiGraph(),
+        )
+        pytest.raises(
+            nx.NetworkXError,
+            preferential_attachment_graph,
+            aseq,
+            0.5,
+            create_using=nx.DiGraph(),
+        )
+        pytest.raises(
+            nx.NetworkXError,
+            preferential_attachment_graph,
+            aseq,
+            0.5,
+            create_using=nx.DiGraph(),
+        )
+
+    def test_random_graph(self):
+        n = 10
+        m = 20
+        G = random_graph(n, m, 0.9)
+        assert len(G) == 30
+        assert nx.is_bipartite(G)
+        X, Y = nx.algorithms.bipartite.sets(G)
+        assert set(range(n)) == X
+        assert set(range(n, n + m)) == Y
+
+    def test_random_digraph(self):
+        n = 10
+        m = 20
+        G = random_graph(n, m, 0.9, directed=True)
+        assert len(G) == 30
+        assert nx.is_bipartite(G)
+        X, Y = nx.algorithms.bipartite.sets(G)
+        assert set(range(n)) == X
+        assert set(range(n, n + m)) == Y
+
+    def test_gnmk_random_graph(self):
+        n = 10
+        m = 20
+        edges = 100
+        # set seed because sometimes it is not connected
+        # which raises an error in bipartite.sets(G) below.
+        G = gnmk_random_graph(n, m, edges, seed=1234)
+        assert len(G) == n + m
+        assert nx.is_bipartite(G)
+        X, Y = nx.algorithms.bipartite.sets(G)
+        # print(X)
+        assert set(range(n)) == X
+        assert set(range(n, n + m)) == Y
+        assert edges == len(list(G.edges()))
+
+    def test_gnmk_random_graph_complete(self):
+        n = 10
+        m = 20
+        edges = 200
+        G = gnmk_random_graph(n, m, edges)
+        assert len(G) == n + m
+        assert nx.is_bipartite(G)
+        X, Y = nx.algorithms.bipartite.sets(G)
+        # print(X)
+        assert set(range(n)) == X
+        assert set(range(n, n + m)) == Y
+        assert edges == len(list(G.edges()))
+
+    @pytest.mark.parametrize("n", (4, range(4), {0, 1, 2, 3}))
+    @pytest.mark.parametrize("m", (range(4, 7), {4, 5, 6}))
+    def test_complete_bipartite_graph_str(self, n, m):
+        """Ensure G.name is consistent for all inputs accepted by nodes_or_number.
+        See gh-7396"""
+        G = nx.complete_bipartite_graph(n, m)
+        ans = "Graph named 'complete_bipartite_graph(4, 3)' with 7 nodes and 12 edges"
+        assert str(G) == ans
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matching.py
new file mode 100644
index 00000000..c24659ea
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matching.py
@@ -0,0 +1,327 @@
+"""Unit tests for the :mod:`networkx.algorithms.bipartite.matching` module."""
+
+import itertools
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.bipartite.matching import (
+    eppstein_matching,
+    hopcroft_karp_matching,
+    maximum_matching,
+    minimum_weight_full_matching,
+    to_vertex_cover,
+)
+
+
+class TestMatching:
+    """Tests for bipartite matching algorithms."""
+
+    def setup_method(self):
+        """Creates a bipartite graph for use in testing matching algorithms.
+
+        The bipartite graph has a maximum cardinality matching that leaves
+        vertex 1 and vertex 10 unmatched. The first six numbers are the left
+        vertices and the next six numbers are the right vertices.
+
+        """
+        self.simple_graph = nx.complete_bipartite_graph(2, 3)
+        self.simple_solution = {0: 2, 1: 3, 2: 0, 3: 1}
+
+        edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), (5, 11)]
+        self.top_nodes = set(range(6))
+        self.graph = nx.Graph()
+        self.graph.add_nodes_from(range(12))
+        self.graph.add_edges_from(edges)
+
+        # Example bipartite graph from issue 2127
+        G = nx.Graph()
+        G.add_nodes_from(
+            [
+                (1, "C"),
+                (1, "B"),
+                (0, "G"),
+                (1, "F"),
+                (1, "E"),
+                (0, "C"),
+                (1, "D"),
+                (1, "I"),
+                (0, "A"),
+                (0, "D"),
+                (0, "F"),
+                (0, "E"),
+                (0, "H"),
+                (1, "G"),
+                (1, "A"),
+                (0, "I"),
+                (0, "B"),
+                (1, "H"),
+            ]
+        )
+        G.add_edge((1, "C"), (0, "A"))
+        G.add_edge((1, "B"), (0, "A"))
+        G.add_edge((0, "G"), (1, "I"))
+        G.add_edge((0, "G"), (1, "H"))
+        G.add_edge((1, "F"), (0, "A"))
+        G.add_edge((1, "F"), (0, "C"))
+        G.add_edge((1, "F"), (0, "E"))
+        G.add_edge((1, "E"), (0, "A"))
+        G.add_edge((1, "E"), (0, "C"))
+        G.add_edge((0, "C"), (1, "D"))
+        G.add_edge((0, "C"), (1, "I"))
+        G.add_edge((0, "C"), (1, "G"))
+        G.add_edge((0, "C"), (1, "H"))
+        G.add_edge((1, "D"), (0, "A"))
+        G.add_edge((1, "I"), (0, "A"))
+        G.add_edge((1, "I"), (0, "E"))
+        G.add_edge((0, "A"), (1, "G"))
+        G.add_edge((0, "A"), (1, "H"))
+        G.add_edge((0, "E"), (1, "G"))
+        G.add_edge((0, "E"), (1, "H"))
+        self.disconnected_graph = G
+
+    def check_match(self, matching):
+        """Asserts that the matching is what we expect from the bipartite graph
+        constructed in the :meth:`setup` fixture.
+
+        """
+        # For the sake of brevity, rename `matching` to `M`.
+        M = matching
+        matched_vertices = frozenset(itertools.chain(*M.items()))
+        # Assert that the maximum number of vertices (10) is matched.
+        assert matched_vertices == frozenset(range(12)) - {1, 10}
+        # Assert that no vertex appears in two edges, or in other words, that
+        # the matching (u, v) and (v, u) both appear in the matching
+        # dictionary.
+        assert all(u == M[M[u]] for u in range(12) if u in M)
+
+    def check_vertex_cover(self, vertices):
+        """Asserts that the given set of vertices is the vertex cover we
+        expected from the bipartite graph constructed in the :meth:`setup`
+        fixture.
+
+        """
+        # By Konig's theorem, the number of edges in a maximum matching equals
+        # the number of vertices in a minimum vertex cover.
+        assert len(vertices) == 5
+        # Assert that the set is truly a vertex cover.
+        for u, v in self.graph.edges():
+            assert u in vertices or v in vertices
+        # TODO Assert that the vertices are the correct ones.
+
+    def test_eppstein_matching(self):
+        """Tests that David Eppstein's implementation of the Hopcroft--Karp
+        algorithm produces a maximum cardinality matching.
+
+        """
+        self.check_match(eppstein_matching(self.graph, self.top_nodes))
+
+    def test_hopcroft_karp_matching(self):
+        """Tests that the Hopcroft--Karp algorithm produces a maximum
+        cardinality matching in a bipartite graph.
+
+        """
+        self.check_match(hopcroft_karp_matching(self.graph, self.top_nodes))
+
+    def test_to_vertex_cover(self):
+        """Test for converting a maximum matching to a minimum vertex cover."""
+        matching = maximum_matching(self.graph, self.top_nodes)
+        vertex_cover = to_vertex_cover(self.graph, matching, self.top_nodes)
+        self.check_vertex_cover(vertex_cover)
+
+    def test_eppstein_matching_simple(self):
+        match = eppstein_matching(self.simple_graph)
+        assert match == self.simple_solution
+
+    def test_hopcroft_karp_matching_simple(self):
+        match = hopcroft_karp_matching(self.simple_graph)
+        assert match == self.simple_solution
+
+    def test_eppstein_matching_disconnected(self):
+        with pytest.raises(nx.AmbiguousSolution):
+            match = eppstein_matching(self.disconnected_graph)
+
+    def test_hopcroft_karp_matching_disconnected(self):
+        with pytest.raises(nx.AmbiguousSolution):
+            match = hopcroft_karp_matching(self.disconnected_graph)
+
+    def test_issue_2127(self):
+        """Test from issue 2127"""
+        # Build the example DAG
+        G = nx.DiGraph()
+        G.add_edge("A", "C")
+        G.add_edge("A", "B")
+        G.add_edge("C", "E")
+        G.add_edge("C", "D")
+        G.add_edge("E", "G")
+        G.add_edge("E", "F")
+        G.add_edge("G", "I")
+        G.add_edge("G", "H")
+
+        tc = nx.transitive_closure(G)
+        btc = nx.Graph()
+
+        # Create a bipartite graph based on the transitive closure of G
+        for v in tc.nodes():
+            btc.add_node((0, v))
+            btc.add_node((1, v))
+
+        for u, v in tc.edges():
+            btc.add_edge((0, u), (1, v))
+
+        top_nodes = {n for n in btc if n[0] == 0}
+        matching = hopcroft_karp_matching(btc, top_nodes)
+        vertex_cover = to_vertex_cover(btc, matching, top_nodes)
+        independent_set = set(G) - {v for _, v in vertex_cover}
+        assert {"B", "D", "F", "I", "H"} == independent_set
+
+    def test_vertex_cover_issue_2384(self):
+        G = nx.Graph([(0, 3), (1, 3), (1, 4), (2, 3)])
+        matching = maximum_matching(G)
+        vertex_cover = to_vertex_cover(G, matching)
+        for u, v in G.edges():
+            assert u in vertex_cover or v in vertex_cover
+
+    def test_vertex_cover_issue_3306(self):
+        G = nx.Graph()
+        edges = [(0, 2), (1, 0), (1, 1), (1, 2), (2, 2)]
+        G.add_edges_from([((i, "L"), (j, "R")) for i, j in edges])
+
+        matching = maximum_matching(G)
+        vertex_cover = to_vertex_cover(G, matching)
+        for u, v in G.edges():
+            assert u in vertex_cover or v in vertex_cover
+
+    def test_unorderable_nodes(self):
+        a = object()
+        b = object()
+        c = object()
+        d = object()
+        e = object()
+        G = nx.Graph([(a, d), (b, d), (b, e), (c, d)])
+        matching = maximum_matching(G)
+        vertex_cover = to_vertex_cover(G, matching)
+        for u, v in G.edges():
+            assert u in vertex_cover or v in vertex_cover
+
+
+def test_eppstein_matching():
+    """Test in accordance to issue #1927"""
+    G = nx.Graph()
+    G.add_nodes_from(["a", 2, 3, 4], bipartite=0)
+    G.add_nodes_from([1, "b", "c"], bipartite=1)
+    G.add_edges_from([("a", 1), ("a", "b"), (2, "b"), (2, "c"), (3, "c"), (4, 1)])
+    matching = eppstein_matching(G)
+    assert len(matching) == len(maximum_matching(G))
+    assert all(x in set(matching.keys()) for x in set(matching.values()))
+
+
+class TestMinimumWeightFullMatching:
+    @classmethod
+    def setup_class(cls):
+        pytest.importorskip("scipy")
+
+    def test_minimum_weight_full_matching_incomplete_graph(self):
+        B = nx.Graph()
+        B.add_nodes_from([1, 2], bipartite=0)
+        B.add_nodes_from([3, 4], bipartite=1)
+        B.add_edge(1, 4, weight=100)
+        B.add_edge(2, 3, weight=100)
+        B.add_edge(2, 4, weight=50)
+        matching = minimum_weight_full_matching(B)
+        assert matching == {1: 4, 2: 3, 4: 1, 3: 2}
+
+    def test_minimum_weight_full_matching_with_no_full_matching(self):
+        B = nx.Graph()
+        B.add_nodes_from([1, 2, 3], bipartite=0)
+        B.add_nodes_from([4, 5, 6], bipartite=1)
+        B.add_edge(1, 4, weight=100)
+        B.add_edge(2, 4, weight=100)
+        B.add_edge(3, 4, weight=50)
+        B.add_edge(3, 5, weight=50)
+        B.add_edge(3, 6, weight=50)
+        with pytest.raises(ValueError):
+            minimum_weight_full_matching(B)
+
+    def test_minimum_weight_full_matching_square(self):
+        G = nx.complete_bipartite_graph(3, 3)
+        G.add_edge(0, 3, weight=400)
+        G.add_edge(0, 4, weight=150)
+        G.add_edge(0, 5, weight=400)
+        G.add_edge(1, 3, weight=400)
+        G.add_edge(1, 4, weight=450)
+        G.add_edge(1, 5, weight=600)
+        G.add_edge(2, 3, weight=300)
+        G.add_edge(2, 4, weight=225)
+        G.add_edge(2, 5, weight=300)
+        matching = minimum_weight_full_matching(G)
+        assert matching == {0: 4, 1: 3, 2: 5, 4: 0, 3: 1, 5: 2}
+
+    def test_minimum_weight_full_matching_smaller_left(self):
+        G = nx.complete_bipartite_graph(3, 4)
+        G.add_edge(0, 3, weight=400)
+        G.add_edge(0, 4, weight=150)
+        G.add_edge(0, 5, weight=400)
+        G.add_edge(0, 6, weight=1)
+        G.add_edge(1, 3, weight=400)
+        G.add_edge(1, 4, weight=450)
+        G.add_edge(1, 5, weight=600)
+        G.add_edge(1, 6, weight=2)
+        G.add_edge(2, 3, weight=300)
+        G.add_edge(2, 4, weight=225)
+        G.add_edge(2, 5, weight=290)
+        G.add_edge(2, 6, weight=3)
+        matching = minimum_weight_full_matching(G)
+        assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1}
+
+    def test_minimum_weight_full_matching_smaller_top_nodes_right(self):
+        G = nx.complete_bipartite_graph(3, 4)
+        G.add_edge(0, 3, weight=400)
+        G.add_edge(0, 4, weight=150)
+        G.add_edge(0, 5, weight=400)
+        G.add_edge(0, 6, weight=1)
+        G.add_edge(1, 3, weight=400)
+        G.add_edge(1, 4, weight=450)
+        G.add_edge(1, 5, weight=600)
+        G.add_edge(1, 6, weight=2)
+        G.add_edge(2, 3, weight=300)
+        G.add_edge(2, 4, weight=225)
+        G.add_edge(2, 5, weight=290)
+        G.add_edge(2, 6, weight=3)
+        matching = minimum_weight_full_matching(G, top_nodes=[3, 4, 5, 6])
+        assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1}
+
+    def test_minimum_weight_full_matching_smaller_right(self):
+        G = nx.complete_bipartite_graph(4, 3)
+        G.add_edge(0, 4, weight=400)
+        G.add_edge(0, 5, weight=400)
+        G.add_edge(0, 6, weight=300)
+        G.add_edge(1, 4, weight=150)
+        G.add_edge(1, 5, weight=450)
+        G.add_edge(1, 6, weight=225)
+        G.add_edge(2, 4, weight=400)
+        G.add_edge(2, 5, weight=600)
+        G.add_edge(2, 6, weight=290)
+        G.add_edge(3, 4, weight=1)
+        G.add_edge(3, 5, weight=2)
+        G.add_edge(3, 6, weight=3)
+        matching = minimum_weight_full_matching(G)
+        assert matching == {1: 4, 2: 6, 3: 5, 4: 1, 5: 3, 6: 2}
+
+    def test_minimum_weight_full_matching_negative_weights(self):
+        G = nx.complete_bipartite_graph(2, 2)
+        G.add_edge(0, 2, weight=-2)
+        G.add_edge(0, 3, weight=0.2)
+        G.add_edge(1, 2, weight=-2)
+        G.add_edge(1, 3, weight=0.3)
+        matching = minimum_weight_full_matching(G)
+        assert matching == {0: 3, 1: 2, 2: 1, 3: 0}
+
+    def test_minimum_weight_full_matching_different_weight_key(self):
+        G = nx.complete_bipartite_graph(2, 2)
+        G.add_edge(0, 2, mass=2)
+        G.add_edge(0, 3, mass=0.2)
+        G.add_edge(1, 2, mass=1)
+        G.add_edge(1, 3, mass=2)
+        matching = minimum_weight_full_matching(G, weight="mass")
+        assert matching == {0: 3, 1: 2, 2: 1, 3: 0}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py
new file mode 100644
index 00000000..53d83115
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py
@@ -0,0 +1,84 @@
+import pytest
+
+np = pytest.importorskip("numpy")
+sp = pytest.importorskip("scipy")
+sparse = pytest.importorskip("scipy.sparse")
+
+
+import networkx as nx
+from networkx.algorithms import bipartite
+from networkx.utils import edges_equal
+
+
+class TestBiadjacencyMatrix:
+    def test_biadjacency_matrix_weight(self):
+        G = nx.path_graph(5)
+        G.add_edge(0, 1, weight=2, other=4)
+        X = [1, 3]
+        Y = [0, 2, 4]
+        M = bipartite.biadjacency_matrix(G, X, weight="weight")
+        assert M[0, 0] == 2
+        M = bipartite.biadjacency_matrix(G, X, weight="other")
+        assert M[0, 0] == 4
+
+    def test_biadjacency_matrix(self):
+        tops = [2, 5, 10]
+        bots = [5, 10, 15]
+        for i in range(len(tops)):
+            G = bipartite.random_graph(tops[i], bots[i], 0.2)
+            top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
+            M = bipartite.biadjacency_matrix(G, top)
+            assert M.shape[0] == tops[i]
+            assert M.shape[1] == bots[i]
+
+    def test_biadjacency_matrix_order(self):
+        G = nx.path_graph(5)
+        G.add_edge(0, 1, weight=2)
+        X = [3, 1]
+        Y = [4, 2, 0]
+        M = bipartite.biadjacency_matrix(G, X, Y, weight="weight")
+        assert M[1, 2] == 2
+
+    def test_biadjacency_matrix_empty_graph(self):
+        G = nx.empty_graph(2)
+        M = nx.bipartite.biadjacency_matrix(G, [0])
+        assert np.array_equal(M.toarray(), np.array([[0]]))
+
+    def test_null_graph(self):
+        with pytest.raises(nx.NetworkXError):
+            bipartite.biadjacency_matrix(nx.Graph(), [])
+
+    def test_empty_graph(self):
+        with pytest.raises(nx.NetworkXError):
+            bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [])
+
+    def test_duplicate_row(self):
+        with pytest.raises(nx.NetworkXError):
+            bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [1, 1])
+
+    def test_duplicate_col(self):
+        with pytest.raises(nx.NetworkXError):
+            bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1])
+
+    def test_format_keyword(self):
+        with pytest.raises(nx.NetworkXError):
+            bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format="foo")
+
+    def test_from_biadjacency_roundtrip(self):
+        B1 = nx.path_graph(5)
+        M = bipartite.biadjacency_matrix(B1, [0, 2, 4])
+        B2 = bipartite.from_biadjacency_matrix(M)
+        assert nx.is_isomorphic(B1, B2)
+
+    def test_from_biadjacency_weight(self):
+        M = sparse.csc_matrix([[1, 2], [0, 3]])
+        B = bipartite.from_biadjacency_matrix(M)
+        assert edges_equal(B.edges(), [(0, 2), (0, 3), (1, 3)])
+        B = bipartite.from_biadjacency_matrix(M, edge_attribute="weight")
+        e = [(0, 2, {"weight": 1}), (0, 3, {"weight": 2}), (1, 3, {"weight": 3})]
+        assert edges_equal(B.edges(data=True), e)
+
+    def test_from_biadjacency_multigraph(self):
+        M = sparse.csc_matrix([[1, 2], [0, 3]])
+        B = bipartite.from_biadjacency_matrix(M, create_using=nx.MultiGraph())
+        assert edges_equal(B.edges(), [(0, 2), (0, 3), (0, 3), (1, 3), (1, 3), (1, 3)])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_project.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_project.py
new file mode 100644
index 00000000..076bb42b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_project.py
@@ -0,0 +1,407 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms import bipartite
+from networkx.utils import edges_equal, nodes_equal
+
+
+class TestBipartiteProject:
+    def test_path_projected_graph(self):
+        G = nx.path_graph(4)
+        P = bipartite.projected_graph(G, [1, 3])
+        assert nodes_equal(list(P), [1, 3])
+        assert edges_equal(list(P.edges()), [(1, 3)])
+        P = bipartite.projected_graph(G, [0, 2])
+        assert nodes_equal(list(P), [0, 2])
+        assert edges_equal(list(P.edges()), [(0, 2)])
+        G = nx.MultiGraph([(0, 1)])
+        with pytest.raises(nx.NetworkXError, match="not defined for multigraphs"):
+            bipartite.projected_graph(G, [0])
+
+    def test_path_projected_properties_graph(self):
+        G = nx.path_graph(4)
+        G.add_node(1, name="one")
+        G.add_node(2, name="two")
+        P = bipartite.projected_graph(G, [1, 3])
+        assert nodes_equal(list(P), [1, 3])
+        assert edges_equal(list(P.edges()), [(1, 3)])
+        assert P.nodes[1]["name"] == G.nodes[1]["name"]
+        P = bipartite.projected_graph(G, [0, 2])
+        assert nodes_equal(list(P), [0, 2])
+        assert edges_equal(list(P.edges()), [(0, 2)])
+        assert P.nodes[2]["name"] == G.nodes[2]["name"]
+
+    def test_path_collaboration_projected_graph(self):
+        G = nx.path_graph(4)
+        P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
+        assert nodes_equal(list(P), [1, 3])
+        assert edges_equal(list(P.edges()), [(1, 3)])
+        P[1][3]["weight"] = 1
+        P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
+        assert nodes_equal(list(P), [0, 2])
+        assert edges_equal(list(P.edges()), [(0, 2)])
+        P[0][2]["weight"] = 1
+
+    def test_directed_path_collaboration_projected_graph(self):
+        G = nx.DiGraph()
+        nx.add_path(G, range(4))
+        P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
+        assert nodes_equal(list(P), [1, 3])
+        assert edges_equal(list(P.edges()), [(1, 3)])
+        P[1][3]["weight"] = 1
+        P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
+        assert nodes_equal(list(P), [0, 2])
+        assert edges_equal(list(P.edges()), [(0, 2)])
+        P[0][2]["weight"] = 1
+
+    def test_path_weighted_projected_graph(self):
+        G = nx.path_graph(4)
+
+        with pytest.raises(nx.NetworkXAlgorithmError):
+            bipartite.weighted_projected_graph(G, [1, 2, 3, 3])
+
+        P = bipartite.weighted_projected_graph(G, [1, 3])
+        assert nodes_equal(list(P), [1, 3])
+        assert edges_equal(list(P.edges()), [(1, 3)])
+        P[1][3]["weight"] = 1
+        P = bipartite.weighted_projected_graph(G, [0, 2])
+        assert nodes_equal(list(P), [0, 2])
+        assert edges_equal(list(P.edges()), [(0, 2)])
+        P[0][2]["weight"] = 1
+
+    def test_digraph_weighted_projection(self):
+        G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)])
+        P = bipartite.overlap_weighted_projected_graph(G, [1, 3])
+        assert nx.get_edge_attributes(P, "weight") == {(1, 3): 1.0}
+        assert len(P) == 2
+
+    def test_path_weighted_projected_directed_graph(self):
+        G = nx.DiGraph()
+        nx.add_path(G, range(4))
+        P = bipartite.weighted_projected_graph(G, [1, 3])
+        assert nodes_equal(list(P), [1, 3])
+        assert edges_equal(list(P.edges()), [(1, 3)])
+        P[1][3]["weight"] = 1
+        P = bipartite.weighted_projected_graph(G, [0, 2])
+        assert nodes_equal(list(P), [0, 2])
+        assert edges_equal(list(P.edges()), [(0, 2)])
+        P[0][2]["weight"] = 1
+
+    def test_star_projected_graph(self):
+        G = nx.star_graph(3)
+        P = bipartite.projected_graph(G, [1, 2, 3])
+        assert nodes_equal(list(P), [1, 2, 3])
+        assert edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
+        P = bipartite.weighted_projected_graph(G, [1, 2, 3])
+        assert nodes_equal(list(P), [1, 2, 3])
+        assert edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
+
+        P = bipartite.projected_graph(G, [0])
+        assert nodes_equal(list(P), [0])
+        assert edges_equal(list(P.edges()), [])
+
+    def test_project_multigraph(self):
+        G = nx.Graph()
+        G.add_edge("a", 1)
+        G.add_edge("b", 1)
+        G.add_edge("a", 2)
+        G.add_edge("b", 2)
+        P = bipartite.projected_graph(G, "ab")
+        assert edges_equal(list(P.edges()), [("a", "b")])
+        P = bipartite.weighted_projected_graph(G, "ab")
+        assert edges_equal(list(P.edges()), [("a", "b")])
+        P = bipartite.projected_graph(G, "ab", multigraph=True)
+        assert edges_equal(list(P.edges()), [("a", "b"), ("a", "b")])
+
+    def test_project_collaboration(self):
+        G = nx.Graph()
+        G.add_edge("a", 1)
+        G.add_edge("b", 1)
+        G.add_edge("b", 2)
+        G.add_edge("c", 2)
+        G.add_edge("c", 3)
+        G.add_edge("c", 4)
+        G.add_edge("b", 4)
+        P = bipartite.collaboration_weighted_projected_graph(G, "abc")
+        assert P["a"]["b"]["weight"] == 1
+        assert P["b"]["c"]["weight"] == 2
+
+    def test_directed_projection(self):
+        G = nx.DiGraph()
+        G.add_edge("A", 1)
+        G.add_edge(1, "B")
+        G.add_edge("A", 2)
+        G.add_edge("B", 2)
+        P = bipartite.projected_graph(G, "AB")
+        assert edges_equal(list(P.edges()), [("A", "B")])
+        P = bipartite.weighted_projected_graph(G, "AB")
+        assert edges_equal(list(P.edges()), [("A", "B")])
+        assert P["A"]["B"]["weight"] == 1
+
+        P = bipartite.projected_graph(G, "AB", multigraph=True)
+        assert edges_equal(list(P.edges()), [("A", "B")])
+
+        G = nx.DiGraph()
+        G.add_edge("A", 1)
+        G.add_edge(1, "B")
+        G.add_edge("A", 2)
+        G.add_edge(2, "B")
+        P = bipartite.projected_graph(G, "AB")
+        assert edges_equal(list(P.edges()), [("A", "B")])
+        P = bipartite.weighted_projected_graph(G, "AB")
+        assert edges_equal(list(P.edges()), [("A", "B")])
+        assert P["A"]["B"]["weight"] == 2
+
+        P = bipartite.projected_graph(G, "AB", multigraph=True)
+        assert edges_equal(list(P.edges()), [("A", "B"), ("A", "B")])
+
+
+class TestBipartiteWeightedProjection:
+    @classmethod
+    def setup_class(cls):
+        # Tore Opsahl's example
+        # http://toreopsahl.com/2009/05/01/projecting-two-mode-networks-onto-weighted-one-mode-networks/
+        cls.G = nx.Graph()
+        cls.G.add_edge("A", 1)
+        cls.G.add_edge("A", 2)
+        cls.G.add_edge("B", 1)
+        cls.G.add_edge("B", 2)
+        cls.G.add_edge("B", 3)
+        cls.G.add_edge("B", 4)
+        cls.G.add_edge("B", 5)
+        cls.G.add_edge("C", 1)
+        cls.G.add_edge("D", 3)
+        cls.G.add_edge("E", 4)
+        cls.G.add_edge("E", 5)
+        cls.G.add_edge("E", 6)
+        cls.G.add_edge("F", 6)
+        # Graph based on figure 6 from Newman (2001)
+        cls.N = nx.Graph()
+        cls.N.add_edge("A", 1)
+        cls.N.add_edge("A", 2)
+        cls.N.add_edge("A", 3)
+        cls.N.add_edge("B", 1)
+        cls.N.add_edge("B", 2)
+        cls.N.add_edge("B", 3)
+        cls.N.add_edge("C", 1)
+        cls.N.add_edge("D", 1)
+        cls.N.add_edge("E", 3)
+
+    def test_project_weighted_shared(self):
+        edges = [
+            ("A", "B", 2),
+            ("A", "C", 1),
+            ("B", "C", 1),
+            ("B", "D", 1),
+            ("B", "E", 2),
+            ("E", "F", 1),
+        ]
+        Panswer = nx.Graph()
+        Panswer.add_weighted_edges_from(edges)
+        P = bipartite.weighted_projected_graph(self.G, "ABCDEF")
+        assert edges_equal(list(P.edges()), Panswer.edges())
+        for u, v in list(P.edges()):
+            assert P[u][v]["weight"] == Panswer[u][v]["weight"]
+
+        edges = [
+            ("A", "B", 3),
+            ("A", "E", 1),
+            ("A", "C", 1),
+            ("A", "D", 1),
+            ("B", "E", 1),
+            ("B", "C", 1),
+            ("B", "D", 1),
+            ("C", "D", 1),
+        ]
+        Panswer = nx.Graph()
+        Panswer.add_weighted_edges_from(edges)
+        P = bipartite.weighted_projected_graph(self.N, "ABCDE")
+        assert edges_equal(list(P.edges()), Panswer.edges())
+        for u, v in list(P.edges()):
+            assert P[u][v]["weight"] == Panswer[u][v]["weight"]
+
+    def test_project_weighted_newman(self):
+        edges = [
+            ("A", "B", 1.5),
+            ("A", "C", 0.5),
+            ("B", "C", 0.5),
+            ("B", "D", 1),
+            ("B", "E", 2),
+            ("E", "F", 1),
+        ]
+        Panswer = nx.Graph()
+        Panswer.add_weighted_edges_from(edges)
+        P = bipartite.collaboration_weighted_projected_graph(self.G, "ABCDEF")
+        assert edges_equal(list(P.edges()), Panswer.edges())
+        for u, v in list(P.edges()):
+            assert P[u][v]["weight"] == Panswer[u][v]["weight"]
+
+        edges = [
+            ("A", "B", 11 / 6.0),
+            ("A", "E", 1 / 2.0),
+            ("A", "C", 1 / 3.0),
+            ("A", "D", 1 / 3.0),
+            ("B", "E", 1 / 2.0),
+            ("B", "C", 1 / 3.0),
+            ("B", "D", 1 / 3.0),
+            ("C", "D", 1 / 3.0),
+        ]
+        Panswer = nx.Graph()
+        Panswer.add_weighted_edges_from(edges)
+        P = bipartite.collaboration_weighted_projected_graph(self.N, "ABCDE")
+        assert edges_equal(list(P.edges()), Panswer.edges())
+        for u, v in list(P.edges()):
+            assert P[u][v]["weight"] == Panswer[u][v]["weight"]
+
+    def test_project_weighted_ratio(self):
+        edges = [
+            ("A", "B", 2 / 6.0),
+            ("A", "C", 1 / 6.0),
+            ("B", "C", 1 / 6.0),
+            ("B", "D", 1 / 6.0),
+            ("B", "E", 2 / 6.0),
+            ("E", "F", 1 / 6.0),
+        ]
+        Panswer = nx.Graph()
+        Panswer.add_weighted_edges_from(edges)
+        P = bipartite.weighted_projected_graph(self.G, "ABCDEF", ratio=True)
+        assert edges_equal(list(P.edges()), Panswer.edges())
+        for u, v in list(P.edges()):
+            assert P[u][v]["weight"] == Panswer[u][v]["weight"]
+
+        edges = [
+            ("A", "B", 3 / 3.0),
+            ("A", "E", 1 / 3.0),
+            ("A", "C", 1 / 3.0),
+            ("A", "D", 1 / 3.0),
+            ("B", "E", 1 / 3.0),
+            ("B", "C", 1 / 3.0),
+            ("B", "D", 1 / 3.0),
+            ("C", "D", 1 / 3.0),
+        ]
+        Panswer = nx.Graph()
+        Panswer.add_weighted_edges_from(edges)
+        P = bipartite.weighted_projected_graph(self.N, "ABCDE", ratio=True)
+        assert edges_equal(list(P.edges()), Panswer.edges())
+        for u, v in list(P.edges()):
+            assert P[u][v]["weight"] == Panswer[u][v]["weight"]
+
+    def test_project_weighted_overlap(self):
+        edges = [
+            ("A", "B", 2 / 2.0),
+            ("A", "C", 1 / 1.0),
+            ("B", "C", 1 / 1.0),
+            ("B", "D", 1 / 1.0),
+            ("B", "E", 2 / 3.0),
+            ("E", "F", 1 / 1.0),
+        ]
+        Panswer = nx.Graph()
+        Panswer.add_weighted_edges_from(edges)
+        P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF", jaccard=False)
+        assert edges_equal(list(P.edges()), Panswer.edges())
+        for u, v in list(P.edges()):
+            assert P[u][v]["weight"] == Panswer[u][v]["weight"]
+
+        edges = [
+            ("A", "B", 3 / 3.0),
+            ("A", "E", 1 / 1.0),
+            ("A", "C", 1 / 1.0),
+            ("A", "D", 1 / 1.0),
+            ("B", "E", 1 / 1.0),
+            ("B", "C", 1 / 1.0),
+            ("B", "D", 1 / 1.0),
+            ("C", "D", 1 / 1.0),
+        ]
+        Panswer = nx.Graph()
+        Panswer.add_weighted_edges_from(edges)
+        P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE", jaccard=False)
+        assert edges_equal(list(P.edges()), Panswer.edges())
+        for u, v in list(P.edges()):
+            assert P[u][v]["weight"] == Panswer[u][v]["weight"]
+
+    def test_project_weighted_jaccard(self):
+        edges = [
+            ("A", "B", 2 / 5.0),
+            ("A", "C", 1 / 2.0),
+            ("B", "C", 1 / 5.0),
+            ("B", "D", 1 / 5.0),
+            ("B", "E", 2 / 6.0),
+            ("E", "F", 1 / 3.0),
+        ]
+        Panswer = nx.Graph()
+        Panswer.add_weighted_edges_from(edges)
+        P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF")
+        assert edges_equal(list(P.edges()), Panswer.edges())
+        for u, v in list(P.edges()):
+            assert P[u][v]["weight"] == Panswer[u][v]["weight"]
+
+        edges = [
+            ("A", "B", 3 / 3.0),
+            ("A", "E", 1 / 3.0),
+            ("A", "C", 1 / 3.0),
+            ("A", "D", 1 / 3.0),
+            ("B", "E", 1 / 3.0),
+            ("B", "C", 1 / 3.0),
+            ("B", "D", 1 / 3.0),
+            ("C", "D", 1 / 1.0),
+        ]
+        Panswer = nx.Graph()
+        Panswer.add_weighted_edges_from(edges)
+        P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE")
+        assert edges_equal(list(P.edges()), Panswer.edges())
+        for u, v in P.edges():
+            assert P[u][v]["weight"] == Panswer[u][v]["weight"]
+
+    def test_generic_weighted_projected_graph_simple(self):
+        def shared(G, u, v):
+            return len(set(G[u]) & set(G[v]))
+
+        B = nx.path_graph(5)
+        G = bipartite.generic_weighted_projected_graph(
+            B, [0, 2, 4], weight_function=shared
+        )
+        assert nodes_equal(list(G), [0, 2, 4])
+        assert edges_equal(
+            list(G.edges(data=True)),
+            [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
+        )
+
+        G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
+        assert nodes_equal(list(G), [0, 2, 4])
+        assert edges_equal(
+            list(G.edges(data=True)),
+            [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
+        )
+        B = nx.DiGraph()
+        nx.add_path(B, range(5))
+        G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
+        assert nodes_equal(list(G), [0, 2, 4])
+        assert edges_equal(
+            list(G.edges(data=True)), [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})]
+        )
+
+    def test_generic_weighted_projected_graph_custom(self):
+        def jaccard(G, u, v):
+            unbrs = set(G[u])
+            vnbrs = set(G[v])
+            return len(unbrs & vnbrs) / len(unbrs | vnbrs)
+
+        def my_weight(G, u, v, weight="weight"):
+            w = 0
+            for nbr in set(G[u]) & set(G[v]):
+                w += G.edges[u, nbr].get(weight, 1) + G.edges[v, nbr].get(weight, 1)
+            return w
+
+        B = nx.bipartite.complete_bipartite_graph(2, 2)
+        for i, (u, v) in enumerate(B.edges()):
+            B.edges[u, v]["weight"] = i + 1
+        G = bipartite.generic_weighted_projected_graph(
+            B, [0, 1], weight_function=jaccard
+        )
+        assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 1.0})])
+        G = bipartite.generic_weighted_projected_graph(
+            B, [0, 1], weight_function=my_weight
+        )
+        assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 10})])
+        G = bipartite.generic_weighted_projected_graph(B, [0, 1])
+        assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 2})])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_redundancy.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_redundancy.py
new file mode 100644
index 00000000..8d979db8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_redundancy.py
@@ -0,0 +1,35 @@
+"""Unit tests for the :mod:`networkx.algorithms.bipartite.redundancy` module."""
+
+import pytest
+
+from networkx import NetworkXError, cycle_graph
+from networkx.algorithms.bipartite import complete_bipartite_graph, node_redundancy
+
+
+def test_no_redundant_nodes():
+    G = complete_bipartite_graph(2, 2)
+
+    # when nodes is None
+    rc = node_redundancy(G)
+    assert all(redundancy == 1 for redundancy in rc.values())
+
+    # when set of nodes is specified
+    rc = node_redundancy(G, (2, 3))
+    assert rc == {2: 1.0, 3: 1.0}
+
+
+def test_redundant_nodes():
+    G = cycle_graph(6)
+    edge = {0, 3}
+    G.add_edge(*edge)
+    redundancy = node_redundancy(G)
+    for v in edge:
+        assert redundancy[v] == 2 / 3
+    for v in set(G) - edge:
+        assert redundancy[v] == 1
+
+
+def test_not_enough_neighbors():
+    with pytest.raises(NetworkXError):
+        G = complete_bipartite_graph(1, 2)
+        node_redundancy(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py
new file mode 100644
index 00000000..b9406497
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py
@@ -0,0 +1,80 @@
+import pytest
+
+pytest.importorskip("scipy")
+
+import networkx as nx
+from networkx.algorithms.bipartite import spectral_bipartivity as sb
+
+# Examples from Figure 1
+# E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
+# bipartivity in complex networks", PhysRev E 72, 046105 (2005)
+
+
+class TestSpectralBipartivity:
+    def test_star_like(self):
+        # star-like
+
+        G = nx.star_graph(2)
+        G.add_edge(1, 2)
+        assert sb(G) == pytest.approx(0.843, abs=1e-3)
+
+        G = nx.star_graph(3)
+        G.add_edge(1, 2)
+        assert sb(G) == pytest.approx(0.871, abs=1e-3)
+
+        G = nx.star_graph(4)
+        G.add_edge(1, 2)
+        assert sb(G) == pytest.approx(0.890, abs=1e-3)
+
+    def test_k23_like(self):
+        # K2,3-like
+        G = nx.complete_bipartite_graph(2, 3)
+        G.add_edge(0, 1)
+        assert sb(G) == pytest.approx(0.769, abs=1e-3)
+
+        G = nx.complete_bipartite_graph(2, 3)
+        G.add_edge(2, 4)
+        assert sb(G) == pytest.approx(0.829, abs=1e-3)
+
+        G = nx.complete_bipartite_graph(2, 3)
+        G.add_edge(2, 4)
+        G.add_edge(3, 4)
+        assert sb(G) == pytest.approx(0.731, abs=1e-3)
+
+        G = nx.complete_bipartite_graph(2, 3)
+        G.add_edge(0, 1)
+        G.add_edge(2, 4)
+        assert sb(G) == pytest.approx(0.692, abs=1e-3)
+
+        G = nx.complete_bipartite_graph(2, 3)
+        G.add_edge(2, 4)
+        G.add_edge(3, 4)
+        G.add_edge(0, 1)
+        assert sb(G) == pytest.approx(0.645, abs=1e-3)
+
+        G = nx.complete_bipartite_graph(2, 3)
+        G.add_edge(2, 4)
+        G.add_edge(3, 4)
+        G.add_edge(2, 3)
+        assert sb(G) == pytest.approx(0.645, abs=1e-3)
+
+        G = nx.complete_bipartite_graph(2, 3)
+        G.add_edge(2, 4)
+        G.add_edge(3, 4)
+        G.add_edge(2, 3)
+        G.add_edge(0, 1)
+        assert sb(G) == pytest.approx(0.597, abs=1e-3)
+
+    def test_single_nodes(self):
+        # single nodes
+        G = nx.complete_bipartite_graph(2, 3)
+        G.add_edge(2, 4)
+        sbn = sb(G, nodes=[1, 2])
+        assert sbn[1] == pytest.approx(0.85, abs=1e-2)
+        assert sbn[2] == pytest.approx(0.77, abs=1e-2)
+
+        G = nx.complete_bipartite_graph(2, 3)
+        G.add_edge(0, 1)
+        sbn = sb(G, nodes=[1, 2])
+        assert sbn[1] == pytest.approx(0.73, abs=1e-2)
+        assert sbn[2] == pytest.approx(0.82, abs=1e-2)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/boundary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/boundary.py
new file mode 100644
index 00000000..ba05d803
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/boundary.py
@@ -0,0 +1,168 @@
+"""Routines to find the boundary of a set of nodes.
+
+An edge boundary is a set of edges, each of which has exactly one
+endpoint in a given set of nodes (or, in the case of directed graphs,
+the set of edges whose source node is in the set).
+
+A node boundary of a set *S* of nodes is the set of (out-)neighbors of
+nodes in *S* that are outside *S*.
+
+"""
+
+from itertools import chain
+
+import networkx as nx
+
+__all__ = ["edge_boundary", "node_boundary"]
+
+
+@nx._dispatchable(edge_attrs={"data": "default"}, preserve_edge_attrs="data")
+def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None):
+    """Returns the edge boundary of `nbunch1`.
+
+    The *edge boundary* of a set *S* with respect to a set *T* is the
+    set of edges (*u*, *v*) such that *u* is in *S* and *v* is in *T*.
+    If *T* is not specified, it is assumed to be the set of all nodes
+    not in *S*.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    nbunch1 : iterable
+        Iterable of nodes in the graph representing the set of nodes
+        whose edge boundary will be returned. (This is the set *S* from
+        the definition above.)
+
+    nbunch2 : iterable
+        Iterable of nodes representing the target (or "exterior") set of
+        nodes. (This is the set *T* from the definition above.) If not
+        specified, this is assumed to be the set of all nodes in `G`
+        not in `nbunch1`.
+
+    keys : bool
+        This parameter has the same meaning as in
+        :meth:`MultiGraph.edges`.
+
+    data : bool or object
+        This parameter has the same meaning as in
+        :meth:`MultiGraph.edges`.
+
+    default : object
+        This parameter has the same meaning as in
+        :meth:`MultiGraph.edges`.
+
+    Returns
+    -------
+    iterator
+        An iterator over the edges in the boundary of `nbunch1` with
+        respect to `nbunch2`. If `keys`, `data`, or `default`
+        are specified and `G` is a multigraph, then edges are returned
+        with keys and/or data, as in :meth:`MultiGraph.edges`.
+
+    Examples
+    --------
+    >>> G = nx.wheel_graph(6)
+
+    When nbunch2=None:
+
+    >>> list(nx.edge_boundary(G, (1, 3)))
+    [(1, 0), (1, 2), (1, 5), (3, 0), (3, 2), (3, 4)]
+
+    When nbunch2 is given:
+
+    >>> list(nx.edge_boundary(G, (1, 3), (2, 0)))
+    [(1, 0), (1, 2), (3, 0), (3, 2)]
+
+    Notes
+    -----
+    Any element of `nbunch` that is not in the graph `G` will be
+    ignored.
+
+    `nbunch1` and `nbunch2` are usually meant to be disjoint, but in
+    the interest of speed and generality, that is not required here.
+
+    """
+    nset1 = {n for n in nbunch1 if n in G}
+    # Here we create an iterator over edges incident to nodes in the set
+    # `nset1`. The `Graph.edges()` method does not provide a guarantee
+    # on the orientation of the edges, so our algorithm below must
+    # handle the case in which exactly one orientation, either (u, v) or
+    # (v, u), appears in this iterable.
+    if G.is_multigraph():
+        edges = G.edges(nset1, data=data, keys=keys, default=default)
+    else:
+        edges = G.edges(nset1, data=data, default=default)
+    # If `nbunch2` is not provided, then it is assumed to be the set
+    # complement of `nbunch1`. For the sake of efficiency, this is
+    # implemented by using the `not in` operator, instead of by creating
+    # an additional set and using the `in` operator.
+    if nbunch2 is None:
+        return (e for e in edges if (e[0] in nset1) ^ (e[1] in nset1))
+    nset2 = set(nbunch2)
+    return (
+        e
+        for e in edges
+        if (e[0] in nset1 and e[1] in nset2) or (e[1] in nset1 and e[0] in nset2)
+    )
+
+
+@nx._dispatchable
+def node_boundary(G, nbunch1, nbunch2=None):
+    """Returns the node boundary of `nbunch1`.
+
+    The *node boundary* of a set *S* with respect to a set *T* is the
+    set of nodes *v* in *T* such that for some *u* in *S*, there is an
+    edge joining *u* to *v*. If *T* is not specified, it is assumed to
+    be the set of all nodes not in *S*.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    nbunch1 : iterable
+        Iterable of nodes in the graph representing the set of nodes
+        whose node boundary will be returned. (This is the set *S* from
+        the definition above.)
+
+    nbunch2 : iterable
+        Iterable of nodes representing the target (or "exterior") set of
+        nodes. (This is the set *T* from the definition above.) If not
+        specified, this is assumed to be the set of all nodes in `G`
+        not in `nbunch1`.
+
+    Returns
+    -------
+    set
+        The node boundary of `nbunch1` with respect to `nbunch2`.
+
+    Examples
+    --------
+    >>> G = nx.wheel_graph(6)
+
+    When nbunch2=None:
+
+    >>> list(nx.node_boundary(G, (3, 4)))
+    [0, 2, 5]
+
+    When nbunch2 is given:
+
+    >>> list(nx.node_boundary(G, (3, 4), (0, 1, 5)))
+    [0, 5]
+
+    Notes
+    -----
+    Any element of `nbunch` that is not in the graph `G` will be
+    ignored.
+
+    `nbunch1` and `nbunch2` are usually meant to be disjoint, but in
+    the interest of speed and generality, that is not required here.
+
+    """
+    nset1 = {n for n in nbunch1 if n in G}
+    bdy = set(chain.from_iterable(G[v] for v in nset1)) - nset1
+    # If `nbunch2` is not specified, it is assumed to be the set
+    # complement of `nbunch1`.
+    if nbunch2 is not None:
+        bdy &= set(nbunch2)
+    return bdy
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bridges.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bridges.py
new file mode 100644
index 00000000..eaa6fd3b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bridges.py
@@ -0,0 +1,205 @@
+"""Bridge-finding algorithms."""
+
+from itertools import chain
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["bridges", "has_bridges", "local_bridges"]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def bridges(G, root=None):
+    """Generate all bridges in a graph.
+
+    A *bridge* in a graph is an edge whose removal causes the number of
+    connected components of the graph to increase.  Equivalently, a bridge is an
+    edge that does not belong to any cycle. Bridges are also known as cut-edges,
+    isthmuses, or cut arcs.
+
+    Parameters
+    ----------
+    G : undirected graph
+
+    root : node (optional)
+       A node in the graph `G`. If specified, only the bridges in the
+       connected component containing this node will be returned.
+
+    Yields
+    ------
+    e : edge
+       An edge in the graph whose removal disconnects the graph (or
+       causes the number of connected components to increase).
+
+    Raises
+    ------
+    NodeNotFound
+       If `root` is not in the graph `G`.
+
+    NetworkXNotImplemented
+        If `G` is a directed graph.
+
+    Examples
+    --------
+    The barbell graph with parameter zero has a single bridge:
+
+    >>> G = nx.barbell_graph(10, 0)
+    >>> list(nx.bridges(G))
+    [(9, 10)]
+
+    Notes
+    -----
+    This is an implementation of the algorithm described in [1]_.  An edge is a
+    bridge if and only if it is not contained in any chain. Chains are found
+    using the :func:`networkx.chain_decomposition` function.
+
+    The algorithm described in [1]_ requires a simple graph. If the provided
+    graph is a multigraph, we convert it to a simple graph and verify that any
+    bridges discovered by the chain decomposition algorithm are not multi-edges.
+
+    Ignoring polylogarithmic factors, the worst-case time complexity is the
+    same as the :func:`networkx.chain_decomposition` function,
+    $O(m + n)$, where $n$ is the number of nodes in the graph and $m$ is
+    the number of edges.
+
+    References
+    ----------
+    .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29#Bridge-Finding_with_Chain_Decompositions
+    """
+    multigraph = G.is_multigraph()
+    H = nx.Graph(G) if multigraph else G
+    chains = nx.chain_decomposition(H, root=root)
+    chain_edges = set(chain.from_iterable(chains))
+    if root is not None:
+        H = H.subgraph(nx.node_connected_component(H, root)).copy()
+    for u, v in H.edges():
+        if (u, v) not in chain_edges and (v, u) not in chain_edges:
+            if multigraph and len(G[u][v]) > 1:
+                continue
+            yield u, v
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def has_bridges(G, root=None):
+    """Decide whether a graph has any bridges.
+
+    A *bridge* in a graph is an edge whose removal causes the number of
+    connected components of the graph to increase.
+
+    Parameters
+    ----------
+    G : undirected graph
+
+    root : node (optional)
+       A node in the graph `G`. If specified, only the bridges in the
+       connected component containing this node will be considered.
+
+    Returns
+    -------
+    bool
+       Whether the graph (or the connected component containing `root`)
+       has any bridges.
+
+    Raises
+    ------
+    NodeNotFound
+       If `root` is not in the graph `G`.
+
+    NetworkXNotImplemented
+        If `G` is a directed graph.
+
+    Examples
+    --------
+    The barbell graph with parameter zero has a single bridge::
+
+        >>> G = nx.barbell_graph(10, 0)
+        >>> nx.has_bridges(G)
+        True
+
+    On the other hand, the cycle graph has no bridges::
+
+        >>> G = nx.cycle_graph(5)
+        >>> nx.has_bridges(G)
+        False
+
+    Notes
+    -----
+    This implementation uses the :func:`networkx.bridges` function, so
+    it shares its worst-case time complexity, $O(m + n)$, ignoring
+    polylogarithmic factors, where $n$ is the number of nodes in the
+    graph and $m$ is the number of edges.
+
+    """
+    try:
+        next(bridges(G, root=root))
+    except StopIteration:
+        return False
+    else:
+        return True
+
+
+@not_implemented_for("multigraph")
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def local_bridges(G, with_span=True, weight=None):
+    """Iterate over local bridges of `G` optionally computing the span
+
+    A *local bridge* is an edge whose endpoints have no common neighbors.
+    That is, the edge is not part of a triangle in the graph.
+
+    The *span* of a *local bridge* is the shortest path length between
+    the endpoints if the local bridge is removed.
+
+    Parameters
+    ----------
+    G : undirected graph
+
+    with_span : bool
+        If True, yield a 3-tuple `(u, v, span)`
+
+    weight : function, string or None (default: None)
+        If function, used to compute edge weights for the span.
+        If string, the edge data attribute used in calculating span.
+        If None, all edges have weight 1.
+
+    Yields
+    ------
+    e : edge
+        The local bridges as an edge 2-tuple of nodes `(u, v)` or
+        as a 3-tuple `(u, v, span)` when `with_span is True`.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a directed graph or multigraph.
+
+    Examples
+    --------
+    A cycle graph has every edge a local bridge with span N-1.
+
+       >>> G = nx.cycle_graph(9)
+       >>> (0, 8, 8) in set(nx.local_bridges(G))
+       True
+    """
+    if with_span is not True:
+        for u, v in G.edges:
+            if not (set(G[u]) & set(G[v])):
+                yield u, v
+    else:
+        wt = nx.weighted._weight_function(G, weight)
+        for u, v in G.edges:
+            if not (set(G[u]) & set(G[v])):
+                enodes = {u, v}
+
+                def hide_edge(n, nbr, d):
+                    if n not in enodes or nbr not in enodes:
+                        return wt(n, nbr, d)
+                    return None
+
+                try:
+                    span = nx.shortest_path_length(G, u, v, weight=hide_edge)
+                    yield u, v, span
+                except nx.NetworkXNoPath:
+                    yield u, v, float("inf")
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/broadcasting.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/broadcasting.py
new file mode 100644
index 00000000..9b362a0e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/broadcasting.py
@@ -0,0 +1,155 @@
+"""Routines to calculate the broadcast time of certain graphs.
+
+Broadcasting is an information dissemination problem in which a node in a graph,
+called the originator, must distribute a message to all other nodes by placing
+a series of calls along the edges of the graph. Once informed, other nodes aid
+the originator in distributing the message.
+
+The broadcasting must be completed as quickly as possible subject to the
+following constraints:
+- Each call requires one unit of time.
+- A node can only participate in one call per unit of time.
+- Each call only involves two adjacent nodes: a sender and a receiver.
+"""
+
+import networkx as nx
+from networkx import NetworkXError
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "tree_broadcast_center",
+    "tree_broadcast_time",
+]
+
+
+def _get_max_broadcast_value(G, U, v, values):
+    adj = sorted(set(G.neighbors(v)) & U, key=values.get, reverse=True)
+    return max(values[u] + i for i, u in enumerate(adj, start=1))
+
+
+def _get_broadcast_centers(G, v, values, target):
+    adj = sorted(G.neighbors(v), key=values.get, reverse=True)
+    j = next(i for i, u in enumerate(adj, start=1) if values[u] + i == target)
+    return set([v] + adj[:j])
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def tree_broadcast_center(G):
+    """Return the Broadcast Center of the tree `G`.
+
+    The broadcast center of a graph G denotes the set of nodes having
+    minimum broadcast time [1]_. This is a linear algorithm for determining
+    the broadcast center of a tree with ``N`` nodes, as a by-product it also
+    determines the broadcast time from the broadcast center.
+
+    Parameters
+    ----------
+    G : undirected graph
+        The graph should be an undirected tree
+
+    Returns
+    -------
+    BC : (int, set) tuple
+        minimum broadcast number of the tree, set of broadcast centers
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph is directed or is a multigraph.
+
+    References
+    ----------
+    .. [1] Slater, P.J., Cockayne, E.J., Hedetniemi, S.T,
+       Information dissemination in trees. SIAM J.Comput. 10(4), 692–701 (1981)
+    """
+    # Assert that the graph G is a tree
+    if not nx.is_tree(G):
+        NetworkXError("Input graph is not a tree")
+    # step 0
+    if G.number_of_nodes() == 2:
+        return 1, set(G.nodes())
+    if G.number_of_nodes() == 1:
+        return 0, set(G.nodes())
+
+    # step 1
+    U = {node for node, deg in G.degree if deg == 1}
+    values = {n: 0 for n in U}
+    T = G.copy()
+    T.remove_nodes_from(U)
+
+    # step 2
+    W = {node for node, deg in T.degree if deg == 1}
+    values.update((w, G.degree[w] - 1) for w in W)
+
+    # step 3
+    while T.number_of_nodes() >= 2:
+        # step 4
+        w = min(W, key=lambda n: values[n])
+        v = next(T.neighbors(w))
+
+        # step 5
+        U.add(w)
+        W.remove(w)
+        T.remove_node(w)
+
+        # step 6
+        if T.degree(v) == 1:
+            # update t(v)
+            values.update({v: _get_max_broadcast_value(G, U, v, values)})
+            W.add(v)
+
+    # step 7
+    v = nx.utils.arbitrary_element(T)
+    b_T = _get_max_broadcast_value(G, U, v, values)
+    return b_T, _get_broadcast_centers(G, v, values, b_T)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def tree_broadcast_time(G, node=None):
+    """Return the Broadcast Time of the tree `G`.
+
+    The minimum broadcast time of a node is defined as the minimum amount
+    of time required to complete broadcasting starting from the
+    originator. The broadcast time of a graph is the maximum over
+    all nodes of the minimum broadcast time from that node [1]_.
+    This function returns the minimum broadcast time of `node`.
+    If `node` is None the broadcast time for the graph is returned.
+
+    Parameters
+    ----------
+    G : undirected graph
+        The graph should be an undirected tree
+    node: int, optional
+        index of starting node. If `None`, the algorithm returns the broadcast
+        time of the tree.
+
+    Returns
+    -------
+    BT : int
+        Broadcast Time of a node in a tree
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph is directed or is a multigraph.
+
+    References
+    ----------
+    .. [1] Harutyunyan, H. A. and Li, Z.
+        "A Simple Construction of Broadcast Graphs."
+        In Computing and Combinatorics. COCOON 2019
+        (Ed. D. Z. Du and C. Tian.) Springer, pp. 240-253, 2019.
+    """
+    b_T, b_C = tree_broadcast_center(G)
+    if node is not None:
+        return b_T + min(nx.shortest_path_length(G, node, u) for u in b_C)
+    dist_from_center = dict.fromkeys(G, len(G))
+    for u in b_C:
+        for v, dist in nx.shortest_path_length(G, u).items():
+            if dist < dist_from_center[v]:
+                dist_from_center[v] = dist
+    return b_T + max(dist_from_center.values())
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__init__.py
new file mode 100644
index 00000000..c91a904a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__init__.py
@@ -0,0 +1,20 @@
+from .betweenness import *
+from .betweenness_subset import *
+from .closeness import *
+from .current_flow_betweenness import *
+from .current_flow_betweenness_subset import *
+from .current_flow_closeness import *
+from .degree_alg import *
+from .dispersion import *
+from .eigenvector import *
+from .group import *
+from .harmonic import *
+from .katz import *
+from .load import *
+from .percolation import *
+from .reaching import *
+from .second_order import *
+from .subgraph_alg import *
+from .trophic import *
+from .voterank_alg import *
+from .laplacian import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness.py
new file mode 100644
index 00000000..42e09771
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness.py
@@ -0,0 +1,436 @@
+"""Betweenness centrality measures."""
+
+from collections import deque
+from heapq import heappop, heappush
+from itertools import count
+
+import networkx as nx
+from networkx.algorithms.shortest_paths.weighted import _weight_function
+from networkx.utils import py_random_state
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = ["betweenness_centrality", "edge_betweenness_centrality"]
+
+
+@py_random_state(5)
+@nx._dispatchable(edge_attrs="weight")
+def betweenness_centrality(
+    G, k=None, normalized=True, weight=None, endpoints=False, seed=None
+):
+    r"""Compute the shortest-path betweenness centrality for nodes.
+
+    Betweenness centrality of a node $v$ is the sum of the
+    fraction of all-pairs shortest paths that pass through $v$
+
+    .. math::
+
+       c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
+
+    where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
+    shortest $(s, t)$-paths,  and $\sigma(s, t|v)$ is the number of
+    those paths  passing through some  node $v$ other than $s, t$.
+    If $s = t$, $\sigma(s, t) = 1$, and if $v \in {s, t}$,
+    $\sigma(s, t|v) = 0$ [2]_.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.
+
+    k : int, optional (default=None)
+      If k is not None use k node samples to estimate betweenness.
+      The value of k <= n where n is the number of nodes in the graph.
+      Higher values give better approximation.
+
+    normalized : bool, optional
+      If True the betweenness values are normalized by `2/((n-1)(n-2))`
+      for graphs, and `1/((n-1)(n-2))` for directed graphs where `n`
+      is the number of nodes in G.
+
+    weight : None or string, optional (default=None)
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+      Weights are used to calculate weighted shortest paths, so they are
+      interpreted as distances.
+
+    endpoints : bool, optional
+      If True include the endpoints in the shortest path counts.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+        Note that this is only used if k is not None.
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with betweenness centrality as the value.
+
+    See Also
+    --------
+    edge_betweenness_centrality
+    load_centrality
+
+    Notes
+    -----
+    The algorithm is from Ulrik Brandes [1]_.
+    See [4]_ for the original first published version and [2]_ for details on
+    algorithms for variations and related metrics.
+
+    For approximate betweenness calculations set k=#samples to use
+    k nodes ("pivots") to estimate the betweenness values. For an estimate
+    of the number of pivots needed see [3]_.
+
+    For weighted graphs the edge weights must be greater than zero.
+    Zero edge weights can produce an infinite number of equal length
+    paths between pairs of nodes.
+
+    The total number of paths between source and target is counted
+    differently for directed and undirected graphs. Directed paths
+    are easy to count. Undirected paths are tricky: should a path
+    from "u" to "v" count as 1 undirected path or as 2 directed paths?
+
+    For betweenness_centrality we report the number of undirected
+    paths when G is undirected.
+
+    For betweenness_centrality_subset the reporting is different.
+    If the source and target subsets are the same, then we want
+    to count undirected paths. But if the source and target subsets
+    differ -- for example, if sources is {0} and targets is {1},
+    then we are only counting the paths in one direction. They are
+    undirected paths but we are counting them in a directed way.
+    To count them as undirected paths, each should count as half a path.
+
+    This algorithm is not guaranteed to be correct if edge weights
+    are floating point numbers. As a workaround you can use integer
+    numbers by multiplying the relevant edge attributes by a convenient
+    constant factor (eg 100) and converting to integers.
+
+    References
+    ----------
+    .. [1] Ulrik Brandes:
+       A Faster Algorithm for Betweenness Centrality.
+       Journal of Mathematical Sociology 25(2):163-177, 2001.
+       https://doi.org/10.1080/0022250X.2001.9990249
+    .. [2] Ulrik Brandes:
+       On Variants of Shortest-Path Betweenness
+       Centrality and their Generic Computation.
+       Social Networks 30(2):136-145, 2008.
+       https://doi.org/10.1016/j.socnet.2007.11.001
+    .. [3] Ulrik Brandes and Christian Pich:
+       Centrality Estimation in Large Networks.
+       International Journal of Bifurcation and Chaos 17(7):2303-2318, 2007.
+       https://dx.doi.org/10.1142/S0218127407018403
+    .. [4] Linton C. Freeman:
+       A set of measures of centrality based on betweenness.
+       Sociometry 40: 35–41, 1977
+       https://doi.org/10.2307/3033543
+    """
+    betweenness = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
+    if k is None:
+        nodes = G
+    else:
+        nodes = seed.sample(list(G.nodes()), k)
+    for s in nodes:
+        # single source shortest paths
+        if weight is None:  # use BFS
+            S, P, sigma, _ = _single_source_shortest_path_basic(G, s)
+        else:  # use Dijkstra's algorithm
+            S, P, sigma, _ = _single_source_dijkstra_path_basic(G, s, weight)
+        # accumulation
+        if endpoints:
+            betweenness, _ = _accumulate_endpoints(betweenness, S, P, sigma, s)
+        else:
+            betweenness, _ = _accumulate_basic(betweenness, S, P, sigma, s)
+    # rescaling
+    betweenness = _rescale(
+        betweenness,
+        len(G),
+        normalized=normalized,
+        directed=G.is_directed(),
+        k=k,
+        endpoints=endpoints,
+    )
+    return betweenness
+
+
+@py_random_state(4)
+@nx._dispatchable(edge_attrs="weight")
+def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=None):
+    r"""Compute betweenness centrality for edges.
+
+    Betweenness centrality of an edge $e$ is the sum of the
+    fraction of all-pairs shortest paths that pass through $e$
+
+    .. math::
+
+       c_B(e) =\sum_{s,t \in V} \frac{\sigma(s, t|e)}{\sigma(s, t)}
+
+    where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
+    shortest $(s, t)$-paths, and $\sigma(s, t|e)$ is the number of
+    those paths passing through edge $e$ [2]_.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.
+
+    k : int, optional (default=None)
+      If k is not None use k node samples to estimate betweenness.
+      The value of k <= n where n is the number of nodes in the graph.
+      Higher values give better approximation.
+
+    normalized : bool, optional
+      If True the betweenness values are normalized by $2/(n(n-1))$
+      for graphs, and $1/(n(n-1))$ for directed graphs where $n$
+      is the number of nodes in G.
+
+    weight : None or string, optional (default=None)
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+      Weights are used to calculate weighted shortest paths, so they are
+      interpreted as distances.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+        Note that this is only used if k is not None.
+
+    Returns
+    -------
+    edges : dictionary
+       Dictionary of edges with betweenness centrality as the value.
+
+    See Also
+    --------
+    betweenness_centrality
+    edge_load
+
+    Notes
+    -----
+    The algorithm is from Ulrik Brandes [1]_.
+
+    For weighted graphs the edge weights must be greater than zero.
+    Zero edge weights can produce an infinite number of equal length
+    paths between pairs of nodes.
+
+    References
+    ----------
+    .. [1]  A Faster Algorithm for Betweenness Centrality. Ulrik Brandes,
+       Journal of Mathematical Sociology 25(2):163-177, 2001.
+       https://doi.org/10.1080/0022250X.2001.9990249
+    .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
+       Centrality and their Generic Computation.
+       Social Networks 30(2):136-145, 2008.
+       https://doi.org/10.1016/j.socnet.2007.11.001
+    """
+    betweenness = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
+    # b[e]=0 for e in G.edges()
+    betweenness.update(dict.fromkeys(G.edges(), 0.0))
+    if k is None:
+        nodes = G
+    else:
+        nodes = seed.sample(list(G.nodes()), k)
+    for s in nodes:
+        # single source shortest paths
+        if weight is None:  # use BFS
+            S, P, sigma, _ = _single_source_shortest_path_basic(G, s)
+        else:  # use Dijkstra's algorithm
+            S, P, sigma, _ = _single_source_dijkstra_path_basic(G, s, weight)
+        # accumulation
+        betweenness = _accumulate_edges(betweenness, S, P, sigma, s)
+    # rescaling
+    for n in G:  # remove nodes to only return edges
+        del betweenness[n]
+    betweenness = _rescale_e(
+        betweenness, len(G), normalized=normalized, directed=G.is_directed()
+    )
+    if G.is_multigraph():
+        betweenness = _add_edge_keys(G, betweenness, weight=weight)
+    return betweenness
+
+
+# helpers for betweenness centrality
+
+
+def _single_source_shortest_path_basic(G, s):
+    S = []
+    P = {}
+    for v in G:
+        P[v] = []
+    sigma = dict.fromkeys(G, 0.0)  # sigma[v]=0 for v in G
+    D = {}
+    sigma[s] = 1.0
+    D[s] = 0
+    Q = deque([s])
+    while Q:  # use BFS to find shortest paths
+        v = Q.popleft()
+        S.append(v)
+        Dv = D[v]
+        sigmav = sigma[v]
+        for w in G[v]:
+            if w not in D:
+                Q.append(w)
+                D[w] = Dv + 1
+            if D[w] == Dv + 1:  # this is a shortest path, count paths
+                sigma[w] += sigmav
+                P[w].append(v)  # predecessors
+    return S, P, sigma, D
+
+
+def _single_source_dijkstra_path_basic(G, s, weight):
+    weight = _weight_function(G, weight)
+    # modified from Eppstein
+    S = []
+    P = {}
+    for v in G:
+        P[v] = []
+    sigma = dict.fromkeys(G, 0.0)  # sigma[v]=0 for v in G
+    D = {}
+    sigma[s] = 1.0
+    push = heappush
+    pop = heappop
+    seen = {s: 0}
+    c = count()
+    Q = []  # use Q as heap with (distance,node id) tuples
+    push(Q, (0, next(c), s, s))
+    while Q:
+        (dist, _, pred, v) = pop(Q)
+        if v in D:
+            continue  # already searched this node.
+        sigma[v] += sigma[pred]  # count paths
+        S.append(v)
+        D[v] = dist
+        for w, edgedata in G[v].items():
+            vw_dist = dist + weight(v, w, edgedata)
+            if w not in D and (w not in seen or vw_dist < seen[w]):
+                seen[w] = vw_dist
+                push(Q, (vw_dist, next(c), v, w))
+                sigma[w] = 0.0
+                P[w] = [v]
+            elif vw_dist == seen[w]:  # handle equal paths
+                sigma[w] += sigma[v]
+                P[w].append(v)
+    return S, P, sigma, D
+
+
+def _accumulate_basic(betweenness, S, P, sigma, s):
+    delta = dict.fromkeys(S, 0)
+    while S:
+        w = S.pop()
+        coeff = (1 + delta[w]) / sigma[w]
+        for v in P[w]:
+            delta[v] += sigma[v] * coeff
+        if w != s:
+            betweenness[w] += delta[w]
+    return betweenness, delta
+
+
+def _accumulate_endpoints(betweenness, S, P, sigma, s):
+    betweenness[s] += len(S) - 1
+    delta = dict.fromkeys(S, 0)
+    while S:
+        w = S.pop()
+        coeff = (1 + delta[w]) / sigma[w]
+        for v in P[w]:
+            delta[v] += sigma[v] * coeff
+        if w != s:
+            betweenness[w] += delta[w] + 1
+    return betweenness, delta
+
+
+def _accumulate_edges(betweenness, S, P, sigma, s):
+    delta = dict.fromkeys(S, 0)
+    while S:
+        w = S.pop()
+        coeff = (1 + delta[w]) / sigma[w]
+        for v in P[w]:
+            c = sigma[v] * coeff
+            if (v, w) not in betweenness:
+                betweenness[(w, v)] += c
+            else:
+                betweenness[(v, w)] += c
+            delta[v] += c
+        if w != s:
+            betweenness[w] += delta[w]
+    return betweenness
+
+
+def _rescale(betweenness, n, normalized, directed=False, k=None, endpoints=False):
+    if normalized:
+        if endpoints:
+            if n < 2:
+                scale = None  # no normalization
+            else:
+                # Scale factor should include endpoint nodes
+                scale = 1 / (n * (n - 1))
+        elif n <= 2:
+            scale = None  # no normalization b=0 for all nodes
+        else:
+            scale = 1 / ((n - 1) * (n - 2))
+    else:  # rescale by 2 for undirected graphs
+        if not directed:
+            scale = 0.5
+        else:
+            scale = None
+    if scale is not None:
+        if k is not None:
+            scale = scale * n / k
+        for v in betweenness:
+            betweenness[v] *= scale
+    return betweenness
+
+
+def _rescale_e(betweenness, n, normalized, directed=False, k=None):
+    if normalized:
+        if n <= 1:
+            scale = None  # no normalization b=0 for all nodes
+        else:
+            scale = 1 / (n * (n - 1))
+    else:  # rescale by 2 for undirected graphs
+        if not directed:
+            scale = 0.5
+        else:
+            scale = None
+    if scale is not None:
+        if k is not None:
+            scale = scale * n / k
+        for v in betweenness:
+            betweenness[v] *= scale
+    return betweenness
+
+
+@not_implemented_for("graph")
+def _add_edge_keys(G, betweenness, weight=None):
+    r"""Adds the corrected betweenness centrality (BC) values for multigraphs.
+
+    Parameters
+    ----------
+    G : NetworkX graph.
+
+    betweenness : dictionary
+        Dictionary mapping adjacent node tuples to betweenness centrality values.
+
+    weight : string or function
+        See `_weight_function` for details. Defaults to `None`.
+
+    Returns
+    -------
+    edges : dictionary
+        The parameter `betweenness` including edges with keys and their
+        betweenness centrality values.
+
+    The BC value is divided among edges of equal weight.
+    """
+    _weight = _weight_function(G, weight)
+
+    edge_bc = dict.fromkeys(G.edges, 0.0)
+    for u, v in betweenness:
+        d = G[u][v]
+        wt = _weight(u, v, d)
+        keys = [k for k in d if _weight(u, v, {k: d[k]}) == wt]
+        bc = betweenness[(u, v)] / len(keys)
+        for k in keys:
+            edge_bc[(u, v, k)] = bc
+
+    return edge_bc
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness_subset.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness_subset.py
new file mode 100644
index 00000000..b9e99365
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness_subset.py
@@ -0,0 +1,275 @@
+"""Betweenness centrality measures for subsets of nodes."""
+
+import networkx as nx
+from networkx.algorithms.centrality.betweenness import (
+    _add_edge_keys,
+)
+from networkx.algorithms.centrality.betweenness import (
+    _single_source_dijkstra_path_basic as dijkstra,
+)
+from networkx.algorithms.centrality.betweenness import (
+    _single_source_shortest_path_basic as shortest_path,
+)
+
+__all__ = [
+    "betweenness_centrality_subset",
+    "edge_betweenness_centrality_subset",
+]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def betweenness_centrality_subset(G, sources, targets, normalized=False, weight=None):
+    r"""Compute betweenness centrality for a subset of nodes.
+
+    .. math::
+
+       c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)}
+
+    where $S$ is the set of sources, $T$ is the set of targets,
+    $\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
+    and $\sigma(s, t|v)$ is the number of those paths
+    passing through some  node $v$ other than $s, t$.
+    If $s = t$, $\sigma(s, t) = 1$,
+    and if $v \in {s, t}$, $\sigma(s, t|v) = 0$ [2]_.
+
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.
+
+    sources: list of nodes
+      Nodes to use as sources for shortest paths in betweenness
+
+    targets: list of nodes
+      Nodes to use as targets for shortest paths in betweenness
+
+    normalized : bool, optional
+      If True the betweenness values are normalized by $2/((n-1)(n-2))$
+      for graphs, and $1/((n-1)(n-2))$ for directed graphs where $n$
+      is the number of nodes in G.
+
+    weight : None or string, optional (default=None)
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+      Weights are used to calculate weighted shortest paths, so they are
+      interpreted as distances.
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with betweenness centrality as the value.
+
+    See Also
+    --------
+    edge_betweenness_centrality
+    load_centrality
+
+    Notes
+    -----
+    The basic algorithm is from [1]_.
+
+    For weighted graphs the edge weights must be greater than zero.
+    Zero edge weights can produce an infinite number of equal length
+    paths between pairs of nodes.
+
+    The normalization might seem a little strange but it is
+    designed to make betweenness_centrality(G) be the same as
+    betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
+
+    The total number of paths between source and target is counted
+    differently for directed and undirected graphs. Directed paths
+    are easy to count. Undirected paths are tricky: should a path
+    from "u" to "v" count as 1 undirected path or as 2 directed paths?
+
+    For betweenness_centrality we report the number of undirected
+    paths when G is undirected.
+
+    For betweenness_centrality_subset the reporting is different.
+    If the source and target subsets are the same, then we want
+    to count undirected paths. But if the source and target subsets
+    differ -- for example, if sources is {0} and targets is {1},
+    then we are only counting the paths in one direction. They are
+    undirected paths but we are counting them in a directed way.
+    To count them as undirected paths, each should count as half a path.
+
+    References
+    ----------
+    .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
+       Journal of Mathematical Sociology 25(2):163-177, 2001.
+       https://doi.org/10.1080/0022250X.2001.9990249
+    .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
+       Centrality and their Generic Computation.
+       Social Networks 30(2):136-145, 2008.
+       https://doi.org/10.1016/j.socnet.2007.11.001
+    """
+    b = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
+    for s in sources:
+        # single source shortest paths
+        if weight is None:  # use BFS
+            S, P, sigma, _ = shortest_path(G, s)
+        else:  # use Dijkstra's algorithm
+            S, P, sigma, _ = dijkstra(G, s, weight)
+        b = _accumulate_subset(b, S, P, sigma, s, targets)
+    b = _rescale(b, len(G), normalized=normalized, directed=G.is_directed())
+    return b
+
+
+@nx._dispatchable(edge_attrs="weight")
+def edge_betweenness_centrality_subset(
+    G, sources, targets, normalized=False, weight=None
+):
+    r"""Compute betweenness centrality for edges for a subset of nodes.
+
+    .. math::
+
+       c_B(v) =\sum_{s\in S,t \in T} \frac{\sigma(s, t|e)}{\sigma(s, t)}
+
+    where $S$ is the set of sources, $T$ is the set of targets,
+    $\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
+    and $\sigma(s, t|e)$ is the number of those paths
+    passing through edge $e$ [2]_.
+
+    Parameters
+    ----------
+    G : graph
+      A networkx graph.
+
+    sources: list of nodes
+      Nodes to use as sources for shortest paths in betweenness
+
+    targets: list of nodes
+      Nodes to use as targets for shortest paths in betweenness
+
+    normalized : bool, optional
+      If True the betweenness values are normalized by `2/(n(n-1))`
+      for graphs, and `1/(n(n-1))` for directed graphs where `n`
+      is the number of nodes in G.
+
+    weight : None or string, optional (default=None)
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+      Weights are used to calculate weighted shortest paths, so they are
+      interpreted as distances.
+
+    Returns
+    -------
+    edges : dictionary
+       Dictionary of edges with Betweenness centrality as the value.
+
+    See Also
+    --------
+    betweenness_centrality
+    edge_load
+
+    Notes
+    -----
+    The basic algorithm is from [1]_.
+
+    For weighted graphs the edge weights must be greater than zero.
+    Zero edge weights can produce an infinite number of equal length
+    paths between pairs of nodes.
+
+    The normalization might seem a little strange but it is the same
+    as in edge_betweenness_centrality() and is designed to make
+    edge_betweenness_centrality(G) be the same as
+    edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
+
+    References
+    ----------
+    .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
+       Journal of Mathematical Sociology 25(2):163-177, 2001.
+       https://doi.org/10.1080/0022250X.2001.9990249
+    .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
+       Centrality and their Generic Computation.
+       Social Networks 30(2):136-145, 2008.
+       https://doi.org/10.1016/j.socnet.2007.11.001
+    """
+    b = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
+    b.update(dict.fromkeys(G.edges(), 0.0))  # b[e] for e in G.edges()
+    for s in sources:
+        # single source shortest paths
+        if weight is None:  # use BFS
+            S, P, sigma, _ = shortest_path(G, s)
+        else:  # use Dijkstra's algorithm
+            S, P, sigma, _ = dijkstra(G, s, weight)
+        b = _accumulate_edges_subset(b, S, P, sigma, s, targets)
+    for n in G:  # remove nodes to only return edges
+        del b[n]
+    b = _rescale_e(b, len(G), normalized=normalized, directed=G.is_directed())
+    if G.is_multigraph():
+        b = _add_edge_keys(G, b, weight=weight)
+    return b
+
+
+def _accumulate_subset(betweenness, S, P, sigma, s, targets):
+    delta = dict.fromkeys(S, 0.0)
+    target_set = set(targets) - {s}
+    while S:
+        w = S.pop()
+        if w in target_set:
+            coeff = (delta[w] + 1.0) / sigma[w]
+        else:
+            coeff = delta[w] / sigma[w]
+        for v in P[w]:
+            delta[v] += sigma[v] * coeff
+        if w != s:
+            betweenness[w] += delta[w]
+    return betweenness
+
+
+def _accumulate_edges_subset(betweenness, S, P, sigma, s, targets):
+    """edge_betweenness_centrality_subset helper."""
+    delta = dict.fromkeys(S, 0)
+    target_set = set(targets)
+    while S:
+        w = S.pop()
+        for v in P[w]:
+            if w in target_set:
+                c = (sigma[v] / sigma[w]) * (1.0 + delta[w])
+            else:
+                c = delta[w] / len(P[w])
+            if (v, w) not in betweenness:
+                betweenness[(w, v)] += c
+            else:
+                betweenness[(v, w)] += c
+            delta[v] += c
+        if w != s:
+            betweenness[w] += delta[w]
+    return betweenness
+
+
+def _rescale(betweenness, n, normalized, directed=False):
+    """betweenness_centrality_subset helper."""
+    if normalized:
+        if n <= 2:
+            scale = None  # no normalization b=0 for all nodes
+        else:
+            scale = 1.0 / ((n - 1) * (n - 2))
+    else:  # rescale by 2 for undirected graphs
+        if not directed:
+            scale = 0.5
+        else:
+            scale = None
+    if scale is not None:
+        for v in betweenness:
+            betweenness[v] *= scale
+    return betweenness
+
+
+def _rescale_e(betweenness, n, normalized, directed=False):
+    """edge_betweenness_centrality_subset helper."""
+    if normalized:
+        if n <= 1:
+            scale = None  # no normalization b=0 for all nodes
+        else:
+            scale = 1.0 / (n * (n - 1))
+    else:  # rescale by 2 for undirected graphs
+        if not directed:
+            scale = 0.5
+        else:
+            scale = None
+    if scale is not None:
+        for v in betweenness:
+            betweenness[v] *= scale
+    return betweenness
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/closeness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/closeness.py
new file mode 100644
index 00000000..1cc2f959
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/closeness.py
@@ -0,0 +1,282 @@
+"""
+Closeness centrality measures.
+"""
+
+import functools
+
+import networkx as nx
+from networkx.exception import NetworkXError
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = ["closeness_centrality", "incremental_closeness_centrality"]
+
+
+@nx._dispatchable(edge_attrs="distance")
+def closeness_centrality(G, u=None, distance=None, wf_improved=True):
+    r"""Compute closeness centrality for nodes.
+
+    Closeness centrality [1]_ of a node `u` is the reciprocal of the
+    average shortest path distance to `u` over all `n-1` reachable nodes.
+
+    .. math::
+
+        C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
+
+    where `d(v, u)` is the shortest-path distance between `v` and `u`,
+    and `n-1` is the number of nodes reachable from `u`. Notice that the
+    closeness distance function computes the incoming distance to `u`
+    for directed graphs. To use outward distance, act on `G.reverse()`.
+
+    Notice that higher values of closeness indicate higher centrality.
+
+    Wasserman and Faust propose an improved formula for graphs with
+    more than one connected component. The result is "a ratio of the
+    fraction of actors in the group who are reachable, to the average
+    distance" from the reachable actors [2]_. You might think this
+    scale factor is inverted but it is not. As is, nodes from small
+    components receive a smaller closeness value. Letting `N` denote
+    the number of nodes in the graph,
+
+    .. math::
+
+        C_{WF}(u) = \frac{n-1}{N-1} \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    u : node, optional
+      Return only the value for node u
+
+    distance : edge attribute key, optional (default=None)
+      Use the specified edge attribute as the edge distance in shortest
+      path calculations.  If `None` (the default) all edges have a distance of 1.
+      Absent edge attributes are assigned a distance of 1. Note that no check
+      is performed to ensure that edges have the provided attribute.
+
+    wf_improved : bool, optional (default=True)
+      If True, scale by the fraction of nodes reachable. This gives the
+      Wasserman and Faust improved formula. For single component graphs
+      it is the same as the original formula.
+
+    Returns
+    -------
+    nodes : dictionary
+      Dictionary of nodes with closeness centrality as the value.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.closeness_centrality(G)
+    {0: 1.0, 1: 1.0, 2: 0.75, 3: 0.75}
+
+    See Also
+    --------
+    betweenness_centrality, load_centrality, eigenvector_centrality,
+    degree_centrality, incremental_closeness_centrality
+
+    Notes
+    -----
+    The closeness centrality is normalized to `(n-1)/(|G|-1)` where
+    `n` is the number of nodes in the connected part of graph
+    containing the node.  If the graph is not completely connected,
+    this algorithm computes the closeness centrality for each
+    connected part separately scaled by that parts size.
+
+    If the 'distance' keyword is set to an edge attribute key then the
+    shortest-path length will be computed using Dijkstra's algorithm with
+    that edge attribute as the edge weight.
+
+    The closeness centrality uses *inward* distance to a node, not outward.
+    If you want to use outword distances apply the function to `G.reverse()`
+
+    In NetworkX 2.2 and earlier a bug caused Dijkstra's algorithm to use the
+    outward distance rather than the inward distance. If you use a 'distance'
+    keyword and a DiGraph, your results will change between v2.2 and v2.3.
+
+    References
+    ----------
+    .. [1] Linton C. Freeman: Centrality in networks: I.
+       Conceptual clarification. Social Networks 1:215-239, 1979.
+       https://doi.org/10.1016/0378-8733(78)90021-7
+    .. [2] pg. 201 of Wasserman, S. and Faust, K.,
+       Social Network Analysis: Methods and Applications, 1994,
+       Cambridge University Press.
+    """
+    if G.is_directed():
+        G = G.reverse()  # create a reversed graph view
+
+    if distance is not None:
+        # use Dijkstra's algorithm with specified attribute as edge weight
+        path_length = functools.partial(
+            nx.single_source_dijkstra_path_length, weight=distance
+        )
+    else:
+        path_length = nx.single_source_shortest_path_length
+
+    if u is None:
+        nodes = G.nodes
+    else:
+        nodes = [u]
+    closeness_dict = {}
+    for n in nodes:
+        sp = path_length(G, n)
+        totsp = sum(sp.values())
+        len_G = len(G)
+        _closeness_centrality = 0.0
+        if totsp > 0.0 and len_G > 1:
+            _closeness_centrality = (len(sp) - 1.0) / totsp
+            # normalize to number of nodes-1 in connected part
+            if wf_improved:
+                s = (len(sp) - 1.0) / (len_G - 1)
+                _closeness_centrality *= s
+        closeness_dict[n] = _closeness_centrality
+    if u is not None:
+        return closeness_dict[u]
+    return closeness_dict
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(mutates_input=True)
+def incremental_closeness_centrality(
+    G, edge, prev_cc=None, insertion=True, wf_improved=True
+):
+    r"""Incremental closeness centrality for nodes.
+
+    Compute closeness centrality for nodes using level-based work filtering
+    as described in Incremental Algorithms for Closeness Centrality by Sariyuce et al.
+
+    Level-based work filtering detects unnecessary updates to the closeness
+    centrality and filters them out.
+
+    ---
+    From "Incremental Algorithms for Closeness Centrality":
+
+    Theorem 1: Let :math:`G = (V, E)` be a graph and u and v be two vertices in V
+    such that there is no edge (u, v) in E. Let :math:`G' = (V, E \cup uv)`
+    Then :math:`cc[s] = cc'[s]` if and only if :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`.
+
+    Where :math:`dG(u, v)` denotes the length of the shortest path between
+    two vertices u, v in a graph G, cc[s] is the closeness centrality for a
+    vertex s in V, and cc'[s] is the closeness centrality for a
+    vertex s in V, with the (u, v) edge added.
+    ---
+
+    We use Theorem 1 to filter out updates when adding or removing an edge.
+    When adding an edge (u, v), we compute the shortest path lengths from all
+    other nodes to u and to v before the node is added. When removing an edge,
+    we compute the shortest path lengths after the edge is removed. Then we
+    apply Theorem 1 to use previously computed closeness centrality for nodes
+    where :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`. This works only for
+    undirected, unweighted graphs; the distance argument is not supported.
+
+    Closeness centrality [1]_ of a node `u` is the reciprocal of the
+    sum of the shortest path distances from `u` to all `n-1` other nodes.
+    Since the sum of distances depends on the number of nodes in the
+    graph, closeness is normalized by the sum of minimum possible
+    distances `n-1`.
+
+    .. math::
+
+        C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
+
+    where `d(v, u)` is the shortest-path distance between `v` and `u`,
+    and `n` is the number of nodes in the graph.
+
+    Notice that higher values of closeness indicate higher centrality.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    edge : tuple
+      The modified edge (u, v) in the graph.
+
+    prev_cc : dictionary
+      The previous closeness centrality for all nodes in the graph.
+
+    insertion : bool, optional
+      If True (default) the edge was inserted, otherwise it was deleted from the graph.
+
+    wf_improved : bool, optional (default=True)
+      If True, scale by the fraction of nodes reachable. This gives the
+      Wasserman and Faust improved formula. For single component graphs
+      it is the same as the original formula.
+
+    Returns
+    -------
+    nodes : dictionary
+      Dictionary of nodes with closeness centrality as the value.
+
+    See Also
+    --------
+    betweenness_centrality, load_centrality, eigenvector_centrality,
+    degree_centrality, closeness_centrality
+
+    Notes
+    -----
+    The closeness centrality is normalized to `(n-1)/(|G|-1)` where
+    `n` is the number of nodes in the connected part of graph
+    containing the node.  If the graph is not completely connected,
+    this algorithm computes the closeness centrality for each
+    connected part separately.
+
+    References
+    ----------
+    .. [1] Freeman, L.C., 1979. Centrality in networks: I.
+       Conceptual clarification.  Social Networks 1, 215--239.
+       https://doi.org/10.1016/0378-8733(78)90021-7
+    .. [2] Sariyuce, A.E. ; Kaya, K. ; Saule, E. ; Catalyiirek, U.V. Incremental
+       Algorithms for Closeness Centrality. 2013 IEEE International Conference on Big Data
+       http://sariyuce.com/papers/bigdata13.pdf
+    """
+    if prev_cc is not None and set(prev_cc.keys()) != set(G.nodes()):
+        raise NetworkXError("prev_cc and G do not have the same nodes")
+
+    # Unpack edge
+    (u, v) = edge
+    path_length = nx.single_source_shortest_path_length
+
+    if insertion:
+        # For edge insertion, we want shortest paths before the edge is inserted
+        du = path_length(G, u)
+        dv = path_length(G, v)
+
+        G.add_edge(u, v)
+    else:
+        G.remove_edge(u, v)
+
+        # For edge removal, we want shortest paths after the edge is removed
+        du = path_length(G, u)
+        dv = path_length(G, v)
+
+    if prev_cc is None:
+        return nx.closeness_centrality(G)
+
+    nodes = G.nodes()
+    closeness_dict = {}
+    for n in nodes:
+        if n in du and n in dv and abs(du[n] - dv[n]) <= 1:
+            closeness_dict[n] = prev_cc[n]
+        else:
+            sp = path_length(G, n)
+            totsp = sum(sp.values())
+            len_G = len(G)
+            _closeness_centrality = 0.0
+            if totsp > 0.0 and len_G > 1:
+                _closeness_centrality = (len(sp) - 1.0) / totsp
+                # normalize to number of nodes-1 in connected part
+                if wf_improved:
+                    s = (len(sp) - 1.0) / (len_G - 1)
+                    _closeness_centrality *= s
+            closeness_dict[n] = _closeness_centrality
+
+    # Leave the graph as we found it
+    if insertion:
+        G.remove_edge(u, v)
+    else:
+        G.add_edge(u, v)
+
+    return closeness_dict
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py
new file mode 100644
index 00000000..bfde279a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py
@@ -0,0 +1,342 @@
+"""Current-flow betweenness centrality measures."""
+
+import networkx as nx
+from networkx.algorithms.centrality.flow_matrix import (
+    CGInverseLaplacian,
+    FullInverseLaplacian,
+    SuperLUInverseLaplacian,
+    flow_matrix_row,
+)
+from networkx.utils import (
+    not_implemented_for,
+    py_random_state,
+    reverse_cuthill_mckee_ordering,
+)
+
+__all__ = [
+    "current_flow_betweenness_centrality",
+    "approximate_current_flow_betweenness_centrality",
+    "edge_current_flow_betweenness_centrality",
+]
+
+
+@not_implemented_for("directed")
+@py_random_state(7)
+@nx._dispatchable(edge_attrs="weight")
+def approximate_current_flow_betweenness_centrality(
+    G,
+    normalized=True,
+    weight=None,
+    dtype=float,
+    solver="full",
+    epsilon=0.5,
+    kmax=10000,
+    seed=None,
+):
+    r"""Compute the approximate current-flow betweenness centrality for nodes.
+
+    Approximates the current-flow betweenness centrality within absolute
+    error of epsilon with high probability [1]_.
+
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    normalized : bool, optional (default=True)
+      If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
+      n is the number of nodes in G.
+
+    weight : string or None, optional (default=None)
+      Key for edge data used as the edge weight.
+      If None, then use 1 as each edge weight.
+      The weight reflects the capacity or the strength of the
+      edge.
+
+    dtype : data type (float)
+      Default data type for internal matrices.
+      Set to np.float32 for lower memory consumption.
+
+    solver : string (default='full')
+       Type of linear solver to use for computing the flow matrix.
+       Options are "full" (uses most memory), "lu" (recommended), and
+       "cg" (uses least memory).
+
+    epsilon: float
+        Absolute error tolerance.
+
+    kmax: int
+       Maximum number of sample node pairs to use for approximation.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with betweenness centrality as the value.
+
+    See Also
+    --------
+    current_flow_betweenness_centrality
+
+    Notes
+    -----
+    The running time is $O((1/\epsilon^2)m{\sqrt k} \log n)$
+    and the space required is $O(m)$ for $n$ nodes and $m$ edges.
+
+    If the edges have a 'weight' attribute they will be used as
+    weights in this algorithm.  Unspecified weights are set to 1.
+
+    References
+    ----------
+    .. [1] Ulrik Brandes and Daniel Fleischer:
+       Centrality Measures Based on Current Flow.
+       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
+       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
+       https://doi.org/10.1007/978-3-540-31856-9_44
+    """
+    import numpy as np
+
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Graph not connected.")
+    solvername = {
+        "full": FullInverseLaplacian,
+        "lu": SuperLUInverseLaplacian,
+        "cg": CGInverseLaplacian,
+    }
+    n = G.number_of_nodes()
+    ordering = list(reverse_cuthill_mckee_ordering(G))
+    # make a copy with integer labels according to rcm ordering
+    # this could be done without a copy if we really wanted to
+    H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
+    L = nx.laplacian_matrix(H, nodelist=range(n), weight=weight).asformat("csc")
+    L = L.astype(dtype)
+    C = solvername[solver](L, dtype=dtype)  # initialize solver
+    betweenness = dict.fromkeys(H, 0.0)
+    nb = (n - 1.0) * (n - 2.0)  # normalization factor
+    cstar = n * (n - 1) / nb
+    l = 1  # parameter in approximation, adjustable
+    k = l * int(np.ceil((cstar / epsilon) ** 2 * np.log(n)))
+    if k > kmax:
+        msg = f"Number random pairs k>kmax ({k}>{kmax}) "
+        raise nx.NetworkXError(msg, "Increase kmax or epsilon")
+    cstar2k = cstar / (2 * k)
+    for _ in range(k):
+        s, t = pair = seed.sample(range(n), 2)
+        b = np.zeros(n, dtype=dtype)
+        b[s] = 1
+        b[t] = -1
+        p = C.solve(b)
+        for v in H:
+            if v in pair:
+                continue
+            for nbr in H[v]:
+                w = H[v][nbr].get(weight, 1.0)
+                betweenness[v] += float(w * np.abs(p[v] - p[nbr]) * cstar2k)
+    if normalized:
+        factor = 1.0
+    else:
+        factor = nb / 2.0
+    # remap to original node names and "unnormalize" if required
+    return {ordering[k]: v * factor for k, v in betweenness.items()}
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def current_flow_betweenness_centrality(
+    G, normalized=True, weight=None, dtype=float, solver="full"
+):
+    r"""Compute current-flow betweenness centrality for nodes.
+
+    Current-flow betweenness centrality uses an electrical current
+    model for information spreading in contrast to betweenness
+    centrality which uses shortest paths.
+
+    Current-flow betweenness centrality is also known as
+    random-walk betweenness centrality [2]_.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    normalized : bool, optional (default=True)
+      If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
+      n is the number of nodes in G.
+
+    weight : string or None, optional (default=None)
+      Key for edge data used as the edge weight.
+      If None, then use 1 as each edge weight.
+      The weight reflects the capacity or the strength of the
+      edge.
+
+    dtype : data type (float)
+      Default data type for internal matrices.
+      Set to np.float32 for lower memory consumption.
+
+    solver : string (default='full')
+       Type of linear solver to use for computing the flow matrix.
+       Options are "full" (uses most memory), "lu" (recommended), and
+       "cg" (uses least memory).
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with betweenness centrality as the value.
+
+    See Also
+    --------
+    approximate_current_flow_betweenness_centrality
+    betweenness_centrality
+    edge_betweenness_centrality
+    edge_current_flow_betweenness_centrality
+
+    Notes
+    -----
+    Current-flow betweenness can be computed in  $O(I(n-1)+mn \log n)$
+    time [1]_, where $I(n-1)$ is the time needed to compute the
+    inverse Laplacian.  For a full matrix this is $O(n^3)$ but using
+    sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
+    Laplacian matrix condition number.
+
+    The space required is $O(nw)$ where $w$ is the width of the sparse
+    Laplacian matrix.  Worse case is $w=n$ for $O(n^2)$.
+
+    If the edges have a 'weight' attribute they will be used as
+    weights in this algorithm.  Unspecified weights are set to 1.
+
+    References
+    ----------
+    .. [1] Centrality Measures Based on Current Flow.
+       Ulrik Brandes and Daniel Fleischer,
+       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
+       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
+       https://doi.org/10.1007/978-3-540-31856-9_44
+
+    .. [2] A measure of betweenness centrality based on random walks,
+       M. E. J. Newman, Social Networks 27, 39-54 (2005).
+    """
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Graph not connected.")
+    N = G.number_of_nodes()
+    ordering = list(reverse_cuthill_mckee_ordering(G))
+    # make a copy with integer labels according to rcm ordering
+    # this could be done without a copy if we really wanted to
+    H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
+    betweenness = dict.fromkeys(H, 0.0)  # b[n]=0 for n in H
+    for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
+        pos = dict(zip(row.argsort()[::-1], range(N)))
+        for i in range(N):
+            betweenness[s] += (i - pos[i]) * row.item(i)
+            betweenness[t] += (N - i - 1 - pos[i]) * row.item(i)
+    if normalized:
+        nb = (N - 1.0) * (N - 2.0)  # normalization factor
+    else:
+        nb = 2.0
+    return {ordering[n]: (b - n) * 2.0 / nb for n, b in betweenness.items()}
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def edge_current_flow_betweenness_centrality(
+    G, normalized=True, weight=None, dtype=float, solver="full"
+):
+    r"""Compute current-flow betweenness centrality for edges.
+
+    Current-flow betweenness centrality uses an electrical current
+    model for information spreading in contrast to betweenness
+    centrality which uses shortest paths.
+
+    Current-flow betweenness centrality is also known as
+    random-walk betweenness centrality [2]_.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    normalized : bool, optional (default=True)
+      If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
+      n is the number of nodes in G.
+
+    weight : string or None, optional (default=None)
+      Key for edge data used as the edge weight.
+      If None, then use 1 as each edge weight.
+      The weight reflects the capacity or the strength of the
+      edge.
+
+    dtype : data type (default=float)
+      Default data type for internal matrices.
+      Set to np.float32 for lower memory consumption.
+
+    solver : string (default='full')
+       Type of linear solver to use for computing the flow matrix.
+       Options are "full" (uses most memory), "lu" (recommended), and
+       "cg" (uses least memory).
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of edge tuples with betweenness centrality as the value.
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support DiGraphs.
+        If the input graph is an instance of DiGraph class, NetworkXError
+        is raised.
+
+    See Also
+    --------
+    betweenness_centrality
+    edge_betweenness_centrality
+    current_flow_betweenness_centrality
+
+    Notes
+    -----
+    Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
+    time [1]_, where $I(n-1)$ is the time needed to compute the
+    inverse Laplacian.  For a full matrix this is $O(n^3)$ but using
+    sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
+    Laplacian matrix condition number.
+
+    The space required is $O(nw)$ where $w$ is the width of the sparse
+    Laplacian matrix.  Worse case is $w=n$ for $O(n^2)$.
+
+    If the edges have a 'weight' attribute they will be used as
+    weights in this algorithm.  Unspecified weights are set to 1.
+
+    References
+    ----------
+    .. [1] Centrality Measures Based on Current Flow.
+       Ulrik Brandes and Daniel Fleischer,
+       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
+       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
+       https://doi.org/10.1007/978-3-540-31856-9_44
+
+    .. [2] A measure of betweenness centrality based on random walks,
+       M. E. J. Newman, Social Networks 27, 39-54 (2005).
+    """
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Graph not connected.")
+    N = G.number_of_nodes()
+    ordering = list(reverse_cuthill_mckee_ordering(G))
+    # make a copy with integer labels according to rcm ordering
+    # this could be done without a copy if we really wanted to
+    H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
+    edges = (tuple(sorted((u, v))) for u, v in H.edges())
+    betweenness = dict.fromkeys(edges, 0.0)
+    if normalized:
+        nb = (N - 1.0) * (N - 2.0)  # normalization factor
+    else:
+        nb = 2.0
+    for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
+        pos = dict(zip(row.argsort()[::-1], range(1, N + 1)))
+        for i in range(N):
+            betweenness[e] += (i + 1 - pos[i]) * row.item(i)
+            betweenness[e] += (N - i - pos[i]) * row.item(i)
+        betweenness[e] /= nb
+    return {(ordering[s], ordering[t]): b for (s, t), b in betweenness.items()}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py
new file mode 100644
index 00000000..911718c8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py
@@ -0,0 +1,227 @@
+"""Current-flow betweenness centrality measures for subsets of nodes."""
+
+import networkx as nx
+from networkx.algorithms.centrality.flow_matrix import flow_matrix_row
+from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering
+
+__all__ = [
+    "current_flow_betweenness_centrality_subset",
+    "edge_current_flow_betweenness_centrality_subset",
+]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def current_flow_betweenness_centrality_subset(
+    G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu"
+):
+    r"""Compute current-flow betweenness centrality for subsets of nodes.
+
+    Current-flow betweenness centrality uses an electrical current
+    model for information spreading in contrast to betweenness
+    centrality which uses shortest paths.
+
+    Current-flow betweenness centrality is also known as
+    random-walk betweenness centrality [2]_.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    sources: list of nodes
+      Nodes to use as sources for current
+
+    targets: list of nodes
+      Nodes to use as sinks for current
+
+    normalized : bool, optional (default=True)
+      If True the betweenness values are normalized by b=b/(n-1)(n-2) where
+      n is the number of nodes in G.
+
+    weight : string or None, optional (default=None)
+      Key for edge data used as the edge weight.
+      If None, then use 1 as each edge weight.
+      The weight reflects the capacity or the strength of the
+      edge.
+
+    dtype: data type (float)
+      Default data type for internal matrices.
+      Set to np.float32 for lower memory consumption.
+
+    solver: string (default='lu')
+       Type of linear solver to use for computing the flow matrix.
+       Options are "full" (uses most memory), "lu" (recommended), and
+       "cg" (uses least memory).
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with betweenness centrality as the value.
+
+    See Also
+    --------
+    approximate_current_flow_betweenness_centrality
+    betweenness_centrality
+    edge_betweenness_centrality
+    edge_current_flow_betweenness_centrality
+
+    Notes
+    -----
+    Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
+    time [1]_, where $I(n-1)$ is the time needed to compute the
+    inverse Laplacian.  For a full matrix this is $O(n^3)$ but using
+    sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
+    Laplacian matrix condition number.
+
+    The space required is $O(nw)$ where $w$ is the width of the sparse
+    Laplacian matrix.  Worse case is $w=n$ for $O(n^2)$.
+
+    If the edges have a 'weight' attribute they will be used as
+    weights in this algorithm.  Unspecified weights are set to 1.
+
+    References
+    ----------
+    .. [1] Centrality Measures Based on Current Flow.
+       Ulrik Brandes and Daniel Fleischer,
+       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
+       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
+       https://doi.org/10.1007/978-3-540-31856-9_44
+
+    .. [2] A measure of betweenness centrality based on random walks,
+       M. E. J. Newman, Social Networks 27, 39-54 (2005).
+    """
+    import numpy as np
+
+    from networkx.utils import reverse_cuthill_mckee_ordering
+
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Graph not connected.")
+    N = G.number_of_nodes()
+    ordering = list(reverse_cuthill_mckee_ordering(G))
+    # make a copy with integer labels according to rcm ordering
+    # this could be done without a copy if we really wanted to
+    mapping = dict(zip(ordering, range(N)))
+    H = nx.relabel_nodes(G, mapping)
+    betweenness = dict.fromkeys(H, 0.0)  # b[n]=0 for n in H
+    for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
+        for ss in sources:
+            i = mapping[ss]
+            for tt in targets:
+                j = mapping[tt]
+                betweenness[s] += 0.5 * abs(row.item(i) - row.item(j))
+                betweenness[t] += 0.5 * abs(row.item(i) - row.item(j))
+    if normalized:
+        nb = (N - 1.0) * (N - 2.0)  # normalization factor
+    else:
+        nb = 2.0
+    for node in H:
+        betweenness[node] = betweenness[node] / nb + 1.0 / (2 - N)
+    return {ordering[node]: value for node, value in betweenness.items()}
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def edge_current_flow_betweenness_centrality_subset(
+    G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu"
+):
+    r"""Compute current-flow betweenness centrality for edges using subsets
+    of nodes.
+
+    Current-flow betweenness centrality uses an electrical current
+    model for information spreading in contrast to betweenness
+    centrality which uses shortest paths.
+
+    Current-flow betweenness centrality is also known as
+    random-walk betweenness centrality [2]_.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    sources: list of nodes
+      Nodes to use as sources for current
+
+    targets: list of nodes
+      Nodes to use as sinks for current
+
+    normalized : bool, optional (default=True)
+      If True the betweenness values are normalized by b=b/(n-1)(n-2) where
+      n is the number of nodes in G.
+
+    weight : string or None, optional (default=None)
+      Key for edge data used as the edge weight.
+      If None, then use 1 as each edge weight.
+      The weight reflects the capacity or the strength of the
+      edge.
+
+    dtype: data type (float)
+      Default data type for internal matrices.
+      Set to np.float32 for lower memory consumption.
+
+    solver: string (default='lu')
+       Type of linear solver to use for computing the flow matrix.
+       Options are "full" (uses most memory), "lu" (recommended), and
+       "cg" (uses least memory).
+
+    Returns
+    -------
+    nodes : dict
+       Dictionary of edge tuples with betweenness centrality as the value.
+
+    See Also
+    --------
+    betweenness_centrality
+    edge_betweenness_centrality
+    current_flow_betweenness_centrality
+
+    Notes
+    -----
+    Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
+    time [1]_, where $I(n-1)$ is the time needed to compute the
+    inverse Laplacian.  For a full matrix this is $O(n^3)$ but using
+    sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
+    Laplacian matrix condition number.
+
+    The space required is $O(nw)$ where $w$ is the width of the sparse
+    Laplacian matrix.  Worse case is $w=n$ for $O(n^2)$.
+
+    If the edges have a 'weight' attribute they will be used as
+    weights in this algorithm.  Unspecified weights are set to 1.
+
+    References
+    ----------
+    .. [1] Centrality Measures Based on Current Flow.
+       Ulrik Brandes and Daniel Fleischer,
+       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
+       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
+       https://doi.org/10.1007/978-3-540-31856-9_44
+
+    .. [2] A measure of betweenness centrality based on random walks,
+       M. E. J. Newman, Social Networks 27, 39-54 (2005).
+    """
+    import numpy as np
+
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Graph not connected.")
+    N = G.number_of_nodes()
+    ordering = list(reverse_cuthill_mckee_ordering(G))
+    # make a copy with integer labels according to rcm ordering
+    # this could be done without a copy if we really wanted to
+    mapping = dict(zip(ordering, range(N)))
+    H = nx.relabel_nodes(G, mapping)
+    edges = (tuple(sorted((u, v))) for u, v in H.edges())
+    betweenness = dict.fromkeys(edges, 0.0)
+    if normalized:
+        nb = (N - 1.0) * (N - 2.0)  # normalization factor
+    else:
+        nb = 2.0
+    for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
+        for ss in sources:
+            i = mapping[ss]
+            for tt in targets:
+                j = mapping[tt]
+                betweenness[e] += 0.5 * abs(row.item(i) - row.item(j))
+        betweenness[e] /= nb
+    return {(ordering[s], ordering[t]): value for (s, t), value in betweenness.items()}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_closeness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_closeness.py
new file mode 100644
index 00000000..67f86397
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_closeness.py
@@ -0,0 +1,96 @@
+"""Current-flow closeness centrality measures."""
+
+import networkx as nx
+from networkx.algorithms.centrality.flow_matrix import (
+    CGInverseLaplacian,
+    FullInverseLaplacian,
+    SuperLUInverseLaplacian,
+)
+from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering
+
+__all__ = ["current_flow_closeness_centrality", "information_centrality"]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"):
+    """Compute current-flow closeness centrality for nodes.
+
+    Current-flow closeness centrality is variant of closeness
+    centrality based on effective resistance between nodes in
+    a network. This metric is also known as information centrality.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.
+
+    weight : None or string, optional (default=None)
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+      The weight reflects the capacity or the strength of the
+      edge.
+
+    dtype: data type (default=float)
+      Default data type for internal matrices.
+      Set to np.float32 for lower memory consumption.
+
+    solver: string (default='lu')
+       Type of linear solver to use for computing the flow matrix.
+       Options are "full" (uses most memory), "lu" (recommended), and
+       "cg" (uses least memory).
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with current flow closeness centrality as the value.
+
+    See Also
+    --------
+    closeness_centrality
+
+    Notes
+    -----
+    The algorithm is from Brandes [1]_.
+
+    See also [2]_ for the original definition of information centrality.
+
+    References
+    ----------
+    .. [1] Ulrik Brandes and Daniel Fleischer,
+       Centrality Measures Based on Current Flow.
+       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
+       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
+       https://doi.org/10.1007/978-3-540-31856-9_44
+
+    .. [2] Karen Stephenson and Marvin Zelen:
+       Rethinking centrality: Methods and examples.
+       Social Networks 11(1):1-37, 1989.
+       https://doi.org/10.1016/0378-8733(89)90016-6
+    """
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Graph not connected.")
+    solvername = {
+        "full": FullInverseLaplacian,
+        "lu": SuperLUInverseLaplacian,
+        "cg": CGInverseLaplacian,
+    }
+    N = G.number_of_nodes()
+    ordering = list(reverse_cuthill_mckee_ordering(G))
+    # make a copy with integer labels according to rcm ordering
+    # this could be done without a copy if we really wanted to
+    H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
+    betweenness = dict.fromkeys(H, 0.0)  # b[n]=0 for n in H
+    N = H.number_of_nodes()
+    L = nx.laplacian_matrix(H, nodelist=range(N), weight=weight).asformat("csc")
+    L = L.astype(dtype)
+    C2 = solvername[solver](L, width=1, dtype=dtype)  # initialize solver
+    for v in H:
+        col = C2.get_row(v)
+        for w in H:
+            betweenness[v] += col.item(v) - 2 * col.item(w)
+            betweenness[w] += col.item(v)
+    return {ordering[node]: 1 / value for node, value in betweenness.items()}
+
+
+information_centrality = current_flow_closeness_centrality
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/degree_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/degree_alg.py
new file mode 100644
index 00000000..b3c1e321
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/degree_alg.py
@@ -0,0 +1,150 @@
+"""Degree centrality measures."""
+
+import networkx as nx
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"]
+
+
+@nx._dispatchable
+def degree_centrality(G):
+    """Compute the degree centrality for nodes.
+
+    The degree centrality for a node v is the fraction of nodes it
+    is connected to.
+
+    Parameters
+    ----------
+    G : graph
+      A networkx graph
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with degree centrality as the value.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.degree_centrality(G)
+    {0: 1.0, 1: 1.0, 2: 0.6666666666666666, 3: 0.6666666666666666}
+
+    See Also
+    --------
+    betweenness_centrality, load_centrality, eigenvector_centrality
+
+    Notes
+    -----
+    The degree centrality values are normalized by dividing by the maximum
+    possible degree in a simple graph n-1 where n is the number of nodes in G.
+
+    For multigraphs or graphs with self loops the maximum degree might
+    be higher than n-1 and values of degree centrality greater than 1
+    are possible.
+    """
+    if len(G) <= 1:
+        return {n: 1 for n in G}
+
+    s = 1.0 / (len(G) - 1.0)
+    centrality = {n: d * s for n, d in G.degree()}
+    return centrality
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def in_degree_centrality(G):
+    """Compute the in-degree centrality for nodes.
+
+    The in-degree centrality for a node v is the fraction of nodes its
+    incoming edges are connected to.
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX graph
+
+    Returns
+    -------
+    nodes : dictionary
+        Dictionary of nodes with in-degree centrality as values.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is undirected.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.in_degree_centrality(G)
+    {0: 0.0, 1: 0.3333333333333333, 2: 0.6666666666666666, 3: 0.6666666666666666}
+
+    See Also
+    --------
+    degree_centrality, out_degree_centrality
+
+    Notes
+    -----
+    The degree centrality values are normalized by dividing by the maximum
+    possible degree in a simple graph n-1 where n is the number of nodes in G.
+
+    For multigraphs or graphs with self loops the maximum degree might
+    be higher than n-1 and values of degree centrality greater than 1
+    are possible.
+    """
+    if len(G) <= 1:
+        return {n: 1 for n in G}
+
+    s = 1.0 / (len(G) - 1.0)
+    centrality = {n: d * s for n, d in G.in_degree()}
+    return centrality
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def out_degree_centrality(G):
+    """Compute the out-degree centrality for nodes.
+
+    The out-degree centrality for a node v is the fraction of nodes its
+    outgoing edges are connected to.
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX graph
+
+    Returns
+    -------
+    nodes : dictionary
+        Dictionary of nodes with out-degree centrality as values.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is undirected.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.out_degree_centrality(G)
+    {0: 1.0, 1: 0.6666666666666666, 2: 0.0, 3: 0.0}
+
+    See Also
+    --------
+    degree_centrality, in_degree_centrality
+
+    Notes
+    -----
+    The degree centrality values are normalized by dividing by the maximum
+    possible degree in a simple graph n-1 where n is the number of nodes in G.
+
+    For multigraphs or graphs with self loops the maximum degree might
+    be higher than n-1 and values of degree centrality greater than 1
+    are possible.
+    """
+    if len(G) <= 1:
+        return {n: 1 for n in G}
+
+    s = 1.0 / (len(G) - 1.0)
+    centrality = {n: d * s for n, d in G.out_degree()}
+    return centrality
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/dispersion.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/dispersion.py
new file mode 100644
index 00000000..a3fa6858
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/dispersion.py
@@ -0,0 +1,107 @@
+from itertools import combinations
+
+import networkx as nx
+
+__all__ = ["dispersion"]
+
+
+@nx._dispatchable
+def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0):
+    r"""Calculate dispersion between `u` and `v` in `G`.
+
+    A link between two actors (`u` and `v`) has a high dispersion when their
+    mutual ties (`s` and `t`) are not well connected with each other.
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX graph.
+    u : node, optional
+        The source for the dispersion score (e.g. ego node of the network).
+    v : node, optional
+        The target of the dispersion score if specified.
+    normalized : bool
+        If True (default) normalize by the embeddedness of the nodes (u and v).
+    alpha, b, c : float
+        Parameters for the normalization procedure. When `normalized` is True,
+        the dispersion value is normalized by::
+
+            result = ((dispersion + b) ** alpha) / (embeddedness + c)
+
+        as long as the denominator is nonzero.
+
+    Returns
+    -------
+    nodes : dictionary
+        If u (v) is specified, returns a dictionary of nodes with dispersion
+        score for all "target" ("source") nodes. If neither u nor v is
+        specified, returns a dictionary of dictionaries for all nodes 'u' in the
+        graph with a dispersion score for each node 'v'.
+
+    Notes
+    -----
+    This implementation follows Lars Backstrom and Jon Kleinberg [1]_. Typical
+    usage would be to run dispersion on the ego network $G_u$ if $u$ were
+    specified.  Running :func:`dispersion` with neither $u$ nor $v$ specified
+    can take some time to complete.
+
+    References
+    ----------
+    .. [1] Romantic Partnerships and the Dispersion of Social Ties:
+        A Network Analysis of Relationship Status on Facebook.
+        Lars Backstrom, Jon Kleinberg.
+        https://arxiv.org/pdf/1310.6753v1.pdf
+
+    """
+
+    def _dispersion(G_u, u, v):
+        """dispersion for all nodes 'v' in a ego network G_u of node 'u'"""
+        u_nbrs = set(G_u[u])
+        ST = {n for n in G_u[v] if n in u_nbrs}
+        set_uv = {u, v}
+        # all possible ties of connections that u and b share
+        possib = combinations(ST, 2)
+        total = 0
+        for s, t in possib:
+            # neighbors of s that are in G_u, not including u and v
+            nbrs_s = u_nbrs.intersection(G_u[s]) - set_uv
+            # s and t are not directly connected
+            if t not in nbrs_s:
+                # s and t do not share a connection
+                if nbrs_s.isdisjoint(G_u[t]):
+                    # tick for disp(u, v)
+                    total += 1
+        # neighbors that u and v share
+        embeddedness = len(ST)
+
+        dispersion_val = total
+        if normalized:
+            dispersion_val = (total + b) ** alpha
+            if embeddedness + c != 0:
+                dispersion_val /= embeddedness + c
+
+        return dispersion_val
+
+    if u is None:
+        # v and u are not specified
+        if v is None:
+            results = {n: {} for n in G}
+            for u in G:
+                for v in G[u]:
+                    results[u][v] = _dispersion(G, u, v)
+        # u is not specified, but v is
+        else:
+            results = dict.fromkeys(G[v], {})
+            for u in G[v]:
+                results[u] = _dispersion(G, v, u)
+    else:
+        # u is specified with no target v
+        if v is None:
+            results = dict.fromkeys(G[u], {})
+            for v in G[u]:
+                results[v] = _dispersion(G, u, v)
+        # both u and v are specified
+        else:
+            results = _dispersion(G, u, v)
+
+    return results
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/eigenvector.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/eigenvector.py
new file mode 100644
index 00000000..b8cf63e8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/eigenvector.py
@@ -0,0 +1,357 @@
+"""Functions for computing eigenvector centrality."""
+
+import math
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["eigenvector_centrality", "eigenvector_centrality_numpy"]
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None):
+    r"""Compute the eigenvector centrality for the graph G.
+
+    Eigenvector centrality computes the centrality for a node by adding
+    the centrality of its predecessors. The centrality for node $i$ is the
+    $i$-th element of a left eigenvector associated with the eigenvalue $\lambda$
+    of maximum modulus that is positive. Such an eigenvector $x$ is
+    defined up to a multiplicative constant by the equation
+
+    .. math::
+
+         \lambda x^T = x^T A,
+
+    where $A$ is the adjacency matrix of the graph G. By definition of
+    row-column product, the equation above is equivalent to
+
+    .. math::
+
+        \lambda x_i = \sum_{j\to i}x_j.
+
+    That is, adding the eigenvector centralities of the predecessors of
+    $i$ one obtains the eigenvector centrality of $i$ multiplied by
+    $\lambda$. In the case of undirected graphs, $x$ also solves the familiar
+    right-eigenvector equation $Ax = \lambda x$.
+
+    By virtue of the Perron–Frobenius theorem [1]_, if G is strongly
+    connected there is a unique eigenvector $x$, and all its entries
+    are strictly positive.
+
+    If G is not strongly connected there might be several left
+    eigenvectors associated with $\lambda$, and some of their elements
+    might be zero.
+
+    Parameters
+    ----------
+    G : graph
+      A networkx graph.
+
+    max_iter : integer, optional (default=100)
+      Maximum number of power iterations.
+
+    tol : float, optional (default=1.0e-6)
+      Error tolerance (in Euclidean norm) used to check convergence in
+      power iteration.
+
+    nstart : dictionary, optional (default=None)
+      Starting value of power iteration for each node. Must have a nonzero
+      projection on the desired eigenvector for the power method to converge.
+      If None, this implementation uses an all-ones vector, which is a safe
+      choice.
+
+    weight : None or string, optional (default=None)
+      If None, all edge weights are considered equal. Otherwise holds the
+      name of the edge attribute used as weight. In this measure the
+      weight is interpreted as the connection strength.
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with eigenvector centrality as the value. The
+       associated vector has unit Euclidean norm and the values are
+       nonegative.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> centrality = nx.eigenvector_centrality(G)
+    >>> sorted((v, f"{c:0.2f}") for v, c in centrality.items())
+    [(0, '0.37'), (1, '0.60'), (2, '0.60'), (3, '0.37')]
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If the graph G is the null graph.
+
+    NetworkXError
+        If each value in `nstart` is zero.
+
+    PowerIterationFailedConvergence
+        If the algorithm fails to converge to the specified tolerance
+        within the specified number of iterations of the power iteration
+        method.
+
+    See Also
+    --------
+    eigenvector_centrality_numpy
+    :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
+    :func:`~networkx.algorithms.link_analysis.hits_alg.hits`
+
+    Notes
+    -----
+    Eigenvector centrality was introduced by Landau [2]_ for chess
+    tournaments. It was later rediscovered by Wei [3]_ and then
+    popularized by Kendall [4]_ in the context of sport ranking. Berge
+    introduced a general definition for graphs based on social connections
+    [5]_. Bonacich [6]_ reintroduced again eigenvector centrality and made
+    it popular in link analysis.
+
+    This function computes the left dominant eigenvector, which corresponds
+    to adding the centrality of predecessors: this is the usual approach.
+    To add the centrality of successors first reverse the graph with
+    ``G.reverse()``.
+
+    The implementation uses power iteration [7]_ to compute a dominant
+    eigenvector starting from the provided vector `nstart`. Convergence is
+    guaranteed as long as `nstart` has a nonzero projection on a dominant
+    eigenvector, which certainly happens using the default value.
+
+    The method stops when the change in the computed vector between two
+    iterations is smaller than an error tolerance of ``G.number_of_nodes()
+    * tol`` or after ``max_iter`` iterations, but in the second case it
+    raises an exception.
+
+    This implementation uses $(A + I)$ rather than the adjacency matrix
+    $A$ because the change preserves eigenvectors, but it shifts the
+    spectrum, thus guaranteeing convergence even for networks with
+    negative eigenvalues of maximum modulus.
+
+    References
+    ----------
+    .. [1] Abraham Berman and Robert J. Plemmons.
+       "Nonnegative Matrices in the Mathematical Sciences."
+       Classics in Applied Mathematics. SIAM, 1994.
+
+    .. [2] Edmund Landau.
+       "Zur relativen Wertbemessung der Turnierresultate."
+       Deutsches Wochenschach, 11:366–369, 1895.
+
+    .. [3] Teh-Hsing Wei.
+       "The Algebraic Foundations of Ranking Theory."
+       PhD thesis, University of Cambridge, 1952.
+
+    .. [4] Maurice G. Kendall.
+       "Further contributions to the theory of paired comparisons."
+       Biometrics, 11(1):43–62, 1955.
+       https://www.jstor.org/stable/3001479
+
+    .. [5] Claude Berge
+       "Théorie des graphes et ses applications."
+       Dunod, Paris, France, 1958.
+
+    .. [6] Phillip Bonacich.
+       "Technique for analyzing overlapping memberships."
+       Sociological Methodology, 4:176–185, 1972.
+       https://www.jstor.org/stable/270732
+
+    .. [7] Power iteration:: https://en.wikipedia.org/wiki/Power_iteration
+
+    """
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept(
+            "cannot compute centrality for the null graph"
+        )
+    # If no initial vector is provided, start with the all-ones vector.
+    if nstart is None:
+        nstart = {v: 1 for v in G}
+    if all(v == 0 for v in nstart.values()):
+        raise nx.NetworkXError("initial vector cannot have all zero values")
+    # Normalize the initial vector so that each entry is in [0, 1]. This is
+    # guaranteed to never have a divide-by-zero error by the previous line.
+    nstart_sum = sum(nstart.values())
+    x = {k: v / nstart_sum for k, v in nstart.items()}
+    nnodes = G.number_of_nodes()
+    # make up to max_iter iterations
+    for _ in range(max_iter):
+        xlast = x
+        x = xlast.copy()  # Start with xlast times I to iterate with (A+I)
+        # do the multiplication y^T = x^T A (left eigenvector)
+        for n in x:
+            for nbr in G[n]:
+                w = G[n][nbr].get(weight, 1) if weight else 1
+                x[nbr] += xlast[n] * w
+        # Normalize the vector. The normalization denominator `norm`
+        # should never be zero by the Perron--Frobenius
+        # theorem. However, in case it is due to numerical error, we
+        # assume the norm to be one instead.
+        norm = math.hypot(*x.values()) or 1
+        x = {k: v / norm for k, v in x.items()}
+        # Check for convergence (in the L_1 norm).
+        if sum(abs(x[n] - xlast[n]) for n in x) < nnodes * tol:
+            return x
+    raise nx.PowerIterationFailedConvergence(max_iter)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0):
+    r"""Compute the eigenvector centrality for the graph `G`.
+
+    Eigenvector centrality computes the centrality for a node by adding
+    the centrality of its predecessors. The centrality for node $i$ is the
+    $i$-th element of a left eigenvector associated with the eigenvalue $\lambda$
+    of maximum modulus that is positive. Such an eigenvector $x$ is
+    defined up to a multiplicative constant by the equation
+
+    .. math::
+
+         \lambda x^T = x^T A,
+
+    where $A$ is the adjacency matrix of the graph `G`. By definition of
+    row-column product, the equation above is equivalent to
+
+    .. math::
+
+        \lambda x_i = \sum_{j\to i}x_j.
+
+    That is, adding the eigenvector centralities of the predecessors of
+    $i$ one obtains the eigenvector centrality of $i$ multiplied by
+    $\lambda$. In the case of undirected graphs, $x$ also solves the familiar
+    right-eigenvector equation $Ax = \lambda x$.
+
+    By virtue of the Perron--Frobenius theorem [1]_, if `G` is (strongly)
+    connected, there is a unique eigenvector $x$, and all its entries
+    are strictly positive.
+
+    However, if `G` is not (strongly) connected, there might be several left
+    eigenvectors associated with $\lambda$, and some of their elements
+    might be zero.
+    Depending on the method used to choose eigenvectors, round-off error can affect
+    which of the infinitely many eigenvectors is reported.
+    This can lead to inconsistent results for the same graph,
+    which the underlying implementation is not robust to.
+    For this reason, only (strongly) connected graphs are accepted.
+
+    Parameters
+    ----------
+    G : graph
+        A connected NetworkX graph.
+
+    weight : None or string, optional (default=None)
+        If ``None``, all edge weights are considered equal. Otherwise holds the
+        name of the edge attribute used as weight. In this measure the
+        weight is interpreted as the connection strength.
+
+    max_iter : integer, optional (default=50)
+        Maximum number of Arnoldi update iterations allowed.
+
+    tol : float, optional (default=0)
+        Relative accuracy for eigenvalues (stopping criterion).
+        The default value of 0 implies machine precision.
+
+    Returns
+    -------
+    nodes : dict of nodes
+        Dictionary of nodes with eigenvector centrality as the value. The
+        associated vector has unit Euclidean norm and the values are
+        nonnegative.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> centrality = nx.eigenvector_centrality_numpy(G)
+    >>> print([f"{node} {centrality[node]:0.2f}" for node in centrality])
+    ['0 0.37', '1 0.60', '2 0.60', '3 0.37']
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If the graph `G` is the null graph.
+
+    ArpackNoConvergence
+        When the requested convergence is not obtained. The currently
+        converged eigenvalues and eigenvectors can be found as
+        eigenvalues and eigenvectors attributes of the exception object.
+
+    AmbiguousSolution
+        If `G` is not connected.
+
+    See Also
+    --------
+    :func:`scipy.sparse.linalg.eigs`
+    eigenvector_centrality
+    :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
+    :func:`~networkx.algorithms.link_analysis.hits_alg.hits`
+
+    Notes
+    -----
+    Eigenvector centrality was introduced by Landau [2]_ for chess
+    tournaments. It was later rediscovered by Wei [3]_ and then
+    popularized by Kendall [4]_ in the context of sport ranking. Berge
+    introduced a general definition for graphs based on social connections
+    [5]_. Bonacich [6]_ reintroduced again eigenvector centrality and made
+    it popular in link analysis.
+
+    This function computes the left dominant eigenvector, which corresponds
+    to adding the centrality of predecessors: this is the usual approach.
+    To add the centrality of successors first reverse the graph with
+    ``G.reverse()``.
+
+    This implementation uses the
+    :func:`SciPy sparse eigenvalue solver<scipy.sparse.linalg.eigs>` (ARPACK)
+    to find the largest eigenvalue/eigenvector pair using Arnoldi iterations
+    [7]_.
+
+    References
+    ----------
+    .. [1] Abraham Berman and Robert J. Plemmons.
+       "Nonnegative Matrices in the Mathematical Sciences".
+       Classics in Applied Mathematics. SIAM, 1994.
+
+    .. [2] Edmund Landau.
+       "Zur relativen Wertbemessung der Turnierresultate".
+       Deutsches Wochenschach, 11:366--369, 1895.
+
+    .. [3] Teh-Hsing Wei.
+       "The Algebraic Foundations of Ranking Theory".
+       PhD thesis, University of Cambridge, 1952.
+
+    .. [4] Maurice G. Kendall.
+       "Further contributions to the theory of paired comparisons".
+       Biometrics, 11(1):43--62, 1955.
+       https://www.jstor.org/stable/3001479
+
+    .. [5] Claude Berge.
+       "Théorie des graphes et ses applications".
+       Dunod, Paris, France, 1958.
+
+    .. [6] Phillip Bonacich.
+       "Technique for analyzing overlapping memberships".
+       Sociological Methodology, 4:176--185, 1972.
+       https://www.jstor.org/stable/270732
+
+    .. [7] Arnoldi, W. E. (1951).
+       "The principle of minimized iterations in the solution of the matrix eigenvalue problem".
+       Quarterly of Applied Mathematics. 9 (1): 17--29.
+       https://doi.org/10.1090/qam/42792
+    """
+    import numpy as np
+    import scipy as sp
+
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept(
+            "cannot compute centrality for the null graph"
+        )
+    connected = nx.is_strongly_connected(G) if G.is_directed() else nx.is_connected(G)
+    if not connected:  # See gh-6888.
+        raise nx.AmbiguousSolution(
+            "`eigenvector_centrality_numpy` does not give consistent results for disconnected graphs"
+        )
+    M = nx.to_scipy_sparse_array(G, nodelist=list(G), weight=weight, dtype=float)
+    _, eigenvector = sp.sparse.linalg.eigs(
+        M.T, k=1, which="LR", maxiter=max_iter, tol=tol
+    )
+    largest = eigenvector.flatten().real
+    norm = np.sign(largest.sum()) * sp.linalg.norm(largest)
+    return dict(zip(G, (largest / norm).tolist()))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/flow_matrix.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/flow_matrix.py
new file mode 100644
index 00000000..e72b5e97
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/flow_matrix.py
@@ -0,0 +1,130 @@
+# Helpers for current-flow betweenness and current-flow closeness
+# Lazy computations for inverse Laplacian and flow-matrix rows.
+import networkx as nx
+
+
+@nx._dispatchable(edge_attrs="weight")
+def flow_matrix_row(G, weight=None, dtype=float, solver="lu"):
+    # Generate a row of the current-flow matrix
+    import numpy as np
+
+    solvername = {
+        "full": FullInverseLaplacian,
+        "lu": SuperLUInverseLaplacian,
+        "cg": CGInverseLaplacian,
+    }
+    n = G.number_of_nodes()
+    L = nx.laplacian_matrix(G, nodelist=range(n), weight=weight).asformat("csc")
+    L = L.astype(dtype)
+    C = solvername[solver](L, dtype=dtype)  # initialize solver
+    w = C.w  # w is the Laplacian matrix width
+    # row-by-row flow matrix
+    for u, v in sorted(sorted((u, v)) for u, v in G.edges()):
+        B = np.zeros(w, dtype=dtype)
+        c = G[u][v].get(weight, 1.0)
+        B[u % w] = c
+        B[v % w] = -c
+        # get only the rows needed in the inverse laplacian
+        # and multiply to get the flow matrix row
+        row = B @ C.get_rows(u, v)
+        yield row, (u, v)
+
+
+# Class to compute the inverse laplacian only for specified rows
+# Allows computation of the current-flow matrix without storing entire
+# inverse laplacian matrix
+class InverseLaplacian:
+    def __init__(self, L, width=None, dtype=None):
+        global np
+        import numpy as np
+
+        (n, n) = L.shape
+        self.dtype = dtype
+        self.n = n
+        if width is None:
+            self.w = self.width(L)
+        else:
+            self.w = width
+        self.C = np.zeros((self.w, n), dtype=dtype)
+        self.L1 = L[1:, 1:]
+        self.init_solver(L)
+
+    def init_solver(self, L):
+        pass
+
+    def solve(self, r):
+        raise nx.NetworkXError("Implement solver")
+
+    def solve_inverse(self, r):
+        raise nx.NetworkXError("Implement solver")
+
+    def get_rows(self, r1, r2):
+        for r in range(r1, r2 + 1):
+            self.C[r % self.w, 1:] = self.solve_inverse(r)
+        return self.C
+
+    def get_row(self, r):
+        self.C[r % self.w, 1:] = self.solve_inverse(r)
+        return self.C[r % self.w]
+
+    def width(self, L):
+        m = 0
+        for i, row in enumerate(L):
+            w = 0
+            y = np.nonzero(row)[-1]
+            if len(y) > 0:
+                v = y - i
+                w = v.max() - v.min() + 1
+                m = max(w, m)
+        return m
+
+
+class FullInverseLaplacian(InverseLaplacian):
+    def init_solver(self, L):
+        self.IL = np.zeros(L.shape, dtype=self.dtype)
+        self.IL[1:, 1:] = np.linalg.inv(self.L1.todense())
+
+    def solve(self, rhs):
+        s = np.zeros(rhs.shape, dtype=self.dtype)
+        s = self.IL @ rhs
+        return s
+
+    def solve_inverse(self, r):
+        return self.IL[r, 1:]
+
+
+class SuperLUInverseLaplacian(InverseLaplacian):
+    def init_solver(self, L):
+        import scipy as sp
+
+        self.lusolve = sp.sparse.linalg.factorized(self.L1.tocsc())
+
+    def solve_inverse(self, r):
+        rhs = np.zeros(self.n, dtype=self.dtype)
+        rhs[r] = 1
+        return self.lusolve(rhs[1:])
+
+    def solve(self, rhs):
+        s = np.zeros(rhs.shape, dtype=self.dtype)
+        s[1:] = self.lusolve(rhs[1:])
+        return s
+
+
+class CGInverseLaplacian(InverseLaplacian):
+    def init_solver(self, L):
+        global sp
+        import scipy as sp
+
+        ilu = sp.sparse.linalg.spilu(self.L1.tocsc())
+        n = self.n - 1
+        self.M = sp.sparse.linalg.LinearOperator(shape=(n, n), matvec=ilu.solve)
+
+    def solve(self, rhs):
+        s = np.zeros(rhs.shape, dtype=self.dtype)
+        s[1:] = sp.sparse.linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0]
+        return s
+
+    def solve_inverse(self, r):
+        rhs = np.zeros(self.n, self.dtype)
+        rhs[r] = 1
+        return sp.sparse.linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/group.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/group.py
new file mode 100644
index 00000000..7c48742a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/group.py
@@ -0,0 +1,787 @@
+"""Group centrality measures."""
+
+from copy import deepcopy
+
+import networkx as nx
+from networkx.algorithms.centrality.betweenness import (
+    _accumulate_endpoints,
+    _single_source_dijkstra_path_basic,
+    _single_source_shortest_path_basic,
+)
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = [
+    "group_betweenness_centrality",
+    "group_closeness_centrality",
+    "group_degree_centrality",
+    "group_in_degree_centrality",
+    "group_out_degree_centrality",
+    "prominent_group",
+]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def group_betweenness_centrality(G, C, normalized=True, weight=None, endpoints=False):
+    r"""Compute the group betweenness centrality for a group of nodes.
+
+    Group betweenness centrality of a group of nodes $C$ is the sum of the
+    fraction of all-pairs shortest paths that pass through any vertex in $C$
+
+    .. math::
+
+       c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
+
+    where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
+    shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of
+    those paths passing through some node in group $C$. Note that
+    $(s, t)$ are not members of the group ($V-C$ is the set of nodes
+    in $V$ that are not in $C$).
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.
+
+    C : list or set or list of lists or list of sets
+      A group or a list of groups containing nodes which belong to G, for which group betweenness
+      centrality is to be calculated.
+
+    normalized : bool, optional (default=True)
+      If True, group betweenness is normalized by `1/((|V|-|C|)(|V|-|C|-1))`
+      where `|V|` is the number of nodes in G and `|C|` is the number of nodes in C.
+
+    weight : None or string, optional (default=None)
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+      The weight of an edge is treated as the length or distance between the two sides.
+
+    endpoints : bool, optional (default=False)
+      If True include the endpoints in the shortest path counts.
+
+    Raises
+    ------
+    NodeNotFound
+       If node(s) in C are not present in G.
+
+    Returns
+    -------
+    betweenness : list of floats or float
+       If C is a single group then return a float. If C is a list with
+       several groups then return a list of group betweenness centralities.
+
+    See Also
+    --------
+    betweenness_centrality
+
+    Notes
+    -----
+    Group betweenness centrality is described in [1]_ and its importance discussed in [3]_.
+    The initial implementation of the algorithm is mentioned in [2]_. This function uses
+    an improved algorithm presented in [4]_.
+
+    The number of nodes in the group must be a maximum of n - 2 where `n`
+    is the total number of nodes in the graph.
+
+    For weighted graphs the edge weights must be greater than zero.
+    Zero edge weights can produce an infinite number of equal length
+    paths between pairs of nodes.
+
+    The total number of paths between source and target is counted
+    differently for directed and undirected graphs. Directed paths
+    between "u" and "v" are counted as two possible paths (one each
+    direction) while undirected paths between "u" and "v" are counted
+    as one path. Said another way, the sum in the expression above is
+    over all ``s != t`` for directed graphs and for ``s < t`` for undirected graphs.
+
+
+    References
+    ----------
+    .. [1] M G Everett and S P Borgatti:
+       The Centrality of Groups and Classes.
+       Journal of Mathematical Sociology. 23(3): 181-201. 1999.
+       http://www.analytictech.com/borgatti/group_centrality.htm
+    .. [2] Ulrik Brandes:
+       On Variants of Shortest-Path Betweenness
+       Centrality and their Generic Computation.
+       Social Networks 30(2):136-145, 2008.
+       http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.72.9610&rep=rep1&type=pdf
+    .. [3] Sourav Medya et. al.:
+       Group Centrality Maximization via Network Design.
+       SIAM International Conference on Data Mining, SDM 2018, 126–134.
+       https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf
+    .. [4] Rami Puzis, Yuval Elovici, and Shlomi Dolev.
+       "Fast algorithm for successive computation of group betweenness centrality."
+       https://journals.aps.org/pre/pdf/10.1103/PhysRevE.76.056709
+
+    """
+    GBC = []  # initialize betweenness
+    list_of_groups = True
+    #  check weather C contains one or many groups
+    if any(el in G for el in C):
+        C = [C]
+        list_of_groups = False
+    set_v = {node for group in C for node in group}
+    if set_v - G.nodes:  # element(s) of C not in G
+        raise nx.NodeNotFound(f"The node(s) {set_v - G.nodes} are in C but not in G.")
+
+    # pre-processing
+    PB, sigma, D = _group_preprocessing(G, set_v, weight)
+
+    # the algorithm for each group
+    for group in C:
+        group = set(group)  # set of nodes in group
+        # initialize the matrices of the sigma and the PB
+        GBC_group = 0
+        sigma_m = deepcopy(sigma)
+        PB_m = deepcopy(PB)
+        sigma_m_v = deepcopy(sigma_m)
+        PB_m_v = deepcopy(PB_m)
+        for v in group:
+            GBC_group += PB_m[v][v]
+            for x in group:
+                for y in group:
+                    dxvy = 0
+                    dxyv = 0
+                    dvxy = 0
+                    if not (
+                        sigma_m[x][y] == 0 or sigma_m[x][v] == 0 or sigma_m[v][y] == 0
+                    ):
+                        if D[x][v] == D[x][y] + D[y][v]:
+                            dxyv = sigma_m[x][y] * sigma_m[y][v] / sigma_m[x][v]
+                        if D[x][y] == D[x][v] + D[v][y]:
+                            dxvy = sigma_m[x][v] * sigma_m[v][y] / sigma_m[x][y]
+                        if D[v][y] == D[v][x] + D[x][y]:
+                            dvxy = sigma_m[v][x] * sigma[x][y] / sigma[v][y]
+                    sigma_m_v[x][y] = sigma_m[x][y] * (1 - dxvy)
+                    PB_m_v[x][y] = PB_m[x][y] - PB_m[x][y] * dxvy
+                    if y != v:
+                        PB_m_v[x][y] -= PB_m[x][v] * dxyv
+                    if x != v:
+                        PB_m_v[x][y] -= PB_m[v][y] * dvxy
+            sigma_m, sigma_m_v = sigma_m_v, sigma_m
+            PB_m, PB_m_v = PB_m_v, PB_m
+
+        # endpoints
+        v, c = len(G), len(group)
+        if not endpoints:
+            scale = 0
+            # if the graph is connected then subtract the endpoints from
+            # the count for all the nodes in the graph. else count how many
+            # nodes are connected to the group's nodes and subtract that.
+            if nx.is_directed(G):
+                if nx.is_strongly_connected(G):
+                    scale = c * (2 * v - c - 1)
+            elif nx.is_connected(G):
+                scale = c * (2 * v - c - 1)
+            if scale == 0:
+                for group_node1 in group:
+                    for node in D[group_node1]:
+                        if node != group_node1:
+                            if node in group:
+                                scale += 1
+                            else:
+                                scale += 2
+            GBC_group -= scale
+
+        # normalized
+        if normalized:
+            scale = 1 / ((v - c) * (v - c - 1))
+            GBC_group *= scale
+
+        # If undirected than count only the undirected edges
+        elif not G.is_directed():
+            GBC_group /= 2
+
+        GBC.append(GBC_group)
+    if list_of_groups:
+        return GBC
+    return GBC[0]
+
+
+def _group_preprocessing(G, set_v, weight):
+    sigma = {}
+    delta = {}
+    D = {}
+    betweenness = dict.fromkeys(G, 0)
+    for s in G:
+        if weight is None:  # use BFS
+            S, P, sigma[s], D[s] = _single_source_shortest_path_basic(G, s)
+        else:  # use Dijkstra's algorithm
+            S, P, sigma[s], D[s] = _single_source_dijkstra_path_basic(G, s, weight)
+        betweenness, delta[s] = _accumulate_endpoints(betweenness, S, P, sigma[s], s)
+        for i in delta[s]:  # add the paths from s to i and rescale sigma
+            if s != i:
+                delta[s][i] += 1
+            if weight is not None:
+                sigma[s][i] = sigma[s][i] / 2
+    # building the path betweenness matrix only for nodes that appear in the group
+    PB = dict.fromkeys(G)
+    for group_node1 in set_v:
+        PB[group_node1] = dict.fromkeys(G, 0.0)
+        for group_node2 in set_v:
+            if group_node2 not in D[group_node1]:
+                continue
+            for node in G:
+                # if node is connected to the two group nodes than continue
+                if group_node2 in D[node] and group_node1 in D[node]:
+                    if (
+                        D[node][group_node2]
+                        == D[node][group_node1] + D[group_node1][group_node2]
+                    ):
+                        PB[group_node1][group_node2] += (
+                            delta[node][group_node2]
+                            * sigma[node][group_node1]
+                            * sigma[group_node1][group_node2]
+                            / sigma[node][group_node2]
+                        )
+    return PB, sigma, D
+
+
+@nx._dispatchable(edge_attrs="weight")
+def prominent_group(
+    G, k, weight=None, C=None, endpoints=False, normalized=True, greedy=False
+):
+    r"""Find the prominent group of size $k$ in graph $G$. The prominence of the
+    group is evaluated by the group betweenness centrality.
+
+    Group betweenness centrality of a group of nodes $C$ is the sum of the
+    fraction of all-pairs shortest paths that pass through any vertex in $C$
+
+    .. math::
+
+       c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
+
+    where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
+    shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of
+    those paths passing through some node in group $C$. Note that
+    $(s, t)$ are not members of the group ($V-C$ is the set of nodes
+    in $V$ that are not in $C$).
+
+    Parameters
+    ----------
+    G : graph
+       A NetworkX graph.
+
+    k : int
+       The number of nodes in the group.
+
+    normalized : bool, optional (default=True)
+       If True, group betweenness is normalized by ``1/((|V|-|C|)(|V|-|C|-1))``
+       where ``|V|`` is the number of nodes in G and ``|C|`` is the number of
+       nodes in C.
+
+    weight : None or string, optional (default=None)
+       If None, all edge weights are considered equal.
+       Otherwise holds the name of the edge attribute used as weight.
+       The weight of an edge is treated as the length or distance between the two sides.
+
+    endpoints : bool, optional (default=False)
+       If True include the endpoints in the shortest path counts.
+
+    C : list or set, optional (default=None)
+       list of nodes which won't be candidates of the prominent group.
+
+    greedy : bool, optional (default=False)
+       Using a naive greedy algorithm in order to find non-optimal prominent
+       group. For scale free networks the results are negligibly below the optimal
+       results.
+
+    Raises
+    ------
+    NodeNotFound
+       If node(s) in C are not present in G.
+
+    Returns
+    -------
+    max_GBC : float
+       The group betweenness centrality of the prominent group.
+
+    max_group : list
+        The list of nodes in the prominent group.
+
+    See Also
+    --------
+    betweenness_centrality, group_betweenness_centrality
+
+    Notes
+    -----
+    Group betweenness centrality is described in [1]_ and its importance discussed in [3]_.
+    The algorithm is described in [2]_ and is based on techniques mentioned in [4]_.
+
+    The number of nodes in the group must be a maximum of ``n - 2`` where ``n``
+    is the total number of nodes in the graph.
+
+    For weighted graphs the edge weights must be greater than zero.
+    Zero edge weights can produce an infinite number of equal length
+    paths between pairs of nodes.
+
+    The total number of paths between source and target is counted
+    differently for directed and undirected graphs. Directed paths
+    between "u" and "v" are counted as two possible paths (one each
+    direction) while undirected paths between "u" and "v" are counted
+    as one path. Said another way, the sum in the expression above is
+    over all ``s != t`` for directed graphs and for ``s < t`` for undirected graphs.
+
+    References
+    ----------
+    .. [1] M G Everett and S P Borgatti:
+       The Centrality of Groups and Classes.
+       Journal of Mathematical Sociology. 23(3): 181-201. 1999.
+       http://www.analytictech.com/borgatti/group_centrality.htm
+    .. [2] Rami Puzis, Yuval Elovici, and Shlomi Dolev:
+       "Finding the Most Prominent Group in Complex Networks"
+       AI communications 20(4): 287-296, 2007.
+       https://www.researchgate.net/profile/Rami_Puzis2/publication/220308855
+    .. [3] Sourav Medya et. al.:
+       Group Centrality Maximization via Network Design.
+       SIAM International Conference on Data Mining, SDM 2018, 126–134.
+       https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf
+    .. [4] Rami Puzis, Yuval Elovici, and Shlomi Dolev.
+       "Fast algorithm for successive computation of group betweenness centrality."
+       https://journals.aps.org/pre/pdf/10.1103/PhysRevE.76.056709
+    """
+    import numpy as np
+    import pandas as pd
+
+    if C is not None:
+        C = set(C)
+        if C - G.nodes:  # element(s) of C not in G
+            raise nx.NodeNotFound(f"The node(s) {C - G.nodes} are in C but not in G.")
+        nodes = list(G.nodes - C)
+    else:
+        nodes = list(G.nodes)
+    DF_tree = nx.Graph()
+    DF_tree.__networkx_cache__ = None  # Disable caching
+    PB, sigma, D = _group_preprocessing(G, nodes, weight)
+    betweenness = pd.DataFrame.from_dict(PB)
+    if C is not None:
+        for node in C:
+            # remove from the betweenness all the nodes not part of the group
+            betweenness.drop(index=node, inplace=True)
+            betweenness.drop(columns=node, inplace=True)
+    CL = [node for _, node in sorted(zip(np.diag(betweenness), nodes), reverse=True)]
+    max_GBC = 0
+    max_group = []
+    DF_tree.add_node(
+        1,
+        CL=CL,
+        betweenness=betweenness,
+        GBC=0,
+        GM=[],
+        sigma=sigma,
+        cont=dict(zip(nodes, np.diag(betweenness))),
+    )
+
+    # the algorithm
+    DF_tree.nodes[1]["heu"] = 0
+    for i in range(k):
+        DF_tree.nodes[1]["heu"] += DF_tree.nodes[1]["cont"][DF_tree.nodes[1]["CL"][i]]
+    max_GBC, DF_tree, max_group = _dfbnb(
+        G, k, DF_tree, max_GBC, 1, D, max_group, nodes, greedy
+    )
+
+    v = len(G)
+    if not endpoints:
+        scale = 0
+        # if the graph is connected then subtract the endpoints from
+        # the count for all the nodes in the graph. else count how many
+        # nodes are connected to the group's nodes and subtract that.
+        if nx.is_directed(G):
+            if nx.is_strongly_connected(G):
+                scale = k * (2 * v - k - 1)
+        elif nx.is_connected(G):
+            scale = k * (2 * v - k - 1)
+        if scale == 0:
+            for group_node1 in max_group:
+                for node in D[group_node1]:
+                    if node != group_node1:
+                        if node in max_group:
+                            scale += 1
+                        else:
+                            scale += 2
+        max_GBC -= scale
+
+    # normalized
+    if normalized:
+        scale = 1 / ((v - k) * (v - k - 1))
+        max_GBC *= scale
+
+    # If undirected then count only the undirected edges
+    elif not G.is_directed():
+        max_GBC /= 2
+    max_GBC = float(f"{max_GBC:.2f}")
+    return max_GBC, max_group
+
+
+def _dfbnb(G, k, DF_tree, max_GBC, root, D, max_group, nodes, greedy):
+    # stopping condition - if we found a group of size k and with higher GBC then prune
+    if len(DF_tree.nodes[root]["GM"]) == k and DF_tree.nodes[root]["GBC"] > max_GBC:
+        return DF_tree.nodes[root]["GBC"], DF_tree, DF_tree.nodes[root]["GM"]
+    # stopping condition - if the size of group members equal to k or there are less than
+    # k - |GM| in the candidate list or the heuristic function plus the GBC is below the
+    # maximal GBC found then prune
+    if (
+        len(DF_tree.nodes[root]["GM"]) == k
+        or len(DF_tree.nodes[root]["CL"]) <= k - len(DF_tree.nodes[root]["GM"])
+        or DF_tree.nodes[root]["GBC"] + DF_tree.nodes[root]["heu"] <= max_GBC
+    ):
+        return max_GBC, DF_tree, max_group
+
+    # finding the heuristic of both children
+    node_p, node_m, DF_tree = _heuristic(k, root, DF_tree, D, nodes, greedy)
+
+    # finding the child with the bigger heuristic + GBC and expand
+    # that node first if greedy then only expand the plus node
+    if greedy:
+        max_GBC, DF_tree, max_group = _dfbnb(
+            G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
+        )
+
+    elif (
+        DF_tree.nodes[node_p]["GBC"] + DF_tree.nodes[node_p]["heu"]
+        > DF_tree.nodes[node_m]["GBC"] + DF_tree.nodes[node_m]["heu"]
+    ):
+        max_GBC, DF_tree, max_group = _dfbnb(
+            G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
+        )
+        max_GBC, DF_tree, max_group = _dfbnb(
+            G, k, DF_tree, max_GBC, node_m, D, max_group, nodes, greedy
+        )
+    else:
+        max_GBC, DF_tree, max_group = _dfbnb(
+            G, k, DF_tree, max_GBC, node_m, D, max_group, nodes, greedy
+        )
+        max_GBC, DF_tree, max_group = _dfbnb(
+            G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
+        )
+    return max_GBC, DF_tree, max_group
+
+
+def _heuristic(k, root, DF_tree, D, nodes, greedy):
+    import numpy as np
+
+    # This helper function add two nodes to DF_tree - one left son and the
+    # other right son, finds their heuristic, CL, GBC, and GM
+    node_p = DF_tree.number_of_nodes() + 1
+    node_m = DF_tree.number_of_nodes() + 2
+    added_node = DF_tree.nodes[root]["CL"][0]
+
+    # adding the plus node
+    DF_tree.add_nodes_from([(node_p, deepcopy(DF_tree.nodes[root]))])
+    DF_tree.nodes[node_p]["GM"].append(added_node)
+    DF_tree.nodes[node_p]["GBC"] += DF_tree.nodes[node_p]["cont"][added_node]
+    root_node = DF_tree.nodes[root]
+    for x in nodes:
+        for y in nodes:
+            dxvy = 0
+            dxyv = 0
+            dvxy = 0
+            if not (
+                root_node["sigma"][x][y] == 0
+                or root_node["sigma"][x][added_node] == 0
+                or root_node["sigma"][added_node][y] == 0
+            ):
+                if D[x][added_node] == D[x][y] + D[y][added_node]:
+                    dxyv = (
+                        root_node["sigma"][x][y]
+                        * root_node["sigma"][y][added_node]
+                        / root_node["sigma"][x][added_node]
+                    )
+                if D[x][y] == D[x][added_node] + D[added_node][y]:
+                    dxvy = (
+                        root_node["sigma"][x][added_node]
+                        * root_node["sigma"][added_node][y]
+                        / root_node["sigma"][x][y]
+                    )
+                if D[added_node][y] == D[added_node][x] + D[x][y]:
+                    dvxy = (
+                        root_node["sigma"][added_node][x]
+                        * root_node["sigma"][x][y]
+                        / root_node["sigma"][added_node][y]
+                    )
+            DF_tree.nodes[node_p]["sigma"][x][y] = root_node["sigma"][x][y] * (1 - dxvy)
+            DF_tree.nodes[node_p]["betweenness"].loc[y, x] = (
+                root_node["betweenness"][x][y] - root_node["betweenness"][x][y] * dxvy
+            )
+            if y != added_node:
+                DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= (
+                    root_node["betweenness"][x][added_node] * dxyv
+                )
+            if x != added_node:
+                DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= (
+                    root_node["betweenness"][added_node][y] * dvxy
+                )
+
+    DF_tree.nodes[node_p]["CL"] = [
+        node
+        for _, node in sorted(
+            zip(np.diag(DF_tree.nodes[node_p]["betweenness"]), nodes), reverse=True
+        )
+        if node not in DF_tree.nodes[node_p]["GM"]
+    ]
+    DF_tree.nodes[node_p]["cont"] = dict(
+        zip(nodes, np.diag(DF_tree.nodes[node_p]["betweenness"]))
+    )
+    DF_tree.nodes[node_p]["heu"] = 0
+    for i in range(k - len(DF_tree.nodes[node_p]["GM"])):
+        DF_tree.nodes[node_p]["heu"] += DF_tree.nodes[node_p]["cont"][
+            DF_tree.nodes[node_p]["CL"][i]
+        ]
+
+    # adding the minus node - don't insert the first node in the CL to GM
+    # Insert minus node only if isn't greedy type algorithm
+    if not greedy:
+        DF_tree.add_nodes_from([(node_m, deepcopy(DF_tree.nodes[root]))])
+        DF_tree.nodes[node_m]["CL"].pop(0)
+        DF_tree.nodes[node_m]["cont"].pop(added_node)
+        DF_tree.nodes[node_m]["heu"] = 0
+        for i in range(k - len(DF_tree.nodes[node_m]["GM"])):
+            DF_tree.nodes[node_m]["heu"] += DF_tree.nodes[node_m]["cont"][
+                DF_tree.nodes[node_m]["CL"][i]
+            ]
+    else:
+        node_m = None
+
+    return node_p, node_m, DF_tree
+
+
+@nx._dispatchable(edge_attrs="weight")
+def group_closeness_centrality(G, S, weight=None):
+    r"""Compute the group closeness centrality for a group of nodes.
+
+    Group closeness centrality of a group of nodes $S$ is a measure
+    of how close the group is to the other nodes in the graph.
+
+    .. math::
+
+       c_{close}(S) = \frac{|V-S|}{\sum_{v \in V-S} d_{S, v}}
+
+       d_{S, v} = min_{u \in S} (d_{u, v})
+
+    where $V$ is the set of nodes, $d_{S, v}$ is the distance of
+    the group $S$ from $v$ defined as above. ($V-S$ is the set of nodes
+    in $V$ that are not in $S$).
+
+    Parameters
+    ----------
+    G : graph
+       A NetworkX graph.
+
+    S : list or set
+       S is a group of nodes which belong to G, for which group closeness
+       centrality is to be calculated.
+
+    weight : None or string, optional (default=None)
+       If None, all edge weights are considered equal.
+       Otherwise holds the name of the edge attribute used as weight.
+       The weight of an edge is treated as the length or distance between the two sides.
+
+    Raises
+    ------
+    NodeNotFound
+       If node(s) in S are not present in G.
+
+    Returns
+    -------
+    closeness : float
+       Group closeness centrality of the group S.
+
+    See Also
+    --------
+    closeness_centrality
+
+    Notes
+    -----
+    The measure was introduced in [1]_.
+    The formula implemented here is described in [2]_.
+
+    Higher values of closeness indicate greater centrality.
+
+    It is assumed that 1 / 0 is 0 (required in the case of directed graphs,
+    or when a shortest path length is 0).
+
+    The number of nodes in the group must be a maximum of n - 1 where `n`
+    is the total number of nodes in the graph.
+
+    For directed graphs, the incoming distance is utilized here. To use the
+    outward distance, act on `G.reverse()`.
+
+    For weighted graphs the edge weights must be greater than zero.
+    Zero edge weights can produce an infinite number of equal length
+    paths between pairs of nodes.
+
+    References
+    ----------
+    .. [1] M G Everett and S P Borgatti:
+       The Centrality of Groups and Classes.
+       Journal of Mathematical Sociology. 23(3): 181-201. 1999.
+       http://www.analytictech.com/borgatti/group_centrality.htm
+    .. [2] J. Zhao et. al.:
+       Measuring and Maximizing Group Closeness Centrality over
+       Disk Resident Graphs.
+       WWWConference Proceedings, 2014. 689-694.
+       https://doi.org/10.1145/2567948.2579356
+    """
+    if G.is_directed():
+        G = G.reverse()  # reverse view
+    closeness = 0  # initialize to 0
+    V = set(G)  # set of nodes in G
+    S = set(S)  # set of nodes in group S
+    V_S = V - S  # set of nodes in V but not S
+    shortest_path_lengths = nx.multi_source_dijkstra_path_length(G, S, weight=weight)
+    # accumulation
+    for v in V_S:
+        try:
+            closeness += shortest_path_lengths[v]
+        except KeyError:  # no path exists
+            closeness += 0
+    try:
+        closeness = len(V_S) / closeness
+    except ZeroDivisionError:  # 1 / 0 assumed as 0
+        closeness = 0
+    return closeness
+
+
+@nx._dispatchable
+def group_degree_centrality(G, S):
+    """Compute the group degree centrality for a group of nodes.
+
+    Group degree centrality of a group of nodes $S$ is the fraction
+    of non-group members connected to group members.
+
+    Parameters
+    ----------
+    G : graph
+       A NetworkX graph.
+
+    S : list or set
+       S is a group of nodes which belong to G, for which group degree
+       centrality is to be calculated.
+
+    Raises
+    ------
+    NetworkXError
+       If node(s) in S are not in G.
+
+    Returns
+    -------
+    centrality : float
+       Group degree centrality of the group S.
+
+    See Also
+    --------
+    degree_centrality
+    group_in_degree_centrality
+    group_out_degree_centrality
+
+    Notes
+    -----
+    The measure was introduced in [1]_.
+
+    The number of nodes in the group must be a maximum of n - 1 where `n`
+    is the total number of nodes in the graph.
+
+    References
+    ----------
+    .. [1] M G Everett and S P Borgatti:
+       The Centrality of Groups and Classes.
+       Journal of Mathematical Sociology. 23(3): 181-201. 1999.
+       http://www.analytictech.com/borgatti/group_centrality.htm
+    """
+    centrality = len(set().union(*[set(G.neighbors(i)) for i in S]) - set(S))
+    centrality /= len(G.nodes()) - len(S)
+    return centrality
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def group_in_degree_centrality(G, S):
+    """Compute the group in-degree centrality for a group of nodes.
+
+    Group in-degree centrality of a group of nodes $S$ is the fraction
+    of non-group members connected to group members by incoming edges.
+
+    Parameters
+    ----------
+    G : graph
+       A NetworkX graph.
+
+    S : list or set
+       S is a group of nodes which belong to G, for which group in-degree
+       centrality is to be calculated.
+
+    Returns
+    -------
+    centrality : float
+       Group in-degree centrality of the group S.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+       If G is undirected.
+
+    NodeNotFound
+       If node(s) in S are not in G.
+
+    See Also
+    --------
+    degree_centrality
+    group_degree_centrality
+    group_out_degree_centrality
+
+    Notes
+    -----
+    The number of nodes in the group must be a maximum of n - 1 where `n`
+    is the total number of nodes in the graph.
+
+    `G.neighbors(i)` gives nodes with an outward edge from i, in a DiGraph,
+    so for group in-degree centrality, the reverse graph is used.
+    """
+    return group_degree_centrality(G.reverse(), S)
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def group_out_degree_centrality(G, S):
+    """Compute the group out-degree centrality for a group of nodes.
+
+    Group out-degree centrality of a group of nodes $S$ is the fraction
+    of non-group members connected to group members by outgoing edges.
+
+    Parameters
+    ----------
+    G : graph
+       A NetworkX graph.
+
+    S : list or set
+       S is a group of nodes which belong to G, for which group in-degree
+       centrality is to be calculated.
+
+    Returns
+    -------
+    centrality : float
+       Group out-degree centrality of the group S.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+       If G is undirected.
+
+    NodeNotFound
+       If node(s) in S are not in G.
+
+    See Also
+    --------
+    degree_centrality
+    group_degree_centrality
+    group_in_degree_centrality
+
+    Notes
+    -----
+    The number of nodes in the group must be a maximum of n - 1 where `n`
+    is the total number of nodes in the graph.
+
+    `G.neighbors(i)` gives nodes with an outward edge from i, in a DiGraph,
+    so for group out-degree centrality, the graph itself is used.
+    """
+    return group_degree_centrality(G, S)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/harmonic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/harmonic.py
new file mode 100644
index 00000000..236e1491
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/harmonic.py
@@ -0,0 +1,89 @@
+"""Functions for computing the harmonic centrality of a graph."""
+
+from functools import partial
+
+import networkx as nx
+
+__all__ = ["harmonic_centrality"]
+
+
+@nx._dispatchable(edge_attrs="distance")
+def harmonic_centrality(G, nbunch=None, distance=None, sources=None):
+    r"""Compute harmonic centrality for nodes.
+
+    Harmonic centrality [1]_ of a node `u` is the sum of the reciprocal
+    of the shortest path distances from all other nodes to `u`
+
+    .. math::
+
+        C(u) = \sum_{v \neq u} \frac{1}{d(v, u)}
+
+    where `d(v, u)` is the shortest-path distance between `v` and `u`.
+
+    If `sources` is given as an argument, the returned harmonic centrality
+    values are calculated as the sum of the reciprocals of the shortest
+    path distances from the nodes specified in `sources` to `u` instead
+    of from all nodes to `u`.
+
+    Notice that higher values indicate higher centrality.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    nbunch : container (default: all nodes in G)
+      Container of nodes for which harmonic centrality values are calculated.
+
+    sources : container (default: all nodes in G)
+      Container of nodes `v` over which reciprocal distances are computed.
+      Nodes not in `G` are silently ignored.
+
+    distance : edge attribute key, optional (default=None)
+      Use the specified edge attribute as the edge distance in shortest
+      path calculations.  If `None`, then each edge will have distance equal to 1.
+
+    Returns
+    -------
+    nodes : dictionary
+      Dictionary of nodes with harmonic centrality as the value.
+
+    See Also
+    --------
+    betweenness_centrality, load_centrality, eigenvector_centrality,
+    degree_centrality, closeness_centrality
+
+    Notes
+    -----
+    If the 'distance' keyword is set to an edge attribute key then the
+    shortest-path length will be computed using Dijkstra's algorithm with
+    that edge attribute as the edge weight.
+
+    References
+    ----------
+    .. [1] Boldi, Paolo, and Sebastiano Vigna. "Axioms for centrality."
+           Internet Mathematics 10.3-4 (2014): 222-262.
+    """
+
+    nbunch = set(G.nbunch_iter(nbunch) if nbunch is not None else G.nodes)
+    sources = set(G.nbunch_iter(sources) if sources is not None else G.nodes)
+
+    centrality = {u: 0 for u in nbunch}
+
+    transposed = False
+    if len(nbunch) < len(sources):
+        transposed = True
+        nbunch, sources = sources, nbunch
+        if nx.is_directed(G):
+            G = nx.reverse(G, copy=False)
+
+    spl = partial(nx.shortest_path_length, G, weight=distance)
+    for v in sources:
+        dist = spl(v)
+        for u in nbunch.intersection(dist):
+            d = dist[u]
+            if d == 0:  # handle u == v and edges with 0 weight
+                continue
+            centrality[v if transposed else u] += 1 / d
+
+    return centrality
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/katz.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/katz.py
new file mode 100644
index 00000000..4bd087bc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/katz.py
@@ -0,0 +1,331 @@
+"""Katz centrality."""
+
+import math
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["katz_centrality", "katz_centrality_numpy"]
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def katz_centrality(
+    G,
+    alpha=0.1,
+    beta=1.0,
+    max_iter=1000,
+    tol=1.0e-6,
+    nstart=None,
+    normalized=True,
+    weight=None,
+):
+    r"""Compute the Katz centrality for the nodes of the graph G.
+
+    Katz centrality computes the centrality for a node based on the centrality
+    of its neighbors. It is a generalization of the eigenvector centrality. The
+    Katz centrality for node $i$ is
+
+    .. math::
+
+        x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
+
+    where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
+
+    The parameter $\beta$ controls the initial centrality and
+
+    .. math::
+
+        \alpha < \frac{1}{\lambda_{\max}}.
+
+    Katz centrality computes the relative influence of a node within a
+    network by measuring the number of the immediate neighbors (first
+    degree nodes) and also all other nodes in the network that connect
+    to the node under consideration through these immediate neighbors.
+
+    Extra weight can be provided to immediate neighbors through the
+    parameter $\beta$.  Connections made with distant neighbors
+    are, however, penalized by an attenuation factor $\alpha$ which
+    should be strictly less than the inverse largest eigenvalue of the
+    adjacency matrix in order for the Katz centrality to be computed
+    correctly. More information is provided in [1]_.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.
+
+    alpha : float, optional (default=0.1)
+      Attenuation factor
+
+    beta : scalar or dictionary, optional (default=1.0)
+      Weight attributed to the immediate neighborhood. If not a scalar, the
+      dictionary must have a value for every node.
+
+    max_iter : integer, optional (default=1000)
+      Maximum number of iterations in power method.
+
+    tol : float, optional (default=1.0e-6)
+      Error tolerance used to check convergence in power method iteration.
+
+    nstart : dictionary, optional
+      Starting value of Katz iteration for each node.
+
+    normalized : bool, optional (default=True)
+      If True normalize the resulting values.
+
+    weight : None or string, optional (default=None)
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+      In this measure the weight is interpreted as the connection strength.
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with Katz centrality as the value.
+
+    Raises
+    ------
+    NetworkXError
+       If the parameter `beta` is not a scalar but lacks a value for at least
+       one node
+
+    PowerIterationFailedConvergence
+        If the algorithm fails to converge to the specified tolerance
+        within the specified number of iterations of the power iteration
+        method.
+
+    Examples
+    --------
+    >>> import math
+    >>> G = nx.path_graph(4)
+    >>> phi = (1 + math.sqrt(5)) / 2.0  # largest eigenvalue of adj matrix
+    >>> centrality = nx.katz_centrality(G, 1 / phi - 0.01)
+    >>> for n, c in sorted(centrality.items()):
+    ...     print(f"{n} {c:.2f}")
+    0 0.37
+    1 0.60
+    2 0.60
+    3 0.37
+
+    See Also
+    --------
+    katz_centrality_numpy
+    eigenvector_centrality
+    eigenvector_centrality_numpy
+    :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
+    :func:`~networkx.algorithms.link_analysis.hits_alg.hits`
+
+    Notes
+    -----
+    Katz centrality was introduced by [2]_.
+
+    This algorithm it uses the power method to find the eigenvector
+    corresponding to the largest eigenvalue of the adjacency matrix of ``G``.
+    The parameter ``alpha`` should be strictly less than the inverse of largest
+    eigenvalue of the adjacency matrix for the algorithm to converge.
+    You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
+    eigenvalue of the adjacency matrix.
+    The iteration will stop after ``max_iter`` iterations or an error tolerance of
+    ``number_of_nodes(G) * tol`` has been reached.
+
+    For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$,
+    Katz centrality approaches the results for eigenvector centrality.
+
+    For directed graphs this finds "left" eigenvectors which corresponds
+    to the in-edges in the graph. For out-edges Katz centrality,
+    first reverse the graph with ``G.reverse()``.
+
+    References
+    ----------
+    .. [1] Mark E. J. Newman:
+       Networks: An Introduction.
+       Oxford University Press, USA, 2010, p. 720.
+    .. [2] Leo Katz:
+       A New Status Index Derived from Sociometric Index.
+       Psychometrika 18(1):39–43, 1953
+       https://link.springer.com/content/pdf/10.1007/BF02289026.pdf
+    """
+    if len(G) == 0:
+        return {}
+
+    nnodes = G.number_of_nodes()
+
+    if nstart is None:
+        # choose starting vector with entries of 0
+        x = {n: 0 for n in G}
+    else:
+        x = nstart
+
+    try:
+        b = dict.fromkeys(G, float(beta))
+    except (TypeError, ValueError, AttributeError) as err:
+        b = beta
+        if set(beta) != set(G):
+            raise nx.NetworkXError(
+                "beta dictionary must have a value for every node"
+            ) from err
+
+    # make up to max_iter iterations
+    for _ in range(max_iter):
+        xlast = x
+        x = dict.fromkeys(xlast, 0)
+        # do the multiplication y^T = Alpha * x^T A + Beta
+        for n in x:
+            for nbr in G[n]:
+                x[nbr] += xlast[n] * G[n][nbr].get(weight, 1)
+        for n in x:
+            x[n] = alpha * x[n] + b[n]
+
+        # check convergence
+        error = sum(abs(x[n] - xlast[n]) for n in x)
+        if error < nnodes * tol:
+            if normalized:
+                # normalize vector
+                try:
+                    s = 1.0 / math.hypot(*x.values())
+                except ZeroDivisionError:
+                    s = 1.0
+            else:
+                s = 1
+            for n in x:
+                x[n] *= s
+            return x
+    raise nx.PowerIterationFailedConvergence(max_iter)
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None):
+    r"""Compute the Katz centrality for the graph G.
+
+    Katz centrality computes the centrality for a node based on the centrality
+    of its neighbors. It is a generalization of the eigenvector centrality. The
+    Katz centrality for node $i$ is
+
+    .. math::
+
+        x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
+
+    where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
+
+    The parameter $\beta$ controls the initial centrality and
+
+    .. math::
+
+        \alpha < \frac{1}{\lambda_{\max}}.
+
+    Katz centrality computes the relative influence of a node within a
+    network by measuring the number of the immediate neighbors (first
+    degree nodes) and also all other nodes in the network that connect
+    to the node under consideration through these immediate neighbors.
+
+    Extra weight can be provided to immediate neighbors through the
+    parameter $\beta$.  Connections made with distant neighbors
+    are, however, penalized by an attenuation factor $\alpha$ which
+    should be strictly less than the inverse largest eigenvalue of the
+    adjacency matrix in order for the Katz centrality to be computed
+    correctly. More information is provided in [1]_.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    alpha : float
+      Attenuation factor
+
+    beta : scalar or dictionary, optional (default=1.0)
+      Weight attributed to the immediate neighborhood. If not a scalar the
+      dictionary must have an value for every node.
+
+    normalized : bool
+      If True normalize the resulting values.
+
+    weight : None or string, optional
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+      In this measure the weight is interpreted as the connection strength.
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with Katz centrality as the value.
+
+    Raises
+    ------
+    NetworkXError
+       If the parameter `beta` is not a scalar but lacks a value for at least
+       one node
+
+    Examples
+    --------
+    >>> import math
+    >>> G = nx.path_graph(4)
+    >>> phi = (1 + math.sqrt(5)) / 2.0  # largest eigenvalue of adj matrix
+    >>> centrality = nx.katz_centrality_numpy(G, 1 / phi)
+    >>> for n, c in sorted(centrality.items()):
+    ...     print(f"{n} {c:.2f}")
+    0 0.37
+    1 0.60
+    2 0.60
+    3 0.37
+
+    See Also
+    --------
+    katz_centrality
+    eigenvector_centrality_numpy
+    eigenvector_centrality
+    :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
+    :func:`~networkx.algorithms.link_analysis.hits_alg.hits`
+
+    Notes
+    -----
+    Katz centrality was introduced by [2]_.
+
+    This algorithm uses a direct linear solver to solve the above equation.
+    The parameter ``alpha`` should be strictly less than the inverse of largest
+    eigenvalue of the adjacency matrix for there to be a solution.
+    You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
+    eigenvalue of the adjacency matrix.
+
+    For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$,
+    Katz centrality approaches the results for eigenvector centrality.
+
+    For directed graphs this finds "left" eigenvectors which corresponds
+    to the in-edges in the graph. For out-edges Katz centrality,
+    first reverse the graph with ``G.reverse()``.
+
+    References
+    ----------
+    .. [1] Mark E. J. Newman:
+       Networks: An Introduction.
+       Oxford University Press, USA, 2010, p. 173.
+    .. [2] Leo Katz:
+       A New Status Index Derived from Sociometric Index.
+       Psychometrika 18(1):39–43, 1953
+       https://link.springer.com/content/pdf/10.1007/BF02289026.pdf
+    """
+    import numpy as np
+
+    if len(G) == 0:
+        return {}
+    try:
+        nodelist = beta.keys()
+        if set(nodelist) != set(G):
+            raise nx.NetworkXError("beta dictionary must have a value for every node")
+        b = np.array(list(beta.values()), dtype=float)
+    except AttributeError:
+        nodelist = list(G)
+        try:
+            b = np.ones((len(nodelist), 1)) * beta
+        except (TypeError, ValueError, AttributeError) as err:
+            raise nx.NetworkXError("beta must be a number") from err
+
+    A = nx.adjacency_matrix(G, nodelist=nodelist, weight=weight).todense().T
+    n = A.shape[0]
+    centrality = np.linalg.solve(np.eye(n, n) - (alpha * A), b).squeeze()
+
+    # Normalize: rely on truediv to cast to float, then tolist to make Python numbers
+    norm = np.sign(sum(centrality)) * np.linalg.norm(centrality) if normalized else 1
+    return dict(zip(nodelist, (centrality / norm).tolist()))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/laplacian.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/laplacian.py
new file mode 100644
index 00000000..efb6e8f6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/laplacian.py
@@ -0,0 +1,150 @@
+"""
+Laplacian centrality measures.
+"""
+
+import networkx as nx
+
+__all__ = ["laplacian_centrality"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def laplacian_centrality(
+    G, normalized=True, nodelist=None, weight="weight", walk_type=None, alpha=0.95
+):
+    r"""Compute the Laplacian centrality for nodes in the graph `G`.
+
+    The Laplacian Centrality of a node ``i`` is measured by the drop in the
+    Laplacian Energy after deleting node ``i`` from the graph. The Laplacian Energy
+    is the sum of the squared eigenvalues of a graph's Laplacian matrix.
+
+    .. math::
+
+        C_L(u_i,G) = \frac{(\Delta E)_i}{E_L (G)} = \frac{E_L (G)-E_L (G_i)}{E_L (G)}
+
+        E_L (G) = \sum_{i=0}^n \lambda_i^2
+
+    Where $E_L (G)$ is the Laplacian energy of graph `G`,
+    E_L (G_i) is the Laplacian energy of graph `G` after deleting node ``i``
+    and $\lambda_i$ are the eigenvalues of `G`'s Laplacian matrix.
+    This formula shows the normalized value. Without normalization,
+    the numerator on the right side is returned.
+
+    Parameters
+    ----------
+    G : graph
+        A networkx graph
+
+    normalized : bool (default = True)
+        If True the centrality score is scaled so the sum over all nodes is 1.
+        If False the centrality score for each node is the drop in Laplacian
+        energy when that node is removed.
+
+    nodelist : list, optional (default = None)
+        The rows and columns are ordered according to the nodes in nodelist.
+        If nodelist is None, then the ordering is produced by G.nodes().
+
+    weight: string or None, optional (default=`weight`)
+        Optional parameter `weight` to compute the Laplacian matrix.
+        The edge data key used to compute each value in the matrix.
+        If None, then each edge has weight 1.
+
+    walk_type : string or None, optional (default=None)
+        Optional parameter `walk_type` used when calling
+        :func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
+        One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None``
+        (the default), then a value is selected according to the properties of `G`:
+        - ``walk_type="random"`` if `G` is strongly connected and aperiodic
+        - ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic
+        - ``walk_type="pagerank"`` for all other cases.
+
+    alpha : real (default = 0.95)
+        Optional parameter `alpha` used when calling
+        :func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
+        (1 - alpha) is the teleportation probability used with pagerank.
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with Laplacian centrality as the value.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> edges = [(0, 1, 4), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2), (4, 5, 1)]
+    >>> G.add_weighted_edges_from(edges)
+    >>> sorted((v, f"{c:0.2f}") for v, c in laplacian_centrality(G).items())
+    [(0, '0.70'), (1, '0.90'), (2, '0.28'), (3, '0.22'), (4, '0.26'), (5, '0.04')]
+
+    Notes
+    -----
+    The algorithm is implemented based on [1]_ with an extension to directed graphs
+    using the ``directed_laplacian_matrix`` function.
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If the graph `G` is the null graph.
+    ZeroDivisionError
+        If the graph `G` has no edges (is empty) and normalization is requested.
+
+    References
+    ----------
+    .. [1] Qi, X., Fuller, E., Wu, Q., Wu, Y., and Zhang, C.-Q. (2012).
+       Laplacian centrality: A new centrality measure for weighted networks.
+       Information Sciences, 194:240-253.
+       https://math.wvu.edu/~cqzhang/Publication-files/my-paper/INS-2012-Laplacian-W.pdf
+
+    See Also
+    --------
+    :func:`~networkx.linalg.laplacianmatrix.directed_laplacian_matrix`
+    :func:`~networkx.linalg.laplacianmatrix.laplacian_matrix`
+    """
+    import numpy as np
+    import scipy as sp
+
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept("null graph has no centrality defined")
+    if G.size(weight=weight) == 0:
+        if normalized:
+            raise ZeroDivisionError("graph with no edges has zero full energy")
+        return {n: 0 for n in G}
+
+    if nodelist is not None:
+        nodeset = set(G.nbunch_iter(nodelist))
+        if len(nodeset) != len(nodelist):
+            raise nx.NetworkXError("nodelist has duplicate nodes or nodes not in G")
+        nodes = nodelist + [n for n in G if n not in nodeset]
+    else:
+        nodelist = nodes = list(G)
+
+    if G.is_directed():
+        lap_matrix = nx.directed_laplacian_matrix(G, nodes, weight, walk_type, alpha)
+    else:
+        lap_matrix = nx.laplacian_matrix(G, nodes, weight).toarray()
+
+    full_energy = np.power(sp.linalg.eigh(lap_matrix, eigvals_only=True), 2).sum()
+
+    # calculate laplacian centrality
+    laplace_centralities_dict = {}
+    for i, node in enumerate(nodelist):
+        # remove row and col i from lap_matrix
+        all_but_i = list(np.arange(lap_matrix.shape[0]))
+        all_but_i.remove(i)
+        A_2 = lap_matrix[all_but_i, :][:, all_but_i]
+
+        # Adjust diagonal for removed row
+        new_diag = lap_matrix.diagonal() - abs(lap_matrix[:, i])
+        np.fill_diagonal(A_2, new_diag[all_but_i])
+
+        if len(all_but_i) > 0:  # catches degenerate case of single node
+            new_energy = np.power(sp.linalg.eigh(A_2, eigvals_only=True), 2).sum()
+        else:
+            new_energy = 0.0
+
+        lapl_cent = full_energy - new_energy
+        if normalized:
+            lapl_cent = lapl_cent / full_energy
+
+        laplace_centralities_dict[node] = float(lapl_cent)
+
+    return laplace_centralities_dict
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/load.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/load.py
new file mode 100644
index 00000000..fc46edd6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/load.py
@@ -0,0 +1,200 @@
+"""Load centrality."""
+
+from operator import itemgetter
+
+import networkx as nx
+
+__all__ = ["load_centrality", "edge_load_centrality"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def newman_betweenness_centrality(G, v=None, cutoff=None, normalized=True, weight=None):
+    """Compute load centrality for nodes.
+
+    The load centrality of a node is the fraction of all shortest
+    paths that pass through that node.
+
+    Parameters
+    ----------
+    G : graph
+      A networkx graph.
+
+    normalized : bool, optional (default=True)
+      If True the betweenness values are normalized by b=b/(n-1)(n-2) where
+      n is the number of nodes in G.
+
+    weight : None or string, optional (default=None)
+      If None, edge weights are ignored.
+      Otherwise holds the name of the edge attribute used as weight.
+      The weight of an edge is treated as the length or distance between the two sides.
+
+    cutoff : bool, optional (default=None)
+      If specified, only consider paths of length <= cutoff.
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with centrality as the value.
+
+    See Also
+    --------
+    betweenness_centrality
+
+    Notes
+    -----
+    Load centrality is slightly different than betweenness. It was originally
+    introduced by [2]_. For this load algorithm see [1]_.
+
+    References
+    ----------
+    .. [1] Mark E. J. Newman:
+       Scientific collaboration networks. II.
+       Shortest paths, weighted networks, and centrality.
+       Physical Review E 64, 016132, 2001.
+       http://journals.aps.org/pre/abstract/10.1103/PhysRevE.64.016132
+    .. [2] Kwang-Il Goh, Byungnam Kahng and Doochul Kim
+       Universal behavior of Load Distribution in Scale-Free Networks.
+       Physical Review Letters 87(27):1–4, 2001.
+       https://doi.org/10.1103/PhysRevLett.87.278701
+    """
+    if v is not None:  # only one node
+        betweenness = 0.0
+        for source in G:
+            ubetween = _node_betweenness(G, source, cutoff, False, weight)
+            betweenness += ubetween[v] if v in ubetween else 0
+        if normalized:
+            order = G.order()
+            if order <= 2:
+                return betweenness  # no normalization b=0 for all nodes
+            betweenness *= 1.0 / ((order - 1) * (order - 2))
+    else:
+        betweenness = {}.fromkeys(G, 0.0)
+        for source in betweenness:
+            ubetween = _node_betweenness(G, source, cutoff, False, weight)
+            for vk in ubetween:
+                betweenness[vk] += ubetween[vk]
+        if normalized:
+            order = G.order()
+            if order <= 2:
+                return betweenness  # no normalization b=0 for all nodes
+            scale = 1.0 / ((order - 1) * (order - 2))
+            for v in betweenness:
+                betweenness[v] *= scale
+    return betweenness  # all nodes
+
+
+def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None):
+    """Node betweenness_centrality helper:
+
+    See betweenness_centrality for what you probably want.
+    This actually computes "load" and not betweenness.
+    See https://networkx.lanl.gov/ticket/103
+
+    This calculates the load of each node for paths from a single source.
+    (The fraction of number of shortests paths from source that go
+    through each node.)
+
+    To get the load for a node you need to do all-pairs shortest paths.
+
+    If weight is not None then use Dijkstra for finding shortest paths.
+    """
+    # get the predecessor and path length data
+    if weight is None:
+        (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True)
+    else:
+        (pred, length) = nx.dijkstra_predecessor_and_distance(G, source, cutoff, weight)
+
+    # order the nodes by path length
+    onodes = [(l, vert) for (vert, l) in length.items()]
+    onodes.sort()
+    onodes[:] = [vert for (l, vert) in onodes if l > 0]
+
+    # initialize betweenness
+    between = {}.fromkeys(length, 1.0)
+
+    while onodes:
+        v = onodes.pop()
+        if v in pred:
+            num_paths = len(pred[v])  # Discount betweenness if more than
+            for x in pred[v]:  # one shortest path.
+                if x == source:  # stop if hit source because all remaining v
+                    break  # also have pred[v]==[source]
+                between[x] += between[v] / num_paths
+    #  remove source
+    for v in between:
+        between[v] -= 1
+    # rescale to be between 0 and 1
+    if normalized:
+        l = len(between)
+        if l > 2:
+            # scale by 1/the number of possible paths
+            scale = 1 / ((l - 1) * (l - 2))
+            for v in between:
+                between[v] *= scale
+    return between
+
+
+load_centrality = newman_betweenness_centrality
+
+
+@nx._dispatchable
+def edge_load_centrality(G, cutoff=False):
+    """Compute edge load.
+
+    WARNING: This concept of edge load has not been analysed
+    or discussed outside of NetworkX that we know of.
+    It is based loosely on load_centrality in the sense that
+    it counts the number of shortest paths which cross each edge.
+    This function is for demonstration and testing purposes.
+
+    Parameters
+    ----------
+    G : graph
+        A networkx graph
+
+    cutoff : bool, optional (default=False)
+        If specified, only consider paths of length <= cutoff.
+
+    Returns
+    -------
+    A dict keyed by edge 2-tuple to the number of shortest paths
+    which use that edge. Where more than one path is shortest
+    the count is divided equally among paths.
+    """
+    betweenness = {}
+    for u, v in G.edges():
+        betweenness[(u, v)] = 0.0
+        betweenness[(v, u)] = 0.0
+
+    for source in G:
+        ubetween = _edge_betweenness(G, source, cutoff=cutoff)
+        for e, ubetweenv in ubetween.items():
+            betweenness[e] += ubetweenv  # cumulative total
+    return betweenness
+
+
+def _edge_betweenness(G, source, nodes=None, cutoff=False):
+    """Edge betweenness helper."""
+    # get the predecessor data
+    (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True)
+    # order the nodes by path length
+    onodes = [n for n, d in sorted(length.items(), key=itemgetter(1))]
+    # initialize betweenness, doesn't account for any edge weights
+    between = {}
+    for u, v in G.edges(nodes):
+        between[(u, v)] = 1.0
+        between[(v, u)] = 1.0
+
+    while onodes:  # work through all paths
+        v = onodes.pop()
+        if v in pred:
+            # Discount betweenness if more than one shortest path.
+            num_paths = len(pred[v])
+            for w in pred[v]:
+                if w in pred:
+                    # Discount betweenness, mult path
+                    num_paths = len(pred[w])
+                    for x in pred[w]:
+                        between[(w, x)] += between[(v, w)] / num_paths
+                        between[(x, w)] += between[(w, v)] / num_paths
+    return between
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/percolation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/percolation.py
new file mode 100644
index 00000000..0d4c8713
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/percolation.py
@@ -0,0 +1,128 @@
+"""Percolation centrality measures."""
+
+import networkx as nx
+from networkx.algorithms.centrality.betweenness import (
+    _single_source_dijkstra_path_basic as dijkstra,
+)
+from networkx.algorithms.centrality.betweenness import (
+    _single_source_shortest_path_basic as shortest_path,
+)
+
+__all__ = ["percolation_centrality"]
+
+
+@nx._dispatchable(node_attrs="attribute", edge_attrs="weight")
+def percolation_centrality(G, attribute="percolation", states=None, weight=None):
+    r"""Compute the percolation centrality for nodes.
+
+    Percolation centrality of a node $v$, at a given time, is defined
+    as the proportion of ‘percolated paths’ that go through that node.
+
+    This measure quantifies relative impact of nodes based on their
+    topological connectivity, as well as their percolation states.
+
+    Percolation states of nodes are used to depict network percolation
+    scenarios (such as during infection transmission in a social network
+    of individuals, spreading of computer viruses on computer networks, or
+    transmission of disease over a network of towns) over time. In this
+    measure usually the percolation state is expressed as a decimal
+    between 0.0 and 1.0.
+
+    When all nodes are in the same percolated state this measure is
+    equivalent to betweenness centrality.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.
+
+    attribute : None or string, optional (default='percolation')
+      Name of the node attribute to use for percolation state, used
+      if `states` is None. If a node does not set the attribute the
+      state of that node will be set to the default value of 1.
+      If all nodes do not have the attribute all nodes will be set to
+      1 and the centrality measure will be equivalent to betweenness centrality.
+
+    states : None or dict, optional (default=None)
+      Specify percolation states for the nodes, nodes as keys states
+      as values.
+
+    weight : None or string, optional (default=None)
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+      The weight of an edge is treated as the length or distance between the two sides.
+
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with percolation centrality as the value.
+
+    See Also
+    --------
+    betweenness_centrality
+
+    Notes
+    -----
+    The algorithm is from Mahendra Piraveenan, Mikhail Prokopenko, and
+    Liaquat Hossain [1]_
+    Pair dependencies are calculated and accumulated using [2]_
+
+    For weighted graphs the edge weights must be greater than zero.
+    Zero edge weights can produce an infinite number of equal length
+    paths between pairs of nodes.
+
+    References
+    ----------
+    .. [1] Mahendra Piraveenan, Mikhail Prokopenko, Liaquat Hossain
+       Percolation Centrality: Quantifying Graph-Theoretic Impact of Nodes
+       during Percolation in Networks
+       http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0053095
+    .. [2] Ulrik Brandes:
+       A Faster Algorithm for Betweenness Centrality.
+       Journal of Mathematical Sociology 25(2):163-177, 2001.
+       https://doi.org/10.1080/0022250X.2001.9990249
+    """
+    percolation = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
+
+    nodes = G
+
+    if states is None:
+        states = nx.get_node_attributes(nodes, attribute, default=1)
+
+    # sum of all percolation states
+    p_sigma_x_t = 0.0
+    for v in states.values():
+        p_sigma_x_t += v
+
+    for s in nodes:
+        # single source shortest paths
+        if weight is None:  # use BFS
+            S, P, sigma, _ = shortest_path(G, s)
+        else:  # use Dijkstra's algorithm
+            S, P, sigma, _ = dijkstra(G, s, weight)
+        # accumulation
+        percolation = _accumulate_percolation(
+            percolation, S, P, sigma, s, states, p_sigma_x_t
+        )
+
+    n = len(G)
+
+    for v in percolation:
+        percolation[v] *= 1 / (n - 2)
+
+    return percolation
+
+
+def _accumulate_percolation(percolation, S, P, sigma, s, states, p_sigma_x_t):
+    delta = dict.fromkeys(S, 0)
+    while S:
+        w = S.pop()
+        coeff = (1 + delta[w]) / sigma[w]
+        for v in P[w]:
+            delta[v] += sigma[v] * coeff
+        if w != s:
+            # percolation weight
+            pw_s_w = states[s] / (p_sigma_x_t - states[w])
+            percolation[w] += delta[w] * pw_s_w
+    return percolation
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/reaching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/reaching.py
new file mode 100644
index 00000000..378e8a05
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/reaching.py
@@ -0,0 +1,209 @@
+"""Functions for computing reaching centrality of a node or a graph."""
+
+import networkx as nx
+from networkx.utils import pairwise
+
+__all__ = ["global_reaching_centrality", "local_reaching_centrality"]
+
+
+def _average_weight(G, path, weight=None):
+    """Returns the average weight of an edge in a weighted path.
+
+    Parameters
+    ----------
+    G : graph
+      A networkx graph.
+
+    path: list
+      A list of vertices that define the path.
+
+    weight : None or string, optional (default=None)
+      If None, edge weights are ignored.  Then the average weight of an edge
+      is assumed to be the multiplicative inverse of the length of the path.
+      Otherwise holds the name of the edge attribute used as weight.
+    """
+    path_length = len(path) - 1
+    if path_length <= 0:
+        return 0
+    if weight is None:
+        return 1 / path_length
+    total_weight = sum(G.edges[i, j][weight] for i, j in pairwise(path))
+    return total_weight / path_length
+
+
+@nx._dispatchable(edge_attrs="weight")
+def global_reaching_centrality(G, weight=None, normalized=True):
+    """Returns the global reaching centrality of a directed graph.
+
+    The *global reaching centrality* of a weighted directed graph is the
+    average over all nodes of the difference between the local reaching
+    centrality of the node and the greatest local reaching centrality of
+    any node in the graph [1]_. For more information on the local
+    reaching centrality, see :func:`local_reaching_centrality`.
+    Informally, the local reaching centrality is the proportion of the
+    graph that is reachable from the neighbors of the node.
+
+    Parameters
+    ----------
+    G : DiGraph
+        A networkx DiGraph.
+
+    weight : None or string, optional (default=None)
+        Attribute to use for edge weights. If ``None``, each edge weight
+        is assumed to be one. A higher weight implies a stronger
+        connection between nodes and a *shorter* path length.
+
+    normalized : bool, optional (default=True)
+        Whether to normalize the edge weights by the total sum of edge
+        weights.
+
+    Returns
+    -------
+    h : float
+        The global reaching centrality of the graph.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_edge(1, 2)
+    >>> G.add_edge(1, 3)
+    >>> nx.global_reaching_centrality(G)
+    1.0
+    >>> G.add_edge(3, 2)
+    >>> nx.global_reaching_centrality(G)
+    0.75
+
+    See also
+    --------
+    local_reaching_centrality
+
+    References
+    ----------
+    .. [1] Mones, Enys, Lilla Vicsek, and Tamás Vicsek.
+           "Hierarchy Measure for Complex Networks."
+           *PLoS ONE* 7.3 (2012): e33799.
+           https://doi.org/10.1371/journal.pone.0033799
+    """
+    if nx.is_negatively_weighted(G, weight=weight):
+        raise nx.NetworkXError("edge weights must be positive")
+    total_weight = G.size(weight=weight)
+    if total_weight <= 0:
+        raise nx.NetworkXError("Size of G must be positive")
+    # If provided, weights must be interpreted as connection strength
+    # (so higher weights are more likely to be chosen). However, the
+    # shortest path algorithms in NetworkX assume the provided "weight"
+    # is actually a distance (so edges with higher weight are less
+    # likely to be chosen). Therefore we need to invert the weights when
+    # computing shortest paths.
+    #
+    # If weight is None, we leave it as-is so that the shortest path
+    # algorithm can use a faster, unweighted algorithm.
+    if weight is not None:
+
+        def as_distance(u, v, d):
+            return total_weight / d.get(weight, 1)
+
+        shortest_paths = nx.shortest_path(G, weight=as_distance)
+    else:
+        shortest_paths = nx.shortest_path(G)
+
+    centrality = local_reaching_centrality
+    # TODO This can be trivially parallelized.
+    lrc = [
+        centrality(G, node, paths=paths, weight=weight, normalized=normalized)
+        for node, paths in shortest_paths.items()
+    ]
+
+    max_lrc = max(lrc)
+    return sum(max_lrc - c for c in lrc) / (len(G) - 1)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def local_reaching_centrality(G, v, paths=None, weight=None, normalized=True):
+    """Returns the local reaching centrality of a node in a directed
+    graph.
+
+    The *local reaching centrality* of a node in a directed graph is the
+    proportion of other nodes reachable from that node [1]_.
+
+    Parameters
+    ----------
+    G : DiGraph
+        A NetworkX DiGraph.
+
+    v : node
+        A node in the directed graph `G`.
+
+    paths : dictionary (default=None)
+        If this is not `None` it must be a dictionary representation
+        of single-source shortest paths, as computed by, for example,
+        :func:`networkx.shortest_path` with source node `v`. Use this
+        keyword argument if you intend to invoke this function many
+        times but don't want the paths to be recomputed each time.
+
+    weight : None or string, optional (default=None)
+        Attribute to use for edge weights.  If `None`, each edge weight
+        is assumed to be one. A higher weight implies a stronger
+        connection between nodes and a *shorter* path length.
+
+    normalized : bool, optional (default=True)
+        Whether to normalize the edge weights by the total sum of edge
+        weights.
+
+    Returns
+    -------
+    h : float
+        The local reaching centrality of the node ``v`` in the graph
+        ``G``.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_edges_from([(1, 2), (1, 3)])
+    >>> nx.local_reaching_centrality(G, 3)
+    0.0
+    >>> G.add_edge(3, 2)
+    >>> nx.local_reaching_centrality(G, 3)
+    0.5
+
+    See also
+    --------
+    global_reaching_centrality
+
+    References
+    ----------
+    .. [1] Mones, Enys, Lilla Vicsek, and Tamás Vicsek.
+           "Hierarchy Measure for Complex Networks."
+           *PLoS ONE* 7.3 (2012): e33799.
+           https://doi.org/10.1371/journal.pone.0033799
+    """
+    # Corner case: graph with single node containing a self-loop
+    if (total_weight := G.size(weight=weight)) > 0 and len(G) == 1:
+        raise nx.NetworkXError(
+            "local_reaching_centrality of a single node with self-loop not well-defined"
+        )
+    if paths is None:
+        if nx.is_negatively_weighted(G, weight=weight):
+            raise nx.NetworkXError("edge weights must be positive")
+        if total_weight <= 0:
+            raise nx.NetworkXError("Size of G must be positive")
+        if weight is not None:
+            # Interpret weights as lengths.
+            def as_distance(u, v, d):
+                return total_weight / d.get(weight, 1)
+
+            paths = nx.shortest_path(G, source=v, weight=as_distance)
+        else:
+            paths = nx.shortest_path(G, source=v)
+    # If the graph is unweighted, simply return the proportion of nodes
+    # reachable from the source node ``v``.
+    if weight is None and G.is_directed():
+        return (len(paths) - 1) / (len(G) - 1)
+    if normalized and weight is not None:
+        norm = G.size(weight=weight) / G.size()
+    else:
+        norm = 1
+    # TODO This can be trivially parallelized.
+    avgw = (_average_weight(G, path, weight=weight) for path in paths.values())
+    sum_avg_weight = sum(avgw) / norm
+    return sum_avg_weight / (len(G) - 1)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/second_order.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/second_order.py
new file mode 100644
index 00000000..35583cd6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/second_order.py
@@ -0,0 +1,141 @@
+"""Copyright (c) 2015 – Thomson Licensing, SAS
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted (subject to the limitations in the
+disclaimer below) provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+
+* Neither the name of Thomson Licensing, or Technicolor, nor the names
+of its contributors may be used to endorse or promote products derived
+from this software without specific prior written permission.
+
+NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
+GRANTED BY THIS LICENSE.  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
+HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+# Authors: Erwan Le Merrer (erwan.lemerrer@technicolor.com)
+
+__all__ = ["second_order_centrality"]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def second_order_centrality(G, weight="weight"):
+    """Compute the second order centrality for nodes of G.
+
+    The second order centrality of a given node is the standard deviation of
+    the return times to that node of a perpetual random walk on G:
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX connected and undirected graph.
+
+    weight : string or None, optional (default="weight")
+        The name of an edge attribute that holds the numerical value
+        used as a weight. If None then each edge has weight 1.
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary keyed by node with second order centrality as the value.
+
+    Examples
+    --------
+    >>> G = nx.star_graph(10)
+    >>> soc = nx.second_order_centrality(G)
+    >>> print(sorted(soc.items(), key=lambda x: x[1])[0][0])  # pick first id
+    0
+
+    Raises
+    ------
+    NetworkXException
+        If the graph G is empty, non connected or has negative weights.
+
+    See Also
+    --------
+    betweenness_centrality
+
+    Notes
+    -----
+    Lower values of second order centrality indicate higher centrality.
+
+    The algorithm is from Kermarrec, Le Merrer, Sericola and Trédan [1]_.
+
+    This code implements the analytical version of the algorithm, i.e.,
+    there is no simulation of a random walk process involved. The random walk
+    is here unbiased (corresponding to eq 6 of the paper [1]_), thus the
+    centrality values are the standard deviations for random walk return times
+    on the transformed input graph G (equal in-degree at each nodes by adding
+    self-loops).
+
+    Complexity of this implementation, made to run locally on a single machine,
+    is O(n^3), with n the size of G, which makes it viable only for small
+    graphs.
+
+    References
+    ----------
+    .. [1] Anne-Marie Kermarrec, Erwan Le Merrer, Bruno Sericola, Gilles Trédan
+       "Second order centrality: Distributed assessment of nodes criticity in
+       complex networks", Elsevier Computer Communications 34(5):619-628, 2011.
+    """
+    import numpy as np
+
+    n = len(G)
+
+    if n == 0:
+        raise nx.NetworkXException("Empty graph.")
+    if not nx.is_connected(G):
+        raise nx.NetworkXException("Non connected graph.")
+    if any(d.get(weight, 0) < 0 for u, v, d in G.edges(data=True)):
+        raise nx.NetworkXException("Graph has negative edge weights.")
+
+    # balancing G for Metropolis-Hastings random walks
+    G = nx.DiGraph(G)
+    in_deg = dict(G.in_degree(weight=weight))
+    d_max = max(in_deg.values())
+    for i, deg in in_deg.items():
+        if deg < d_max:
+            G.add_edge(i, i, weight=d_max - deg)
+
+    P = nx.to_numpy_array(G)
+    P /= P.sum(axis=1)[:, np.newaxis]  # to transition probability matrix
+
+    def _Qj(P, j):
+        P = P.copy()
+        P[:, j] = 0
+        return P
+
+    M = np.empty([n, n])
+
+    for i in range(n):
+        M[:, i] = np.linalg.solve(
+            np.identity(n) - _Qj(P, i), np.ones([n, 1])[:, 0]
+        )  # eq 3
+
+    return dict(
+        zip(
+            G.nodes,
+            (float(np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1))) for i in range(n)),
+        )
+    )  # eq 6
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/subgraph_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/subgraph_alg.py
new file mode 100644
index 00000000..0a49e6f4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/subgraph_alg.py
@@ -0,0 +1,340 @@
+"""
+Subraph centrality and communicability betweenness.
+"""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "subgraph_centrality_exp",
+    "subgraph_centrality",
+    "communicability_betweenness_centrality",
+    "estrada_index",
+]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def subgraph_centrality_exp(G):
+    r"""Returns the subgraph centrality for each node of G.
+
+    Subgraph centrality  of a node `n` is the sum of weighted closed
+    walks of all lengths starting and ending at node `n`. The weights
+    decrease with path length. Each closed walk is associated with a
+    connected subgraph ([1]_).
+
+    Parameters
+    ----------
+    G: graph
+
+    Returns
+    -------
+    nodes:dictionary
+        Dictionary of nodes with subgraph centrality as the value.
+
+    Raises
+    ------
+    NetworkXError
+        If the graph is not undirected and simple.
+
+    See Also
+    --------
+    subgraph_centrality:
+        Alternative algorithm of the subgraph centrality for each node of G.
+
+    Notes
+    -----
+    This version of the algorithm exponentiates the adjacency matrix.
+
+    The subgraph centrality of a node `u` in G can be found using
+    the matrix exponential of the adjacency matrix of G [1]_,
+
+    .. math::
+
+        SC(u)=(e^A)_{uu} .
+
+    References
+    ----------
+    .. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez,
+       "Subgraph centrality in complex networks",
+       Physical Review E 71, 056103 (2005).
+       https://arxiv.org/abs/cond-mat/0504730
+
+    Examples
+    --------
+    (Example from [1]_)
+    >>> G = nx.Graph(
+    ...     [
+    ...         (1, 2),
+    ...         (1, 5),
+    ...         (1, 8),
+    ...         (2, 3),
+    ...         (2, 8),
+    ...         (3, 4),
+    ...         (3, 6),
+    ...         (4, 5),
+    ...         (4, 7),
+    ...         (5, 6),
+    ...         (6, 7),
+    ...         (7, 8),
+    ...     ]
+    ... )
+    >>> sc = nx.subgraph_centrality_exp(G)
+    >>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)])
+    ['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90']
+    """
+    # alternative implementation that calculates the matrix exponential
+    import scipy as sp
+
+    nodelist = list(G)  # ordering of nodes in matrix
+    A = nx.to_numpy_array(G, nodelist)
+    # convert to 0-1 matrix
+    A[A != 0.0] = 1
+    expA = sp.linalg.expm(A)
+    # convert diagonal to dictionary keyed by node
+    sc = dict(zip(nodelist, map(float, expA.diagonal())))
+    return sc
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def subgraph_centrality(G):
+    r"""Returns subgraph centrality for each node in G.
+
+    Subgraph centrality  of a node `n` is the sum of weighted closed
+    walks of all lengths starting and ending at node `n`. The weights
+    decrease with path length. Each closed walk is associated with a
+    connected subgraph ([1]_).
+
+    Parameters
+    ----------
+    G: graph
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with subgraph centrality as the value.
+
+    Raises
+    ------
+    NetworkXError
+       If the graph is not undirected and simple.
+
+    See Also
+    --------
+    subgraph_centrality_exp:
+        Alternative algorithm of the subgraph centrality for each node of G.
+
+    Notes
+    -----
+    This version of the algorithm computes eigenvalues and eigenvectors
+    of the adjacency matrix.
+
+    Subgraph centrality of a node `u` in G can be found using
+    a spectral decomposition of the adjacency matrix [1]_,
+
+    .. math::
+
+       SC(u)=\sum_{j=1}^{N}(v_{j}^{u})^2 e^{\lambda_{j}},
+
+    where `v_j` is an eigenvector of the adjacency matrix `A` of G
+    corresponding to the eigenvalue `\lambda_j`.
+
+    Examples
+    --------
+    (Example from [1]_)
+    >>> G = nx.Graph(
+    ...     [
+    ...         (1, 2),
+    ...         (1, 5),
+    ...         (1, 8),
+    ...         (2, 3),
+    ...         (2, 8),
+    ...         (3, 4),
+    ...         (3, 6),
+    ...         (4, 5),
+    ...         (4, 7),
+    ...         (5, 6),
+    ...         (6, 7),
+    ...         (7, 8),
+    ...     ]
+    ... )
+    >>> sc = nx.subgraph_centrality(G)
+    >>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)])
+    ['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90']
+
+    References
+    ----------
+    .. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez,
+       "Subgraph centrality in complex networks",
+       Physical Review E 71, 056103 (2005).
+       https://arxiv.org/abs/cond-mat/0504730
+
+    """
+    import numpy as np
+
+    nodelist = list(G)  # ordering of nodes in matrix
+    A = nx.to_numpy_array(G, nodelist)
+    # convert to 0-1 matrix
+    A[np.nonzero(A)] = 1
+    w, v = np.linalg.eigh(A)
+    vsquare = np.array(v) ** 2
+    expw = np.exp(w)
+    xg = vsquare @ expw
+    # convert vector dictionary keyed by node
+    sc = dict(zip(nodelist, map(float, xg)))
+    return sc
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def communicability_betweenness_centrality(G):
+    r"""Returns subgraph communicability for all pairs of nodes in G.
+
+    Communicability betweenness measure makes use of the number of walks
+    connecting every pair of nodes as the basis of a betweenness centrality
+    measure.
+
+    Parameters
+    ----------
+    G: graph
+
+    Returns
+    -------
+    nodes : dictionary
+        Dictionary of nodes with communicability betweenness as the value.
+
+    Raises
+    ------
+    NetworkXError
+        If the graph is not undirected and simple.
+
+    Notes
+    -----
+    Let `G=(V,E)` be a simple undirected graph with `n` nodes and `m` edges,
+    and `A` denote the adjacency matrix of `G`.
+
+    Let `G(r)=(V,E(r))` be the graph resulting from
+    removing all edges connected to node `r` but not the node itself.
+
+    The adjacency matrix for `G(r)` is `A+E(r)`,  where `E(r)` has nonzeros
+    only in row and column `r`.
+
+    The subraph betweenness of a node `r`  is [1]_
+
+    .. math::
+
+         \omega_{r} = \frac{1}{C}\sum_{p}\sum_{q}\frac{G_{prq}}{G_{pq}},
+         p\neq q, q\neq r,
+
+    where
+    `G_{prq}=(e^{A}_{pq} - (e^{A+E(r)})_{pq}`  is the number of walks
+    involving node r,
+    `G_{pq}=(e^{A})_{pq}` is the number of closed walks starting
+    at node `p` and ending at node `q`,
+    and `C=(n-1)^{2}-(n-1)` is a normalization factor equal to the
+    number of terms in the sum.
+
+    The resulting `\omega_{r}` takes values between zero and one.
+    The lower bound cannot be attained for a connected
+    graph, and the upper bound is attained in the star graph.
+
+    References
+    ----------
+    .. [1] Ernesto Estrada, Desmond J. Higham, Naomichi Hatano,
+       "Communicability Betweenness in Complex Networks"
+       Physica A 388 (2009) 764-774.
+       https://arxiv.org/abs/0905.4102
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
+    >>> cbc = nx.communicability_betweenness_centrality(G)
+    >>> print([f"{node} {cbc[node]:0.2f}" for node in sorted(cbc)])
+    ['0 0.03', '1 0.45', '2 0.51', '3 0.45', '4 0.40', '5 0.19', '6 0.03']
+    """
+    import numpy as np
+    import scipy as sp
+
+    nodelist = list(G)  # ordering of nodes in matrix
+    n = len(nodelist)
+    A = nx.to_numpy_array(G, nodelist)
+    # convert to 0-1 matrix
+    A[np.nonzero(A)] = 1
+    expA = sp.linalg.expm(A)
+    mapping = dict(zip(nodelist, range(n)))
+    cbc = {}
+    for v in G:
+        # remove row and col of node v
+        i = mapping[v]
+        row = A[i, :].copy()
+        col = A[:, i].copy()
+        A[i, :] = 0
+        A[:, i] = 0
+        B = (expA - sp.linalg.expm(A)) / expA
+        # sum with row/col of node v and diag set to zero
+        B[i, :] = 0
+        B[:, i] = 0
+        B -= np.diag(np.diag(B))
+        cbc[v] = float(B.sum())
+        # put row and col back
+        A[i, :] = row
+        A[:, i] = col
+    # rescale when more than two nodes
+    order = len(cbc)
+    if order > 2:
+        scale = 1.0 / ((order - 1.0) ** 2 - (order - 1.0))
+        cbc = {node: value * scale for node, value in cbc.items()}
+    return cbc
+
+
+@nx._dispatchable
+def estrada_index(G):
+    r"""Returns the Estrada index of a the graph G.
+
+    The Estrada Index is a topological index of folding or 3D "compactness" ([1]_).
+
+    Parameters
+    ----------
+    G: graph
+
+    Returns
+    -------
+    estrada index: float
+
+    Raises
+    ------
+    NetworkXError
+        If the graph is not undirected and simple.
+
+    Notes
+    -----
+    Let `G=(V,E)` be a simple undirected graph with `n` nodes  and let
+    `\lambda_{1}\leq\lambda_{2}\leq\cdots\lambda_{n}`
+    be a non-increasing ordering of the eigenvalues of its adjacency
+    matrix `A`. The Estrada index is ([1]_, [2]_)
+
+    .. math::
+        EE(G)=\sum_{j=1}^n e^{\lambda _j}.
+
+    References
+    ----------
+    .. [1] E. Estrada, "Characterization of 3D molecular structure",
+       Chem. Phys. Lett. 319, 713 (2000).
+       https://doi.org/10.1016/S0009-2614(00)00158-5
+    .. [2] José Antonio de la Peñaa, Ivan Gutman, Juan Rada,
+       "Estimating the Estrada index",
+       Linear Algebra and its Applications. 427, 1 (2007).
+       https://doi.org/10.1016/j.laa.2007.06.020
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
+    >>> ei = nx.estrada_index(G)
+    >>> print(f"{ei:0.5}")
+    20.55
+    """
+    return sum(subgraph_centrality(G).values())
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality.py
new file mode 100644
index 00000000..4c059cf9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality.py
@@ -0,0 +1,780 @@
+import pytest
+
+import networkx as nx
+
+
+def weighted_G():
+    G = nx.Graph()
+    G.add_edge(0, 1, weight=3)
+    G.add_edge(0, 2, weight=2)
+    G.add_edge(0, 3, weight=6)
+    G.add_edge(0, 4, weight=4)
+    G.add_edge(1, 3, weight=5)
+    G.add_edge(1, 5, weight=5)
+    G.add_edge(2, 4, weight=1)
+    G.add_edge(3, 4, weight=2)
+    G.add_edge(3, 5, weight=1)
+    G.add_edge(4, 5, weight=4)
+    return G
+
+
+class TestBetweennessCentrality:
+    def test_K5(self):
+        """Betweenness centrality: K5"""
+        G = nx.complete_graph(5)
+        b = nx.betweenness_centrality(G, weight=None, normalized=False)
+        b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_K5_endpoints(self):
+        """Betweenness centrality: K5 endpoints"""
+        G = nx.complete_graph(5)
+        b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True)
+        b_answer = {0: 4.0, 1: 4.0, 2: 4.0, 3: 4.0, 4: 4.0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        # normalized = True case
+        b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True)
+        b_answer = {0: 0.4, 1: 0.4, 2: 0.4, 3: 0.4, 4: 0.4}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P3_normalized(self):
+        """Betweenness centrality: P3 normalized"""
+        G = nx.path_graph(3)
+        b = nx.betweenness_centrality(G, weight=None, normalized=True)
+        b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P3(self):
+        """Betweenness centrality: P3"""
+        G = nx.path_graph(3)
+        b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
+        b = nx.betweenness_centrality(G, weight=None, normalized=False)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_sample_from_P3(self):
+        """Betweenness centrality: P3 sample"""
+        G = nx.path_graph(3)
+        b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
+        b = nx.betweenness_centrality(G, k=3, weight=None, normalized=False, seed=1)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        b = nx.betweenness_centrality(G, k=2, weight=None, normalized=False, seed=1)
+        # python versions give different results with same seed
+        b_approx1 = {0: 0.0, 1: 1.5, 2: 0.0}
+        b_approx2 = {0: 0.0, 1: 0.75, 2: 0.0}
+        for n in sorted(G):
+            assert b[n] in (b_approx1[n], b_approx2[n])
+
+    def test_P3_endpoints(self):
+        """Betweenness centrality: P3 endpoints"""
+        G = nx.path_graph(3)
+        b_answer = {0: 2.0, 1: 3.0, 2: 2.0}
+        b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        # normalized = True case
+        b_answer = {0: 2 / 3, 1: 1.0, 2: 2 / 3}
+        b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_krackhardt_kite_graph(self):
+        """Betweenness centrality: Krackhardt kite graph"""
+        G = nx.krackhardt_kite_graph()
+        b_answer = {
+            0: 1.667,
+            1: 1.667,
+            2: 0.000,
+            3: 7.333,
+            4: 0.000,
+            5: 16.667,
+            6: 16.667,
+            7: 28.000,
+            8: 16.000,
+            9: 0.000,
+        }
+        for b in b_answer:
+            b_answer[b] /= 2
+        b = nx.betweenness_centrality(G, weight=None, normalized=False)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_krackhardt_kite_graph_normalized(self):
+        """Betweenness centrality: Krackhardt kite graph normalized"""
+        G = nx.krackhardt_kite_graph()
+        b_answer = {
+            0: 0.023,
+            1: 0.023,
+            2: 0.000,
+            3: 0.102,
+            4: 0.000,
+            5: 0.231,
+            6: 0.231,
+            7: 0.389,
+            8: 0.222,
+            9: 0.000,
+        }
+        b = nx.betweenness_centrality(G, weight=None, normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_florentine_families_graph(self):
+        """Betweenness centrality: Florentine families graph"""
+        G = nx.florentine_families_graph()
+        b_answer = {
+            "Acciaiuoli": 0.000,
+            "Albizzi": 0.212,
+            "Barbadori": 0.093,
+            "Bischeri": 0.104,
+            "Castellani": 0.055,
+            "Ginori": 0.000,
+            "Guadagni": 0.255,
+            "Lamberteschi": 0.000,
+            "Medici": 0.522,
+            "Pazzi": 0.000,
+            "Peruzzi": 0.022,
+            "Ridolfi": 0.114,
+            "Salviati": 0.143,
+            "Strozzi": 0.103,
+            "Tornabuoni": 0.092,
+        }
+
+        b = nx.betweenness_centrality(G, weight=None, normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_les_miserables_graph(self):
+        """Betweenness centrality: Les Miserables graph"""
+        G = nx.les_miserables_graph()
+        b_answer = {
+            "Napoleon": 0.000,
+            "Myriel": 0.177,
+            "MlleBaptistine": 0.000,
+            "MmeMagloire": 0.000,
+            "CountessDeLo": 0.000,
+            "Geborand": 0.000,
+            "Champtercier": 0.000,
+            "Cravatte": 0.000,
+            "Count": 0.000,
+            "OldMan": 0.000,
+            "Valjean": 0.570,
+            "Labarre": 0.000,
+            "Marguerite": 0.000,
+            "MmeDeR": 0.000,
+            "Isabeau": 0.000,
+            "Gervais": 0.000,
+            "Listolier": 0.000,
+            "Tholomyes": 0.041,
+            "Fameuil": 0.000,
+            "Blacheville": 0.000,
+            "Favourite": 0.000,
+            "Dahlia": 0.000,
+            "Zephine": 0.000,
+            "Fantine": 0.130,
+            "MmeThenardier": 0.029,
+            "Thenardier": 0.075,
+            "Cosette": 0.024,
+            "Javert": 0.054,
+            "Fauchelevent": 0.026,
+            "Bamatabois": 0.008,
+            "Perpetue": 0.000,
+            "Simplice": 0.009,
+            "Scaufflaire": 0.000,
+            "Woman1": 0.000,
+            "Judge": 0.000,
+            "Champmathieu": 0.000,
+            "Brevet": 0.000,
+            "Chenildieu": 0.000,
+            "Cochepaille": 0.000,
+            "Pontmercy": 0.007,
+            "Boulatruelle": 0.000,
+            "Eponine": 0.011,
+            "Anzelma": 0.000,
+            "Woman2": 0.000,
+            "MotherInnocent": 0.000,
+            "Gribier": 0.000,
+            "MmeBurgon": 0.026,
+            "Jondrette": 0.000,
+            "Gavroche": 0.165,
+            "Gillenormand": 0.020,
+            "Magnon": 0.000,
+            "MlleGillenormand": 0.048,
+            "MmePontmercy": 0.000,
+            "MlleVaubois": 0.000,
+            "LtGillenormand": 0.000,
+            "Marius": 0.132,
+            "BaronessT": 0.000,
+            "Mabeuf": 0.028,
+            "Enjolras": 0.043,
+            "Combeferre": 0.001,
+            "Prouvaire": 0.000,
+            "Feuilly": 0.001,
+            "Courfeyrac": 0.005,
+            "Bahorel": 0.002,
+            "Bossuet": 0.031,
+            "Joly": 0.002,
+            "Grantaire": 0.000,
+            "MotherPlutarch": 0.000,
+            "Gueulemer": 0.005,
+            "Babet": 0.005,
+            "Claquesous": 0.005,
+            "Montparnasse": 0.004,
+            "Toussaint": 0.000,
+            "Child1": 0.000,
+            "Child2": 0.000,
+            "Brujon": 0.000,
+            "MmeHucheloup": 0.000,
+        }
+
+        b = nx.betweenness_centrality(G, weight=None, normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_ladder_graph(self):
+        """Betweenness centrality: Ladder graph"""
+        G = nx.Graph()  # ladder_graph(3)
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
+        b_answer = {0: 1.667, 1: 1.667, 2: 6.667, 3: 6.667, 4: 1.667, 5: 1.667}
+        for b in b_answer:
+            b_answer[b] /= 2
+        b = nx.betweenness_centrality(G, weight=None, normalized=False)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_disconnected_path(self):
+        """Betweenness centrality: disconnected path"""
+        G = nx.Graph()
+        nx.add_path(G, [0, 1, 2])
+        nx.add_path(G, [3, 4, 5, 6])
+        b_answer = {0: 0, 1: 1, 2: 0, 3: 0, 4: 2, 5: 2, 6: 0}
+        b = nx.betweenness_centrality(G, weight=None, normalized=False)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_disconnected_path_endpoints(self):
+        """Betweenness centrality: disconnected path endpoints"""
+        G = nx.Graph()
+        nx.add_path(G, [0, 1, 2])
+        nx.add_path(G, [3, 4, 5, 6])
+        b_answer = {0: 2, 1: 3, 2: 2, 3: 3, 4: 5, 5: 5, 6: 3}
+        b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        # normalized = True case
+        b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n] / 21, abs=1e-7)
+
+    def test_directed_path(self):
+        """Betweenness centrality: directed path"""
+        G = nx.DiGraph()
+        nx.add_path(G, [0, 1, 2])
+        b = nx.betweenness_centrality(G, weight=None, normalized=False)
+        b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_directed_path_normalized(self):
+        """Betweenness centrality: directed path normalized"""
+        G = nx.DiGraph()
+        nx.add_path(G, [0, 1, 2])
+        b = nx.betweenness_centrality(G, weight=None, normalized=True)
+        b_answer = {0: 0.0, 1: 0.5, 2: 0.0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+
+class TestWeightedBetweennessCentrality:
+    def test_K5(self):
+        """Weighted betweenness centrality: K5"""
+        G = nx.complete_graph(5)
+        b = nx.betweenness_centrality(G, weight="weight", normalized=False)
+        b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P3_normalized(self):
+        """Weighted betweenness centrality: P3 normalized"""
+        G = nx.path_graph(3)
+        b = nx.betweenness_centrality(G, weight="weight", normalized=True)
+        b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P3(self):
+        """Weighted betweenness centrality: P3"""
+        G = nx.path_graph(3)
+        b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
+        b = nx.betweenness_centrality(G, weight="weight", normalized=False)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_krackhardt_kite_graph(self):
+        """Weighted betweenness centrality: Krackhardt kite graph"""
+        G = nx.krackhardt_kite_graph()
+        b_answer = {
+            0: 1.667,
+            1: 1.667,
+            2: 0.000,
+            3: 7.333,
+            4: 0.000,
+            5: 16.667,
+            6: 16.667,
+            7: 28.000,
+            8: 16.000,
+            9: 0.000,
+        }
+        for b in b_answer:
+            b_answer[b] /= 2
+
+        b = nx.betweenness_centrality(G, weight="weight", normalized=False)
+
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_krackhardt_kite_graph_normalized(self):
+        """Weighted betweenness centrality:
+        Krackhardt kite graph normalized
+        """
+        G = nx.krackhardt_kite_graph()
+        b_answer = {
+            0: 0.023,
+            1: 0.023,
+            2: 0.000,
+            3: 0.102,
+            4: 0.000,
+            5: 0.231,
+            6: 0.231,
+            7: 0.389,
+            8: 0.222,
+            9: 0.000,
+        }
+        b = nx.betweenness_centrality(G, weight="weight", normalized=True)
+
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_florentine_families_graph(self):
+        """Weighted betweenness centrality:
+        Florentine families graph"""
+        G = nx.florentine_families_graph()
+        b_answer = {
+            "Acciaiuoli": 0.000,
+            "Albizzi": 0.212,
+            "Barbadori": 0.093,
+            "Bischeri": 0.104,
+            "Castellani": 0.055,
+            "Ginori": 0.000,
+            "Guadagni": 0.255,
+            "Lamberteschi": 0.000,
+            "Medici": 0.522,
+            "Pazzi": 0.000,
+            "Peruzzi": 0.022,
+            "Ridolfi": 0.114,
+            "Salviati": 0.143,
+            "Strozzi": 0.103,
+            "Tornabuoni": 0.092,
+        }
+
+        b = nx.betweenness_centrality(G, weight="weight", normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_les_miserables_graph(self):
+        """Weighted betweenness centrality: Les Miserables graph"""
+        G = nx.les_miserables_graph()
+        b_answer = {
+            "Napoleon": 0.000,
+            "Myriel": 0.177,
+            "MlleBaptistine": 0.000,
+            "MmeMagloire": 0.000,
+            "CountessDeLo": 0.000,
+            "Geborand": 0.000,
+            "Champtercier": 0.000,
+            "Cravatte": 0.000,
+            "Count": 0.000,
+            "OldMan": 0.000,
+            "Valjean": 0.454,
+            "Labarre": 0.000,
+            "Marguerite": 0.009,
+            "MmeDeR": 0.000,
+            "Isabeau": 0.000,
+            "Gervais": 0.000,
+            "Listolier": 0.000,
+            "Tholomyes": 0.066,
+            "Fameuil": 0.000,
+            "Blacheville": 0.000,
+            "Favourite": 0.000,
+            "Dahlia": 0.000,
+            "Zephine": 0.000,
+            "Fantine": 0.114,
+            "MmeThenardier": 0.046,
+            "Thenardier": 0.129,
+            "Cosette": 0.075,
+            "Javert": 0.193,
+            "Fauchelevent": 0.026,
+            "Bamatabois": 0.080,
+            "Perpetue": 0.000,
+            "Simplice": 0.001,
+            "Scaufflaire": 0.000,
+            "Woman1": 0.000,
+            "Judge": 0.000,
+            "Champmathieu": 0.000,
+            "Brevet": 0.000,
+            "Chenildieu": 0.000,
+            "Cochepaille": 0.000,
+            "Pontmercy": 0.023,
+            "Boulatruelle": 0.000,
+            "Eponine": 0.023,
+            "Anzelma": 0.000,
+            "Woman2": 0.000,
+            "MotherInnocent": 0.000,
+            "Gribier": 0.000,
+            "MmeBurgon": 0.026,
+            "Jondrette": 0.000,
+            "Gavroche": 0.285,
+            "Gillenormand": 0.024,
+            "Magnon": 0.005,
+            "MlleGillenormand": 0.036,
+            "MmePontmercy": 0.005,
+            "MlleVaubois": 0.000,
+            "LtGillenormand": 0.015,
+            "Marius": 0.072,
+            "BaronessT": 0.004,
+            "Mabeuf": 0.089,
+            "Enjolras": 0.003,
+            "Combeferre": 0.000,
+            "Prouvaire": 0.000,
+            "Feuilly": 0.004,
+            "Courfeyrac": 0.001,
+            "Bahorel": 0.007,
+            "Bossuet": 0.028,
+            "Joly": 0.000,
+            "Grantaire": 0.036,
+            "MotherPlutarch": 0.000,
+            "Gueulemer": 0.025,
+            "Babet": 0.015,
+            "Claquesous": 0.042,
+            "Montparnasse": 0.050,
+            "Toussaint": 0.011,
+            "Child1": 0.000,
+            "Child2": 0.000,
+            "Brujon": 0.002,
+            "MmeHucheloup": 0.034,
+        }
+
+        b = nx.betweenness_centrality(G, weight="weight", normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_ladder_graph(self):
+        """Weighted betweenness centrality: Ladder graph"""
+        G = nx.Graph()  # ladder_graph(3)
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
+        b_answer = {0: 1.667, 1: 1.667, 2: 6.667, 3: 6.667, 4: 1.667, 5: 1.667}
+        for b in b_answer:
+            b_answer[b] /= 2
+        b = nx.betweenness_centrality(G, weight="weight", normalized=False)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_G(self):
+        """Weighted betweenness centrality: G"""
+        G = weighted_G()
+        b_answer = {0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0}
+        b = nx.betweenness_centrality(G, weight="weight", normalized=False)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_G2(self):
+        """Weighted betweenness centrality: G2"""
+        G = nx.DiGraph()
+        G.add_weighted_edges_from(
+            [
+                ("s", "u", 10),
+                ("s", "x", 5),
+                ("u", "v", 1),
+                ("u", "x", 2),
+                ("v", "y", 1),
+                ("x", "u", 3),
+                ("x", "v", 5),
+                ("x", "y", 2),
+                ("y", "s", 7),
+                ("y", "v", 6),
+            ]
+        )
+
+        b_answer = {"y": 5.0, "x": 5.0, "s": 4.0, "u": 2.0, "v": 2.0}
+
+        b = nx.betweenness_centrality(G, weight="weight", normalized=False)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_G3(self):
+        """Weighted betweenness centrality: G3"""
+        G = nx.MultiGraph(weighted_G())
+        es = list(G.edges(data=True))[::2]  # duplicate every other edge
+        G.add_edges_from(es)
+        b_answer = {0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0}
+        b = nx.betweenness_centrality(G, weight="weight", normalized=False)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_G4(self):
+        """Weighted betweenness centrality: G4"""
+        G = nx.MultiDiGraph()
+        G.add_weighted_edges_from(
+            [
+                ("s", "u", 10),
+                ("s", "x", 5),
+                ("s", "x", 6),
+                ("u", "v", 1),
+                ("u", "x", 2),
+                ("v", "y", 1),
+                ("v", "y", 1),
+                ("x", "u", 3),
+                ("x", "v", 5),
+                ("x", "y", 2),
+                ("x", "y", 3),
+                ("y", "s", 7),
+                ("y", "v", 6),
+                ("y", "v", 6),
+            ]
+        )
+
+        b_answer = {"y": 5.0, "x": 5.0, "s": 4.0, "u": 2.0, "v": 2.0}
+
+        b = nx.betweenness_centrality(G, weight="weight", normalized=False)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+
+class TestEdgeBetweennessCentrality:
+    def test_K5(self):
+        """Edge betweenness centrality: K5"""
+        G = nx.complete_graph(5)
+        b = nx.edge_betweenness_centrality(G, weight=None, normalized=False)
+        b_answer = dict.fromkeys(G.edges(), 1)
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_normalized_K5(self):
+        """Edge betweenness centrality: K5"""
+        G = nx.complete_graph(5)
+        b = nx.edge_betweenness_centrality(G, weight=None, normalized=True)
+        b_answer = dict.fromkeys(G.edges(), 1 / 10)
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_C4(self):
+        """Edge betweenness centrality: C4"""
+        G = nx.cycle_graph(4)
+        b = nx.edge_betweenness_centrality(G, weight=None, normalized=True)
+        b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2}
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n] / 6, abs=1e-7)
+
+    def test_P4(self):
+        """Edge betweenness centrality: P4"""
+        G = nx.path_graph(4)
+        b = nx.edge_betweenness_centrality(G, weight=None, normalized=False)
+        b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3}
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_normalized_P4(self):
+        """Edge betweenness centrality: P4"""
+        G = nx.path_graph(4)
+        b = nx.edge_betweenness_centrality(G, weight=None, normalized=True)
+        b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3}
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n] / 6, abs=1e-7)
+
+    def test_balanced_tree(self):
+        """Edge betweenness centrality: balanced tree"""
+        G = nx.balanced_tree(r=2, h=2)
+        b = nx.edge_betweenness_centrality(G, weight=None, normalized=False)
+        b_answer = {(0, 1): 12, (0, 2): 12, (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6}
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+
+class TestWeightedEdgeBetweennessCentrality:
+    def test_K5(self):
+        """Edge betweenness centrality: K5"""
+        G = nx.complete_graph(5)
+        b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
+        b_answer = dict.fromkeys(G.edges(), 1)
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_C4(self):
+        """Edge betweenness centrality: C4"""
+        G = nx.cycle_graph(4)
+        b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
+        b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2}
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P4(self):
+        """Edge betweenness centrality: P4"""
+        G = nx.path_graph(4)
+        b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
+        b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3}
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_balanced_tree(self):
+        """Edge betweenness centrality: balanced tree"""
+        G = nx.balanced_tree(r=2, h=2)
+        b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
+        b_answer = {(0, 1): 12, (0, 2): 12, (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6}
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_weighted_graph(self):
+        """Edge betweenness centrality: weighted"""
+        eList = [
+            (0, 1, 5),
+            (0, 2, 4),
+            (0, 3, 3),
+            (0, 4, 2),
+            (1, 2, 4),
+            (1, 3, 1),
+            (1, 4, 3),
+            (2, 4, 5),
+            (3, 4, 4),
+        ]
+        G = nx.Graph()
+        G.add_weighted_edges_from(eList)
+        b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
+        b_answer = {
+            (0, 1): 0.0,
+            (0, 2): 1.0,
+            (0, 3): 2.0,
+            (0, 4): 1.0,
+            (1, 2): 2.0,
+            (1, 3): 3.5,
+            (1, 4): 1.5,
+            (2, 4): 1.0,
+            (3, 4): 0.5,
+        }
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_normalized_weighted_graph(self):
+        """Edge betweenness centrality: normalized weighted"""
+        eList = [
+            (0, 1, 5),
+            (0, 2, 4),
+            (0, 3, 3),
+            (0, 4, 2),
+            (1, 2, 4),
+            (1, 3, 1),
+            (1, 4, 3),
+            (2, 4, 5),
+            (3, 4, 4),
+        ]
+        G = nx.Graph()
+        G.add_weighted_edges_from(eList)
+        b = nx.edge_betweenness_centrality(G, weight="weight", normalized=True)
+        b_answer = {
+            (0, 1): 0.0,
+            (0, 2): 1.0,
+            (0, 3): 2.0,
+            (0, 4): 1.0,
+            (1, 2): 2.0,
+            (1, 3): 3.5,
+            (1, 4): 1.5,
+            (2, 4): 1.0,
+            (3, 4): 0.5,
+        }
+        norm = len(G) * (len(G) - 1) / 2
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n] / norm, abs=1e-7)
+
+    def test_weighted_multigraph(self):
+        """Edge betweenness centrality: weighted multigraph"""
+        eList = [
+            (0, 1, 5),
+            (0, 1, 4),
+            (0, 2, 4),
+            (0, 3, 3),
+            (0, 3, 3),
+            (0, 4, 2),
+            (1, 2, 4),
+            (1, 3, 1),
+            (1, 3, 2),
+            (1, 4, 3),
+            (1, 4, 4),
+            (2, 4, 5),
+            (3, 4, 4),
+            (3, 4, 4),
+        ]
+        G = nx.MultiGraph()
+        G.add_weighted_edges_from(eList)
+        b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
+        b_answer = {
+            (0, 1, 0): 0.0,
+            (0, 1, 1): 0.5,
+            (0, 2, 0): 1.0,
+            (0, 3, 0): 0.75,
+            (0, 3, 1): 0.75,
+            (0, 4, 0): 1.0,
+            (1, 2, 0): 2.0,
+            (1, 3, 0): 3.0,
+            (1, 3, 1): 0.0,
+            (1, 4, 0): 1.5,
+            (1, 4, 1): 0.0,
+            (2, 4, 0): 1.0,
+            (3, 4, 0): 0.25,
+            (3, 4, 1): 0.25,
+        }
+        for n in sorted(G.edges(keys=True)):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_normalized_weighted_multigraph(self):
+        """Edge betweenness centrality: normalized weighted multigraph"""
+        eList = [
+            (0, 1, 5),
+            (0, 1, 4),
+            (0, 2, 4),
+            (0, 3, 3),
+            (0, 3, 3),
+            (0, 4, 2),
+            (1, 2, 4),
+            (1, 3, 1),
+            (1, 3, 2),
+            (1, 4, 3),
+            (1, 4, 4),
+            (2, 4, 5),
+            (3, 4, 4),
+            (3, 4, 4),
+        ]
+        G = nx.MultiGraph()
+        G.add_weighted_edges_from(eList)
+        b = nx.edge_betweenness_centrality(G, weight="weight", normalized=True)
+        b_answer = {
+            (0, 1, 0): 0.0,
+            (0, 1, 1): 0.5,
+            (0, 2, 0): 1.0,
+            (0, 3, 0): 0.75,
+            (0, 3, 1): 0.75,
+            (0, 4, 0): 1.0,
+            (1, 2, 0): 2.0,
+            (1, 3, 0): 3.0,
+            (1, 3, 1): 0.0,
+            (1, 4, 0): 1.5,
+            (1, 4, 1): 0.0,
+            (2, 4, 0): 1.0,
+            (3, 4, 0): 0.25,
+            (3, 4, 1): 0.25,
+        }
+        norm = len(G) * (len(G) - 1) / 2
+        for n in sorted(G.edges(keys=True)):
+            assert b[n] == pytest.approx(b_answer[n] / norm, abs=1e-7)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py
new file mode 100644
index 00000000..a35a401a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py
@@ -0,0 +1,340 @@
+import pytest
+
+import networkx as nx
+
+
+class TestSubsetBetweennessCentrality:
+    def test_K5(self):
+        """Betweenness Centrality Subset: K5"""
+        G = nx.complete_graph(5)
+        b = nx.betweenness_centrality_subset(
+            G, sources=[0], targets=[1, 3], weight=None
+        )
+        b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P5_directed(self):
+        """Betweenness Centrality Subset: P5 directed"""
+        G = nx.DiGraph()
+        nx.add_path(G, range(5))
+        b_answer = {0: 0, 1: 1, 2: 1, 3: 0, 4: 0, 5: 0}
+        b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P5(self):
+        """Betweenness Centrality Subset: P5"""
+        G = nx.Graph()
+        nx.add_path(G, range(5))
+        b_answer = {0: 0, 1: 0.5, 2: 0.5, 3: 0, 4: 0, 5: 0}
+        b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P5_multiple_target(self):
+        """Betweenness Centrality Subset: P5 multiple target"""
+        G = nx.Graph()
+        nx.add_path(G, range(5))
+        b_answer = {0: 0, 1: 1, 2: 1, 3: 0.5, 4: 0, 5: 0}
+        b = nx.betweenness_centrality_subset(
+            G, sources=[0], targets=[3, 4], weight=None
+        )
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_box(self):
+        """Betweenness Centrality Subset: box"""
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+        b_answer = {0: 0, 1: 0.25, 2: 0.25, 3: 0}
+        b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_box_and_path(self):
+        """Betweenness Centrality Subset: box and path"""
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (3, 4), (4, 5)])
+        b_answer = {0: 0, 1: 0.5, 2: 0.5, 3: 0.5, 4: 0, 5: 0}
+        b = nx.betweenness_centrality_subset(
+            G, sources=[0], targets=[3, 4], weight=None
+        )
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_box_and_path2(self):
+        """Betweenness Centrality Subset: box and path multiple target"""
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (1, 2), (2, 3), (1, 20), (20, 3), (3, 4)])
+        b_answer = {0: 0, 1: 1.0, 2: 0.5, 20: 0.5, 3: 0.5, 4: 0}
+        b = nx.betweenness_centrality_subset(
+            G, sources=[0], targets=[3, 4], weight=None
+        )
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_diamond_multi_path(self):
+        """Betweenness Centrality Subset: Diamond Multi Path"""
+        G = nx.Graph()
+        G.add_edges_from(
+            [
+                (1, 2),
+                (1, 3),
+                (1, 4),
+                (1, 5),
+                (1, 10),
+                (10, 11),
+                (11, 12),
+                (12, 9),
+                (2, 6),
+                (3, 6),
+                (4, 6),
+                (5, 7),
+                (7, 8),
+                (6, 8),
+                (8, 9),
+            ]
+        )
+        b = nx.betweenness_centrality_subset(G, sources=[1], targets=[9], weight=None)
+
+        expected_b = {
+            1: 0,
+            2: 1.0 / 10,
+            3: 1.0 / 10,
+            4: 1.0 / 10,
+            5: 1.0 / 10,
+            6: 3.0 / 10,
+            7: 1.0 / 10,
+            8: 4.0 / 10,
+            9: 0,
+            10: 1.0 / 10,
+            11: 1.0 / 10,
+            12: 1.0 / 10,
+        }
+
+        for n in sorted(G):
+            assert b[n] == pytest.approx(expected_b[n], abs=1e-7)
+
+    def test_normalized_p2(self):
+        """
+        Betweenness Centrality Subset: Normalized P2
+        if n <= 2:  no normalization, betweenness centrality should be 0 for all nodes.
+        """
+        G = nx.Graph()
+        nx.add_path(G, range(2))
+        b_answer = {0: 0, 1: 0.0}
+        b = nx.betweenness_centrality_subset(
+            G, sources=[0], targets=[1], normalized=True, weight=None
+        )
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_normalized_P5_directed(self):
+        """Betweenness Centrality Subset: Normalized Directed P5"""
+        G = nx.DiGraph()
+        nx.add_path(G, range(5))
+        b_answer = {0: 0, 1: 1.0 / 12.0, 2: 1.0 / 12.0, 3: 0, 4: 0, 5: 0}
+        b = nx.betweenness_centrality_subset(
+            G, sources=[0], targets=[3], normalized=True, weight=None
+        )
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_weighted_graph(self):
+        """Betweenness Centrality Subset: Weighted Graph"""
+        G = nx.DiGraph()
+        G.add_edge(0, 1, weight=3)
+        G.add_edge(0, 2, weight=2)
+        G.add_edge(0, 3, weight=6)
+        G.add_edge(0, 4, weight=4)
+        G.add_edge(1, 3, weight=5)
+        G.add_edge(1, 5, weight=5)
+        G.add_edge(2, 4, weight=1)
+        G.add_edge(3, 4, weight=2)
+        G.add_edge(3, 5, weight=1)
+        G.add_edge(4, 5, weight=4)
+        b_answer = {0: 0.0, 1: 0.0, 2: 0.5, 3: 0.5, 4: 0.5, 5: 0.0}
+        b = nx.betweenness_centrality_subset(
+            G, sources=[0], targets=[5], normalized=False, weight="weight"
+        )
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+
+class TestEdgeSubsetBetweennessCentrality:
+    def test_K5(self):
+        """Edge betweenness subset centrality: K5"""
+        G = nx.complete_graph(5)
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[1, 3], weight=None
+        )
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 3)] = b_answer[(0, 1)] = 0.5
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P5_directed(self):
+        """Edge betweenness subset centrality: P5 directed"""
+        G = nx.DiGraph()
+        nx.add_path(G, range(5))
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 1
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[3], weight=None
+        )
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P5(self):
+        """Edge betweenness subset centrality: P5"""
+        G = nx.Graph()
+        nx.add_path(G, range(5))
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 0.5
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[3], weight=None
+        )
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P5_multiple_target(self):
+        """Edge betweenness subset centrality: P5 multiple target"""
+        G = nx.Graph()
+        nx.add_path(G, range(5))
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 1
+        b_answer[(3, 4)] = 0.5
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[3, 4], weight=None
+        )
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_box(self):
+        """Edge betweenness subset centrality: box"""
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 1)] = b_answer[(0, 2)] = 0.25
+        b_answer[(1, 3)] = b_answer[(2, 3)] = 0.25
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[3], weight=None
+        )
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_box_and_path(self):
+        """Edge betweenness subset centrality: box and path"""
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (3, 4), (4, 5)])
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 1)] = b_answer[(0, 2)] = 0.5
+        b_answer[(1, 3)] = b_answer[(2, 3)] = 0.5
+        b_answer[(3, 4)] = 0.5
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[3, 4], weight=None
+        )
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_box_and_path2(self):
+        """Edge betweenness subset centrality: box and path multiple target"""
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (1, 2), (2, 3), (1, 20), (20, 3), (3, 4)])
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 1)] = 1.0
+        b_answer[(1, 20)] = b_answer[(3, 20)] = 0.5
+        b_answer[(1, 2)] = b_answer[(2, 3)] = 0.5
+        b_answer[(3, 4)] = 0.5
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[3, 4], weight=None
+        )
+        for n in sorted(G.edges()):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_diamond_multi_path(self):
+        """Edge betweenness subset centrality: Diamond Multi Path"""
+        G = nx.Graph()
+        G.add_edges_from(
+            [
+                (1, 2),
+                (1, 3),
+                (1, 4),
+                (1, 5),
+                (1, 10),
+                (10, 11),
+                (11, 12),
+                (12, 9),
+                (2, 6),
+                (3, 6),
+                (4, 6),
+                (5, 7),
+                (7, 8),
+                (6, 8),
+                (8, 9),
+            ]
+        )
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(8, 9)] = 0.4
+        b_answer[(6, 8)] = b_answer[(7, 8)] = 0.2
+        b_answer[(2, 6)] = b_answer[(3, 6)] = b_answer[(4, 6)] = 0.2 / 3.0
+        b_answer[(1, 2)] = b_answer[(1, 3)] = b_answer[(1, 4)] = 0.2 / 3.0
+        b_answer[(5, 7)] = 0.2
+        b_answer[(1, 5)] = 0.2
+        b_answer[(9, 12)] = 0.1
+        b_answer[(11, 12)] = b_answer[(10, 11)] = b_answer[(1, 10)] = 0.1
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[1], targets=[9], weight=None
+        )
+        for n in G.edges():
+            sort_n = tuple(sorted(n))
+            assert b[n] == pytest.approx(b_answer[sort_n], abs=1e-7)
+
+    def test_normalized_p1(self):
+        """
+        Edge betweenness subset centrality: P1
+        if n <= 1: no normalization b=0 for all nodes
+        """
+        G = nx.Graph()
+        nx.add_path(G, range(1))
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[0], normalized=True, weight=None
+        )
+        for n in G.edges():
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_normalized_P5_directed(self):
+        """Edge betweenness subset centrality: Normalized Directed P5"""
+        G = nx.DiGraph()
+        nx.add_path(G, range(5))
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 0.05
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[3], normalized=True, weight=None
+        )
+        for n in G.edges():
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_weighted_graph(self):
+        """Edge betweenness subset centrality: Weighted Graph"""
+        G = nx.DiGraph()
+        G.add_edge(0, 1, weight=3)
+        G.add_edge(0, 2, weight=2)
+        G.add_edge(0, 3, weight=6)
+        G.add_edge(0, 4, weight=4)
+        G.add_edge(1, 3, weight=5)
+        G.add_edge(1, 5, weight=5)
+        G.add_edge(2, 4, weight=1)
+        G.add_edge(3, 4, weight=2)
+        G.add_edge(3, 5, weight=1)
+        G.add_edge(4, 5, weight=4)
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 2)] = b_answer[(2, 4)] = b_answer[(4, 5)] = 0.5
+        b_answer[(0, 3)] = b_answer[(3, 5)] = 0.5
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[5], normalized=False, weight="weight"
+        )
+        for n in G.edges():
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py
new file mode 100644
index 00000000..7bdb7e7c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py
@@ -0,0 +1,307 @@
+"""
+Tests for closeness centrality.
+"""
+
+import pytest
+
+import networkx as nx
+
+
+class TestClosenessCentrality:
+    @classmethod
+    def setup_class(cls):
+        cls.K = nx.krackhardt_kite_graph()
+        cls.P3 = nx.path_graph(3)
+        cls.P4 = nx.path_graph(4)
+        cls.K5 = nx.complete_graph(5)
+
+        cls.C4 = nx.cycle_graph(4)
+        cls.T = nx.balanced_tree(r=2, h=2)
+        cls.Gb = nx.Graph()
+        cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
+
+        F = nx.florentine_families_graph()
+        cls.F = F
+
+        cls.LM = nx.les_miserables_graph()
+
+        # Create random undirected, unweighted graph for testing incremental version
+        cls.undirected_G = nx.fast_gnp_random_graph(n=100, p=0.6, seed=123)
+        cls.undirected_G_cc = nx.closeness_centrality(cls.undirected_G)
+
+    def test_wf_improved(self):
+        G = nx.union(self.P4, nx.path_graph([4, 5, 6]))
+        c = nx.closeness_centrality(G)
+        cwf = nx.closeness_centrality(G, wf_improved=False)
+        res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25, 4: 0.222, 5: 0.333, 6: 0.222}
+        wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, 4: 0.667, 5: 1.0, 6: 0.667}
+        for n in G:
+            assert c[n] == pytest.approx(res[n], abs=1e-3)
+            assert cwf[n] == pytest.approx(wf_res[n], abs=1e-3)
+
+    def test_digraph(self):
+        G = nx.path_graph(3, create_using=nx.DiGraph())
+        c = nx.closeness_centrality(G)
+        cr = nx.closeness_centrality(G.reverse())
+        d = {0: 0.0, 1: 0.500, 2: 0.667}
+        dr = {0: 0.667, 1: 0.500, 2: 0.0}
+        for n in sorted(self.P3):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+            assert cr[n] == pytest.approx(dr[n], abs=1e-3)
+
+    def test_k5_closeness(self):
+        c = nx.closeness_centrality(self.K5)
+        d = {0: 1.000, 1: 1.000, 2: 1.000, 3: 1.000, 4: 1.000}
+        for n in sorted(self.K5):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_p3_closeness(self):
+        c = nx.closeness_centrality(self.P3)
+        d = {0: 0.667, 1: 1.000, 2: 0.667}
+        for n in sorted(self.P3):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_krackhardt_closeness(self):
+        c = nx.closeness_centrality(self.K)
+        d = {
+            0: 0.529,
+            1: 0.529,
+            2: 0.500,
+            3: 0.600,
+            4: 0.500,
+            5: 0.643,
+            6: 0.643,
+            7: 0.600,
+            8: 0.429,
+            9: 0.310,
+        }
+        for n in sorted(self.K):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_florentine_families_closeness(self):
+        c = nx.closeness_centrality(self.F)
+        d = {
+            "Acciaiuoli": 0.368,
+            "Albizzi": 0.483,
+            "Barbadori": 0.4375,
+            "Bischeri": 0.400,
+            "Castellani": 0.389,
+            "Ginori": 0.333,
+            "Guadagni": 0.467,
+            "Lamberteschi": 0.326,
+            "Medici": 0.560,
+            "Pazzi": 0.286,
+            "Peruzzi": 0.368,
+            "Ridolfi": 0.500,
+            "Salviati": 0.389,
+            "Strozzi": 0.4375,
+            "Tornabuoni": 0.483,
+        }
+        for n in sorted(self.F):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_les_miserables_closeness(self):
+        c = nx.closeness_centrality(self.LM)
+        d = {
+            "Napoleon": 0.302,
+            "Myriel": 0.429,
+            "MlleBaptistine": 0.413,
+            "MmeMagloire": 0.413,
+            "CountessDeLo": 0.302,
+            "Geborand": 0.302,
+            "Champtercier": 0.302,
+            "Cravatte": 0.302,
+            "Count": 0.302,
+            "OldMan": 0.302,
+            "Valjean": 0.644,
+            "Labarre": 0.394,
+            "Marguerite": 0.413,
+            "MmeDeR": 0.394,
+            "Isabeau": 0.394,
+            "Gervais": 0.394,
+            "Listolier": 0.341,
+            "Tholomyes": 0.392,
+            "Fameuil": 0.341,
+            "Blacheville": 0.341,
+            "Favourite": 0.341,
+            "Dahlia": 0.341,
+            "Zephine": 0.341,
+            "Fantine": 0.461,
+            "MmeThenardier": 0.461,
+            "Thenardier": 0.517,
+            "Cosette": 0.478,
+            "Javert": 0.517,
+            "Fauchelevent": 0.402,
+            "Bamatabois": 0.427,
+            "Perpetue": 0.318,
+            "Simplice": 0.418,
+            "Scaufflaire": 0.394,
+            "Woman1": 0.396,
+            "Judge": 0.404,
+            "Champmathieu": 0.404,
+            "Brevet": 0.404,
+            "Chenildieu": 0.404,
+            "Cochepaille": 0.404,
+            "Pontmercy": 0.373,
+            "Boulatruelle": 0.342,
+            "Eponine": 0.396,
+            "Anzelma": 0.352,
+            "Woman2": 0.402,
+            "MotherInnocent": 0.398,
+            "Gribier": 0.288,
+            "MmeBurgon": 0.344,
+            "Jondrette": 0.257,
+            "Gavroche": 0.514,
+            "Gillenormand": 0.442,
+            "Magnon": 0.335,
+            "MlleGillenormand": 0.442,
+            "MmePontmercy": 0.315,
+            "MlleVaubois": 0.308,
+            "LtGillenormand": 0.365,
+            "Marius": 0.531,
+            "BaronessT": 0.352,
+            "Mabeuf": 0.396,
+            "Enjolras": 0.481,
+            "Combeferre": 0.392,
+            "Prouvaire": 0.357,
+            "Feuilly": 0.392,
+            "Courfeyrac": 0.400,
+            "Bahorel": 0.394,
+            "Bossuet": 0.475,
+            "Joly": 0.394,
+            "Grantaire": 0.358,
+            "MotherPlutarch": 0.285,
+            "Gueulemer": 0.463,
+            "Babet": 0.463,
+            "Claquesous": 0.452,
+            "Montparnasse": 0.458,
+            "Toussaint": 0.402,
+            "Child1": 0.342,
+            "Child2": 0.342,
+            "Brujon": 0.380,
+            "MmeHucheloup": 0.353,
+        }
+        for n in sorted(self.LM):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_weighted_closeness(self):
+        edges = [
+            ("s", "u", 10),
+            ("s", "x", 5),
+            ("u", "v", 1),
+            ("u", "x", 2),
+            ("v", "y", 1),
+            ("x", "u", 3),
+            ("x", "v", 5),
+            ("x", "y", 2),
+            ("y", "s", 7),
+            ("y", "v", 6),
+        ]
+        XG = nx.Graph()
+        XG.add_weighted_edges_from(edges)
+        c = nx.closeness_centrality(XG, distance="weight")
+        d = {"y": 0.200, "x": 0.286, "s": 0.138, "u": 0.235, "v": 0.200}
+        for n in sorted(XG):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    #
+    # Tests for incremental closeness centrality.
+    #
+    @staticmethod
+    def pick_add_edge(g):
+        u = nx.utils.arbitrary_element(g)
+        possible_nodes = set(g.nodes())
+        neighbors = list(g.neighbors(u)) + [u]
+        possible_nodes.difference_update(neighbors)
+        v = nx.utils.arbitrary_element(possible_nodes)
+        return (u, v)
+
+    @staticmethod
+    def pick_remove_edge(g):
+        u = nx.utils.arbitrary_element(g)
+        possible_nodes = list(g.neighbors(u))
+        v = nx.utils.arbitrary_element(possible_nodes)
+        return (u, v)
+
+    def test_directed_raises(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            dir_G = nx.gn_graph(n=5)
+            prev_cc = None
+            edge = self.pick_add_edge(dir_G)
+            insert = True
+            nx.incremental_closeness_centrality(dir_G, edge, prev_cc, insert)
+
+    def test_wrong_size_prev_cc_raises(self):
+        with pytest.raises(nx.NetworkXError):
+            G = self.undirected_G.copy()
+            edge = self.pick_add_edge(G)
+            insert = True
+            prev_cc = self.undirected_G_cc.copy()
+            prev_cc.pop(0)
+            nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
+
+    def test_wrong_nodes_prev_cc_raises(self):
+        with pytest.raises(nx.NetworkXError):
+            G = self.undirected_G.copy()
+            edge = self.pick_add_edge(G)
+            insert = True
+            prev_cc = self.undirected_G_cc.copy()
+            num_nodes = len(prev_cc)
+            prev_cc.pop(0)
+            prev_cc[num_nodes] = 0.5
+            nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
+
+    def test_zero_centrality(self):
+        G = nx.path_graph(3)
+        prev_cc = nx.closeness_centrality(G)
+        edge = self.pick_remove_edge(G)
+        test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insertion=False)
+        G.remove_edges_from([edge])
+        real_cc = nx.closeness_centrality(G)
+        shared_items = set(test_cc.items()) & set(real_cc.items())
+        assert len(shared_items) == len(real_cc)
+        assert 0 in test_cc.values()
+
+    def test_incremental(self):
+        # Check that incremental and regular give same output
+        G = self.undirected_G.copy()
+        prev_cc = None
+        for i in range(5):
+            if i % 2 == 0:
+                # Remove an edge
+                insert = False
+                edge = self.pick_remove_edge(G)
+            else:
+                # Add an edge
+                insert = True
+                edge = self.pick_add_edge(G)
+
+            # start = timeit.default_timer()
+            test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
+            # inc_elapsed = (timeit.default_timer() - start)
+            # print(f"incremental time: {inc_elapsed}")
+
+            if insert:
+                G.add_edges_from([edge])
+            else:
+                G.remove_edges_from([edge])
+
+            # start = timeit.default_timer()
+            real_cc = nx.closeness_centrality(G)
+            # reg_elapsed = (timeit.default_timer() - start)
+            # print(f"regular time: {reg_elapsed}")
+            # Example output:
+            # incremental time: 0.208
+            # regular time: 0.276
+            # incremental time: 0.00683
+            # regular time: 0.260
+            # incremental time: 0.0224
+            # regular time: 0.278
+            # incremental time: 0.00804
+            # regular time: 0.208
+            # incremental time: 0.00947
+            # regular time: 0.188
+
+            assert set(test_cc.items()) == set(real_cc.items())
+
+            prev_cc = test_cc
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
new file mode 100644
index 00000000..4e3d4385
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
@@ -0,0 +1,197 @@
+import pytest
+
+import networkx as nx
+from networkx import approximate_current_flow_betweenness_centrality as approximate_cfbc
+from networkx import edge_current_flow_betweenness_centrality as edge_current_flow
+
+np = pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+
+class TestFlowBetweennessCentrality:
+    def test_K4_normalized(self):
+        """Betweenness centrality: K4"""
+        G = nx.complete_graph(4)
+        b = nx.current_flow_betweenness_centrality(G, normalized=True)
+        b_answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        G.add_edge(0, 1, weight=0.5, other=0.3)
+        b = nx.current_flow_betweenness_centrality(G, normalized=True, weight=None)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        wb_answer = {0: 0.2222222, 1: 0.2222222, 2: 0.30555555, 3: 0.30555555}
+        b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="weight")
+        for n in sorted(G):
+            assert b[n] == pytest.approx(wb_answer[n], abs=1e-7)
+        wb_answer = {0: 0.2051282, 1: 0.2051282, 2: 0.33974358, 3: 0.33974358}
+        b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="other")
+        for n in sorted(G):
+            assert b[n] == pytest.approx(wb_answer[n], abs=1e-7)
+
+    def test_K4(self):
+        """Betweenness centrality: K4"""
+        G = nx.complete_graph(4)
+        for solver in ["full", "lu", "cg"]:
+            b = nx.current_flow_betweenness_centrality(
+                G, normalized=False, solver=solver
+            )
+            b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
+            for n in sorted(G):
+                assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P4_normalized(self):
+        """Betweenness centrality: P4 normalized"""
+        G = nx.path_graph(4)
+        b = nx.current_flow_betweenness_centrality(G, normalized=True)
+        b_answer = {0: 0, 1: 2.0 / 3, 2: 2.0 / 3, 3: 0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P4(self):
+        """Betweenness centrality: P4"""
+        G = nx.path_graph(4)
+        b = nx.current_flow_betweenness_centrality(G, normalized=False)
+        b_answer = {0: 0, 1: 2, 2: 2, 3: 0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_star(self):
+        """Betweenness centrality: star"""
+        G = nx.Graph()
+        nx.add_star(G, ["a", "b", "c", "d"])
+        b = nx.current_flow_betweenness_centrality(G, normalized=True)
+        b_answer = {"a": 1.0, "b": 0.0, "c": 0.0, "d": 0.0}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_solvers2(self):
+        """Betweenness centrality: alternate solvers"""
+        G = nx.complete_graph(4)
+        for solver in ["full", "lu", "cg"]:
+            b = nx.current_flow_betweenness_centrality(
+                G, normalized=False, solver=solver
+            )
+            b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
+            for n in sorted(G):
+                assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+
+class TestApproximateFlowBetweennessCentrality:
+    def test_K4_normalized(self):
+        "Approximate current-flow betweenness centrality: K4 normalized"
+        G = nx.complete_graph(4)
+        b = nx.current_flow_betweenness_centrality(G, normalized=True)
+        epsilon = 0.1
+        ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
+        for n in sorted(G):
+            np.testing.assert_allclose(b[n], ba[n], atol=epsilon)
+
+    def test_K4(self):
+        "Approximate current-flow betweenness centrality: K4"
+        G = nx.complete_graph(4)
+        b = nx.current_flow_betweenness_centrality(G, normalized=False)
+        epsilon = 0.1
+        ba = approximate_cfbc(G, normalized=False, epsilon=0.5 * epsilon)
+        for n in sorted(G):
+            np.testing.assert_allclose(b[n], ba[n], atol=epsilon * len(G) ** 2)
+
+    def test_star(self):
+        "Approximate current-flow betweenness centrality: star"
+        G = nx.Graph()
+        nx.add_star(G, ["a", "b", "c", "d"])
+        b = nx.current_flow_betweenness_centrality(G, normalized=True)
+        epsilon = 0.1
+        ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
+        for n in sorted(G):
+            np.testing.assert_allclose(b[n], ba[n], atol=epsilon)
+
+    def test_grid(self):
+        "Approximate current-flow betweenness centrality: 2d grid"
+        G = nx.grid_2d_graph(4, 4)
+        b = nx.current_flow_betweenness_centrality(G, normalized=True)
+        epsilon = 0.1
+        ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
+        for n in sorted(G):
+            np.testing.assert_allclose(b[n], ba[n], atol=epsilon)
+
+    def test_seed(self):
+        G = nx.complete_graph(4)
+        b = approximate_cfbc(G, normalized=False, epsilon=0.05, seed=1)
+        b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
+        for n in sorted(G):
+            np.testing.assert_allclose(b[n], b_answer[n], atol=0.1)
+
+    def test_solvers(self):
+        "Approximate current-flow betweenness centrality: solvers"
+        G = nx.complete_graph(4)
+        epsilon = 0.1
+        for solver in ["full", "lu", "cg"]:
+            b = approximate_cfbc(
+                G, normalized=False, solver=solver, epsilon=0.5 * epsilon
+            )
+            b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
+            for n in sorted(G):
+                np.testing.assert_allclose(b[n], b_answer[n], atol=epsilon)
+
+    def test_lower_kmax(self):
+        G = nx.complete_graph(4)
+        with pytest.raises(nx.NetworkXError, match="Increase kmax or epsilon"):
+            nx.approximate_current_flow_betweenness_centrality(G, kmax=4)
+
+
+class TestWeightedFlowBetweennessCentrality:
+    pass
+
+
+class TestEdgeFlowBetweennessCentrality:
+    def test_K4(self):
+        """Edge flow betweenness centrality: K4"""
+        G = nx.complete_graph(4)
+        b = edge_current_flow(G, normalized=True)
+        b_answer = dict.fromkeys(G.edges(), 0.25)
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
+
+    def test_K4_normalized(self):
+        """Edge flow betweenness centrality: K4"""
+        G = nx.complete_graph(4)
+        b = edge_current_flow(G, normalized=False)
+        b_answer = dict.fromkeys(G.edges(), 0.75)
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
+
+    def test_C4(self):
+        """Edge flow betweenness centrality: C4"""
+        G = nx.cycle_graph(4)
+        b = edge_current_flow(G, normalized=False)
+        b_answer = {(0, 1): 1.25, (0, 3): 1.25, (1, 2): 1.25, (2, 3): 1.25}
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
+
+    def test_P4(self):
+        """Edge betweenness centrality: P4"""
+        G = nx.path_graph(4)
+        b = edge_current_flow(G, normalized=False)
+        b_answer = {(0, 1): 1.5, (1, 2): 2.0, (2, 3): 1.5}
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
+
+
+@pytest.mark.parametrize(
+    "centrality_func",
+    (
+        nx.current_flow_betweenness_centrality,
+        nx.edge_current_flow_betweenness_centrality,
+        nx.approximate_current_flow_betweenness_centrality,
+    ),
+)
+def test_unconnected_graphs_betweenness_centrality(centrality_func):
+    G = nx.Graph([(1, 2), (3, 4)])
+    G.add_node(5)
+    with pytest.raises(nx.NetworkXError, match="Graph not connected"):
+        centrality_func(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py
new file mode 100644
index 00000000..7b1611b0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py
@@ -0,0 +1,147 @@
+import pytest
+
+pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+import networkx as nx
+from networkx import edge_current_flow_betweenness_centrality as edge_current_flow
+from networkx import (
+    edge_current_flow_betweenness_centrality_subset as edge_current_flow_subset,
+)
+
+
+class TestFlowBetweennessCentrality:
+    def test_K4_normalized(self):
+        """Betweenness centrality: K4"""
+        G = nx.complete_graph(4)
+        b = nx.current_flow_betweenness_centrality_subset(
+            G, list(G), list(G), normalized=True
+        )
+        b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_K4(self):
+        """Betweenness centrality: K4"""
+        G = nx.complete_graph(4)
+        b = nx.current_flow_betweenness_centrality_subset(
+            G, list(G), list(G), normalized=True
+        )
+        b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        # test weighted network
+        G.add_edge(0, 1, weight=0.5, other=0.3)
+        b = nx.current_flow_betweenness_centrality_subset(
+            G, list(G), list(G), normalized=True, weight=None
+        )
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        b = nx.current_flow_betweenness_centrality_subset(
+            G, list(G), list(G), normalized=True
+        )
+        b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        b = nx.current_flow_betweenness_centrality_subset(
+            G, list(G), list(G), normalized=True, weight="other"
+        )
+        b_answer = nx.current_flow_betweenness_centrality(
+            G, normalized=True, weight="other"
+        )
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P4_normalized(self):
+        """Betweenness centrality: P4 normalized"""
+        G = nx.path_graph(4)
+        b = nx.current_flow_betweenness_centrality_subset(
+            G, list(G), list(G), normalized=True
+        )
+        b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P4(self):
+        """Betweenness centrality: P4"""
+        G = nx.path_graph(4)
+        b = nx.current_flow_betweenness_centrality_subset(
+            G, list(G), list(G), normalized=True
+        )
+        b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_star(self):
+        """Betweenness centrality: star"""
+        G = nx.Graph()
+        nx.add_star(G, ["a", "b", "c", "d"])
+        b = nx.current_flow_betweenness_centrality_subset(
+            G, list(G), list(G), normalized=True
+        )
+        b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+
+# class TestWeightedFlowBetweennessCentrality():
+#     pass
+
+
+class TestEdgeFlowBetweennessCentrality:
+    def test_K4_normalized(self):
+        """Betweenness centrality: K4"""
+        G = nx.complete_graph(4)
+        b = edge_current_flow_subset(G, list(G), list(G), normalized=True)
+        b_answer = edge_current_flow(G, normalized=True)
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
+
+    def test_K4(self):
+        """Betweenness centrality: K4"""
+        G = nx.complete_graph(4)
+        b = edge_current_flow_subset(G, list(G), list(G), normalized=False)
+        b_answer = edge_current_flow(G, normalized=False)
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
+        # test weighted network
+        G.add_edge(0, 1, weight=0.5, other=0.3)
+        b = edge_current_flow_subset(G, list(G), list(G), normalized=False, weight=None)
+        # weight is None => same as unweighted network
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
+
+        b = edge_current_flow_subset(G, list(G), list(G), normalized=False)
+        b_answer = edge_current_flow(G, normalized=False)
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
+
+        b = edge_current_flow_subset(
+            G, list(G), list(G), normalized=False, weight="other"
+        )
+        b_answer = edge_current_flow(G, normalized=False, weight="other")
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
+
+    def test_C4(self):
+        """Edge betweenness centrality: C4"""
+        G = nx.cycle_graph(4)
+        b = edge_current_flow_subset(G, list(G), list(G), normalized=True)
+        b_answer = edge_current_flow(G, normalized=True)
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
+
+    def test_P4(self):
+        """Edge betweenness centrality: P4"""
+        G = nx.path_graph(4)
+        b = edge_current_flow_subset(G, list(G), list(G), normalized=True)
+        b_answer = edge_current_flow(G, normalized=True)
+        for (s, t), v1 in b_answer.items():
+            v2 = b.get((s, t), b.get((t, s)))
+            assert v1 == pytest.approx(v2, abs=1e-7)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_closeness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_closeness.py
new file mode 100644
index 00000000..2528d622
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_closeness.py
@@ -0,0 +1,43 @@
+import pytest
+
+pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+import networkx as nx
+
+
+class TestFlowClosenessCentrality:
+    def test_K4(self):
+        """Closeness centrality: K4"""
+        G = nx.complete_graph(4)
+        b = nx.current_flow_closeness_centrality(G)
+        b_answer = {0: 2.0 / 3, 1: 2.0 / 3, 2: 2.0 / 3, 3: 2.0 / 3}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P4(self):
+        """Closeness centrality: P4"""
+        G = nx.path_graph(4)
+        b = nx.current_flow_closeness_centrality(G)
+        b_answer = {0: 1.0 / 6, 1: 1.0 / 4, 2: 1.0 / 4, 3: 1.0 / 6}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_star(self):
+        """Closeness centrality: star"""
+        G = nx.Graph()
+        nx.add_star(G, ["a", "b", "c", "d"])
+        b = nx.current_flow_closeness_centrality(G)
+        b_answer = {"a": 1.0 / 3, "b": 0.6 / 3, "c": 0.6 / 3, "d": 0.6 / 3}
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_current_flow_closeness_centrality_not_connected(self):
+        G = nx.Graph()
+        G.add_nodes_from([1, 2, 3])
+        with pytest.raises(nx.NetworkXError):
+            nx.current_flow_closeness_centrality(G)
+
+
+class TestWeightedFlowClosenessCentrality:
+    pass
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_degree_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_degree_centrality.py
new file mode 100644
index 00000000..e39aa3b1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_degree_centrality.py
@@ -0,0 +1,144 @@
+"""
+Unit tests for degree centrality.
+"""
+
+import pytest
+
+import networkx as nx
+
+
+class TestDegreeCentrality:
+    def setup_method(self):
+        self.K = nx.krackhardt_kite_graph()
+        self.P3 = nx.path_graph(3)
+        self.K5 = nx.complete_graph(5)
+
+        F = nx.Graph()  # Florentine families
+        F.add_edge("Acciaiuoli", "Medici")
+        F.add_edge("Castellani", "Peruzzi")
+        F.add_edge("Castellani", "Strozzi")
+        F.add_edge("Castellani", "Barbadori")
+        F.add_edge("Medici", "Barbadori")
+        F.add_edge("Medici", "Ridolfi")
+        F.add_edge("Medici", "Tornabuoni")
+        F.add_edge("Medici", "Albizzi")
+        F.add_edge("Medici", "Salviati")
+        F.add_edge("Salviati", "Pazzi")
+        F.add_edge("Peruzzi", "Strozzi")
+        F.add_edge("Peruzzi", "Bischeri")
+        F.add_edge("Strozzi", "Ridolfi")
+        F.add_edge("Strozzi", "Bischeri")
+        F.add_edge("Ridolfi", "Tornabuoni")
+        F.add_edge("Tornabuoni", "Guadagni")
+        F.add_edge("Albizzi", "Ginori")
+        F.add_edge("Albizzi", "Guadagni")
+        F.add_edge("Bischeri", "Guadagni")
+        F.add_edge("Guadagni", "Lamberteschi")
+        self.F = F
+
+        G = nx.DiGraph()
+        G.add_edge(0, 5)
+        G.add_edge(1, 5)
+        G.add_edge(2, 5)
+        G.add_edge(3, 5)
+        G.add_edge(4, 5)
+        G.add_edge(5, 6)
+        G.add_edge(5, 7)
+        G.add_edge(5, 8)
+        self.G = G
+
+    def test_degree_centrality_1(self):
+        d = nx.degree_centrality(self.K5)
+        exact = dict(zip(range(5), [1] * 5))
+        for n, dc in d.items():
+            assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    def test_degree_centrality_2(self):
+        d = nx.degree_centrality(self.P3)
+        exact = {0: 0.5, 1: 1, 2: 0.5}
+        for n, dc in d.items():
+            assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    def test_degree_centrality_3(self):
+        d = nx.degree_centrality(self.K)
+        exact = {
+            0: 0.444,
+            1: 0.444,
+            2: 0.333,
+            3: 0.667,
+            4: 0.333,
+            5: 0.556,
+            6: 0.556,
+            7: 0.333,
+            8: 0.222,
+            9: 0.111,
+        }
+        for n, dc in d.items():
+            assert exact[n] == pytest.approx(float(f"{dc:.3f}"), abs=1e-7)
+
+    def test_degree_centrality_4(self):
+        d = nx.degree_centrality(self.F)
+        names = sorted(self.F.nodes())
+        dcs = [
+            0.071,
+            0.214,
+            0.143,
+            0.214,
+            0.214,
+            0.071,
+            0.286,
+            0.071,
+            0.429,
+            0.071,
+            0.214,
+            0.214,
+            0.143,
+            0.286,
+            0.214,
+        ]
+        exact = dict(zip(names, dcs))
+        for n, dc in d.items():
+            assert exact[n] == pytest.approx(float(f"{dc:.3f}"), abs=1e-7)
+
+    def test_indegree_centrality(self):
+        d = nx.in_degree_centrality(self.G)
+        exact = {
+            0: 0.0,
+            1: 0.0,
+            2: 0.0,
+            3: 0.0,
+            4: 0.0,
+            5: 0.625,
+            6: 0.125,
+            7: 0.125,
+            8: 0.125,
+        }
+        for n, dc in d.items():
+            assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    def test_outdegree_centrality(self):
+        d = nx.out_degree_centrality(self.G)
+        exact = {
+            0: 0.125,
+            1: 0.125,
+            2: 0.125,
+            3: 0.125,
+            4: 0.125,
+            5: 0.375,
+            6: 0.0,
+            7: 0.0,
+            8: 0.0,
+        }
+        for n, dc in d.items():
+            assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    def test_small_graph_centrality(self):
+        G = nx.empty_graph(create_using=nx.DiGraph)
+        assert {} == nx.degree_centrality(G)
+        assert {} == nx.out_degree_centrality(G)
+        assert {} == nx.in_degree_centrality(G)
+
+        G = nx.empty_graph(1, create_using=nx.DiGraph)
+        assert {0: 1} == nx.degree_centrality(G)
+        assert {0: 1} == nx.out_degree_centrality(G)
+        assert {0: 1} == nx.in_degree_centrality(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py
new file mode 100644
index 00000000..05de1c43
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py
@@ -0,0 +1,73 @@
+import networkx as nx
+
+
+def small_ego_G():
+    """The sample network from https://arxiv.org/pdf/1310.6753v1.pdf"""
+    edges = [
+        ("a", "b"),
+        ("a", "c"),
+        ("b", "c"),
+        ("b", "d"),
+        ("b", "e"),
+        ("b", "f"),
+        ("c", "d"),
+        ("c", "f"),
+        ("c", "h"),
+        ("d", "f"),
+        ("e", "f"),
+        ("f", "h"),
+        ("h", "j"),
+        ("h", "k"),
+        ("i", "j"),
+        ("i", "k"),
+        ("j", "k"),
+        ("u", "a"),
+        ("u", "b"),
+        ("u", "c"),
+        ("u", "d"),
+        ("u", "e"),
+        ("u", "f"),
+        ("u", "g"),
+        ("u", "h"),
+        ("u", "i"),
+        ("u", "j"),
+        ("u", "k"),
+    ]
+    G = nx.Graph()
+    G.add_edges_from(edges)
+
+    return G
+
+
+class TestDispersion:
+    def test_article(self):
+        """our algorithm matches article's"""
+        G = small_ego_G()
+        disp_uh = nx.dispersion(G, "u", "h", normalized=False)
+        disp_ub = nx.dispersion(G, "u", "b", normalized=False)
+        assert disp_uh == 4
+        assert disp_ub == 1
+
+    def test_results_length(self):
+        """there is a result for every node"""
+        G = small_ego_G()
+        disp = nx.dispersion(G)
+        disp_Gu = nx.dispersion(G, "u")
+        disp_uv = nx.dispersion(G, "u", "h")
+        assert len(disp) == len(G)
+        assert len(disp_Gu) == len(G) - 1
+        assert isinstance(disp_uv, float)
+
+    def test_dispersion_v_only(self):
+        G = small_ego_G()
+        disp_G_h = nx.dispersion(G, v="h", normalized=False)
+        disp_G_h_normalized = nx.dispersion(G, v="h", normalized=True)
+        assert disp_G_h == {"c": 0, "f": 0, "j": 0, "k": 0, "u": 4}
+        assert disp_G_h_normalized == {"c": 0.0, "f": 0.0, "j": 0.0, "k": 0.0, "u": 1.0}
+
+    def test_impossible_things(self):
+        G = nx.karate_club_graph()
+        disp = nx.dispersion(G)
+        for u in disp:
+            for v in disp[u]:
+                assert disp[u][v] >= 0
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py
new file mode 100644
index 00000000..cfc9ee79
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py
@@ -0,0 +1,187 @@
+import math
+
+import pytest
+
+np = pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+
+import networkx as nx
+
+
+class TestEigenvectorCentrality:
+    def test_K5(self):
+        """Eigenvector centrality: K5"""
+        G = nx.complete_graph(5)
+        b = nx.eigenvector_centrality(G)
+        v = math.sqrt(1 / 5.0)
+        b_answer = dict.fromkeys(G, v)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        nstart = {n: 1 for n in G}
+        b = nx.eigenvector_centrality(G, nstart=nstart)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+        b = nx.eigenvector_centrality_numpy(G)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_P3(self):
+        """Eigenvector centrality: P3"""
+        G = nx.path_graph(3)
+        b_answer = {0: 0.5, 1: 0.7071, 2: 0.5}
+        b = nx.eigenvector_centrality_numpy(G)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
+        b = nx.eigenvector_centrality(G)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
+
+    def test_P3_unweighted(self):
+        """Eigenvector centrality: P3"""
+        G = nx.path_graph(3)
+        b_answer = {0: 0.5, 1: 0.7071, 2: 0.5}
+        b = nx.eigenvector_centrality_numpy(G, weight=None)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
+
+    def test_maxiter(self):
+        with pytest.raises(nx.PowerIterationFailedConvergence):
+            G = nx.path_graph(3)
+            nx.eigenvector_centrality(G, max_iter=0)
+
+
+class TestEigenvectorCentralityDirected:
+    @classmethod
+    def setup_class(cls):
+        G = nx.DiGraph()
+
+        edges = [
+            (1, 2),
+            (1, 3),
+            (2, 4),
+            (3, 2),
+            (3, 5),
+            (4, 2),
+            (4, 5),
+            (4, 6),
+            (5, 6),
+            (5, 7),
+            (5, 8),
+            (6, 8),
+            (7, 1),
+            (7, 5),
+            (7, 8),
+            (8, 6),
+            (8, 7),
+        ]
+
+        G.add_edges_from(edges, weight=2.0)
+        cls.G = G.reverse()
+        cls.G.evc = [
+            0.25368793,
+            0.19576478,
+            0.32817092,
+            0.40430835,
+            0.48199885,
+            0.15724483,
+            0.51346196,
+            0.32475403,
+        ]
+
+        H = nx.DiGraph()
+
+        edges = [
+            (1, 2),
+            (1, 3),
+            (2, 4),
+            (3, 2),
+            (3, 5),
+            (4, 2),
+            (4, 5),
+            (4, 6),
+            (5, 6),
+            (5, 7),
+            (5, 8),
+            (6, 8),
+            (7, 1),
+            (7, 5),
+            (7, 8),
+            (8, 6),
+            (8, 7),
+        ]
+
+        G.add_edges_from(edges)
+        cls.H = G.reverse()
+        cls.H.evc = [
+            0.25368793,
+            0.19576478,
+            0.32817092,
+            0.40430835,
+            0.48199885,
+            0.15724483,
+            0.51346196,
+            0.32475403,
+        ]
+
+    def test_eigenvector_centrality_weighted(self):
+        G = self.G
+        p = nx.eigenvector_centrality(G)
+        for a, b in zip(list(p.values()), self.G.evc):
+            assert a == pytest.approx(b, abs=1e-4)
+
+    def test_eigenvector_centrality_weighted_numpy(self):
+        G = self.G
+        p = nx.eigenvector_centrality_numpy(G)
+        for a, b in zip(list(p.values()), self.G.evc):
+            assert a == pytest.approx(b, abs=1e-7)
+
+    def test_eigenvector_centrality_unweighted(self):
+        G = self.H
+        p = nx.eigenvector_centrality(G)
+        for a, b in zip(list(p.values()), self.G.evc):
+            assert a == pytest.approx(b, abs=1e-4)
+
+    def test_eigenvector_centrality_unweighted_numpy(self):
+        G = self.H
+        p = nx.eigenvector_centrality_numpy(G)
+        for a, b in zip(list(p.values()), self.G.evc):
+            assert a == pytest.approx(b, abs=1e-7)
+
+
+class TestEigenvectorCentralityExceptions:
+    def test_multigraph(self):
+        with pytest.raises(nx.NetworkXException):
+            nx.eigenvector_centrality(nx.MultiGraph())
+
+    def test_multigraph_numpy(self):
+        with pytest.raises(nx.NetworkXException):
+            nx.eigenvector_centrality_numpy(nx.MultiGraph())
+
+    def test_null(self):
+        with pytest.raises(nx.NetworkXException):
+            nx.eigenvector_centrality(nx.Graph())
+
+    def test_null_numpy(self):
+        with pytest.raises(nx.NetworkXException):
+            nx.eigenvector_centrality_numpy(nx.Graph())
+
+    @pytest.mark.parametrize(
+        "G",
+        [
+            nx.empty_graph(3),
+            nx.DiGraph([(0, 1), (1, 2)]),
+        ],
+    )
+    def test_disconnected_numpy(self, G):
+        msg = "does not give consistent results for disconnected"
+        with pytest.raises(nx.AmbiguousSolution, match=msg):
+            nx.eigenvector_centrality_numpy(G)
+
+    def test_zero_nstart(self):
+        G = nx.Graph([(1, 2), (1, 3), (2, 3)])
+        with pytest.raises(
+            nx.NetworkXException, match="initial vector cannot have all zero values"
+        ):
+            nx.eigenvector_centrality(G, nstart={v: 0 for v in G})
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_group.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_group.py
new file mode 100644
index 00000000..82343f28
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_group.py
@@ -0,0 +1,277 @@
+"""
+Tests for Group Centrality Measures
+"""
+
+import pytest
+
+import networkx as nx
+
+
+class TestGroupBetweennessCentrality:
+    def test_group_betweenness_single_node(self):
+        """
+        Group betweenness centrality for single node group
+        """
+        G = nx.path_graph(5)
+        C = [1]
+        b = nx.group_betweenness_centrality(
+            G, C, weight=None, normalized=False, endpoints=False
+        )
+        b_answer = 3.0
+        assert b == b_answer
+
+    def test_group_betweenness_with_endpoints(self):
+        """
+        Group betweenness centrality for single node group
+        """
+        G = nx.path_graph(5)
+        C = [1]
+        b = nx.group_betweenness_centrality(
+            G, C, weight=None, normalized=False, endpoints=True
+        )
+        b_answer = 7.0
+        assert b == b_answer
+
+    def test_group_betweenness_normalized(self):
+        """
+        Group betweenness centrality for group with more than
+        1 node and normalized
+        """
+        G = nx.path_graph(5)
+        C = [1, 3]
+        b = nx.group_betweenness_centrality(
+            G, C, weight=None, normalized=True, endpoints=False
+        )
+        b_answer = 1.0
+        assert b == b_answer
+
+    def test_two_group_betweenness_value_zero(self):
+        """
+        Group betweenness centrality value of 0
+        """
+        G = nx.cycle_graph(7)
+        C = [[0, 1, 6], [0, 1, 5]]
+        b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False)
+        b_answer = [0.0, 3.0]
+        assert b == b_answer
+
+    def test_group_betweenness_value_zero(self):
+        """
+        Group betweenness centrality value of 0
+        """
+        G = nx.cycle_graph(6)
+        C = [0, 1, 5]
+        b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False)
+        b_answer = 0.0
+        assert b == b_answer
+
+    def test_group_betweenness_disconnected_graph(self):
+        """
+        Group betweenness centrality in a disconnected graph
+        """
+        G = nx.path_graph(5)
+        G.remove_edge(0, 1)
+        C = [1]
+        b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False)
+        b_answer = 0.0
+        assert b == b_answer
+
+    def test_group_betweenness_node_not_in_graph(self):
+        """
+        Node(s) in C not in graph, raises NodeNotFound exception
+        """
+        with pytest.raises(nx.NodeNotFound):
+            nx.group_betweenness_centrality(nx.path_graph(5), [4, 7, 8])
+
+    def test_group_betweenness_directed_weighted(self):
+        """
+        Group betweenness centrality in a directed and weighted graph
+        """
+        G = nx.DiGraph()
+        G.add_edge(1, 0, weight=1)
+        G.add_edge(0, 2, weight=2)
+        G.add_edge(1, 2, weight=3)
+        G.add_edge(3, 1, weight=4)
+        G.add_edge(2, 3, weight=1)
+        G.add_edge(4, 3, weight=6)
+        G.add_edge(2, 4, weight=7)
+        C = [1, 2]
+        b = nx.group_betweenness_centrality(G, C, weight="weight", normalized=False)
+        b_answer = 5.0
+        assert b == b_answer
+
+
+class TestProminentGroup:
+    np = pytest.importorskip("numpy")
+    pd = pytest.importorskip("pandas")
+
+    def test_prominent_group_single_node(self):
+        """
+        Prominent group for single node
+        """
+        G = nx.path_graph(5)
+        k = 1
+        b, g = nx.prominent_group(G, k, normalized=False, endpoints=False)
+        b_answer, g_answer = 4.0, [2]
+        assert b == b_answer and g == g_answer
+
+    def test_prominent_group_with_c(self):
+        """
+        Prominent group without some nodes
+        """
+        G = nx.path_graph(5)
+        k = 1
+        b, g = nx.prominent_group(G, k, normalized=False, C=[2])
+        b_answer, g_answer = 3.0, [1]
+        assert b == b_answer and g == g_answer
+
+    def test_prominent_group_normalized_endpoints(self):
+        """
+        Prominent group with normalized result, with endpoints
+        """
+        G = nx.cycle_graph(7)
+        k = 2
+        b, g = nx.prominent_group(G, k, normalized=True, endpoints=True)
+        b_answer, g_answer = 1.7, [2, 5]
+        assert b == b_answer and g == g_answer
+
+    def test_prominent_group_disconnected_graph(self):
+        """
+        Prominent group of disconnected graph
+        """
+        G = nx.path_graph(6)
+        G.remove_edge(0, 1)
+        k = 1
+        b, g = nx.prominent_group(G, k, weight=None, normalized=False)
+        b_answer, g_answer = 4.0, [3]
+        assert b == b_answer and g == g_answer
+
+    def test_prominent_group_node_not_in_graph(self):
+        """
+        Node(s) in C not in graph, raises NodeNotFound exception
+        """
+        with pytest.raises(nx.NodeNotFound):
+            nx.prominent_group(nx.path_graph(5), 1, C=[10])
+
+    def test_group_betweenness_directed_weighted(self):
+        """
+        Group betweenness centrality in a directed and weighted graph
+        """
+        G = nx.DiGraph()
+        G.add_edge(1, 0, weight=1)
+        G.add_edge(0, 2, weight=2)
+        G.add_edge(1, 2, weight=3)
+        G.add_edge(3, 1, weight=4)
+        G.add_edge(2, 3, weight=1)
+        G.add_edge(4, 3, weight=6)
+        G.add_edge(2, 4, weight=7)
+        k = 2
+        b, g = nx.prominent_group(G, k, weight="weight", normalized=False)
+        b_answer, g_answer = 5.0, [1, 2]
+        assert b == b_answer and g == g_answer
+
+    def test_prominent_group_greedy_algorithm(self):
+        """
+        Group betweenness centrality in a greedy algorithm
+        """
+        G = nx.cycle_graph(7)
+        k = 2
+        b, g = nx.prominent_group(G, k, normalized=True, endpoints=True, greedy=True)
+        b_answer, g_answer = 1.7, [6, 3]
+        assert b == b_answer and g == g_answer
+
+
+class TestGroupClosenessCentrality:
+    def test_group_closeness_single_node(self):
+        """
+        Group closeness centrality for a single node group
+        """
+        G = nx.path_graph(5)
+        c = nx.group_closeness_centrality(G, [1])
+        c_answer = nx.closeness_centrality(G, 1)
+        assert c == c_answer
+
+    def test_group_closeness_disconnected(self):
+        """
+        Group closeness centrality for a disconnected graph
+        """
+        G = nx.Graph()
+        G.add_nodes_from([1, 2, 3, 4])
+        c = nx.group_closeness_centrality(G, [1, 2])
+        c_answer = 0
+        assert c == c_answer
+
+    def test_group_closeness_multiple_node(self):
+        """
+        Group closeness centrality for a group with more than
+        1 node
+        """
+        G = nx.path_graph(4)
+        c = nx.group_closeness_centrality(G, [1, 2])
+        c_answer = 1
+        assert c == c_answer
+
+    def test_group_closeness_node_not_in_graph(self):
+        """
+        Node(s) in S not in graph, raises NodeNotFound exception
+        """
+        with pytest.raises(nx.NodeNotFound):
+            nx.group_closeness_centrality(nx.path_graph(5), [6, 7, 8])
+
+
+class TestGroupDegreeCentrality:
+    def test_group_degree_centrality_single_node(self):
+        """
+        Group degree centrality for a single node group
+        """
+        G = nx.path_graph(4)
+        d = nx.group_degree_centrality(G, [1])
+        d_answer = nx.degree_centrality(G)[1]
+        assert d == d_answer
+
+    def test_group_degree_centrality_multiple_node(self):
+        """
+        Group degree centrality for group with more than
+        1 node
+        """
+        G = nx.Graph()
+        G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
+        G.add_edges_from(
+            [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)]
+        )
+        d = nx.group_degree_centrality(G, [1, 2])
+        d_answer = 1
+        assert d == d_answer
+
+    def test_group_in_degree_centrality(self):
+        """
+        Group in-degree centrality in a DiGraph
+        """
+        G = nx.DiGraph()
+        G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
+        G.add_edges_from(
+            [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)]
+        )
+        d = nx.group_in_degree_centrality(G, [1, 2])
+        d_answer = 0
+        assert d == d_answer
+
+    def test_group_out_degree_centrality(self):
+        """
+        Group out-degree centrality in a DiGraph
+        """
+        G = nx.DiGraph()
+        G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
+        G.add_edges_from(
+            [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)]
+        )
+        d = nx.group_out_degree_centrality(G, [1, 2])
+        d_answer = 1
+        assert d == d_answer
+
+    def test_group_degree_centrality_node_not_in_graph(self):
+        """
+        Node(s) in S not in graph, raises NetworkXError
+        """
+        with pytest.raises(nx.NetworkXError):
+            nx.group_degree_centrality(nx.path_graph(5), [6, 7, 8])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_harmonic_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_harmonic_centrality.py
new file mode 100644
index 00000000..4b3dc4ac
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_harmonic_centrality.py
@@ -0,0 +1,122 @@
+"""
+Tests for degree centrality.
+"""
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.centrality import harmonic_centrality
+
+
+class TestClosenessCentrality:
+    @classmethod
+    def setup_class(cls):
+        cls.P3 = nx.path_graph(3)
+        cls.P4 = nx.path_graph(4)
+        cls.K5 = nx.complete_graph(5)
+
+        cls.C4 = nx.cycle_graph(4)
+        cls.C4_directed = nx.cycle_graph(4, create_using=nx.DiGraph)
+
+        cls.C5 = nx.cycle_graph(5)
+
+        cls.T = nx.balanced_tree(r=2, h=2)
+
+        cls.Gb = nx.DiGraph()
+        cls.Gb.add_edges_from([(0, 1), (0, 2), (0, 4), (2, 1), (2, 3), (4, 3)])
+
+    def test_p3_harmonic(self):
+        c = harmonic_centrality(self.P3)
+        d = {0: 1.5, 1: 2, 2: 1.5}
+        for n in sorted(self.P3):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_p4_harmonic(self):
+        c = harmonic_centrality(self.P4)
+        d = {0: 1.8333333, 1: 2.5, 2: 2.5, 3: 1.8333333}
+        for n in sorted(self.P4):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_clique_complete(self):
+        c = harmonic_centrality(self.K5)
+        d = {0: 4, 1: 4, 2: 4, 3: 4, 4: 4}
+        for n in sorted(self.P3):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_cycle_C4(self):
+        c = harmonic_centrality(self.C4)
+        d = {0: 2.5, 1: 2.5, 2: 2.5, 3: 2.5}
+        for n in sorted(self.C4):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_cycle_C5(self):
+        c = harmonic_centrality(self.C5)
+        d = {0: 3, 1: 3, 2: 3, 3: 3, 4: 3, 5: 4}
+        for n in sorted(self.C5):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_bal_tree(self):
+        c = harmonic_centrality(self.T)
+        d = {0: 4.0, 1: 4.1666, 2: 4.1666, 3: 2.8333, 4: 2.8333, 5: 2.8333, 6: 2.8333}
+        for n in sorted(self.T):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_exampleGraph(self):
+        c = harmonic_centrality(self.Gb)
+        d = {0: 0, 1: 2, 2: 1, 3: 2.5, 4: 1}
+        for n in sorted(self.Gb):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_weighted_harmonic(self):
+        XG = nx.DiGraph()
+        XG.add_weighted_edges_from(
+            [
+                ("a", "b", 10),
+                ("d", "c", 5),
+                ("a", "c", 1),
+                ("e", "f", 2),
+                ("f", "c", 1),
+                ("a", "f", 3),
+            ]
+        )
+        c = harmonic_centrality(XG, distance="weight")
+        d = {"a": 0, "b": 0.1, "c": 2.533, "d": 0, "e": 0, "f": 0.83333}
+        for n in sorted(XG):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_empty(self):
+        G = nx.DiGraph()
+        c = harmonic_centrality(G, distance="weight")
+        d = {}
+        assert c == d
+
+    def test_singleton(self):
+        G = nx.DiGraph()
+        G.add_node(0)
+        c = harmonic_centrality(G, distance="weight")
+        d = {0: 0}
+        assert c == d
+
+    def test_cycle_c4_directed(self):
+        c = harmonic_centrality(self.C4_directed, nbunch=[0, 1], sources=[1, 2])
+        d = {0: 0.833, 1: 0.333}
+        for n in [0, 1]:
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_cycle_c4_directed_subset(self):
+        c = harmonic_centrality(self.C4_directed, nbunch=[0, 1])
+        d = 1.833
+        for n in [0, 1]:
+            assert c[n] == pytest.approx(d, abs=1e-3)
+
+    def test_p3_harmonic_subset(self):
+        c = harmonic_centrality(self.P3, sources=[0, 1])
+        d = {0: 1, 1: 1, 2: 1.5}
+        for n in self.P3:
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_p4_harmonic_subset(self):
+        c = harmonic_centrality(self.P4, nbunch=[2, 3], sources=[0, 1])
+        d = {2: 1.5, 3: 0.8333333}
+        for n in [2, 3]:
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_katz_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_katz_centrality.py
new file mode 100644
index 00000000..0927f00b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_katz_centrality.py
@@ -0,0 +1,345 @@
+import math
+
+import pytest
+
+import networkx as nx
+
+
+class TestKatzCentrality:
+    def test_K5(self):
+        """Katz centrality: K5"""
+        G = nx.complete_graph(5)
+        alpha = 0.1
+        b = nx.katz_centrality(G, alpha)
+        v = math.sqrt(1 / 5.0)
+        b_answer = dict.fromkeys(G, v)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        nstart = {n: 1 for n in G}
+        b = nx.katz_centrality(G, alpha, nstart=nstart)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_P3(self):
+        """Katz centrality: P3"""
+        alpha = 0.1
+        G = nx.path_graph(3)
+        b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
+        b = nx.katz_centrality(G, alpha)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
+
+    def test_maxiter(self):
+        with pytest.raises(nx.PowerIterationFailedConvergence):
+            nx.katz_centrality(nx.path_graph(3), 0.1, max_iter=0)
+
+    def test_beta_as_scalar(self):
+        alpha = 0.1
+        beta = 0.1
+        b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
+        G = nx.path_graph(3)
+        b = nx.katz_centrality(G, alpha, beta)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
+
+    def test_beta_as_dict(self):
+        alpha = 0.1
+        beta = {0: 1.0, 1: 1.0, 2: 1.0}
+        b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
+        G = nx.path_graph(3)
+        b = nx.katz_centrality(G, alpha, beta)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
+
+    def test_multiple_alpha(self):
+        alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]
+        for alpha in alpha_list:
+            b_answer = {
+                0.1: {
+                    0: 0.5598852584152165,
+                    1: 0.6107839182711449,
+                    2: 0.5598852584152162,
+                },
+                0.2: {
+                    0: 0.5454545454545454,
+                    1: 0.6363636363636365,
+                    2: 0.5454545454545454,
+                },
+                0.3: {
+                    0: 0.5333964609104419,
+                    1: 0.6564879518897746,
+                    2: 0.5333964609104419,
+                },
+                0.4: {
+                    0: 0.5232045649263551,
+                    1: 0.6726915834767423,
+                    2: 0.5232045649263551,
+                },
+                0.5: {
+                    0: 0.5144957746691622,
+                    1: 0.6859943117075809,
+                    2: 0.5144957746691622,
+                },
+                0.6: {
+                    0: 0.5069794004195823,
+                    1: 0.6970966755769258,
+                    2: 0.5069794004195823,
+                },
+            }
+            G = nx.path_graph(3)
+            b = nx.katz_centrality(G, alpha)
+            for n in sorted(G):
+                assert b[n] == pytest.approx(b_answer[alpha][n], abs=1e-4)
+
+    def test_multigraph(self):
+        with pytest.raises(nx.NetworkXException):
+            nx.katz_centrality(nx.MultiGraph(), 0.1)
+
+    def test_empty(self):
+        e = nx.katz_centrality(nx.Graph(), 0.1)
+        assert e == {}
+
+    def test_bad_beta(self):
+        with pytest.raises(nx.NetworkXException):
+            G = nx.Graph([(0, 1)])
+            beta = {0: 77}
+            nx.katz_centrality(G, 0.1, beta=beta)
+
+    def test_bad_beta_number(self):
+        with pytest.raises(nx.NetworkXException):
+            G = nx.Graph([(0, 1)])
+            nx.katz_centrality(G, 0.1, beta="foo")
+
+
+class TestKatzCentralityNumpy:
+    @classmethod
+    def setup_class(cls):
+        global np
+        np = pytest.importorskip("numpy")
+        pytest.importorskip("scipy")
+
+    def test_K5(self):
+        """Katz centrality: K5"""
+        G = nx.complete_graph(5)
+        alpha = 0.1
+        b = nx.katz_centrality(G, alpha)
+        v = math.sqrt(1 / 5.0)
+        b_answer = dict.fromkeys(G, v)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        b = nx.eigenvector_centrality_numpy(G)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_P3(self):
+        """Katz centrality: P3"""
+        alpha = 0.1
+        G = nx.path_graph(3)
+        b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
+        b = nx.katz_centrality_numpy(G, alpha)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
+
+    def test_beta_as_scalar(self):
+        alpha = 0.1
+        beta = 0.1
+        b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
+        G = nx.path_graph(3)
+        b = nx.katz_centrality_numpy(G, alpha, beta)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
+
+    def test_beta_as_dict(self):
+        alpha = 0.1
+        beta = {0: 1.0, 1: 1.0, 2: 1.0}
+        b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
+        G = nx.path_graph(3)
+        b = nx.katz_centrality_numpy(G, alpha, beta)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
+
+    def test_multiple_alpha(self):
+        alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]
+        for alpha in alpha_list:
+            b_answer = {
+                0.1: {
+                    0: 0.5598852584152165,
+                    1: 0.6107839182711449,
+                    2: 0.5598852584152162,
+                },
+                0.2: {
+                    0: 0.5454545454545454,
+                    1: 0.6363636363636365,
+                    2: 0.5454545454545454,
+                },
+                0.3: {
+                    0: 0.5333964609104419,
+                    1: 0.6564879518897746,
+                    2: 0.5333964609104419,
+                },
+                0.4: {
+                    0: 0.5232045649263551,
+                    1: 0.6726915834767423,
+                    2: 0.5232045649263551,
+                },
+                0.5: {
+                    0: 0.5144957746691622,
+                    1: 0.6859943117075809,
+                    2: 0.5144957746691622,
+                },
+                0.6: {
+                    0: 0.5069794004195823,
+                    1: 0.6970966755769258,
+                    2: 0.5069794004195823,
+                },
+            }
+            G = nx.path_graph(3)
+            b = nx.katz_centrality_numpy(G, alpha)
+            for n in sorted(G):
+                assert b[n] == pytest.approx(b_answer[alpha][n], abs=1e-4)
+
+    def test_multigraph(self):
+        with pytest.raises(nx.NetworkXException):
+            nx.katz_centrality(nx.MultiGraph(), 0.1)
+
+    def test_empty(self):
+        e = nx.katz_centrality(nx.Graph(), 0.1)
+        assert e == {}
+
+    def test_bad_beta(self):
+        with pytest.raises(nx.NetworkXException):
+            G = nx.Graph([(0, 1)])
+            beta = {0: 77}
+            nx.katz_centrality_numpy(G, 0.1, beta=beta)
+
+    def test_bad_beta_numbe(self):
+        with pytest.raises(nx.NetworkXException):
+            G = nx.Graph([(0, 1)])
+            nx.katz_centrality_numpy(G, 0.1, beta="foo")
+
+    def test_K5_unweighted(self):
+        """Katz centrality: K5"""
+        G = nx.complete_graph(5)
+        alpha = 0.1
+        b = nx.katz_centrality(G, alpha, weight=None)
+        v = math.sqrt(1 / 5.0)
+        b_answer = dict.fromkeys(G, v)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+        b = nx.eigenvector_centrality_numpy(G, weight=None)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
+
+    def test_P3_unweighted(self):
+        """Katz centrality: P3"""
+        alpha = 0.1
+        G = nx.path_graph(3)
+        b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
+        b = nx.katz_centrality_numpy(G, alpha, weight=None)
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
+
+
+class TestKatzCentralityDirected:
+    @classmethod
+    def setup_class(cls):
+        G = nx.DiGraph()
+        edges = [
+            (1, 2),
+            (1, 3),
+            (2, 4),
+            (3, 2),
+            (3, 5),
+            (4, 2),
+            (4, 5),
+            (4, 6),
+            (5, 6),
+            (5, 7),
+            (5, 8),
+            (6, 8),
+            (7, 1),
+            (7, 5),
+            (7, 8),
+            (8, 6),
+            (8, 7),
+        ]
+        G.add_edges_from(edges, weight=2.0)
+        cls.G = G.reverse()
+        cls.G.alpha = 0.1
+        cls.G.evc = [
+            0.3289589783189635,
+            0.2832077296243516,
+            0.3425906003685471,
+            0.3970420865198392,
+            0.41074871061646284,
+            0.272257430756461,
+            0.4201989685435462,
+            0.34229059218038554,
+        ]
+
+        H = nx.DiGraph(edges)
+        cls.H = G.reverse()
+        cls.H.alpha = 0.1
+        cls.H.evc = [
+            0.3289589783189635,
+            0.2832077296243516,
+            0.3425906003685471,
+            0.3970420865198392,
+            0.41074871061646284,
+            0.272257430756461,
+            0.4201989685435462,
+            0.34229059218038554,
+        ]
+
+    def test_katz_centrality_weighted(self):
+        G = self.G
+        alpha = self.G.alpha
+        p = nx.katz_centrality(G, alpha, weight="weight")
+        for a, b in zip(list(p.values()), self.G.evc):
+            assert a == pytest.approx(b, abs=1e-7)
+
+    def test_katz_centrality_unweighted(self):
+        H = self.H
+        alpha = self.H.alpha
+        p = nx.katz_centrality(H, alpha, weight="weight")
+        for a, b in zip(list(p.values()), self.H.evc):
+            assert a == pytest.approx(b, abs=1e-7)
+
+
+class TestKatzCentralityDirectedNumpy(TestKatzCentralityDirected):
+    @classmethod
+    def setup_class(cls):
+        global np
+        np = pytest.importorskip("numpy")
+        pytest.importorskip("scipy")
+        super().setup_class()
+
+    def test_katz_centrality_weighted(self):
+        G = self.G
+        alpha = self.G.alpha
+        p = nx.katz_centrality_numpy(G, alpha, weight="weight")
+        for a, b in zip(list(p.values()), self.G.evc):
+            assert a == pytest.approx(b, abs=1e-7)
+
+    def test_katz_centrality_unweighted(self):
+        H = self.H
+        alpha = self.H.alpha
+        p = nx.katz_centrality_numpy(H, alpha, weight="weight")
+        for a, b in zip(list(p.values()), self.H.evc):
+            assert a == pytest.approx(b, abs=1e-7)
+
+
+class TestKatzEigenvectorVKatz:
+    @classmethod
+    def setup_class(cls):
+        global np
+        np = pytest.importorskip("numpy")
+        pytest.importorskip("scipy")
+
+    def test_eigenvector_v_katz_random(self):
+        G = nx.gnp_random_graph(10, 0.5, seed=1234)
+        l = max(np.linalg.eigvals(nx.adjacency_matrix(G).todense()))
+        e = nx.eigenvector_centrality_numpy(G)
+        k = nx.katz_centrality_numpy(G, 1.0 / l)
+        for n in G:
+            assert e[n] == pytest.approx(k[n], abs=1e-7)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py
new file mode 100644
index 00000000..21aa28b0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py
@@ -0,0 +1,221 @@
+import pytest
+
+import networkx as nx
+
+np = pytest.importorskip("numpy")
+sp = pytest.importorskip("scipy")
+
+
+def test_laplacian_centrality_null_graph():
+    G = nx.Graph()
+    with pytest.raises(nx.NetworkXPointlessConcept):
+        d = nx.laplacian_centrality(G, normalized=False)
+
+
+def test_laplacian_centrality_single_node():
+    """See gh-6571"""
+    G = nx.empty_graph(1)
+    assert nx.laplacian_centrality(G, normalized=False) == {0: 0}
+    with pytest.raises(ZeroDivisionError):
+        nx.laplacian_centrality(G, normalized=True)
+
+
+def test_laplacian_centrality_unconnected_nodes():
+    """laplacian_centrality on a unconnected node graph should return 0
+
+    For graphs without edges, the Laplacian energy is 0 and is unchanged with
+    node removal, so::
+
+        LC(v) = LE(G) - LE(G - v) = 0 - 0 = 0
+    """
+    G = nx.empty_graph(3)
+    assert nx.laplacian_centrality(G, normalized=False) == {0: 0, 1: 0, 2: 0}
+
+
+def test_laplacian_centrality_empty_graph():
+    G = nx.empty_graph(3)
+    with pytest.raises(ZeroDivisionError):
+        d = nx.laplacian_centrality(G, normalized=True)
+
+
+def test_laplacian_centrality_E():
+    E = nx.Graph()
+    E.add_weighted_edges_from(
+        [(0, 1, 4), (4, 5, 1), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2)]
+    )
+    d = nx.laplacian_centrality(E)
+    exact = {
+        0: 0.700000,
+        1: 0.900000,
+        2: 0.280000,
+        3: 0.220000,
+        4: 0.260000,
+        5: 0.040000,
+    }
+
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    # Check not normalized
+    full_energy = 200
+    dnn = nx.laplacian_centrality(E, normalized=False)
+    for n, dc in dnn.items():
+        assert exact[n] * full_energy == pytest.approx(dc, abs=1e-7)
+
+    # Check unweighted not-normalized version
+    duw_nn = nx.laplacian_centrality(E, normalized=False, weight=None)
+    print(duw_nn)
+    exact_uw_nn = {
+        0: 18,
+        1: 34,
+        2: 18,
+        3: 10,
+        4: 16,
+        5: 6,
+    }
+    for n, dc in duw_nn.items():
+        assert exact_uw_nn[n] == pytest.approx(dc, abs=1e-7)
+
+    # Check unweighted version
+    duw = nx.laplacian_centrality(E, weight=None)
+    full_energy = 42
+    for n, dc in duw.items():
+        assert exact_uw_nn[n] / full_energy == pytest.approx(dc, abs=1e-7)
+
+
+def test_laplacian_centrality_KC():
+    KC = nx.karate_club_graph()
+    d = nx.laplacian_centrality(KC)
+    exact = {
+        0: 0.2543593,
+        1: 0.1724524,
+        2: 0.2166053,
+        3: 0.0964646,
+        4: 0.0350344,
+        5: 0.0571109,
+        6: 0.0540713,
+        7: 0.0788674,
+        8: 0.1222204,
+        9: 0.0217565,
+        10: 0.0308751,
+        11: 0.0215965,
+        12: 0.0174372,
+        13: 0.118861,
+        14: 0.0366341,
+        15: 0.0548712,
+        16: 0.0172772,
+        17: 0.0191969,
+        18: 0.0225564,
+        19: 0.0331147,
+        20: 0.0279955,
+        21: 0.0246361,
+        22: 0.0382339,
+        23: 0.1294193,
+        24: 0.0227164,
+        25: 0.0644697,
+        26: 0.0281555,
+        27: 0.075188,
+        28: 0.0364742,
+        29: 0.0707087,
+        30: 0.0708687,
+        31: 0.131019,
+        32: 0.2370821,
+        33: 0.3066709,
+    }
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    # Check not normalized
+    full_energy = 12502
+    dnn = nx.laplacian_centrality(KC, normalized=False)
+    for n, dc in dnn.items():
+        assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3)
+
+
+def test_laplacian_centrality_K():
+    K = nx.krackhardt_kite_graph()
+    d = nx.laplacian_centrality(K)
+    exact = {
+        0: 0.3010753,
+        1: 0.3010753,
+        2: 0.2258065,
+        3: 0.483871,
+        4: 0.2258065,
+        5: 0.3870968,
+        6: 0.3870968,
+        7: 0.1935484,
+        8: 0.0752688,
+        9: 0.0322581,
+    }
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    # Check not normalized
+    full_energy = 186
+    dnn = nx.laplacian_centrality(K, normalized=False)
+    for n, dc in dnn.items():
+        assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3)
+
+
+def test_laplacian_centrality_P3():
+    P3 = nx.path_graph(3)
+    d = nx.laplacian_centrality(P3)
+    exact = {0: 0.6, 1: 1.0, 2: 0.6}
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+
+def test_laplacian_centrality_K5():
+    K5 = nx.complete_graph(5)
+    d = nx.laplacian_centrality(K5)
+    exact = {0: 0.52, 1: 0.52, 2: 0.52, 3: 0.52, 4: 0.52}
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+
+def test_laplacian_centrality_FF():
+    FF = nx.florentine_families_graph()
+    d = nx.laplacian_centrality(FF)
+    exact = {
+        "Acciaiuoli": 0.0804598,
+        "Medici": 0.4022989,
+        "Castellani": 0.1724138,
+        "Peruzzi": 0.183908,
+        "Strozzi": 0.2528736,
+        "Barbadori": 0.137931,
+        "Ridolfi": 0.2183908,
+        "Tornabuoni": 0.2183908,
+        "Albizzi": 0.1954023,
+        "Salviati": 0.1149425,
+        "Pazzi": 0.0344828,
+        "Bischeri": 0.1954023,
+        "Guadagni": 0.2298851,
+        "Ginori": 0.045977,
+        "Lamberteschi": 0.0574713,
+    }
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+
+def test_laplacian_centrality_DG():
+    DG = nx.DiGraph([(0, 5), (1, 5), (2, 5), (3, 5), (4, 5), (5, 6), (5, 7), (5, 8)])
+    d = nx.laplacian_centrality(DG)
+    exact = {
+        0: 0.2123352,
+        5: 0.515391,
+        1: 0.2123352,
+        2: 0.2123352,
+        3: 0.2123352,
+        4: 0.2123352,
+        6: 0.2952031,
+        7: 0.2952031,
+        8: 0.2952031,
+    }
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    # Check not normalized
+    full_energy = 9.50704
+    dnn = nx.laplacian_centrality(DG, normalized=False)
+    for n, dc in dnn.items():
+        assert exact[n] * full_energy == pytest.approx(dc, abs=1e-4)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_load_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_load_centrality.py
new file mode 100644
index 00000000..bf096039
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_load_centrality.py
@@ -0,0 +1,344 @@
+import pytest
+
+import networkx as nx
+
+
+class TestLoadCentrality:
+    @classmethod
+    def setup_class(cls):
+        G = nx.Graph()
+        G.add_edge(0, 1, weight=3)
+        G.add_edge(0, 2, weight=2)
+        G.add_edge(0, 3, weight=6)
+        G.add_edge(0, 4, weight=4)
+        G.add_edge(1, 3, weight=5)
+        G.add_edge(1, 5, weight=5)
+        G.add_edge(2, 4, weight=1)
+        G.add_edge(3, 4, weight=2)
+        G.add_edge(3, 5, weight=1)
+        G.add_edge(4, 5, weight=4)
+        cls.G = G
+        cls.exact_weighted = {0: 4.0, 1: 0.0, 2: 8.0, 3: 6.0, 4: 8.0, 5: 0.0}
+        cls.K = nx.krackhardt_kite_graph()
+        cls.P3 = nx.path_graph(3)
+        cls.P4 = nx.path_graph(4)
+        cls.K5 = nx.complete_graph(5)
+        cls.P2 = nx.path_graph(2)
+
+        cls.C4 = nx.cycle_graph(4)
+        cls.T = nx.balanced_tree(r=2, h=2)
+        cls.Gb = nx.Graph()
+        cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
+        cls.F = nx.florentine_families_graph()
+        cls.LM = nx.les_miserables_graph()
+        cls.D = nx.cycle_graph(3, create_using=nx.DiGraph())
+        cls.D.add_edges_from([(3, 0), (4, 3)])
+
+    def test_not_strongly_connected(self):
+        b = nx.load_centrality(self.D)
+        result = {0: 5.0 / 12, 1: 1.0 / 4, 2: 1.0 / 12, 3: 1.0 / 4, 4: 0.000}
+        for n in sorted(self.D):
+            assert result[n] == pytest.approx(b[n], abs=1e-3)
+            assert result[n] == pytest.approx(nx.load_centrality(self.D, n), abs=1e-3)
+
+    def test_P2_normalized_load(self):
+        G = self.P2
+        c = nx.load_centrality(G, normalized=True)
+        d = {0: 0.000, 1: 0.000}
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_weighted_load(self):
+        b = nx.load_centrality(self.G, weight="weight", normalized=False)
+        for n in sorted(self.G):
+            assert b[n] == self.exact_weighted[n]
+
+    def test_k5_load(self):
+        G = self.K5
+        c = nx.load_centrality(G)
+        d = {0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000}
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_p3_load(self):
+        G = self.P3
+        c = nx.load_centrality(G)
+        d = {0: 0.000, 1: 1.000, 2: 0.000}
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+        c = nx.load_centrality(G, v=1)
+        assert c == pytest.approx(1.0, abs=1e-7)
+        c = nx.load_centrality(G, v=1, normalized=True)
+        assert c == pytest.approx(1.0, abs=1e-7)
+
+    def test_p2_load(self):
+        G = nx.path_graph(2)
+        c = nx.load_centrality(G)
+        d = {0: 0.000, 1: 0.000}
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_krackhardt_load(self):
+        G = self.K
+        c = nx.load_centrality(G)
+        d = {
+            0: 0.023,
+            1: 0.023,
+            2: 0.000,
+            3: 0.102,
+            4: 0.000,
+            5: 0.231,
+            6: 0.231,
+            7: 0.389,
+            8: 0.222,
+            9: 0.000,
+        }
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_florentine_families_load(self):
+        G = self.F
+        c = nx.load_centrality(G)
+        d = {
+            "Acciaiuoli": 0.000,
+            "Albizzi": 0.211,
+            "Barbadori": 0.093,
+            "Bischeri": 0.104,
+            "Castellani": 0.055,
+            "Ginori": 0.000,
+            "Guadagni": 0.251,
+            "Lamberteschi": 0.000,
+            "Medici": 0.522,
+            "Pazzi": 0.000,
+            "Peruzzi": 0.022,
+            "Ridolfi": 0.117,
+            "Salviati": 0.143,
+            "Strozzi": 0.106,
+            "Tornabuoni": 0.090,
+        }
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_les_miserables_load(self):
+        G = self.LM
+        c = nx.load_centrality(G)
+        d = {
+            "Napoleon": 0.000,
+            "Myriel": 0.177,
+            "MlleBaptistine": 0.000,
+            "MmeMagloire": 0.000,
+            "CountessDeLo": 0.000,
+            "Geborand": 0.000,
+            "Champtercier": 0.000,
+            "Cravatte": 0.000,
+            "Count": 0.000,
+            "OldMan": 0.000,
+            "Valjean": 0.567,
+            "Labarre": 0.000,
+            "Marguerite": 0.000,
+            "MmeDeR": 0.000,
+            "Isabeau": 0.000,
+            "Gervais": 0.000,
+            "Listolier": 0.000,
+            "Tholomyes": 0.043,
+            "Fameuil": 0.000,
+            "Blacheville": 0.000,
+            "Favourite": 0.000,
+            "Dahlia": 0.000,
+            "Zephine": 0.000,
+            "Fantine": 0.128,
+            "MmeThenardier": 0.029,
+            "Thenardier": 0.075,
+            "Cosette": 0.024,
+            "Javert": 0.054,
+            "Fauchelevent": 0.026,
+            "Bamatabois": 0.008,
+            "Perpetue": 0.000,
+            "Simplice": 0.009,
+            "Scaufflaire": 0.000,
+            "Woman1": 0.000,
+            "Judge": 0.000,
+            "Champmathieu": 0.000,
+            "Brevet": 0.000,
+            "Chenildieu": 0.000,
+            "Cochepaille": 0.000,
+            "Pontmercy": 0.007,
+            "Boulatruelle": 0.000,
+            "Eponine": 0.012,
+            "Anzelma": 0.000,
+            "Woman2": 0.000,
+            "MotherInnocent": 0.000,
+            "Gribier": 0.000,
+            "MmeBurgon": 0.026,
+            "Jondrette": 0.000,
+            "Gavroche": 0.164,
+            "Gillenormand": 0.021,
+            "Magnon": 0.000,
+            "MlleGillenormand": 0.047,
+            "MmePontmercy": 0.000,
+            "MlleVaubois": 0.000,
+            "LtGillenormand": 0.000,
+            "Marius": 0.133,
+            "BaronessT": 0.000,
+            "Mabeuf": 0.028,
+            "Enjolras": 0.041,
+            "Combeferre": 0.001,
+            "Prouvaire": 0.000,
+            "Feuilly": 0.001,
+            "Courfeyrac": 0.006,
+            "Bahorel": 0.002,
+            "Bossuet": 0.032,
+            "Joly": 0.002,
+            "Grantaire": 0.000,
+            "MotherPlutarch": 0.000,
+            "Gueulemer": 0.005,
+            "Babet": 0.005,
+            "Claquesous": 0.005,
+            "Montparnasse": 0.004,
+            "Toussaint": 0.000,
+            "Child1": 0.000,
+            "Child2": 0.000,
+            "Brujon": 0.000,
+            "MmeHucheloup": 0.000,
+        }
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_unnormalized_k5_load(self):
+        G = self.K5
+        c = nx.load_centrality(G, normalized=False)
+        d = {0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000}
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_unnormalized_p3_load(self):
+        G = self.P3
+        c = nx.load_centrality(G, normalized=False)
+        d = {0: 0.000, 1: 2.000, 2: 0.000}
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_unnormalized_krackhardt_load(self):
+        G = self.K
+        c = nx.load_centrality(G, normalized=False)
+        d = {
+            0: 1.667,
+            1: 1.667,
+            2: 0.000,
+            3: 7.333,
+            4: 0.000,
+            5: 16.667,
+            6: 16.667,
+            7: 28.000,
+            8: 16.000,
+            9: 0.000,
+        }
+
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_unnormalized_florentine_families_load(self):
+        G = self.F
+        c = nx.load_centrality(G, normalized=False)
+
+        d = {
+            "Acciaiuoli": 0.000,
+            "Albizzi": 38.333,
+            "Barbadori": 17.000,
+            "Bischeri": 19.000,
+            "Castellani": 10.000,
+            "Ginori": 0.000,
+            "Guadagni": 45.667,
+            "Lamberteschi": 0.000,
+            "Medici": 95.000,
+            "Pazzi": 0.000,
+            "Peruzzi": 4.000,
+            "Ridolfi": 21.333,
+            "Salviati": 26.000,
+            "Strozzi": 19.333,
+            "Tornabuoni": 16.333,
+        }
+        for n in sorted(G):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_load_betweenness_difference(self):
+        # Difference Between Load and Betweenness
+        # --------------------------------------- The smallest graph
+        # that shows the difference between load and betweenness is
+        # G=ladder_graph(3) (Graph B below)
+
+        # Graph A and B are from Tao Zhou, Jian-Guo Liu, Bing-Hong
+        # Wang: Comment on "Scientific collaboration
+        # networks. II. Shortest paths, weighted networks, and
+        # centrality". https://arxiv.org/pdf/physics/0511084
+
+        # Notice that unlike here, their calculation adds to 1 to the
+        # betweenness of every node i for every path from i to every
+        # other node.  This is exactly what it should be, based on
+        # Eqn. (1) in their paper: the eqn is B(v) = \sum_{s\neq t,
+        # s\neq v}{\frac{\sigma_{st}(v)}{\sigma_{st}}}, therefore,
+        # they allow v to be the target node.
+
+        # We follow Brandes 2001, who follows Freeman 1977 that make
+        # the sum for betweenness of v exclude paths where v is either
+        # the source or target node.  To agree with their numbers, we
+        # must additionally, remove edge (4,8) from the graph, see AC
+        # example following (there is a mistake in the figure in their
+        # paper - personal communication).
+
+        # A = nx.Graph()
+        # A.add_edges_from([(0,1), (1,2), (1,3), (2,4),
+        #                  (3,5), (4,6), (4,7), (4,8),
+        #                  (5,8), (6,9), (7,9), (8,9)])
+        B = nx.Graph()  # ladder_graph(3)
+        B.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
+        c = nx.load_centrality(B, normalized=False)
+        d = {0: 1.750, 1: 1.750, 2: 6.500, 3: 6.500, 4: 1.750, 5: 1.750}
+        for n in sorted(B):
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_c4_edge_load(self):
+        G = self.C4
+        c = nx.edge_load_centrality(G)
+        d = {(0, 1): 6.000, (0, 3): 6.000, (1, 2): 6.000, (2, 3): 6.000}
+        for n in G.edges():
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_p4_edge_load(self):
+        G = self.P4
+        c = nx.edge_load_centrality(G)
+        d = {(0, 1): 6.000, (1, 2): 8.000, (2, 3): 6.000}
+        for n in G.edges():
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_k5_edge_load(self):
+        G = self.K5
+        c = nx.edge_load_centrality(G)
+        d = {
+            (0, 1): 5.000,
+            (0, 2): 5.000,
+            (0, 3): 5.000,
+            (0, 4): 5.000,
+            (1, 2): 5.000,
+            (1, 3): 5.000,
+            (1, 4): 5.000,
+            (2, 3): 5.000,
+            (2, 4): 5.000,
+            (3, 4): 5.000,
+        }
+        for n in G.edges():
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
+
+    def test_tree_edge_load(self):
+        G = self.T
+        c = nx.edge_load_centrality(G)
+        d = {
+            (0, 1): 24.000,
+            (0, 2): 24.000,
+            (1, 3): 12.000,
+            (1, 4): 12.000,
+            (2, 5): 12.000,
+            (2, 6): 12.000,
+        }
+        for n in G.edges():
+            assert c[n] == pytest.approx(d[n], abs=1e-3)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_percolation_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_percolation_centrality.py
new file mode 100644
index 00000000..0cb8f529
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_percolation_centrality.py
@@ -0,0 +1,87 @@
+import pytest
+
+import networkx as nx
+
+
+def example1a_G():
+    G = nx.Graph()
+    G.add_node(1, percolation=0.1)
+    G.add_node(2, percolation=0.2)
+    G.add_node(3, percolation=0.2)
+    G.add_node(4, percolation=0.2)
+    G.add_node(5, percolation=0.3)
+    G.add_node(6, percolation=0.2)
+    G.add_node(7, percolation=0.5)
+    G.add_node(8, percolation=0.5)
+    G.add_edges_from([(1, 4), (2, 4), (3, 4), (4, 5), (5, 6), (6, 7), (6, 8)])
+    return G
+
+
+def example1b_G():
+    G = nx.Graph()
+    G.add_node(1, percolation=0.3)
+    G.add_node(2, percolation=0.5)
+    G.add_node(3, percolation=0.5)
+    G.add_node(4, percolation=0.2)
+    G.add_node(5, percolation=0.3)
+    G.add_node(6, percolation=0.2)
+    G.add_node(7, percolation=0.1)
+    G.add_node(8, percolation=0.1)
+    G.add_edges_from([(1, 4), (2, 4), (3, 4), (4, 5), (5, 6), (6, 7), (6, 8)])
+    return G
+
+
+def test_percolation_example1a():
+    """percolation centrality: example 1a"""
+    G = example1a_G()
+    p = nx.percolation_centrality(G)
+    p_answer = {4: 0.625, 6: 0.667}
+    for n, k in p_answer.items():
+        assert p[n] == pytest.approx(k, abs=1e-3)
+
+
+def test_percolation_example1b():
+    """percolation centrality: example 1a"""
+    G = example1b_G()
+    p = nx.percolation_centrality(G)
+    p_answer = {4: 0.825, 6: 0.4}
+    for n, k in p_answer.items():
+        assert p[n] == pytest.approx(k, abs=1e-3)
+
+
+def test_converge_to_betweenness():
+    """percolation centrality: should converge to betweenness
+    centrality when all nodes are percolated the same"""
+    # taken from betweenness test test_florentine_families_graph
+    G = nx.florentine_families_graph()
+    b_answer = {
+        "Acciaiuoli": 0.000,
+        "Albizzi": 0.212,
+        "Barbadori": 0.093,
+        "Bischeri": 0.104,
+        "Castellani": 0.055,
+        "Ginori": 0.000,
+        "Guadagni": 0.255,
+        "Lamberteschi": 0.000,
+        "Medici": 0.522,
+        "Pazzi": 0.000,
+        "Peruzzi": 0.022,
+        "Ridolfi": 0.114,
+        "Salviati": 0.143,
+        "Strozzi": 0.103,
+        "Tornabuoni": 0.092,
+    }
+
+    # If no initial state is provided, state for
+    # every node defaults to 1
+    p_answer = nx.percolation_centrality(G)
+    assert p_answer == pytest.approx(b_answer, abs=1e-3)
+
+    p_states = {k: 0.3 for k, v in b_answer.items()}
+    p_answer = nx.percolation_centrality(G, states=p_states)
+    assert p_answer == pytest.approx(b_answer, abs=1e-3)
+
+
+def test_default_percolation():
+    G = nx.erdos_renyi_graph(42, 0.42, seed=42)
+    assert nx.percolation_centrality(G) == pytest.approx(nx.betweenness_centrality(G))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_reaching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_reaching.py
new file mode 100644
index 00000000..35d50e70
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_reaching.py
@@ -0,0 +1,140 @@
+"""Unit tests for the :mod:`networkx.algorithms.centrality.reaching` module."""
+
+import pytest
+
+import networkx as nx
+
+
+class TestGlobalReachingCentrality:
+    """Unit tests for the global reaching centrality function."""
+
+    def test_non_positive_weights(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.DiGraph()
+            nx.global_reaching_centrality(G, weight="weight")
+
+    def test_negatively_weighted(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.Graph()
+            G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)])
+            nx.global_reaching_centrality(G, weight="weight")
+
+    def test_directed_star(self):
+        G = nx.DiGraph()
+        G.add_weighted_edges_from([(1, 2, 0.5), (1, 3, 0.5)])
+        grc = nx.global_reaching_centrality
+        assert grc(G, normalized=False, weight="weight") == 0.5
+        assert grc(G) == 1
+
+    def test_undirected_unweighted_star(self):
+        G = nx.star_graph(2)
+        grc = nx.global_reaching_centrality
+        assert grc(G, normalized=False, weight=None) == 0.25
+
+    def test_undirected_weighted_star(self):
+        G = nx.Graph()
+        G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)])
+        grc = nx.global_reaching_centrality
+        assert grc(G, normalized=False, weight="weight") == 0.375
+
+    def test_cycle_directed_unweighted(self):
+        G = nx.DiGraph()
+        G.add_edge(1, 2)
+        G.add_edge(2, 1)
+        assert nx.global_reaching_centrality(G, weight=None) == 0
+
+    def test_cycle_undirected_unweighted(self):
+        G = nx.Graph()
+        G.add_edge(1, 2)
+        assert nx.global_reaching_centrality(G, weight=None) == 0
+
+    def test_cycle_directed_weighted(self):
+        G = nx.DiGraph()
+        G.add_weighted_edges_from([(1, 2, 1), (2, 1, 1)])
+        assert nx.global_reaching_centrality(G) == 0
+
+    def test_cycle_undirected_weighted(self):
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=1)
+        grc = nx.global_reaching_centrality
+        assert grc(G, normalized=False) == 0
+
+    def test_directed_weighted(self):
+        G = nx.DiGraph()
+        G.add_edge("A", "B", weight=5)
+        G.add_edge("B", "C", weight=1)
+        G.add_edge("B", "D", weight=0.25)
+        G.add_edge("D", "E", weight=1)
+
+        denom = len(G) - 1
+        A_local = sum([5, 3, 2.625, 2.0833333333333]) / denom
+        B_local = sum([1, 0.25, 0.625]) / denom
+        C_local = 0
+        D_local = sum([1]) / denom
+        E_local = 0
+
+        local_reach_ctrs = [A_local, C_local, B_local, D_local, E_local]
+        max_local = max(local_reach_ctrs)
+        expected = sum(max_local - lrc for lrc in local_reach_ctrs) / denom
+        grc = nx.global_reaching_centrality
+        actual = grc(G, normalized=False, weight="weight")
+        assert expected == pytest.approx(actual, abs=1e-7)
+
+    def test_single_node_with_cycle(self):
+        G = nx.DiGraph([(1, 1)])
+        with pytest.raises(nx.NetworkXError, match="local_reaching_centrality"):
+            nx.global_reaching_centrality(G)
+
+    def test_single_node_with_weighted_cycle(self):
+        G = nx.DiGraph()
+        G.add_weighted_edges_from([(1, 1, 2)])
+        with pytest.raises(nx.NetworkXError, match="local_reaching_centrality"):
+            nx.global_reaching_centrality(G, weight="weight")
+
+
+class TestLocalReachingCentrality:
+    """Unit tests for the local reaching centrality function."""
+
+    def test_non_positive_weights(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.DiGraph()
+            G.add_weighted_edges_from([(0, 1, 0)])
+            nx.local_reaching_centrality(G, 0, weight="weight")
+
+    def test_negatively_weighted(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.Graph()
+            G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)])
+            nx.local_reaching_centrality(G, 0, weight="weight")
+
+    def test_undirected_unweighted_star(self):
+        G = nx.star_graph(2)
+        grc = nx.local_reaching_centrality
+        assert grc(G, 1, weight=None, normalized=False) == 0.75
+
+    def test_undirected_weighted_star(self):
+        G = nx.Graph()
+        G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)])
+        centrality = nx.local_reaching_centrality(
+            G, 1, normalized=False, weight="weight"
+        )
+        assert centrality == 1.5
+
+    def test_undirected_weighted_normalized(self):
+        G = nx.Graph()
+        G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)])
+        centrality = nx.local_reaching_centrality(
+            G, 1, normalized=True, weight="weight"
+        )
+        assert centrality == 1.0
+
+    def test_single_node_with_cycle(self):
+        G = nx.DiGraph([(1, 1)])
+        with pytest.raises(nx.NetworkXError, match="local_reaching_centrality"):
+            nx.local_reaching_centrality(G, 1)
+
+    def test_single_node_with_weighted_cycle(self):
+        G = nx.DiGraph()
+        G.add_weighted_edges_from([(1, 1, 2)])
+        with pytest.raises(nx.NetworkXError, match="local_reaching_centrality"):
+            nx.local_reaching_centrality(G, 1, weight="weight")
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_second_order_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_second_order_centrality.py
new file mode 100644
index 00000000..cc304786
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_second_order_centrality.py
@@ -0,0 +1,82 @@
+"""
+Tests for second order centrality.
+"""
+
+import pytest
+
+pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+import networkx as nx
+
+
+def test_empty():
+    with pytest.raises(nx.NetworkXException):
+        G = nx.empty_graph()
+        nx.second_order_centrality(G)
+
+
+def test_non_connected():
+    with pytest.raises(nx.NetworkXException):
+        G = nx.Graph()
+        G.add_node(0)
+        G.add_node(1)
+        nx.second_order_centrality(G)
+
+
+def test_non_negative_edge_weights():
+    with pytest.raises(nx.NetworkXException):
+        G = nx.path_graph(2)
+        G.add_edge(0, 1, weight=-1)
+        nx.second_order_centrality(G)
+
+
+def test_weight_attribute():
+    G = nx.Graph()
+    G.add_weighted_edges_from([(0, 1, 1.0), (1, 2, 3.5)], weight="w")
+    expected = {0: 3.431, 1: 3.082, 2: 5.612}
+    b = nx.second_order_centrality(G, weight="w")
+
+    for n in sorted(G):
+        assert b[n] == pytest.approx(expected[n], abs=1e-2)
+
+
+def test_one_node_graph():
+    """Second order centrality: single node"""
+    G = nx.Graph()
+    G.add_node(0)
+    G.add_edge(0, 0)
+    assert nx.second_order_centrality(G)[0] == 0
+
+
+def test_P3():
+    """Second order centrality: line graph, as defined in paper"""
+    G = nx.path_graph(3)
+    b_answer = {0: 3.741, 1: 1.414, 2: 3.741}
+
+    b = nx.second_order_centrality(G)
+
+    for n in sorted(G):
+        assert b[n] == pytest.approx(b_answer[n], abs=1e-2)
+
+
+def test_K3():
+    """Second order centrality: complete graph, as defined in paper"""
+    G = nx.complete_graph(3)
+    b_answer = {0: 1.414, 1: 1.414, 2: 1.414}
+
+    b = nx.second_order_centrality(G)
+
+    for n in sorted(G):
+        assert b[n] == pytest.approx(b_answer[n], abs=1e-2)
+
+
+def test_ring_graph():
+    """Second order centrality: ring graph, as defined in paper"""
+    G = nx.cycle_graph(5)
+    b_answer = {0: 4.472, 1: 4.472, 2: 4.472, 3: 4.472, 4: 4.472}
+
+    b = nx.second_order_centrality(G)
+
+    for n in sorted(G):
+        assert b[n] == pytest.approx(b_answer[n], abs=1e-2)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_subgraph.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_subgraph.py
new file mode 100644
index 00000000..71092751
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_subgraph.py
@@ -0,0 +1,110 @@
+import pytest
+
+pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+import networkx as nx
+from networkx.algorithms.centrality.subgraph_alg import (
+    communicability_betweenness_centrality,
+    estrada_index,
+    subgraph_centrality,
+    subgraph_centrality_exp,
+)
+
+
+class TestSubgraph:
+    def test_subgraph_centrality(self):
+        answer = {0: 1.5430806348152433, 1: 1.5430806348152433}
+        result = subgraph_centrality(nx.path_graph(2))
+        for k, v in result.items():
+            assert answer[k] == pytest.approx(v, abs=1e-7)
+
+        answer1 = {
+            "1": 1.6445956054135658,
+            "Albert": 2.4368257358712189,
+            "Aric": 2.4368257358712193,
+            "Dan": 3.1306328496328168,
+            "Franck": 2.3876142275231915,
+        }
+        G1 = nx.Graph(
+            [
+                ("Franck", "Aric"),
+                ("Aric", "Dan"),
+                ("Dan", "Albert"),
+                ("Albert", "Franck"),
+                ("Dan", "1"),
+                ("Franck", "Albert"),
+            ]
+        )
+        result1 = subgraph_centrality(G1)
+        for k, v in result1.items():
+            assert answer1[k] == pytest.approx(v, abs=1e-7)
+        result1 = subgraph_centrality_exp(G1)
+        for k, v in result1.items():
+            assert answer1[k] == pytest.approx(v, abs=1e-7)
+
+    def test_subgraph_centrality_big_graph(self):
+        g199 = nx.complete_graph(199)
+        g200 = nx.complete_graph(200)
+
+        comm199 = nx.subgraph_centrality(g199)
+        comm199_exp = nx.subgraph_centrality_exp(g199)
+
+        comm200 = nx.subgraph_centrality(g200)
+        comm200_exp = nx.subgraph_centrality_exp(g200)
+
+    def test_communicability_betweenness_centrality_small(self):
+        result = communicability_betweenness_centrality(nx.path_graph(2))
+        assert result == {0: 0, 1: 0}
+
+        result = communicability_betweenness_centrality(nx.path_graph(1))
+        assert result == {0: 0}
+
+        result = communicability_betweenness_centrality(nx.path_graph(0))
+        assert result == {}
+
+        answer = {0: 0.1411224421177313, 1: 1.0, 2: 0.1411224421177313}
+        result = communicability_betweenness_centrality(nx.path_graph(3))
+        for k, v in result.items():
+            assert answer[k] == pytest.approx(v, abs=1e-7)
+
+        result = communicability_betweenness_centrality(nx.complete_graph(3))
+        for k, v in result.items():
+            assert 0.49786143366223296 == pytest.approx(v, abs=1e-7)
+
+    def test_communicability_betweenness_centrality(self):
+        answer = {
+            0: 0.07017447951484615,
+            1: 0.71565598701107991,
+            2: 0.71565598701107991,
+            3: 0.07017447951484615,
+        }
+        result = communicability_betweenness_centrality(nx.path_graph(4))
+        for k, v in result.items():
+            assert answer[k] == pytest.approx(v, abs=1e-7)
+
+        answer1 = {
+            "1": 0.060039074193949521,
+            "Albert": 0.315470761661372,
+            "Aric": 0.31547076166137211,
+            "Dan": 0.68297778678316201,
+            "Franck": 0.21977926617449497,
+        }
+        G1 = nx.Graph(
+            [
+                ("Franck", "Aric"),
+                ("Aric", "Dan"),
+                ("Dan", "Albert"),
+                ("Albert", "Franck"),
+                ("Dan", "1"),
+                ("Franck", "Albert"),
+            ]
+        )
+        result1 = communicability_betweenness_centrality(G1)
+        for k, v in result1.items():
+            assert answer1[k] == pytest.approx(v, abs=1e-7)
+
+    def test_estrada_index(self):
+        answer = 1041.2470334195475
+        result = estrada_index(nx.karate_club_graph())
+        assert answer == pytest.approx(result, abs=1e-7)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_trophic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_trophic.py
new file mode 100644
index 00000000..e6880d52
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_trophic.py
@@ -0,0 +1,302 @@
+"""Test trophic levels, trophic differences and trophic coherence"""
+
+import pytest
+
+np = pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+import networkx as nx
+
+
+def test_trophic_levels():
+    """Trivial example"""
+    G = nx.DiGraph()
+    G.add_edge("a", "b")
+    G.add_edge("b", "c")
+
+    d = nx.trophic_levels(G)
+    assert d == {"a": 1, "b": 2, "c": 3}
+
+
+def test_trophic_levels_levine():
+    """Example from Figure 5 in Stephen Levine (1980) J. theor. Biol. 83,
+    195-207
+    """
+    S = nx.DiGraph()
+    S.add_edge(1, 2, weight=1.0)
+    S.add_edge(1, 3, weight=0.2)
+    S.add_edge(1, 4, weight=0.8)
+    S.add_edge(2, 3, weight=0.2)
+    S.add_edge(2, 5, weight=0.3)
+    S.add_edge(4, 3, weight=0.6)
+    S.add_edge(4, 5, weight=0.7)
+    S.add_edge(5, 4, weight=0.2)
+
+    # save copy for later, test intermediate implementation details first
+    S2 = S.copy()
+
+    # drop nodes of in-degree zero
+    z = [nid for nid, d in S.in_degree if d == 0]
+    for nid in z:
+        S.remove_node(nid)
+
+    # find adjacency matrix
+    q = nx.linalg.graphmatrix.adjacency_matrix(S).T
+
+    # fmt: off
+    expected_q = np.array([
+        [0, 0, 0., 0],
+        [0.2, 0, 0.6, 0],
+        [0, 0, 0, 0.2],
+        [0.3, 0, 0.7, 0]
+    ])
+    # fmt: on
+    assert np.array_equal(q.todense(), expected_q)
+
+    # must be square, size of number of nodes
+    assert len(q.shape) == 2
+    assert q.shape[0] == q.shape[1]
+    assert q.shape[0] == len(S)
+
+    nn = q.shape[0]
+
+    i = np.eye(nn)
+    n = np.linalg.inv(i - q)
+    y = np.asarray(n) @ np.ones(nn)
+
+    expected_y = np.array([1, 2.07906977, 1.46511628, 2.3255814])
+    assert np.allclose(y, expected_y)
+
+    expected_d = {1: 1, 2: 2, 3: 3.07906977, 4: 2.46511628, 5: 3.3255814}
+
+    d = nx.trophic_levels(S2)
+
+    for nid, level in d.items():
+        expected_level = expected_d[nid]
+        assert expected_level == pytest.approx(level, abs=1e-7)
+
+
+def test_trophic_levels_simple():
+    matrix_a = np.array([[0, 0], [1, 0]])
+    G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph)
+    d = nx.trophic_levels(G)
+    assert d[0] == pytest.approx(2, abs=1e-7)
+    assert d[1] == pytest.approx(1, abs=1e-7)
+
+
+def test_trophic_levels_more_complex():
+    # fmt: off
+    matrix = np.array([
+        [0, 1, 0, 0],
+        [0, 0, 1, 0],
+        [0, 0, 0, 1],
+        [0, 0, 0, 0]
+    ])
+    # fmt: on
+    G = nx.from_numpy_array(matrix, create_using=nx.DiGraph)
+    d = nx.trophic_levels(G)
+    expected_result = [1, 2, 3, 4]
+    for ind in range(4):
+        assert d[ind] == pytest.approx(expected_result[ind], abs=1e-7)
+
+    # fmt: off
+    matrix = np.array([
+        [0, 1, 1, 0],
+        [0, 0, 1, 1],
+        [0, 0, 0, 1],
+        [0, 0, 0, 0]
+    ])
+    # fmt: on
+    G = nx.from_numpy_array(matrix, create_using=nx.DiGraph)
+    d = nx.trophic_levels(G)
+
+    expected_result = [1, 2, 2.5, 3.25]
+    print("Calculated result: ", d)
+    print("Expected Result: ", expected_result)
+
+    for ind in range(4):
+        assert d[ind] == pytest.approx(expected_result[ind], abs=1e-7)
+
+
+def test_trophic_levels_even_more_complex():
+    # fmt: off
+    # Another, bigger matrix
+    matrix = np.array([
+        [0, 0, 0, 0, 0],
+        [0, 1, 0, 1, 0],
+        [1, 0, 0, 0, 0],
+        [0, 1, 0, 0, 0],
+        [0, 0, 0, 1, 0]
+    ])
+    # Generated this linear system using pen and paper:
+    K = np.array([
+        [1, 0, -1, 0, 0],
+        [0, 0.5, 0, -0.5, 0],
+        [0, 0, 1, 0, 0],
+        [0, -0.5, 0, 1, -0.5],
+        [0, 0, 0, 0, 1],
+    ])
+    # fmt: on
+    result_1 = np.ravel(np.linalg.inv(K) @ np.ones(5))
+    G = nx.from_numpy_array(matrix, create_using=nx.DiGraph)
+    result_2 = nx.trophic_levels(G)
+
+    for ind in range(5):
+        assert result_1[ind] == pytest.approx(result_2[ind], abs=1e-7)
+
+
+def test_trophic_levels_singular_matrix():
+    """Should raise an error with graphs with only non-basal nodes"""
+    matrix = np.identity(4)
+    G = nx.from_numpy_array(matrix, create_using=nx.DiGraph)
+    with pytest.raises(nx.NetworkXError) as e:
+        nx.trophic_levels(G)
+    msg = (
+        "Trophic levels are only defined for graphs where every node "
+        + "has a path from a basal node (basal nodes are nodes with no "
+        + "incoming edges)."
+    )
+    assert msg in str(e.value)
+
+
+def test_trophic_levels_singular_with_basal():
+    """Should fail to compute if there are any parts of the graph which are not
+    reachable from any basal node (with in-degree zero).
+    """
+    G = nx.DiGraph()
+    # a has in-degree zero
+    G.add_edge("a", "b")
+
+    # b is one level above a, c and d
+    G.add_edge("c", "b")
+    G.add_edge("d", "b")
+
+    # c and d form a loop, neither are reachable from a
+    G.add_edge("c", "d")
+    G.add_edge("d", "c")
+
+    with pytest.raises(nx.NetworkXError) as e:
+        nx.trophic_levels(G)
+    msg = (
+        "Trophic levels are only defined for graphs where every node "
+        + "has a path from a basal node (basal nodes are nodes with no "
+        + "incoming edges)."
+    )
+    assert msg in str(e.value)
+
+    # if self-loops are allowed, smaller example:
+    G = nx.DiGraph()
+    G.add_edge("a", "b")  # a has in-degree zero
+    G.add_edge("c", "b")  # b is one level above a and c
+    G.add_edge("c", "c")  # c has a self-loop
+    with pytest.raises(nx.NetworkXError) as e:
+        nx.trophic_levels(G)
+    msg = (
+        "Trophic levels are only defined for graphs where every node "
+        + "has a path from a basal node (basal nodes are nodes with no "
+        + "incoming edges)."
+    )
+    assert msg in str(e.value)
+
+
+def test_trophic_differences():
+    matrix_a = np.array([[0, 1], [0, 0]])
+    G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph)
+    diffs = nx.trophic_differences(G)
+    assert diffs[(0, 1)] == pytest.approx(1, abs=1e-7)
+
+    # fmt: off
+    matrix_b = np.array([
+        [0, 1, 1, 0],
+        [0, 0, 1, 1],
+        [0, 0, 0, 1],
+        [0, 0, 0, 0]
+    ])
+    # fmt: on
+    G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph)
+    diffs = nx.trophic_differences(G)
+
+    assert diffs[(0, 1)] == pytest.approx(1, abs=1e-7)
+    assert diffs[(0, 2)] == pytest.approx(1.5, abs=1e-7)
+    assert diffs[(1, 2)] == pytest.approx(0.5, abs=1e-7)
+    assert diffs[(1, 3)] == pytest.approx(1.25, abs=1e-7)
+    assert diffs[(2, 3)] == pytest.approx(0.75, abs=1e-7)
+
+
+def test_trophic_incoherence_parameter_no_cannibalism():
+    matrix_a = np.array([[0, 1], [0, 0]])
+    G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph)
+    q = nx.trophic_incoherence_parameter(G, cannibalism=False)
+    assert q == pytest.approx(0, abs=1e-7)
+
+    # fmt: off
+    matrix_b = np.array([
+        [0, 1, 1, 0],
+        [0, 0, 1, 1],
+        [0, 0, 0, 1],
+        [0, 0, 0, 0]
+    ])
+    # fmt: on
+    G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph)
+    q = nx.trophic_incoherence_parameter(G, cannibalism=False)
+    assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7)
+
+    # fmt: off
+    matrix_c = np.array([
+        [0, 1, 1, 0],
+        [0, 1, 1, 1],
+        [0, 0, 0, 1],
+        [0, 0, 0, 1]
+    ])
+    # fmt: on
+    G = nx.from_numpy_array(matrix_c, create_using=nx.DiGraph)
+    q = nx.trophic_incoherence_parameter(G, cannibalism=False)
+    # Ignore the -link
+    assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7)
+
+    # no self-loops case
+    # fmt: off
+    matrix_d = np.array([
+        [0, 1, 1, 0],
+        [0, 0, 1, 1],
+        [0, 0, 0, 1],
+        [0, 0, 0, 0]
+    ])
+    # fmt: on
+    G = nx.from_numpy_array(matrix_d, create_using=nx.DiGraph)
+    q = nx.trophic_incoherence_parameter(G, cannibalism=False)
+    # Ignore the -link
+    assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7)
+
+
+def test_trophic_incoherence_parameter_cannibalism():
+    matrix_a = np.array([[0, 1], [0, 0]])
+    G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph)
+    q = nx.trophic_incoherence_parameter(G, cannibalism=True)
+    assert q == pytest.approx(0, abs=1e-7)
+
+    # fmt: off
+    matrix_b = np.array([
+        [0, 0, 0, 0, 0],
+        [0, 1, 0, 1, 0],
+        [1, 0, 0, 0, 0],
+        [0, 1, 0, 0, 0],
+        [0, 0, 0, 1, 0]
+    ])
+    # fmt: on
+    G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph)
+    q = nx.trophic_incoherence_parameter(G, cannibalism=True)
+    assert q == pytest.approx(2, abs=1e-7)
+
+    # fmt: off
+    matrix_c = np.array([
+        [0, 1, 1, 0],
+        [0, 0, 1, 1],
+        [0, 0, 0, 1],
+        [0, 0, 0, 0]
+    ])
+    # fmt: on
+    G = nx.from_numpy_array(matrix_c, create_using=nx.DiGraph)
+    q = nx.trophic_incoherence_parameter(G, cannibalism=True)
+    # Ignore the -link
+    assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_voterank.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_voterank.py
new file mode 100644
index 00000000..a5cfb610
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_voterank.py
@@ -0,0 +1,64 @@
+"""
+Unit tests for VoteRank.
+"""
+
+import networkx as nx
+
+
+class TestVoteRankCentrality:
+    # Example Graph present in reference paper
+    def test_voterank_centrality_1(self):
+        G = nx.Graph()
+        G.add_edges_from(
+            [
+                (7, 8),
+                (7, 5),
+                (7, 9),
+                (5, 0),
+                (0, 1),
+                (0, 2),
+                (0, 3),
+                (0, 4),
+                (1, 6),
+                (2, 6),
+                (3, 6),
+                (4, 6),
+            ]
+        )
+        assert [0, 7, 6] == nx.voterank(G)
+
+    def test_voterank_emptygraph(self):
+        G = nx.Graph()
+        assert [] == nx.voterank(G)
+
+    # Graph unit test
+    def test_voterank_centrality_2(self):
+        G = nx.florentine_families_graph()
+        d = nx.voterank(G, 4)
+        exact = ["Medici", "Strozzi", "Guadagni", "Castellani"]
+        assert exact == d
+
+    # DiGraph unit test
+    def test_voterank_centrality_3(self):
+        G = nx.gnc_graph(10, seed=7)
+        d = nx.voterank(G, 4)
+        exact = [3, 6, 8]
+        assert exact == d
+
+    # MultiGraph unit test
+    def test_voterank_centrality_4(self):
+        G = nx.MultiGraph()
+        G.add_edges_from(
+            [(0, 1), (0, 1), (1, 2), (2, 5), (2, 5), (5, 6), (5, 6), (2, 4), (4, 3)]
+        )
+        exact = [2, 1, 5, 4]
+        assert exact == nx.voterank(G)
+
+    # MultiDiGraph unit test
+    def test_voterank_centrality_5(self):
+        G = nx.MultiDiGraph()
+        G.add_edges_from(
+            [(0, 1), (0, 1), (1, 2), (2, 5), (2, 5), (5, 6), (5, 6), (2, 4), (4, 3)]
+        )
+        exact = [2, 0, 5, 4]
+        assert exact == nx.voterank(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/trophic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/trophic.py
new file mode 100644
index 00000000..9e461ced
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/trophic.py
@@ -0,0 +1,163 @@
+"""Trophic levels"""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["trophic_levels", "trophic_differences", "trophic_incoherence_parameter"]
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(edge_attrs="weight")
+def trophic_levels(G, weight="weight"):
+    r"""Compute the trophic levels of nodes.
+
+    The trophic level of a node $i$ is
+
+    .. math::
+
+        s_i = 1 + \frac{1}{k^{in}_i} \sum_{j} a_{ij} s_j
+
+    where $k^{in}_i$ is the in-degree of i
+
+    .. math::
+
+        k^{in}_i = \sum_{j} a_{ij}
+
+    and nodes with $k^{in}_i = 0$ have $s_i = 1$ by convention.
+
+    These are calculated using the method outlined in Levine [1]_.
+
+    Parameters
+    ----------
+    G : DiGraph
+        A directed networkx graph
+
+    Returns
+    -------
+    nodes : dict
+        Dictionary of nodes with trophic level as the value.
+
+    References
+    ----------
+    .. [1] Stephen Levine (1980) J. theor. Biol. 83, 195-207
+    """
+    import numpy as np
+
+    # find adjacency matrix
+    a = nx.adjacency_matrix(G, weight=weight).T.toarray()
+
+    # drop rows/columns where in-degree is zero
+    rowsum = np.sum(a, axis=1)
+    p = a[rowsum != 0][:, rowsum != 0]
+    # normalise so sum of in-degree weights is 1 along each row
+    p = p / rowsum[rowsum != 0][:, np.newaxis]
+
+    # calculate trophic levels
+    nn = p.shape[0]
+    i = np.eye(nn)
+    try:
+        n = np.linalg.inv(i - p)
+    except np.linalg.LinAlgError as err:
+        # LinAlgError is raised when there is a non-basal node
+        msg = (
+            "Trophic levels are only defined for graphs where every "
+            + "node has a path from a basal node (basal nodes are nodes "
+            + "with no incoming edges)."
+        )
+        raise nx.NetworkXError(msg) from err
+    y = n.sum(axis=1) + 1
+
+    levels = {}
+
+    # all nodes with in-degree zero have trophic level == 1
+    zero_node_ids = (node_id for node_id, degree in G.in_degree if degree == 0)
+    for node_id in zero_node_ids:
+        levels[node_id] = 1
+
+    # all other nodes have levels as calculated
+    nonzero_node_ids = (node_id for node_id, degree in G.in_degree if degree != 0)
+    for i, node_id in enumerate(nonzero_node_ids):
+        levels[node_id] = y.item(i)
+
+    return levels
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(edge_attrs="weight")
+def trophic_differences(G, weight="weight"):
+    r"""Compute the trophic differences of the edges of a directed graph.
+
+    The trophic difference $x_ij$ for each edge is defined in Johnson et al.
+    [1]_ as:
+
+    .. math::
+        x_ij = s_j - s_i
+
+    Where $s_i$ is the trophic level of node $i$.
+
+    Parameters
+    ----------
+    G : DiGraph
+        A directed networkx graph
+
+    Returns
+    -------
+    diffs : dict
+        Dictionary of edges with trophic differences as the value.
+
+    References
+    ----------
+    .. [1] Samuel Johnson, Virginia Dominguez-Garcia, Luca Donetti, Miguel A.
+        Munoz (2014) PNAS "Trophic coherence determines food-web stability"
+    """
+    levels = trophic_levels(G, weight=weight)
+    diffs = {}
+    for u, v in G.edges:
+        diffs[(u, v)] = levels[v] - levels[u]
+    return diffs
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(edge_attrs="weight")
+def trophic_incoherence_parameter(G, weight="weight", cannibalism=False):
+    r"""Compute the trophic incoherence parameter of a graph.
+
+    Trophic coherence is defined as the homogeneity of the distribution of
+    trophic distances: the more similar, the more coherent. This is measured by
+    the standard deviation of the trophic differences and referred to as the
+    trophic incoherence parameter $q$ by [1].
+
+    Parameters
+    ----------
+    G : DiGraph
+        A directed networkx graph
+
+    cannibalism: Boolean
+        If set to False, self edges are not considered in the calculation
+
+    Returns
+    -------
+    trophic_incoherence_parameter : float
+        The trophic coherence of a graph
+
+    References
+    ----------
+    .. [1] Samuel Johnson, Virginia Dominguez-Garcia, Luca Donetti, Miguel A.
+        Munoz (2014) PNAS "Trophic coherence determines food-web stability"
+    """
+    import numpy as np
+
+    if cannibalism:
+        diffs = trophic_differences(G, weight=weight)
+    else:
+        # If no cannibalism, remove self-edges
+        self_loops = list(nx.selfloop_edges(G))
+        if self_loops:
+            # Make a copy so we do not change G's edges in memory
+            G_2 = G.copy()
+            G_2.remove_edges_from(self_loops)
+        else:
+            # Avoid copy otherwise
+            G_2 = G
+        diffs = trophic_differences(G_2, weight=weight)
+    return float(np.std(list(diffs.values())))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/voterank_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/voterank_alg.py
new file mode 100644
index 00000000..9b510b28
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/voterank_alg.py
@@ -0,0 +1,95 @@
+"""Algorithm to select influential nodes in a graph using VoteRank."""
+
+import networkx as nx
+
+__all__ = ["voterank"]
+
+
+@nx._dispatchable
+def voterank(G, number_of_nodes=None):
+    """Select a list of influential nodes in a graph using VoteRank algorithm
+
+    VoteRank [1]_ computes a ranking of the nodes in a graph G based on a
+    voting scheme. With VoteRank, all nodes vote for each of its in-neighbors
+    and the node with the highest votes is elected iteratively. The voting
+    ability of out-neighbors of elected nodes is decreased in subsequent turns.
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX graph.
+
+    number_of_nodes : integer, optional
+        Number of ranked nodes to extract (default all nodes).
+
+    Returns
+    -------
+    voterank : list
+        Ordered list of computed seeds.
+        Only nodes with positive number of votes are returned.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 4)])
+    >>> nx.voterank(G)
+    [0, 1]
+
+    The algorithm can be used both for undirected and directed graphs.
+    However, the directed version is different in two ways:
+    (i) nodes only vote for their in-neighbors and
+    (ii) only the voting ability of elected node and its out-neighbors are updated:
+
+    >>> G = nx.DiGraph([(0, 1), (2, 1), (2, 3), (3, 4)])
+    >>> nx.voterank(G)
+    [2, 3]
+
+    Notes
+    -----
+    Each edge is treated independently in case of multigraphs.
+
+    References
+    ----------
+    .. [1] Zhang, J.-X. et al. (2016).
+        Identifying a set of influential spreaders in complex networks.
+        Sci. Rep. 6, 27823; doi: 10.1038/srep27823.
+    """
+    influential_nodes = []
+    vote_rank = {}
+    if len(G) == 0:
+        return influential_nodes
+    if number_of_nodes is None or number_of_nodes > len(G):
+        number_of_nodes = len(G)
+    if G.is_directed():
+        # For directed graphs compute average out-degree
+        avgDegree = sum(deg for _, deg in G.out_degree()) / len(G)
+    else:
+        # For undirected graphs compute average degree
+        avgDegree = sum(deg for _, deg in G.degree()) / len(G)
+    # step 1 - initiate all nodes to (0,1) (score, voting ability)
+    for n in G.nodes():
+        vote_rank[n] = [0, 1]
+    # Repeat steps 1b to 4 until num_seeds are elected.
+    for _ in range(number_of_nodes):
+        # step 1b - reset rank
+        for n in G.nodes():
+            vote_rank[n][0] = 0
+        # step 2 - vote
+        for n, nbr in G.edges():
+            # In directed graphs nodes only vote for their in-neighbors
+            vote_rank[n][0] += vote_rank[nbr][1]
+            if not G.is_directed():
+                vote_rank[nbr][0] += vote_rank[n][1]
+        for n in influential_nodes:
+            vote_rank[n][0] = 0
+        # step 3 - select top node
+        n = max(G.nodes, key=lambda x: vote_rank[x][0])
+        if vote_rank[n][0] == 0:
+            return influential_nodes
+        influential_nodes.append(n)
+        # weaken the selected node
+        vote_rank[n] = [0, 0]
+        # step 4 - update voterank properties
+        for _, nbr in G.edges(n):
+            vote_rank[nbr][1] -= 1 / avgDegree
+            vote_rank[nbr][1] = max(vote_rank[nbr][1], 0)
+    return influential_nodes
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/chains.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/chains.py
new file mode 100644
index 00000000..ae342d9c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/chains.py
@@ -0,0 +1,172 @@
+"""Functions for finding chains in a graph."""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["chain_decomposition"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def chain_decomposition(G, root=None):
+    """Returns the chain decomposition of a graph.
+
+    The *chain decomposition* of a graph with respect a depth-first
+    search tree is a set of cycles or paths derived from the set of
+    fundamental cycles of the tree in the following manner. Consider
+    each fundamental cycle with respect to the given tree, represented
+    as a list of edges beginning with the nontree edge oriented away
+    from the root of the tree. For each fundamental cycle, if it
+    overlaps with any previous fundamental cycle, just take the initial
+    non-overlapping segment, which is a path instead of a cycle. Each
+    cycle or path is called a *chain*. For more information, see [1]_.
+
+    Parameters
+    ----------
+    G : undirected graph
+
+    root : node (optional)
+       A node in the graph `G`. If specified, only the chain
+       decomposition for the connected component containing this node
+       will be returned. This node indicates the root of the depth-first
+       search tree.
+
+    Yields
+    ------
+    chain : list
+       A list of edges representing a chain. There is no guarantee on
+       the orientation of the edges in each chain (for example, if a
+       chain includes the edge joining nodes 1 and 2, the chain may
+       include either (1, 2) or (2, 1)).
+
+    Raises
+    ------
+    NodeNotFound
+       If `root` is not in the graph `G`.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (1, 4), (3, 4), (3, 5), (4, 5)])
+    >>> list(nx.chain_decomposition(G))
+    [[(4, 5), (5, 3), (3, 4)]]
+
+    Notes
+    -----
+    The worst-case running time of this implementation is linear in the
+    number of nodes and number of edges [1]_.
+
+    References
+    ----------
+    .. [1] Jens M. Schmidt (2013). "A simple test on 2-vertex-
+       and 2-edge-connectivity." *Information Processing Letters*,
+       113, 241–244. Elsevier. <https://doi.org/10.1016/j.ipl.2013.01.016>
+
+    """
+
+    def _dfs_cycle_forest(G, root=None):
+        """Builds a directed graph composed of cycles from the given graph.
+
+        `G` is an undirected simple graph. `root` is a node in the graph
+        from which the depth-first search is started.
+
+        This function returns both the depth-first search cycle graph
+        (as a :class:`~networkx.DiGraph`) and the list of nodes in
+        depth-first preorder. The depth-first search cycle graph is a
+        directed graph whose edges are the edges of `G` oriented toward
+        the root if the edge is a tree edge and away from the root if
+        the edge is a non-tree edge. If `root` is not specified, this
+        performs a depth-first search on each connected component of `G`
+        and returns a directed forest instead.
+
+        If `root` is not in the graph, this raises :exc:`KeyError`.
+
+        """
+        # Create a directed graph from the depth-first search tree with
+        # root node `root` in which tree edges are directed toward the
+        # root and nontree edges are directed away from the root. For
+        # each node with an incident nontree edge, this creates a
+        # directed cycle starting with the nontree edge and returning to
+        # that node.
+        #
+        # The `parent` node attribute stores the parent of each node in
+        # the DFS tree. The `nontree` edge attribute indicates whether
+        # the edge is a tree edge or a nontree edge.
+        #
+        # We also store the order of the nodes found in the depth-first
+        # search in the `nodes` list.
+        H = nx.DiGraph()
+        nodes = []
+        for u, v, d in nx.dfs_labeled_edges(G, source=root):
+            if d == "forward":
+                # `dfs_labeled_edges()` yields (root, root, 'forward')
+                # if it is beginning the search on a new connected
+                # component.
+                if u == v:
+                    H.add_node(v, parent=None)
+                    nodes.append(v)
+                else:
+                    H.add_node(v, parent=u)
+                    H.add_edge(v, u, nontree=False)
+                    nodes.append(v)
+            # `dfs_labeled_edges` considers nontree edges in both
+            # orientations, so we need to not add the edge if it its
+            # other orientation has been added.
+            elif d == "nontree" and v not in H[u]:
+                H.add_edge(v, u, nontree=True)
+            else:
+                # Do nothing on 'reverse' edges; we only care about
+                # forward and nontree edges.
+                pass
+        return H, nodes
+
+    def _build_chain(G, u, v, visited):
+        """Generate the chain starting from the given nontree edge.
+
+        `G` is a DFS cycle graph as constructed by
+        :func:`_dfs_cycle_graph`. The edge (`u`, `v`) is a nontree edge
+        that begins a chain. `visited` is a set representing the nodes
+        in `G` that have already been visited.
+
+        This function yields the edges in an initial segment of the
+        fundamental cycle of `G` starting with the nontree edge (`u`,
+        `v`) that includes all the edges up until the first node that
+        appears in `visited`. The tree edges are given by the 'parent'
+        node attribute. The `visited` set is updated to add each node in
+        an edge yielded by this function.
+
+        """
+        while v not in visited:
+            yield u, v
+            visited.add(v)
+            u, v = v, G.nodes[v]["parent"]
+        yield u, v
+
+    # Check if the root is in the graph G. If not, raise NodeNotFound
+    if root is not None and root not in G:
+        raise nx.NodeNotFound(f"Root node {root} is not in graph")
+
+    # Create a directed version of H that has the DFS edges directed
+    # toward the root and the nontree edges directed away from the root
+    # (in each connected component).
+    H, nodes = _dfs_cycle_forest(G, root)
+
+    # Visit the nodes again in DFS order. For each node, and for each
+    # nontree edge leaving that node, compute the fundamental cycle for
+    # that nontree edge starting with that edge. If the fundamental
+    # cycle overlaps with any visited nodes, just take the prefix of the
+    # cycle up to the point of visited nodes.
+    #
+    # We repeat this process for each connected component (implicitly,
+    # since `nodes` already has a list of the nodes grouped by connected
+    # component).
+    visited = set()
+    for u in nodes:
+        visited.add(u)
+        # For each nontree edge going out of node u...
+        edges = ((u, v) for u, v, d in H.out_edges(u, data="nontree") if d)
+        for u, v in edges:
+            # Create the cycle or cycle prefix starting with the
+            # nontree edge.
+            chain = list(_build_chain(H, u, v, visited))
+            yield chain
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/chordal.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/chordal.py
new file mode 100644
index 00000000..ab71c243
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/chordal.py
@@ -0,0 +1,443 @@
+"""
+Algorithms for chordal graphs.
+
+A graph is chordal if every cycle of length at least 4 has a chord
+(an edge joining two nodes not adjacent in the cycle).
+https://en.wikipedia.org/wiki/Chordal_graph
+"""
+
+import sys
+
+import networkx as nx
+from networkx.algorithms.components import connected_components
+from networkx.utils import arbitrary_element, not_implemented_for
+
+__all__ = [
+    "is_chordal",
+    "find_induced_nodes",
+    "chordal_graph_cliques",
+    "chordal_graph_treewidth",
+    "NetworkXTreewidthBoundExceeded",
+    "complete_to_chordal_graph",
+]
+
+
+class NetworkXTreewidthBoundExceeded(nx.NetworkXException):
+    """Exception raised when a treewidth bound has been provided and it has
+    been exceeded"""
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def is_chordal(G):
+    """Checks whether G is a chordal graph.
+
+    A graph is chordal if every cycle of length at least 4 has a chord
+    (an edge joining two nodes not adjacent in the cycle).
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.
+
+    Returns
+    -------
+    chordal : bool
+      True if G is a chordal graph and False otherwise.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
+
+    Examples
+    --------
+    >>> e = [
+    ...     (1, 2),
+    ...     (1, 3),
+    ...     (2, 3),
+    ...     (2, 4),
+    ...     (3, 4),
+    ...     (3, 5),
+    ...     (3, 6),
+    ...     (4, 5),
+    ...     (4, 6),
+    ...     (5, 6),
+    ... ]
+    >>> G = nx.Graph(e)
+    >>> nx.is_chordal(G)
+    True
+
+    Notes
+    -----
+    The routine tries to go through every node following maximum cardinality
+    search. It returns False when it finds that the separator for any node
+    is not a clique.  Based on the algorithms in [1]_.
+
+    Self loops are ignored.
+
+    References
+    ----------
+    .. [1] R. E. Tarjan and M. Yannakakis, Simple linear-time algorithms
+       to test chordality of graphs, test acyclicity of hypergraphs, and
+       selectively reduce acyclic hypergraphs, SIAM J. Comput., 13 (1984),
+       pp. 566–579.
+    """
+    if len(G.nodes) <= 3:
+        return True
+    return len(_find_chordality_breaker(G)) == 0
+
+
+@nx._dispatchable
+def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize):
+    """Returns the set of induced nodes in the path from s to t.
+
+    Parameters
+    ----------
+    G : graph
+      A chordal NetworkX graph
+    s : node
+        Source node to look for induced nodes
+    t : node
+        Destination node to look for induced nodes
+    treewidth_bound: float
+        Maximum treewidth acceptable for the graph H. The search
+        for induced nodes will end as soon as the treewidth_bound is exceeded.
+
+    Returns
+    -------
+    induced_nodes : Set of nodes
+        The set of induced nodes in the path from s to t in G
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
+        If the input graph is an instance of one of these classes, a
+        :exc:`NetworkXError` is raised.
+        The algorithm can only be applied to chordal graphs. If the input
+        graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> G = nx.generators.classic.path_graph(10)
+    >>> induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
+    >>> sorted(induced_nodes)
+    [1, 2, 3, 4, 5, 6, 7, 8, 9]
+
+    Notes
+    -----
+    G must be a chordal graph and (s,t) an edge that is not in G.
+
+    If a treewidth_bound is provided, the search for induced nodes will end
+    as soon as the treewidth_bound is exceeded.
+
+    The algorithm is inspired by Algorithm 4 in [1]_.
+    A formal definition of induced node can also be found on that reference.
+
+    Self Loops are ignored
+
+    References
+    ----------
+    .. [1] Learning Bounded Treewidth Bayesian Networks.
+       Gal Elidan, Stephen Gould; JMLR, 9(Dec):2699--2731, 2008.
+       http://jmlr.csail.mit.edu/papers/volume9/elidan08a/elidan08a.pdf
+    """
+    if not is_chordal(G):
+        raise nx.NetworkXError("Input graph is not chordal.")
+
+    H = nx.Graph(G)
+    H.add_edge(s, t)
+    induced_nodes = set()
+    triplet = _find_chordality_breaker(H, s, treewidth_bound)
+    while triplet:
+        (u, v, w) = triplet
+        induced_nodes.update(triplet)
+        for n in triplet:
+            if n != s:
+                H.add_edge(s, n)
+        triplet = _find_chordality_breaker(H, s, treewidth_bound)
+    if induced_nodes:
+        # Add t and the second node in the induced path from s to t.
+        induced_nodes.add(t)
+        for u in G[s]:
+            if len(induced_nodes & set(G[u])) == 2:
+                induced_nodes.add(u)
+                break
+    return induced_nodes
+
+
+@nx._dispatchable
+def chordal_graph_cliques(G):
+    """Returns all maximal cliques of a chordal graph.
+
+    The algorithm breaks the graph in connected components and performs a
+    maximum cardinality search in each component to get the cliques.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    Yields
+    ------
+    frozenset of nodes
+        Maximal cliques, each of which is a frozenset of
+        nodes in `G`. The order of cliques is arbitrary.
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
+        The algorithm can only be applied to chordal graphs. If the input
+        graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
+
+    Examples
+    --------
+    >>> e = [
+    ...     (1, 2),
+    ...     (1, 3),
+    ...     (2, 3),
+    ...     (2, 4),
+    ...     (3, 4),
+    ...     (3, 5),
+    ...     (3, 6),
+    ...     (4, 5),
+    ...     (4, 6),
+    ...     (5, 6),
+    ...     (7, 8),
+    ... ]
+    >>> G = nx.Graph(e)
+    >>> G.add_node(9)
+    >>> cliques = [c for c in chordal_graph_cliques(G)]
+    >>> cliques[0]
+    frozenset({1, 2, 3})
+    """
+    for C in (G.subgraph(c).copy() for c in connected_components(G)):
+        if C.number_of_nodes() == 1:
+            if nx.number_of_selfloops(C) > 0:
+                raise nx.NetworkXError("Input graph is not chordal.")
+            yield frozenset(C.nodes())
+        else:
+            unnumbered = set(C.nodes())
+            v = arbitrary_element(C)
+            unnumbered.remove(v)
+            numbered = {v}
+            clique_wanna_be = {v}
+            while unnumbered:
+                v = _max_cardinality_node(C, unnumbered, numbered)
+                unnumbered.remove(v)
+                numbered.add(v)
+                new_clique_wanna_be = set(C.neighbors(v)) & numbered
+                sg = C.subgraph(clique_wanna_be)
+                if _is_complete_graph(sg):
+                    new_clique_wanna_be.add(v)
+                    if not new_clique_wanna_be >= clique_wanna_be:
+                        yield frozenset(clique_wanna_be)
+                    clique_wanna_be = new_clique_wanna_be
+                else:
+                    raise nx.NetworkXError("Input graph is not chordal.")
+            yield frozenset(clique_wanna_be)
+
+
+@nx._dispatchable
+def chordal_graph_treewidth(G):
+    """Returns the treewidth of the chordal graph G.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    Returns
+    -------
+    treewidth : int
+        The size of the largest clique in the graph minus one.
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
+        The algorithm can only be applied to chordal graphs. If the input
+        graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
+
+    Examples
+    --------
+    >>> e = [
+    ...     (1, 2),
+    ...     (1, 3),
+    ...     (2, 3),
+    ...     (2, 4),
+    ...     (3, 4),
+    ...     (3, 5),
+    ...     (3, 6),
+    ...     (4, 5),
+    ...     (4, 6),
+    ...     (5, 6),
+    ...     (7, 8),
+    ... ]
+    >>> G = nx.Graph(e)
+    >>> G.add_node(9)
+    >>> nx.chordal_graph_treewidth(G)
+    3
+
+    References
+    ----------
+    .. [1] https://en.wikipedia.org/wiki/Tree_decomposition#Treewidth
+    """
+    if not is_chordal(G):
+        raise nx.NetworkXError("Input graph is not chordal.")
+
+    max_clique = -1
+    for clique in nx.chordal_graph_cliques(G):
+        max_clique = max(max_clique, len(clique))
+    return max_clique - 1
+
+
+def _is_complete_graph(G):
+    """Returns True if G is a complete graph."""
+    if nx.number_of_selfloops(G) > 0:
+        raise nx.NetworkXError("Self loop found in _is_complete_graph()")
+    n = G.number_of_nodes()
+    if n < 2:
+        return True
+    e = G.number_of_edges()
+    max_edges = (n * (n - 1)) / 2
+    return e == max_edges
+
+
+def _find_missing_edge(G):
+    """Given a non-complete graph G, returns a missing edge."""
+    nodes = set(G)
+    for u in G:
+        missing = nodes - set(list(G[u].keys()) + [u])
+        if missing:
+            return (u, missing.pop())
+
+
+def _max_cardinality_node(G, choices, wanna_connect):
+    """Returns a the node in choices that has more connections in G
+    to nodes in wanna_connect.
+    """
+    max_number = -1
+    for x in choices:
+        number = len([y for y in G[x] if y in wanna_connect])
+        if number > max_number:
+            max_number = number
+            max_cardinality_node = x
+    return max_cardinality_node
+
+
+def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize):
+    """Given a graph G, starts a max cardinality search
+    (starting from s if s is given and from an arbitrary node otherwise)
+    trying to find a non-chordal cycle.
+
+    If it does find one, it returns (u,v,w) where u,v,w are the three
+    nodes that together with s are involved in the cycle.
+
+    It ignores any self loops.
+    """
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept("Graph has no nodes.")
+    unnumbered = set(G)
+    if s is None:
+        s = arbitrary_element(G)
+    unnumbered.remove(s)
+    numbered = {s}
+    current_treewidth = -1
+    while unnumbered:  # and current_treewidth <= treewidth_bound:
+        v = _max_cardinality_node(G, unnumbered, numbered)
+        unnumbered.remove(v)
+        numbered.add(v)
+        clique_wanna_be = set(G[v]) & numbered
+        sg = G.subgraph(clique_wanna_be)
+        if _is_complete_graph(sg):
+            # The graph seems to be chordal by now. We update the treewidth
+            current_treewidth = max(current_treewidth, len(clique_wanna_be))
+            if current_treewidth > treewidth_bound:
+                raise nx.NetworkXTreewidthBoundExceeded(
+                    f"treewidth_bound exceeded: {current_treewidth}"
+                )
+        else:
+            # sg is not a clique,
+            # look for an edge that is not included in sg
+            (u, w) = _find_missing_edge(sg)
+            return (u, v, w)
+    return ()
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(returns_graph=True)
+def complete_to_chordal_graph(G):
+    """Return a copy of G completed to a chordal graph
+
+    Adds edges to a copy of G to create a chordal graph. A graph G=(V,E) is
+    called chordal if for each cycle with length bigger than 3, there exist
+    two non-adjacent nodes connected by an edge (called a chord).
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    Returns
+    -------
+    H : NetworkX graph
+        The chordal enhancement of G
+    alpha : Dictionary
+            The elimination ordering of nodes of G
+
+    Notes
+    -----
+    There are different approaches to calculate the chordal
+    enhancement of a graph. The algorithm used here is called
+    MCS-M and gives at least minimal (local) triangulation of graph. Note
+    that this triangulation is not necessarily a global minimum.
+
+    https://en.wikipedia.org/wiki/Chordal_graph
+
+    References
+    ----------
+    .. [1] Berry, Anne & Blair, Jean & Heggernes, Pinar & Peyton, Barry. (2004)
+           Maximum Cardinality Search for Computing Minimal Triangulations of
+           Graphs.  Algorithmica. 39. 287-298. 10.1007/s00453-004-1084-3.
+
+    Examples
+    --------
+    >>> from networkx.algorithms.chordal import complete_to_chordal_graph
+    >>> G = nx.wheel_graph(10)
+    >>> H, alpha = complete_to_chordal_graph(G)
+    """
+    H = G.copy()
+    alpha = {node: 0 for node in H}
+    if nx.is_chordal(H):
+        return H, alpha
+    chords = set()
+    weight = {node: 0 for node in H.nodes()}
+    unnumbered_nodes = list(H.nodes())
+    for i in range(len(H.nodes()), 0, -1):
+        # get the node in unnumbered_nodes with the maximum weight
+        z = max(unnumbered_nodes, key=lambda node: weight[node])
+        unnumbered_nodes.remove(z)
+        alpha[z] = i
+        update_nodes = []
+        for y in unnumbered_nodes:
+            if G.has_edge(y, z):
+                update_nodes.append(y)
+            else:
+                # y_weight will be bigger than node weights between y and z
+                y_weight = weight[y]
+                lower_nodes = [
+                    node for node in unnumbered_nodes if weight[node] < y_weight
+                ]
+                if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z):
+                    update_nodes.append(y)
+                    chords.add((z, y))
+        # during calculation of paths the weights should not be updated
+        for node in update_nodes:
+            weight[node] += 1
+    H.add_edges_from(chords)
+    return H, alpha
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/clique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/clique.py
new file mode 100644
index 00000000..57b588ae
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/clique.py
@@ -0,0 +1,755 @@
+"""Functions for finding and manipulating cliques.
+
+Finding the largest clique in a graph is NP-complete problem, so most of
+these algorithms have an exponential running time; for more information,
+see the Wikipedia article on the clique problem [1]_.
+
+.. [1] clique problem:: https://en.wikipedia.org/wiki/Clique_problem
+
+"""
+
+from collections import defaultdict, deque
+from itertools import chain, combinations, islice
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "find_cliques",
+    "find_cliques_recursive",
+    "make_max_clique_graph",
+    "make_clique_bipartite",
+    "node_clique_number",
+    "number_of_cliques",
+    "enumerate_all_cliques",
+    "max_weight_clique",
+]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def enumerate_all_cliques(G):
+    """Returns all cliques in an undirected graph.
+
+    This function returns an iterator over cliques, each of which is a
+    list of nodes. The iteration is ordered by cardinality of the
+    cliques: first all cliques of size one, then all cliques of size
+    two, etc.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected graph.
+
+    Returns
+    -------
+    iterator
+        An iterator over cliques, each of which is a list of nodes in
+        `G`. The cliques are ordered according to size.
+
+    Notes
+    -----
+    To obtain a list of all cliques, use
+    `list(enumerate_all_cliques(G))`. However, be aware that in the
+    worst-case, the length of this list can be exponential in the number
+    of nodes in the graph (for example, when the graph is the complete
+    graph). This function avoids storing all cliques in memory by only
+    keeping current candidate node lists in memory during its search.
+
+    The implementation is adapted from the algorithm by Zhang, et
+    al. (2005) [1]_ to output all cliques discovered.
+
+    This algorithm ignores self-loops and parallel edges, since cliques
+    are not conventionally defined with such edges.
+
+    References
+    ----------
+    .. [1] Yun Zhang, Abu-Khzam, F.N., Baldwin, N.E., Chesler, E.J.,
+           Langston, M.A., Samatova, N.F.,
+           "Genome-Scale Computational Approaches to Memory-Intensive
+           Applications in Systems Biology".
+           *Supercomputing*, 2005. Proceedings of the ACM/IEEE SC 2005
+           Conference, pp. 12, 12--18 Nov. 2005.
+           <https://doi.org/10.1109/SC.2005.29>.
+
+    """
+    index = {}
+    nbrs = {}
+    for u in G:
+        index[u] = len(index)
+        # Neighbors of u that appear after u in the iteration order of G.
+        nbrs[u] = {v for v in G[u] if v not in index}
+
+    queue = deque(([u], sorted(nbrs[u], key=index.__getitem__)) for u in G)
+    # Loop invariants:
+    # 1. len(base) is nondecreasing.
+    # 2. (base + cnbrs) is sorted with respect to the iteration order of G.
+    # 3. cnbrs is a set of common neighbors of nodes in base.
+    while queue:
+        base, cnbrs = map(list, queue.popleft())
+        yield base
+        for i, u in enumerate(cnbrs):
+            # Use generators to reduce memory consumption.
+            queue.append(
+                (
+                    chain(base, [u]),
+                    filter(nbrs[u].__contains__, islice(cnbrs, i + 1, None)),
+                )
+            )
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def find_cliques(G, nodes=None):
+    """Returns all maximal cliques in an undirected graph.
+
+    For each node *n*, a *maximal clique for n* is a largest complete
+    subgraph containing *n*. The largest maximal clique is sometimes
+    called the *maximum clique*.
+
+    This function returns an iterator over cliques, each of which is a
+    list of nodes. It is an iterative implementation, so should not
+    suffer from recursion depth issues.
+
+    This function accepts a list of `nodes` and only the maximal cliques
+    containing all of these `nodes` are returned. It can considerably speed up
+    the running time if some specific cliques are desired.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected graph.
+
+    nodes : list, optional (default=None)
+        If provided, only yield *maximal cliques* containing all nodes in `nodes`.
+        If `nodes` isn't a clique itself, a ValueError is raised.
+
+    Returns
+    -------
+    iterator
+        An iterator over maximal cliques, each of which is a list of
+        nodes in `G`. If `nodes` is provided, only the maximal cliques
+        containing all the nodes in `nodes` are returned. The order of
+        cliques is arbitrary.
+
+    Raises
+    ------
+    ValueError
+        If `nodes` is not a clique.
+
+    Examples
+    --------
+    >>> from pprint import pprint  # For nice dict formatting
+    >>> G = nx.karate_club_graph()
+    >>> sum(1 for c in nx.find_cliques(G))  # The number of maximal cliques in G
+    36
+    >>> max(nx.find_cliques(G), key=len)  # The largest maximal clique in G
+    [0, 1, 2, 3, 13]
+
+    The size of the largest maximal clique is known as the *clique number* of
+    the graph, which can be found directly with:
+
+    >>> max(len(c) for c in nx.find_cliques(G))
+    5
+
+    One can also compute the number of maximal cliques in `G` that contain a given
+    node. The following produces a dictionary keyed by node whose
+    values are the number of maximal cliques in `G` that contain the node:
+
+    >>> pprint({n: sum(1 for c in nx.find_cliques(G) if n in c) for n in G})
+    {0: 13,
+     1: 6,
+     2: 7,
+     3: 3,
+     4: 2,
+     5: 3,
+     6: 3,
+     7: 1,
+     8: 3,
+     9: 2,
+     10: 2,
+     11: 1,
+     12: 1,
+     13: 2,
+     14: 1,
+     15: 1,
+     16: 1,
+     17: 1,
+     18: 1,
+     19: 2,
+     20: 1,
+     21: 1,
+     22: 1,
+     23: 3,
+     24: 2,
+     25: 2,
+     26: 1,
+     27: 3,
+     28: 2,
+     29: 2,
+     30: 2,
+     31: 4,
+     32: 9,
+     33: 14}
+
+    Or, similarly, the maximal cliques in `G` that contain a given node.
+    For example, the 4 maximal cliques that contain node 31:
+
+    >>> [c for c in nx.find_cliques(G) if 31 in c]
+    [[0, 31], [33, 32, 31], [33, 28, 31], [24, 25, 31]]
+
+    See Also
+    --------
+    find_cliques_recursive
+        A recursive version of the same algorithm.
+
+    Notes
+    -----
+    To obtain a list of all maximal cliques, use
+    `list(find_cliques(G))`. However, be aware that in the worst-case,
+    the length of this list can be exponential in the number of nodes in
+    the graph. This function avoids storing all cliques in memory by
+    only keeping current candidate node lists in memory during its search.
+
+    This implementation is based on the algorithm published by Bron and
+    Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi
+    (2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. It
+    essentially unrolls the recursion used in the references to avoid
+    issues of recursion stack depth (for a recursive implementation, see
+    :func:`find_cliques_recursive`).
+
+    This algorithm ignores self-loops and parallel edges, since cliques
+    are not conventionally defined with such edges.
+
+    References
+    ----------
+    .. [1] Bron, C. and Kerbosch, J.
+       "Algorithm 457: finding all cliques of an undirected graph".
+       *Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
+       <http://portal.acm.org/citation.cfm?doid=362342.362367>
+
+    .. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi,
+       "The worst-case time complexity for generating all maximal
+       cliques and computational experiments",
+       *Theoretical Computer Science*, Volume 363, Issue 1,
+       Computing and Combinatorics,
+       10th Annual International Conference on
+       Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
+       <https://doi.org/10.1016/j.tcs.2006.06.015>
+
+    .. [3] F. Cazals, C. Karande,
+       "A note on the problem of reporting maximal cliques",
+       *Theoretical Computer Science*,
+       Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
+       <https://doi.org/10.1016/j.tcs.2008.05.010>
+
+    """
+    if len(G) == 0:
+        return
+
+    adj = {u: {v for v in G[u] if v != u} for u in G}
+
+    # Initialize Q with the given nodes and subg, cand with their nbrs
+    Q = nodes[:] if nodes is not None else []
+    cand = set(G)
+    for node in Q:
+        if node not in cand:
+            raise ValueError(f"The given `nodes` {nodes} do not form a clique")
+        cand &= adj[node]
+
+    if not cand:
+        yield Q[:]
+        return
+
+    subg = cand.copy()
+    stack = []
+    Q.append(None)
+
+    u = max(subg, key=lambda u: len(cand & adj[u]))
+    ext_u = cand - adj[u]
+
+    try:
+        while True:
+            if ext_u:
+                q = ext_u.pop()
+                cand.remove(q)
+                Q[-1] = q
+                adj_q = adj[q]
+                subg_q = subg & adj_q
+                if not subg_q:
+                    yield Q[:]
+                else:
+                    cand_q = cand & adj_q
+                    if cand_q:
+                        stack.append((subg, cand, ext_u))
+                        Q.append(None)
+                        subg = subg_q
+                        cand = cand_q
+                        u = max(subg, key=lambda u: len(cand & adj[u]))
+                        ext_u = cand - adj[u]
+            else:
+                Q.pop()
+                subg, cand, ext_u = stack.pop()
+    except IndexError:
+        pass
+
+
+# TODO Should this also be not implemented for directed graphs?
+@nx._dispatchable
+def find_cliques_recursive(G, nodes=None):
+    """Returns all maximal cliques in a graph.
+
+    For each node *v*, a *maximal clique for v* is a largest complete
+    subgraph containing *v*. The largest maximal clique is sometimes
+    called the *maximum clique*.
+
+    This function returns an iterator over cliques, each of which is a
+    list of nodes. It is a recursive implementation, so may suffer from
+    recursion depth issues, but is included for pedagogical reasons.
+    For a non-recursive implementation, see :func:`find_cliques`.
+
+    This function accepts a list of `nodes` and only the maximal cliques
+    containing all of these `nodes` are returned. It can considerably speed up
+    the running time if some specific cliques are desired.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    nodes : list, optional (default=None)
+        If provided, only yield *maximal cliques* containing all nodes in `nodes`.
+        If `nodes` isn't a clique itself, a ValueError is raised.
+
+    Returns
+    -------
+    iterator
+        An iterator over maximal cliques, each of which is a list of
+        nodes in `G`. If `nodes` is provided, only the maximal cliques
+        containing all the nodes in `nodes` are yielded. The order of
+        cliques is arbitrary.
+
+    Raises
+    ------
+    ValueError
+        If `nodes` is not a clique.
+
+    See Also
+    --------
+    find_cliques
+        An iterative version of the same algorithm. See docstring for examples.
+
+    Notes
+    -----
+    To obtain a list of all maximal cliques, use
+    `list(find_cliques_recursive(G))`. However, be aware that in the
+    worst-case, the length of this list can be exponential in the number
+    of nodes in the graph. This function avoids storing all cliques in memory
+    by only keeping current candidate node lists in memory during its search.
+
+    This implementation is based on the algorithm published by Bron and
+    Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi
+    (2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. For a
+    non-recursive implementation, see :func:`find_cliques`.
+
+    This algorithm ignores self-loops and parallel edges, since cliques
+    are not conventionally defined with such edges.
+
+    References
+    ----------
+    .. [1] Bron, C. and Kerbosch, J.
+       "Algorithm 457: finding all cliques of an undirected graph".
+       *Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
+       <http://portal.acm.org/citation.cfm?doid=362342.362367>
+
+    .. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi,
+       "The worst-case time complexity for generating all maximal
+       cliques and computational experiments",
+       *Theoretical Computer Science*, Volume 363, Issue 1,
+       Computing and Combinatorics,
+       10th Annual International Conference on
+       Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
+       <https://doi.org/10.1016/j.tcs.2006.06.015>
+
+    .. [3] F. Cazals, C. Karande,
+       "A note on the problem of reporting maximal cliques",
+       *Theoretical Computer Science*,
+       Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
+       <https://doi.org/10.1016/j.tcs.2008.05.010>
+
+    """
+    if len(G) == 0:
+        return iter([])
+
+    adj = {u: {v for v in G[u] if v != u} for u in G}
+
+    # Initialize Q with the given nodes and subg, cand with their nbrs
+    Q = nodes[:] if nodes is not None else []
+    cand_init = set(G)
+    for node in Q:
+        if node not in cand_init:
+            raise ValueError(f"The given `nodes` {nodes} do not form a clique")
+        cand_init &= adj[node]
+
+    if not cand_init:
+        return iter([Q])
+
+    subg_init = cand_init.copy()
+
+    def expand(subg, cand):
+        u = max(subg, key=lambda u: len(cand & adj[u]))
+        for q in cand - adj[u]:
+            cand.remove(q)
+            Q.append(q)
+            adj_q = adj[q]
+            subg_q = subg & adj_q
+            if not subg_q:
+                yield Q[:]
+            else:
+                cand_q = cand & adj_q
+                if cand_q:
+                    yield from expand(subg_q, cand_q)
+            Q.pop()
+
+    return expand(subg_init, cand_init)
+
+
+@nx._dispatchable(returns_graph=True)
+def make_max_clique_graph(G, create_using=None):
+    """Returns the maximal clique graph of the given graph.
+
+    The nodes of the maximal clique graph of `G` are the cliques of
+    `G` and an edge joins two cliques if the cliques are not disjoint.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    create_using : NetworkX graph constructor, optional (default=nx.Graph)
+       Graph type to create. If graph instance, then cleared before populated.
+
+    Returns
+    -------
+    NetworkX graph
+        A graph whose nodes are the cliques of `G` and whose edges
+        join two cliques if they are not disjoint.
+
+    Notes
+    -----
+    This function behaves like the following code::
+
+        import networkx as nx
+
+        G = nx.make_clique_bipartite(G)
+        cliques = [v for v in G.nodes() if G.nodes[v]["bipartite"] == 0]
+        G = nx.bipartite.projected_graph(G, cliques)
+        G = nx.relabel_nodes(G, {-v: v - 1 for v in G})
+
+    It should be faster, though, since it skips all the intermediate
+    steps.
+
+    """
+    if create_using is None:
+        B = G.__class__()
+    else:
+        B = nx.empty_graph(0, create_using)
+    cliques = list(enumerate(set(c) for c in find_cliques(G)))
+    # Add a numbered node for each clique.
+    B.add_nodes_from(i for i, c in cliques)
+    # Join cliques by an edge if they share a node.
+    clique_pairs = combinations(cliques, 2)
+    B.add_edges_from((i, j) for (i, c1), (j, c2) in clique_pairs if c1 & c2)
+    return B
+
+
+@nx._dispatchable(returns_graph=True)
+def make_clique_bipartite(G, fpos=None, create_using=None, name=None):
+    """Returns the bipartite clique graph corresponding to `G`.
+
+    In the returned bipartite graph, the "bottom" nodes are the nodes of
+    `G` and the "top" nodes represent the maximal cliques of `G`.
+    There is an edge from node *v* to clique *C* in the returned graph
+    if and only if *v* is an element of *C*.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected graph.
+
+    fpos : bool
+        If True or not None, the returned graph will have an
+        additional attribute, `pos`, a dictionary mapping node to
+        position in the Euclidean plane.
+
+    create_using : NetworkX graph constructor, optional (default=nx.Graph)
+       Graph type to create. If graph instance, then cleared before populated.
+
+    Returns
+    -------
+    NetworkX graph
+        A bipartite graph whose "bottom" set is the nodes of the graph
+        `G`, whose "top" set is the cliques of `G`, and whose edges
+        join nodes of `G` to the cliques that contain them.
+
+        The nodes of the graph `G` have the node attribute
+        'bipartite' set to 1 and the nodes representing cliques
+        have the node attribute 'bipartite' set to 0, as is the
+        convention for bipartite graphs in NetworkX.
+
+    """
+    B = nx.empty_graph(0, create_using)
+    B.clear()
+    # The "bottom" nodes in the bipartite graph are the nodes of the
+    # original graph, G.
+    B.add_nodes_from(G, bipartite=1)
+    for i, cl in enumerate(find_cliques(G)):
+        # The "top" nodes in the bipartite graph are the cliques. These
+        # nodes get negative numbers as labels.
+        name = -i - 1
+        B.add_node(name, bipartite=0)
+        B.add_edges_from((v, name) for v in cl)
+    return B
+
+
+@nx._dispatchable
+def node_clique_number(G, nodes=None, cliques=None, separate_nodes=False):
+    """Returns the size of the largest maximal clique containing each given node.
+
+    Returns a single or list depending on input nodes.
+    An optional list of cliques can be input if already computed.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected graph.
+
+    cliques : list, optional (default=None)
+        A list of cliques, each of which is itself a list of nodes.
+        If not specified, the list of all cliques will be computed
+        using :func:`find_cliques`.
+
+    Returns
+    -------
+    int or dict
+        If `nodes` is a single node, returns the size of the
+        largest maximal clique in `G` containing that node.
+        Otherwise return a dict keyed by node to the size
+        of the largest maximal clique containing that node.
+
+    See Also
+    --------
+    find_cliques
+        find_cliques yields the maximal cliques of G.
+        It accepts a `nodes` argument which restricts consideration to
+        maximal cliques containing all the given `nodes`.
+        The search for the cliques is optimized for `nodes`.
+    """
+    if cliques is None:
+        if nodes is not None:
+            # Use ego_graph to decrease size of graph
+            # check for single node
+            if nodes in G:
+                return max(len(c) for c in find_cliques(nx.ego_graph(G, nodes)))
+            # handle multiple nodes
+            return {
+                n: max(len(c) for c in find_cliques(nx.ego_graph(G, n))) for n in nodes
+            }
+
+        # nodes is None--find all cliques
+        cliques = list(find_cliques(G))
+
+    # single node requested
+    if nodes in G:
+        return max(len(c) for c in cliques if nodes in c)
+
+    # multiple nodes requested
+    # preprocess all nodes (faster than one at a time for even 2 nodes)
+    size_for_n = defaultdict(int)
+    for c in cliques:
+        size_of_c = len(c)
+        for n in c:
+            if size_for_n[n] < size_of_c:
+                size_for_n[n] = size_of_c
+    if nodes is None:
+        return size_for_n
+    return {n: size_for_n[n] for n in nodes}
+
+
+def number_of_cliques(G, nodes=None, cliques=None):
+    """Returns the number of maximal cliques for each node.
+
+    Returns a single or list depending on input nodes.
+    Optional list of cliques can be input if already computed.
+    """
+    if cliques is None:
+        cliques = list(find_cliques(G))
+
+    if nodes is None:
+        nodes = list(G.nodes())  # none, get entire graph
+
+    if not isinstance(nodes, list):  # check for a list
+        v = nodes
+        # assume it is a single value
+        numcliq = len([1 for c in cliques if v in c])
+    else:
+        numcliq = {}
+        for v in nodes:
+            numcliq[v] = len([1 for c in cliques if v in c])
+    return numcliq
+
+
+class MaxWeightClique:
+    """A class for the maximum weight clique algorithm.
+
+    This class is a helper for the `max_weight_clique` function.  The class
+    should not normally be used directly.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The undirected graph for which a maximum weight clique is sought
+    weight : string or None, optional (default='weight')
+        The node attribute that holds the integer value used as a weight.
+        If None, then each node has weight 1.
+
+    Attributes
+    ----------
+    G : NetworkX graph
+        The undirected graph for which a maximum weight clique is sought
+    node_weights: dict
+        The weight of each node
+    incumbent_nodes : list
+        The nodes of the incumbent clique (the best clique found so far)
+    incumbent_weight: int
+        The weight of the incumbent clique
+    """
+
+    def __init__(self, G, weight):
+        self.G = G
+        self.incumbent_nodes = []
+        self.incumbent_weight = 0
+
+        if weight is None:
+            self.node_weights = {v: 1 for v in G.nodes()}
+        else:
+            for v in G.nodes():
+                if weight not in G.nodes[v]:
+                    errmsg = f"Node {v!r} does not have the requested weight field."
+                    raise KeyError(errmsg)
+                if not isinstance(G.nodes[v][weight], int):
+                    errmsg = f"The {weight!r} field of node {v!r} is not an integer."
+                    raise ValueError(errmsg)
+            self.node_weights = {v: G.nodes[v][weight] for v in G.nodes()}
+
+    def update_incumbent_if_improved(self, C, C_weight):
+        """Update the incumbent if the node set C has greater weight.
+
+        C is assumed to be a clique.
+        """
+        if C_weight > self.incumbent_weight:
+            self.incumbent_nodes = C[:]
+            self.incumbent_weight = C_weight
+
+    def greedily_find_independent_set(self, P):
+        """Greedily find an independent set of nodes from a set of
+        nodes P."""
+        independent_set = []
+        P = P[:]
+        while P:
+            v = P[0]
+            independent_set.append(v)
+            P = [w for w in P if v != w and not self.G.has_edge(v, w)]
+        return independent_set
+
+    def find_branching_nodes(self, P, target):
+        """Find a set of nodes to branch on."""
+        residual_wt = {v: self.node_weights[v] for v in P}
+        total_wt = 0
+        P = P[:]
+        while P:
+            independent_set = self.greedily_find_independent_set(P)
+            min_wt_in_class = min(residual_wt[v] for v in independent_set)
+            total_wt += min_wt_in_class
+            if total_wt > target:
+                break
+            for v in independent_set:
+                residual_wt[v] -= min_wt_in_class
+            P = [v for v in P if residual_wt[v] != 0]
+        return P
+
+    def expand(self, C, C_weight, P):
+        """Look for the best clique that contains all the nodes in C and zero or
+        more of the nodes in P, backtracking if it can be shown that no such
+        clique has greater weight than the incumbent.
+        """
+        self.update_incumbent_if_improved(C, C_weight)
+        branching_nodes = self.find_branching_nodes(P, self.incumbent_weight - C_weight)
+        while branching_nodes:
+            v = branching_nodes.pop()
+            P.remove(v)
+            new_C = C + [v]
+            new_C_weight = C_weight + self.node_weights[v]
+            new_P = [w for w in P if self.G.has_edge(v, w)]
+            self.expand(new_C, new_C_weight, new_P)
+
+    def find_max_weight_clique(self):
+        """Find a maximum weight clique."""
+        # Sort nodes in reverse order of degree for speed
+        nodes = sorted(self.G.nodes(), key=lambda v: self.G.degree(v), reverse=True)
+        nodes = [v for v in nodes if self.node_weights[v] > 0]
+        self.expand([], 0, nodes)
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(node_attrs="weight")
+def max_weight_clique(G, weight="weight"):
+    """Find a maximum weight clique in G.
+
+    A *clique* in a graph is a set of nodes such that every two distinct nodes
+    are adjacent.  The *weight* of a clique is the sum of the weights of its
+    nodes.  A *maximum weight clique* of graph G is a clique C in G such that
+    no clique in G has weight greater than the weight of C.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+    weight : string or None, optional (default='weight')
+        The node attribute that holds the integer value used as a weight.
+        If None, then each node has weight 1.
+
+    Returns
+    -------
+    clique : list
+        the nodes of a maximum weight clique
+    weight : int
+        the weight of a maximum weight clique
+
+    Notes
+    -----
+    The implementation is recursive, and therefore it may run into recursion
+    depth issues if G contains a clique whose number of nodes is close to the
+    recursion depth limit.
+
+    At each search node, the algorithm greedily constructs a weighted
+    independent set cover of part of the graph in order to find a small set of
+    nodes on which to branch.  The algorithm is very similar to the algorithm
+    of Tavares et al. [1]_, other than the fact that the NetworkX version does
+    not use bitsets.  This style of algorithm for maximum weight clique (and
+    maximum weight independent set, which is the same problem but on the
+    complement graph) has a decades-long history.  See Algorithm B of Warren
+    and Hicks [2]_ and the references in that paper.
+
+    References
+    ----------
+    .. [1] Tavares, W.A., Neto, M.B.C., Rodrigues, C.D., Michelon, P.: Um
+           algoritmo de branch and bound para o problema da clique máxima
+           ponderada.  Proceedings of XLVII SBPO 1 (2015).
+
+    .. [2] Warren, Jeffrey S, Hicks, Illya V.: Combinatorial Branch-and-Bound
+           for the Maximum Weight Independent Set Problem.  Technical Report,
+           Texas A&M University (2016).
+    """
+
+    mwc = MaxWeightClique(G, weight)
+    mwc.find_max_weight_clique()
+    return mwc.incumbent_nodes, mwc.incumbent_weight
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/cluster.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/cluster.py
new file mode 100644
index 00000000..6c91ad28
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/cluster.py
@@ -0,0 +1,609 @@
+"""Algorithms to characterize the number of triangles in a graph."""
+
+from collections import Counter
+from itertools import chain, combinations
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "triangles",
+    "average_clustering",
+    "clustering",
+    "transitivity",
+    "square_clustering",
+    "generalized_degree",
+]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def triangles(G, nodes=None):
+    """Compute the number of triangles.
+
+    Finds the number of triangles that include a node as one vertex.
+
+    Parameters
+    ----------
+    G : graph
+       A networkx graph
+
+    nodes : node, iterable of nodes, or None (default=None)
+        If a singleton node, return the number of triangles for that node.
+        If an iterable, compute the number of triangles for each of those nodes.
+        If `None` (the default) compute the number of triangles for all nodes in `G`.
+
+    Returns
+    -------
+    out : dict or int
+       If `nodes` is a container of nodes, returns number of triangles keyed by node (dict).
+       If `nodes` is a specific node, returns number of triangles for the node (int).
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> print(nx.triangles(G, 0))
+    6
+    >>> print(nx.triangles(G))
+    {0: 6, 1: 6, 2: 6, 3: 6, 4: 6}
+    >>> print(list(nx.triangles(G, [0, 1]).values()))
+    [6, 6]
+
+    Notes
+    -----
+    Self loops are ignored.
+
+    """
+    if nodes is not None:
+        # If `nodes` represents a single node, return only its number of triangles
+        if nodes in G:
+            return next(_triangles_and_degree_iter(G, nodes))[2] // 2
+
+        # if `nodes` is a container of nodes, then return a
+        # dictionary mapping node to number of triangles.
+        return {v: t // 2 for v, d, t, _ in _triangles_and_degree_iter(G, nodes)}
+
+    # if nodes is None, then compute triangles for the complete graph
+
+    # dict used to avoid visiting the same nodes twice
+    # this allows calculating/counting each triangle only once
+    later_nbrs = {}
+
+    # iterate over the nodes in a graph
+    for node, neighbors in G.adjacency():
+        later_nbrs[node] = {n for n in neighbors if n not in later_nbrs and n != node}
+
+    # instantiate Counter for each node to include isolated nodes
+    # add 1 to the count if a nodes neighbor's neighbor is also a neighbor
+    triangle_counts = Counter(dict.fromkeys(G, 0))
+    for node1, neighbors in later_nbrs.items():
+        for node2 in neighbors:
+            third_nodes = neighbors & later_nbrs[node2]
+            m = len(third_nodes)
+            triangle_counts[node1] += m
+            triangle_counts[node2] += m
+            triangle_counts.update(third_nodes)
+
+    return dict(triangle_counts)
+
+
+@not_implemented_for("multigraph")
+def _triangles_and_degree_iter(G, nodes=None):
+    """Return an iterator of (node, degree, triangles, generalized degree).
+
+    This double counts triangles so you may want to divide by 2.
+    See degree(), triangles() and generalized_degree() for definitions
+    and details.
+
+    """
+    if nodes is None:
+        nodes_nbrs = G.adj.items()
+    else:
+        nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes))
+
+    for v, v_nbrs in nodes_nbrs:
+        vs = set(v_nbrs) - {v}
+        gen_degree = Counter(len(vs & (set(G[w]) - {w})) for w in vs)
+        ntriangles = sum(k * val for k, val in gen_degree.items())
+        yield (v, len(vs), ntriangles, gen_degree)
+
+
+@not_implemented_for("multigraph")
+def _weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"):
+    """Return an iterator of (node, degree, weighted_triangles).
+
+    Used for weighted clustering.
+    Note: this returns the geometric average weight of edges in the triangle.
+    Also, each triangle is counted twice (each direction).
+    So you may want to divide by 2.
+
+    """
+    import numpy as np
+
+    if weight is None or G.number_of_edges() == 0:
+        max_weight = 1
+    else:
+        max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True))
+    if nodes is None:
+        nodes_nbrs = G.adj.items()
+    else:
+        nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes))
+
+    def wt(u, v):
+        return G[u][v].get(weight, 1) / max_weight
+
+    for i, nbrs in nodes_nbrs:
+        inbrs = set(nbrs) - {i}
+        weighted_triangles = 0
+        seen = set()
+        for j in inbrs:
+            seen.add(j)
+            # This avoids counting twice -- we double at the end.
+            jnbrs = set(G[j]) - seen
+            # Only compute the edge weight once, before the inner inner
+            # loop.
+            wij = wt(i, j)
+            weighted_triangles += np.cbrt(
+                [(wij * wt(j, k) * wt(k, i)) for k in inbrs & jnbrs]
+            ).sum()
+        yield (i, len(inbrs), 2 * float(weighted_triangles))
+
+
+@not_implemented_for("multigraph")
+def _directed_triangles_and_degree_iter(G, nodes=None):
+    """Return an iterator of
+    (node, total_degree, reciprocal_degree, directed_triangles).
+
+    Used for directed clustering.
+    Note that unlike `_triangles_and_degree_iter()`, this function counts
+    directed triangles so does not count triangles twice.
+
+    """
+    nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes))
+
+    for i, preds, succs in nodes_nbrs:
+        ipreds = set(preds) - {i}
+        isuccs = set(succs) - {i}
+
+        directed_triangles = 0
+        for j in chain(ipreds, isuccs):
+            jpreds = set(G._pred[j]) - {j}
+            jsuccs = set(G._succ[j]) - {j}
+            directed_triangles += sum(
+                1
+                for k in chain(
+                    (ipreds & jpreds),
+                    (ipreds & jsuccs),
+                    (isuccs & jpreds),
+                    (isuccs & jsuccs),
+                )
+            )
+        dtotal = len(ipreds) + len(isuccs)
+        dbidirectional = len(ipreds & isuccs)
+        yield (i, dtotal, dbidirectional, directed_triangles)
+
+
+@not_implemented_for("multigraph")
+def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"):
+    """Return an iterator of
+    (node, total_degree, reciprocal_degree, directed_weighted_triangles).
+
+    Used for directed weighted clustering.
+    Note that unlike `_weighted_triangles_and_degree_iter()`, this function counts
+    directed triangles so does not count triangles twice.
+
+    """
+    import numpy as np
+
+    if weight is None or G.number_of_edges() == 0:
+        max_weight = 1
+    else:
+        max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True))
+
+    nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes))
+
+    def wt(u, v):
+        return G[u][v].get(weight, 1) / max_weight
+
+    for i, preds, succs in nodes_nbrs:
+        ipreds = set(preds) - {i}
+        isuccs = set(succs) - {i}
+
+        directed_triangles = 0
+        for j in ipreds:
+            jpreds = set(G._pred[j]) - {j}
+            jsuccs = set(G._succ[j]) - {j}
+            directed_triangles += np.cbrt(
+                [(wt(j, i) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]
+            ).sum()
+            directed_triangles += np.cbrt(
+                [(wt(j, i) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]
+            ).sum()
+            directed_triangles += np.cbrt(
+                [(wt(j, i) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]
+            ).sum()
+            directed_triangles += np.cbrt(
+                [(wt(j, i) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]
+            ).sum()
+
+        for j in isuccs:
+            jpreds = set(G._pred[j]) - {j}
+            jsuccs = set(G._succ[j]) - {j}
+            directed_triangles += np.cbrt(
+                [(wt(i, j) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]
+            ).sum()
+            directed_triangles += np.cbrt(
+                [(wt(i, j) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]
+            ).sum()
+            directed_triangles += np.cbrt(
+                [(wt(i, j) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]
+            ).sum()
+            directed_triangles += np.cbrt(
+                [(wt(i, j) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]
+            ).sum()
+
+        dtotal = len(ipreds) + len(isuccs)
+        dbidirectional = len(ipreds & isuccs)
+        yield (i, dtotal, dbidirectional, float(directed_triangles))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def average_clustering(G, nodes=None, weight=None, count_zeros=True):
+    r"""Compute the average clustering coefficient for the graph G.
+
+    The clustering coefficient for the graph is the average,
+
+    .. math::
+
+       C = \frac{1}{n}\sum_{v \in G} c_v,
+
+    where :math:`n` is the number of nodes in `G`.
+
+    Parameters
+    ----------
+    G : graph
+
+    nodes : container of nodes, optional (default=all nodes in G)
+       Compute average clustering for nodes in this container.
+
+    weight : string or None, optional (default=None)
+       The edge attribute that holds the numerical value used as a weight.
+       If None, then each edge has weight 1.
+
+    count_zeros : bool
+       If False include only the nodes with nonzero clustering in the average.
+
+    Returns
+    -------
+    avg : float
+       Average clustering
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> print(nx.average_clustering(G))
+    1.0
+
+    Notes
+    -----
+    This is a space saving routine; it might be faster
+    to use the clustering function to get a list and then take the average.
+
+    Self loops are ignored.
+
+    References
+    ----------
+    .. [1] Generalizations of the clustering coefficient to weighted
+       complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela,
+       K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007).
+       http://jponnela.com/web_documents/a9.pdf
+    .. [2] Marcus Kaiser,  Mean clustering coefficients: the role of isolated
+       nodes and leafs on clustering measures for small-world networks.
+       https://arxiv.org/abs/0802.2512
+    """
+    c = clustering(G, nodes, weight=weight).values()
+    if not count_zeros:
+        c = [v for v in c if abs(v) > 0]
+    return sum(c) / len(c)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def clustering(G, nodes=None, weight=None):
+    r"""Compute the clustering coefficient for nodes.
+
+    For unweighted graphs, the clustering of a node :math:`u`
+    is the fraction of possible triangles through that node that exist,
+
+    .. math::
+
+      c_u = \frac{2 T(u)}{deg(u)(deg(u)-1)},
+
+    where :math:`T(u)` is the number of triangles through node :math:`u` and
+    :math:`deg(u)` is the degree of :math:`u`.
+
+    For weighted graphs, there are several ways to define clustering [1]_.
+    the one used here is defined
+    as the geometric average of the subgraph edge weights [2]_,
+
+    .. math::
+
+       c_u = \frac{1}{deg(u)(deg(u)-1))}
+             \sum_{vw} (\hat{w}_{uv} \hat{w}_{uw} \hat{w}_{vw})^{1/3}.
+
+    The edge weights :math:`\hat{w}_{uv}` are normalized by the maximum weight
+    in the network :math:`\hat{w}_{uv} = w_{uv}/\max(w)`.
+
+    The value of :math:`c_u` is assigned to 0 if :math:`deg(u) < 2`.
+
+    Additionally, this weighted definition has been generalized to support negative edge weights [3]_.
+
+    For directed graphs, the clustering is similarly defined as the fraction
+    of all possible directed triangles or geometric average of the subgraph
+    edge weights for unweighted and weighted directed graph respectively [4]_.
+
+    .. math::
+
+       c_u = \frac{T(u)}{2(deg^{tot}(u)(deg^{tot}(u)-1) - 2deg^{\leftrightarrow}(u))},
+
+    where :math:`T(u)` is the number of directed triangles through node
+    :math:`u`, :math:`deg^{tot}(u)` is the sum of in degree and out degree of
+    :math:`u` and :math:`deg^{\leftrightarrow}(u)` is the reciprocal degree of
+    :math:`u`.
+
+
+    Parameters
+    ----------
+    G : graph
+
+    nodes : node, iterable of nodes, or None (default=None)
+        If a singleton node, return the number of triangles for that node.
+        If an iterable, compute the number of triangles for each of those nodes.
+        If `None` (the default) compute the number of triangles for all nodes in `G`.
+
+    weight : string or None, optional (default=None)
+       The edge attribute that holds the numerical value used as a weight.
+       If None, then each edge has weight 1.
+
+    Returns
+    -------
+    out : float, or dictionary
+       Clustering coefficient at specified nodes
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> print(nx.clustering(G, 0))
+    1.0
+    >>> print(nx.clustering(G))
+    {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
+
+    Notes
+    -----
+    Self loops are ignored.
+
+    References
+    ----------
+    .. [1] Generalizations of the clustering coefficient to weighted
+       complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela,
+       K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007).
+       http://jponnela.com/web_documents/a9.pdf
+    .. [2] Intensity and coherence of motifs in weighted complex
+       networks by J. P. Onnela, J. Saramäki, J. Kertész, and K. Kaski,
+       Physical Review E, 71(6), 065103 (2005).
+    .. [3] Generalization of Clustering Coefficients to Signed Correlation Networks
+       by G. Costantini and M. Perugini, PloS one, 9(2), e88669 (2014).
+    .. [4] Clustering in complex directed networks by G. Fagiolo,
+       Physical Review E, 76(2), 026107 (2007).
+    """
+    if G.is_directed():
+        if weight is not None:
+            td_iter = _directed_weighted_triangles_and_degree_iter(G, nodes, weight)
+            clusterc = {
+                v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2)
+                for v, dt, db, t in td_iter
+            }
+        else:
+            td_iter = _directed_triangles_and_degree_iter(G, nodes)
+            clusterc = {
+                v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2)
+                for v, dt, db, t in td_iter
+            }
+    else:
+        # The formula 2*T/(d*(d-1)) from docs is t/(d*(d-1)) here b/c t==2*T
+        if weight is not None:
+            td_iter = _weighted_triangles_and_degree_iter(G, nodes, weight)
+            clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t in td_iter}
+        else:
+            td_iter = _triangles_and_degree_iter(G, nodes)
+            clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t, _ in td_iter}
+    if nodes in G:
+        # Return the value of the sole entry in the dictionary.
+        return clusterc[nodes]
+    return clusterc
+
+
+@nx._dispatchable
+def transitivity(G):
+    r"""Compute graph transitivity, the fraction of all possible triangles
+    present in G.
+
+    Possible triangles are identified by the number of "triads"
+    (two edges with a shared vertex).
+
+    The transitivity is
+
+    .. math::
+
+        T = 3\frac{\#triangles}{\#triads}.
+
+    Parameters
+    ----------
+    G : graph
+
+    Returns
+    -------
+    out : float
+       Transitivity
+
+    Notes
+    -----
+    Self loops are ignored.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> print(nx.transitivity(G))
+    1.0
+    """
+    triangles_contri = [
+        (t, d * (d - 1)) for v, d, t, _ in _triangles_and_degree_iter(G)
+    ]
+    # If the graph is empty
+    if len(triangles_contri) == 0:
+        return 0
+    triangles, contri = map(sum, zip(*triangles_contri))
+    return 0 if triangles == 0 else triangles / contri
+
+
+@nx._dispatchable
+def square_clustering(G, nodes=None):
+    r"""Compute the squares clustering coefficient for nodes.
+
+    For each node return the fraction of possible squares that exist at
+    the node [1]_
+
+    .. math::
+       C_4(v) = \frac{ \sum_{u=1}^{k_v}
+       \sum_{w=u+1}^{k_v} q_v(u,w) }{ \sum_{u=1}^{k_v}
+       \sum_{w=u+1}^{k_v} [a_v(u,w) + q_v(u,w)]},
+
+    where :math:`q_v(u,w)` are the number of common neighbors of :math:`u` and
+    :math:`w` other than :math:`v` (ie squares), and :math:`a_v(u,w) = (k_u -
+    (1+q_v(u,w)+\theta_{uv})) + (k_w - (1+q_v(u,w)+\theta_{uw}))`, where
+    :math:`\theta_{uw} = 1` if :math:`u` and :math:`w` are connected and 0
+    otherwise. [2]_
+
+    Parameters
+    ----------
+    G : graph
+
+    nodes : container of nodes, optional (default=all nodes in G)
+       Compute clustering for nodes in this container.
+
+    Returns
+    -------
+    c4 : dictionary
+       A dictionary keyed by node with the square clustering coefficient value.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> print(nx.square_clustering(G, 0))
+    1.0
+    >>> print(nx.square_clustering(G))
+    {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
+
+    Notes
+    -----
+    While :math:`C_3(v)` (triangle clustering) gives the probability that
+    two neighbors of node v are connected with each other, :math:`C_4(v)` is
+    the probability that two neighbors of node v share a common
+    neighbor different from v. This algorithm can be applied to both
+    bipartite and unipartite networks.
+
+    References
+    ----------
+    .. [1] Pedro G. Lind, Marta C. González, and Hans J. Herrmann. 2005
+        Cycles and clustering in bipartite networks.
+        Physical Review E (72) 056127.
+    .. [2] Zhang, Peng et al. Clustering Coefficient and Community Structure of
+        Bipartite Networks. Physica A: Statistical Mechanics and its Applications 387.27 (2008): 6869–6875.
+        https://arxiv.org/abs/0710.0117v1
+    """
+    if nodes is None:
+        node_iter = G
+    else:
+        node_iter = G.nbunch_iter(nodes)
+    clustering = {}
+    for v in node_iter:
+        clustering[v] = 0
+        potential = 0
+        for u, w in combinations(G[v], 2):
+            squares = len((set(G[u]) & set(G[w])) - {v})
+            clustering[v] += squares
+            degm = squares + 1
+            if w in G[u]:
+                degm += 1
+            potential += (len(G[u]) - degm) + (len(G[w]) - degm) + squares
+        if potential > 0:
+            clustering[v] /= potential
+    if nodes in G:
+        # Return the value of the sole entry in the dictionary.
+        return clustering[nodes]
+    return clustering
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def generalized_degree(G, nodes=None):
+    r"""Compute the generalized degree for nodes.
+
+    For each node, the generalized degree shows how many edges of given
+    triangle multiplicity the node is connected to. The triangle multiplicity
+    of an edge is the number of triangles an edge participates in. The
+    generalized degree of node :math:`i` can be written as a vector
+    :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc, k_i^{(N-2)})` where
+    :math:`k_i^{(j)}` is the number of edges attached to node :math:`i` that
+    participate in :math:`j` triangles.
+
+    Parameters
+    ----------
+    G : graph
+
+    nodes : container of nodes, optional (default=all nodes in G)
+       Compute the generalized degree for nodes in this container.
+
+    Returns
+    -------
+    out : Counter, or dictionary of Counters
+       Generalized degree of specified nodes. The Counter is keyed by edge
+       triangle multiplicity.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> print(nx.generalized_degree(G, 0))
+    Counter({3: 4})
+    >>> print(nx.generalized_degree(G))
+    {0: Counter({3: 4}), 1: Counter({3: 4}), 2: Counter({3: 4}), 3: Counter({3: 4}), 4: Counter({3: 4})}
+
+    To recover the number of triangles attached to a node:
+
+    >>> k1 = nx.generalized_degree(G, 0)
+    >>> sum([k * v for k, v in k1.items()]) / 2 == nx.triangles(G, 0)
+    True
+
+    Notes
+    -----
+    Self loops are ignored.
+
+    In a network of N nodes, the highest triangle multiplicity an edge can have
+    is N-2.
+
+    The return value does not include a `zero` entry if no edges of a
+    particular triangle multiplicity are present.
+
+    The number of triangles node :math:`i` is attached to can be recovered from
+    the generalized degree :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc,
+    k_i^{(N-2)})` by :math:`(k_i^{(1)}+2k_i^{(2)}+\dotsc +(N-2)k_i^{(N-2)})/2`.
+
+    References
+    ----------
+    .. [1] Networks with arbitrary edge multiplicities by V. Zlatić,
+        D. Garlaschelli and G. Caldarelli, EPL (Europhysics Letters),
+        Volume 97, Number 2 (2012).
+        https://iopscience.iop.org/article/10.1209/0295-5075/97/28005
+    """
+    if nodes in G:
+        return next(_triangles_and_degree_iter(G, nodes))[3]
+    return {v: gd for v, d, t, gd in _triangles_and_degree_iter(G, nodes)}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__init__.py
new file mode 100644
index 00000000..39381d9f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__init__.py
@@ -0,0 +1,4 @@
+from networkx.algorithms.coloring.greedy_coloring import *
+from networkx.algorithms.coloring.equitable_coloring import equitable_color
+
+__all__ = ["greedy_color", "equitable_color"]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/equitable_coloring.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/equitable_coloring.py
new file mode 100644
index 00000000..e464a074
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/equitable_coloring.py
@@ -0,0 +1,505 @@
+"""
+Equitable coloring of graphs with bounded degree.
+"""
+
+from collections import defaultdict
+
+import networkx as nx
+
+__all__ = ["equitable_color"]
+
+
+@nx._dispatchable
+def is_coloring(G, coloring):
+    """Determine if the coloring is a valid coloring for the graph G."""
+    # Verify that the coloring is valid.
+    return all(coloring[s] != coloring[d] for s, d in G.edges)
+
+
+@nx._dispatchable
+def is_equitable(G, coloring, num_colors=None):
+    """Determines if the coloring is valid and equitable for the graph G."""
+
+    if not is_coloring(G, coloring):
+        return False
+
+    # Verify whether it is equitable.
+    color_set_size = defaultdict(int)
+    for color in coloring.values():
+        color_set_size[color] += 1
+
+    if num_colors is not None:
+        for color in range(num_colors):
+            if color not in color_set_size:
+                # These colors do not have any vertices attached to them.
+                color_set_size[color] = 0
+
+    # If there are more than 2 distinct values, the coloring cannot be equitable
+    all_set_sizes = set(color_set_size.values())
+    if len(all_set_sizes) == 0 and num_colors is None:  # Was an empty graph
+        return True
+    elif len(all_set_sizes) == 1:
+        return True
+    elif len(all_set_sizes) == 2:
+        a, b = list(all_set_sizes)
+        return abs(a - b) <= 1
+    else:  # len(all_set_sizes) > 2:
+        return False
+
+
+def make_C_from_F(F):
+    C = defaultdict(list)
+    for node, color in F.items():
+        C[color].append(node)
+
+    return C
+
+
+def make_N_from_L_C(L, C):
+    nodes = L.keys()
+    colors = C.keys()
+    return {
+        (node, color): sum(1 for v in L[node] if v in C[color])
+        for node in nodes
+        for color in colors
+    }
+
+
+def make_H_from_C_N(C, N):
+    return {
+        (c1, c2): sum(1 for node in C[c1] if N[(node, c2)] == 0) for c1 in C for c2 in C
+    }
+
+
+def change_color(u, X, Y, N, H, F, C, L):
+    """Change the color of 'u' from X to Y and update N, H, F, C."""
+    assert F[u] == X and X != Y
+
+    # Change the class of 'u' from X to Y
+    F[u] = Y
+
+    for k in C:
+        # 'u' witnesses an edge from k -> Y instead of from k -> X now.
+        if N[u, k] == 0:
+            H[(X, k)] -= 1
+            H[(Y, k)] += 1
+
+    for v in L[u]:
+        # 'v' has lost a neighbor in X and gained one in Y
+        N[(v, X)] -= 1
+        N[(v, Y)] += 1
+
+        if N[(v, X)] == 0:
+            # 'v' witnesses F[v] -> X
+            H[(F[v], X)] += 1
+
+        if N[(v, Y)] == 1:
+            # 'v' no longer witnesses F[v] -> Y
+            H[(F[v], Y)] -= 1
+
+    C[X].remove(u)
+    C[Y].append(u)
+
+
+def move_witnesses(src_color, dst_color, N, H, F, C, T_cal, L):
+    """Move witness along a path from src_color to dst_color."""
+    X = src_color
+    while X != dst_color:
+        Y = T_cal[X]
+        # Move _any_ witness from X to Y = T_cal[X]
+        w = next(x for x in C[X] if N[(x, Y)] == 0)
+        change_color(w, X, Y, N=N, H=H, F=F, C=C, L=L)
+        X = Y
+
+
+@nx._dispatchable(mutates_input=True)
+def pad_graph(G, num_colors):
+    """Add a disconnected complete clique K_p such that the number of nodes in
+    the graph becomes a multiple of `num_colors`.
+
+    Assumes that the graph's nodes are labelled using integers.
+
+    Returns the number of nodes with each color.
+    """
+
+    n_ = len(G)
+    r = num_colors - 1
+
+    # Ensure that the number of nodes in G is a multiple of (r + 1)
+    s = n_ // (r + 1)
+    if n_ != s * (r + 1):
+        p = (r + 1) - n_ % (r + 1)
+        s += 1
+
+        # Complete graph K_p between (imaginary) nodes [n_, ... , n_ + p]
+        K = nx.relabel_nodes(nx.complete_graph(p), {idx: idx + n_ for idx in range(p)})
+        G.add_edges_from(K.edges)
+
+    return s
+
+
+def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None):
+    """Procedure P as described in the paper."""
+
+    if excluded_colors is None:
+        excluded_colors = set()
+
+    A_cal = set()
+    T_cal = {}
+    R_cal = []
+
+    # BFS to determine A_cal, i.e. colors reachable from V-
+    reachable = [V_minus]
+    marked = set(reachable)
+    idx = 0
+
+    while idx < len(reachable):
+        pop = reachable[idx]
+        idx += 1
+
+        A_cal.add(pop)
+        R_cal.append(pop)
+
+        # TODO: Checking whether a color has been visited can be made faster by
+        # using a look-up table instead of testing for membership in a set by a
+        # logarithmic factor.
+        next_layer = []
+        for k in C:
+            if (
+                H[(k, pop)] > 0
+                and k not in A_cal
+                and k not in excluded_colors
+                and k not in marked
+            ):
+                next_layer.append(k)
+
+        for dst in next_layer:
+            # Record that `dst` can reach `pop`
+            T_cal[dst] = pop
+
+        marked.update(next_layer)
+        reachable.extend(next_layer)
+
+    # Variables for the algorithm
+    b = len(C) - len(A_cal)
+
+    if V_plus in A_cal:
+        # Easy case: V+ is in A_cal
+        # Move one node from V+ to V- using T_cal to find the parents.
+        move_witnesses(V_plus, V_minus, N=N, H=H, F=F, C=C, T_cal=T_cal, L=L)
+    else:
+        # If there is a solo edge, we can resolve the situation by
+        # moving witnesses from B to A, making G[A] equitable and then
+        # recursively balancing G[B - w] with a different V_minus and
+        # but the same V_plus.
+
+        A_0 = set()
+        A_cal_0 = set()
+        num_terminal_sets_found = 0
+        made_equitable = False
+
+        for W_1 in R_cal[::-1]:
+            for v in C[W_1]:
+                X = None
+
+                for U in C:
+                    if N[(v, U)] == 0 and U in A_cal and U != W_1:
+                        X = U
+
+                # v does not witness an edge in H[A_cal]
+                if X is None:
+                    continue
+
+                for U in C:
+                    # Note: Departing from the paper here.
+                    if N[(v, U)] >= 1 and U not in A_cal:
+                        X_prime = U
+                        w = v
+
+                        try:
+                            # Finding the solo neighbor of w in X_prime
+                            y = next(
+                                node
+                                for node in L[w]
+                                if F[node] == X_prime and N[(node, W_1)] == 1
+                            )
+                        except StopIteration:
+                            pass
+                        else:
+                            W = W_1
+
+                            # Move w from W to X, now X has one extra node.
+                            change_color(w, W, X, N=N, H=H, F=F, C=C, L=L)
+
+                            # Move witness from X to V_minus, making the coloring
+                            # equitable.
+                            move_witnesses(
+                                src_color=X,
+                                dst_color=V_minus,
+                                N=N,
+                                H=H,
+                                F=F,
+                                C=C,
+                                T_cal=T_cal,
+                                L=L,
+                            )
+
+                            # Move y from X_prime to W, making W the correct size.
+                            change_color(y, X_prime, W, N=N, H=H, F=F, C=C, L=L)
+
+                            # Then call the procedure on G[B - y]
+                            procedure_P(
+                                V_minus=X_prime,
+                                V_plus=V_plus,
+                                N=N,
+                                H=H,
+                                C=C,
+                                F=F,
+                                L=L,
+                                excluded_colors=excluded_colors.union(A_cal),
+                            )
+                            made_equitable = True
+                            break
+
+                if made_equitable:
+                    break
+            else:
+                # No node in W_1 was found such that
+                # it had a solo-neighbor.
+                A_cal_0.add(W_1)
+                A_0.update(C[W_1])
+                num_terminal_sets_found += 1
+
+            if num_terminal_sets_found == b:
+                # Otherwise, construct the maximal independent set and find
+                # a pair of z_1, z_2 as in Case II.
+
+                # BFS to determine B_cal': the set of colors reachable from V+
+                B_cal_prime = set()
+                T_cal_prime = {}
+
+                reachable = [V_plus]
+                marked = set(reachable)
+                idx = 0
+                while idx < len(reachable):
+                    pop = reachable[idx]
+                    idx += 1
+
+                    B_cal_prime.add(pop)
+
+                    # No need to check for excluded_colors here because
+                    # they only exclude colors from A_cal
+                    next_layer = [
+                        k
+                        for k in C
+                        if H[(pop, k)] > 0 and k not in B_cal_prime and k not in marked
+                    ]
+
+                    for dst in next_layer:
+                        T_cal_prime[pop] = dst
+
+                    marked.update(next_layer)
+                    reachable.extend(next_layer)
+
+                # Construct the independent set of G[B']
+                I_set = set()
+                I_covered = set()
+                W_covering = {}
+
+                B_prime = [node for k in B_cal_prime for node in C[k]]
+
+                # Add the nodes in V_plus to I first.
+                for z in C[V_plus] + B_prime:
+                    if z in I_covered or F[z] not in B_cal_prime:
+                        continue
+
+                    I_set.add(z)
+                    I_covered.add(z)
+                    I_covered.update(list(L[z]))
+
+                    for w in L[z]:
+                        if F[w] in A_cal_0 and N[(z, F[w])] == 1:
+                            if w not in W_covering:
+                                W_covering[w] = z
+                            else:
+                                # Found z1, z2 which have the same solo
+                                # neighbor in some W
+                                z_1 = W_covering[w]
+                                # z_2 = z
+
+                                Z = F[z_1]
+                                W = F[w]
+
+                                # shift nodes along W, V-
+                                move_witnesses(
+                                    W, V_minus, N=N, H=H, F=F, C=C, T_cal=T_cal, L=L
+                                )
+
+                                # shift nodes along V+ to Z
+                                move_witnesses(
+                                    V_plus,
+                                    Z,
+                                    N=N,
+                                    H=H,
+                                    F=F,
+                                    C=C,
+                                    T_cal=T_cal_prime,
+                                    L=L,
+                                )
+
+                                # change color of z_1 to W
+                                change_color(z_1, Z, W, N=N, H=H, F=F, C=C, L=L)
+
+                                # change color of w to some color in B_cal
+                                W_plus = next(
+                                    k for k in C if N[(w, k)] == 0 and k not in A_cal
+                                )
+                                change_color(w, W, W_plus, N=N, H=H, F=F, C=C, L=L)
+
+                                # recurse with G[B \cup W*]
+                                excluded_colors.update(
+                                    [k for k in C if k != W and k not in B_cal_prime]
+                                )
+                                procedure_P(
+                                    V_minus=W,
+                                    V_plus=W_plus,
+                                    N=N,
+                                    H=H,
+                                    C=C,
+                                    F=F,
+                                    L=L,
+                                    excluded_colors=excluded_colors,
+                                )
+
+                                made_equitable = True
+                                break
+
+                    if made_equitable:
+                        break
+                else:
+                    assert False, (
+                        "Must find a w which is the solo neighbor "
+                        "of two vertices in B_cal_prime."
+                    )
+
+            if made_equitable:
+                break
+
+
+@nx._dispatchable
+def equitable_color(G, num_colors):
+    """Provides an equitable coloring for nodes of `G`.
+
+    Attempts to color a graph using `num_colors` colors, where no neighbors of
+    a node can have same color as the node itself and the number of nodes with
+    each color differ by at most 1. `num_colors` must be greater than the
+    maximum degree of `G`. The algorithm is described in [1]_ and has
+    complexity O(num_colors * n**2).
+
+    Parameters
+    ----------
+    G : networkX graph
+       The nodes of this graph will be colored.
+
+    num_colors : number of colors to use
+       This number must be at least one more than the maximum degree of nodes
+       in the graph.
+
+    Returns
+    -------
+    A dictionary with keys representing nodes and values representing
+    corresponding coloring.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4)
+    >>> nx.coloring.equitable_color(G, num_colors=3)  # doctest: +SKIP
+    {0: 2, 1: 1, 2: 2, 3: 0}
+
+    Raises
+    ------
+    NetworkXAlgorithmError
+        If `num_colors` is not at least the maximum degree of the graph `G`
+
+    References
+    ----------
+    .. [1] Kierstead, H. A., Kostochka, A. V., Mydlarz, M., & Szemerédi, E.
+        (2010). A fast algorithm for equitable coloring. Combinatorica, 30(2),
+        217-224.
+    """
+
+    # Map nodes to integers for simplicity later.
+    nodes_to_int = {}
+    int_to_nodes = {}
+
+    for idx, node in enumerate(G.nodes):
+        nodes_to_int[node] = idx
+        int_to_nodes[idx] = node
+
+    G = nx.relabel_nodes(G, nodes_to_int, copy=True)
+
+    # Basic graph statistics and sanity check.
+    if len(G.nodes) > 0:
+        r_ = max(G.degree(node) for node in G.nodes)
+    else:
+        r_ = 0
+
+    if r_ >= num_colors:
+        raise nx.NetworkXAlgorithmError(
+            f"Graph has maximum degree {r_}, needs "
+            f"{r_ + 1} (> {num_colors}) colors for guaranteed coloring."
+        )
+
+    # Ensure that the number of nodes in G is a multiple of (r + 1)
+    pad_graph(G, num_colors)
+
+    # Starting the algorithm.
+    # L = {node: list(G.neighbors(node)) for node in G.nodes}
+    L_ = {node: [] for node in G.nodes}
+
+    # Arbitrary equitable allocation of colors to nodes.
+    F = {node: idx % num_colors for idx, node in enumerate(G.nodes)}
+
+    C = make_C_from_F(F)
+
+    # The neighborhood is empty initially.
+    N = make_N_from_L_C(L_, C)
+
+    # Currently all nodes witness all edges.
+    H = make_H_from_C_N(C, N)
+
+    # Start of algorithm.
+    edges_seen = set()
+
+    for u in sorted(G.nodes):
+        for v in sorted(G.neighbors(u)):
+            # Do not double count edges if (v, u) has already been seen.
+            if (v, u) in edges_seen:
+                continue
+
+            edges_seen.add((u, v))
+
+            L_[u].append(v)
+            L_[v].append(u)
+
+            N[(u, F[v])] += 1
+            N[(v, F[u])] += 1
+
+            if F[u] != F[v]:
+                # Were 'u' and 'v' witnesses for F[u] -> F[v] or F[v] -> F[u]?
+                if N[(u, F[v])] == 1:
+                    H[F[u], F[v]] -= 1  # u cannot witness an edge between F[u], F[v]
+
+                if N[(v, F[u])] == 1:
+                    H[F[v], F[u]] -= 1  # v cannot witness an edge between F[v], F[u]
+
+        if N[(u, F[u])] != 0:
+            # Find the first color where 'u' does not have any neighbors.
+            Y = next(k for k in C if N[(u, k)] == 0)
+            X = F[u]
+            change_color(u, X, Y, N=N, H=H, F=F, C=C, L=L_)
+
+            # Procedure P
+            procedure_P(V_minus=X, V_plus=Y, N=N, H=H, F=F, C=C, L=L_)
+
+    return {int_to_nodes[x]: F[x] for x in int_to_nodes}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/greedy_coloring.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/greedy_coloring.py
new file mode 100644
index 00000000..9be07803
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/greedy_coloring.py
@@ -0,0 +1,565 @@
+"""
+Greedy graph coloring using various strategies.
+"""
+
+import itertools
+from collections import defaultdict, deque
+
+import networkx as nx
+from networkx.utils import arbitrary_element, py_random_state
+
+__all__ = [
+    "greedy_color",
+    "strategy_connected_sequential",
+    "strategy_connected_sequential_bfs",
+    "strategy_connected_sequential_dfs",
+    "strategy_independent_set",
+    "strategy_largest_first",
+    "strategy_random_sequential",
+    "strategy_saturation_largest_first",
+    "strategy_smallest_last",
+]
+
+
+def strategy_largest_first(G, colors):
+    """Returns a list of the nodes of ``G`` in decreasing order by
+    degree.
+
+    ``G`` is a NetworkX graph. ``colors`` is ignored.
+
+    """
+    return sorted(G, key=G.degree, reverse=True)
+
+
+@py_random_state(2)
+def strategy_random_sequential(G, colors, seed=None):
+    """Returns a random permutation of the nodes of ``G`` as a list.
+
+    ``G`` is a NetworkX graph. ``colors`` is ignored.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+    """
+    nodes = list(G)
+    seed.shuffle(nodes)
+    return nodes
+
+
+def strategy_smallest_last(G, colors):
+    """Returns a deque of the nodes of ``G``, "smallest" last.
+
+    Specifically, the degrees of each node are tracked in a bucket queue.
+    From this, the node of minimum degree is repeatedly popped from the
+    graph, updating its neighbors' degrees.
+
+    ``G`` is a NetworkX graph. ``colors`` is ignored.
+
+    This implementation of the strategy runs in $O(n + m)$ time
+    (ignoring polylogarithmic factors), where $n$ is the number of nodes
+    and $m$ is the number of edges.
+
+    This strategy is related to :func:`strategy_independent_set`: if we
+    interpret each node removed as an independent set of size one, then
+    this strategy chooses an independent set of size one instead of a
+    maximal independent set.
+
+    """
+    H = G.copy()
+    result = deque()
+
+    # Build initial degree list (i.e. the bucket queue data structure)
+    degrees = defaultdict(set)  # set(), for fast random-access removals
+    lbound = float("inf")
+    for node, d in H.degree():
+        degrees[d].add(node)
+        lbound = min(lbound, d)  # Lower bound on min-degree.
+
+    def find_min_degree():
+        # Save time by starting the iterator at `lbound`, not 0.
+        # The value that we find will be our new `lbound`, which we set later.
+        return next(d for d in itertools.count(lbound) if d in degrees)
+
+    for _ in G:
+        # Pop a min-degree node and add it to the list.
+        min_degree = find_min_degree()
+        u = degrees[min_degree].pop()
+        if not degrees[min_degree]:  # Clean up the degree list.
+            del degrees[min_degree]
+        result.appendleft(u)
+
+        # Update degrees of removed node's neighbors.
+        for v in H[u]:
+            degree = H.degree(v)
+            degrees[degree].remove(v)
+            if not degrees[degree]:  # Clean up the degree list.
+                del degrees[degree]
+            degrees[degree - 1].add(v)
+
+        # Finally, remove the node.
+        H.remove_node(u)
+        lbound = min_degree - 1  # Subtract 1 in case of tied neighbors.
+
+    return result
+
+
+def _maximal_independent_set(G):
+    """Returns a maximal independent set of nodes in ``G`` by repeatedly
+    choosing an independent node of minimum degree (with respect to the
+    subgraph of unchosen nodes).
+
+    """
+    result = set()
+    remaining = set(G)
+    while remaining:
+        G = G.subgraph(remaining)
+        v = min(remaining, key=G.degree)
+        result.add(v)
+        remaining -= set(G[v]) | {v}
+    return result
+
+
+def strategy_independent_set(G, colors):
+    """Uses a greedy independent set removal strategy to determine the
+    colors.
+
+    This function updates ``colors`` **in-place** and return ``None``,
+    unlike the other strategy functions in this module.
+
+    This algorithm repeatedly finds and removes a maximal independent
+    set, assigning each node in the set an unused color.
+
+    ``G`` is a NetworkX graph.
+
+    This strategy is related to :func:`strategy_smallest_last`: in that
+    strategy, an independent set of size one is chosen at each step
+    instead of a maximal independent set.
+
+    """
+    remaining_nodes = set(G)
+    while len(remaining_nodes) > 0:
+        nodes = _maximal_independent_set(G.subgraph(remaining_nodes))
+        remaining_nodes -= nodes
+        yield from nodes
+
+
+def strategy_connected_sequential_bfs(G, colors):
+    """Returns an iterable over nodes in ``G`` in the order given by a
+    breadth-first traversal.
+
+    The generated sequence has the property that for each node except
+    the first, at least one neighbor appeared earlier in the sequence.
+
+    ``G`` is a NetworkX graph. ``colors`` is ignored.
+
+    """
+    return strategy_connected_sequential(G, colors, "bfs")
+
+
+def strategy_connected_sequential_dfs(G, colors):
+    """Returns an iterable over nodes in ``G`` in the order given by a
+    depth-first traversal.
+
+    The generated sequence has the property that for each node except
+    the first, at least one neighbor appeared earlier in the sequence.
+
+    ``G`` is a NetworkX graph. ``colors`` is ignored.
+
+    """
+    return strategy_connected_sequential(G, colors, "dfs")
+
+
+def strategy_connected_sequential(G, colors, traversal="bfs"):
+    """Returns an iterable over nodes in ``G`` in the order given by a
+    breadth-first or depth-first traversal.
+
+    ``traversal`` must be one of the strings ``'dfs'`` or ``'bfs'``,
+    representing depth-first traversal or breadth-first traversal,
+    respectively.
+
+    The generated sequence has the property that for each node except
+    the first, at least one neighbor appeared earlier in the sequence.
+
+    ``G`` is a NetworkX graph. ``colors`` is ignored.
+
+    """
+    if traversal == "bfs":
+        traverse = nx.bfs_edges
+    elif traversal == "dfs":
+        traverse = nx.dfs_edges
+    else:
+        raise nx.NetworkXError(
+            "Please specify one of the strings 'bfs' or"
+            " 'dfs' for connected sequential ordering"
+        )
+    for component in nx.connected_components(G):
+        source = arbitrary_element(component)
+        # Yield the source node, then all the nodes in the specified
+        # traversal order.
+        yield source
+        for _, end in traverse(G.subgraph(component), source):
+            yield end
+
+
+def strategy_saturation_largest_first(G, colors):
+    """Iterates over all the nodes of ``G`` in "saturation order" (also
+    known as "DSATUR").
+
+    ``G`` is a NetworkX graph. ``colors`` is a dictionary mapping nodes of
+    ``G`` to colors, for those nodes that have already been colored.
+
+    """
+    distinct_colors = {v: set() for v in G}
+
+    # Add the node color assignments given in colors to the
+    # distinct colors set for each neighbor of that node
+    for node, color in colors.items():
+        for neighbor in G[node]:
+            distinct_colors[neighbor].add(color)
+
+    # Check that the color assignments in colors are valid
+    # i.e. no neighboring nodes have the same color
+    if len(colors) >= 2:
+        for node, color in colors.items():
+            if color in distinct_colors[node]:
+                raise nx.NetworkXError("Neighboring nodes must have different colors")
+
+    # If 0 nodes have been colored, simply choose the node of highest degree.
+    if not colors:
+        node = max(G, key=G.degree)
+        yield node
+        # Add the color 0 to the distinct colors set for each
+        # neighbor of that node.
+        for v in G[node]:
+            distinct_colors[v].add(0)
+
+    while len(G) != len(colors):
+        # Update the distinct color sets for the neighbors.
+        for node, color in colors.items():
+            for neighbor in G[node]:
+                distinct_colors[neighbor].add(color)
+
+        # Compute the maximum saturation and the set of nodes that
+        # achieve that saturation.
+        saturation = {v: len(c) for v, c in distinct_colors.items() if v not in colors}
+        # Yield the node with the highest saturation, and break ties by
+        # degree.
+        node = max(saturation, key=lambda v: (saturation[v], G.degree(v)))
+        yield node
+
+
+#: Dictionary mapping name of a strategy as a string to the strategy function.
+STRATEGIES = {
+    "largest_first": strategy_largest_first,
+    "random_sequential": strategy_random_sequential,
+    "smallest_last": strategy_smallest_last,
+    "independent_set": strategy_independent_set,
+    "connected_sequential_bfs": strategy_connected_sequential_bfs,
+    "connected_sequential_dfs": strategy_connected_sequential_dfs,
+    "connected_sequential": strategy_connected_sequential,
+    "saturation_largest_first": strategy_saturation_largest_first,
+    "DSATUR": strategy_saturation_largest_first,
+}
+
+
+@nx._dispatchable
+def greedy_color(G, strategy="largest_first", interchange=False):
+    """Color a graph using various strategies of greedy graph coloring.
+
+    Attempts to color a graph using as few colors as possible, where no
+    neighbors of a node can have same color as the node itself. The
+    given strategy determines the order in which nodes are colored.
+
+    The strategies are described in [1]_, and smallest-last is based on
+    [2]_.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    strategy : string or function(G, colors)
+       A function (or a string representing a function) that provides
+       the coloring strategy, by returning nodes in the ordering they
+       should be colored. ``G`` is the graph, and ``colors`` is a
+       dictionary of the currently assigned colors, keyed by nodes. The
+       function must return an iterable over all the nodes in ``G``.
+
+       If the strategy function is an iterator generator (that is, a
+       function with ``yield`` statements), keep in mind that the
+       ``colors`` dictionary will be updated after each ``yield``, since
+       this function chooses colors greedily.
+
+       If ``strategy`` is a string, it must be one of the following,
+       each of which represents one of the built-in strategy functions.
+
+       * ``'largest_first'``
+       * ``'random_sequential'``
+       * ``'smallest_last'``
+       * ``'independent_set'``
+       * ``'connected_sequential_bfs'``
+       * ``'connected_sequential_dfs'``
+       * ``'connected_sequential'`` (alias for the previous strategy)
+       * ``'saturation_largest_first'``
+       * ``'DSATUR'`` (alias for the previous strategy)
+
+    interchange: bool
+       Will use the color interchange algorithm described by [3]_ if set
+       to ``True``.
+
+       Note that ``saturation_largest_first`` and ``independent_set``
+       do not work with interchange. Furthermore, if you use
+       interchange with your own strategy function, you cannot rely
+       on the values in the ``colors`` argument.
+
+    Returns
+    -------
+    A dictionary with keys representing nodes and values representing
+    corresponding coloring.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4)
+    >>> d = nx.coloring.greedy_color(G, strategy="largest_first")
+    >>> d in [{0: 0, 1: 1, 2: 0, 3: 1}, {0: 1, 1: 0, 2: 1, 3: 0}]
+    True
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If ``strategy`` is ``saturation_largest_first`` or
+        ``independent_set`` and ``interchange`` is ``True``.
+
+    References
+    ----------
+    .. [1] Adrian Kosowski, and Krzysztof Manuszewski,
+       Classical Coloring of Graphs, Graph Colorings, 2-19, 2004.
+       ISBN 0-8218-3458-4.
+    .. [2] David W. Matula, and Leland L. Beck, "Smallest-last
+       ordering and clustering and graph coloring algorithms." *J. ACM* 30,
+       3 (July 1983), 417–427. <https://doi.org/10.1145/2402.322385>
+    .. [3] Maciej M. Sysło, Narsingh Deo, Janusz S. Kowalik,
+       Discrete Optimization Algorithms with Pascal Programs, 415-424, 1983.
+       ISBN 0-486-45353-7.
+
+    """
+    if len(G) == 0:
+        return {}
+    # Determine the strategy provided by the caller.
+    strategy = STRATEGIES.get(strategy, strategy)
+    if not callable(strategy):
+        raise nx.NetworkXError(
+            f"strategy must be callable or a valid string. {strategy} not valid."
+        )
+    # Perform some validation on the arguments before executing any
+    # strategy functions.
+    if interchange:
+        if strategy is strategy_independent_set:
+            msg = "interchange cannot be used with independent_set"
+            raise nx.NetworkXPointlessConcept(msg)
+        if strategy is strategy_saturation_largest_first:
+            msg = "interchange cannot be used with" " saturation_largest_first"
+            raise nx.NetworkXPointlessConcept(msg)
+    colors = {}
+    nodes = strategy(G, colors)
+    if interchange:
+        return _greedy_coloring_with_interchange(G, nodes)
+    for u in nodes:
+        # Set to keep track of colors of neighbors
+        nbr_colors = {colors[v] for v in G[u] if v in colors}
+        # Find the first unused color.
+        for color in itertools.count():
+            if color not in nbr_colors:
+                break
+        # Assign the new color to the current node.
+        colors[u] = color
+    return colors
+
+
+# Tools for coloring with interchanges
+class _Node:
+    __slots__ = ["node_id", "color", "adj_list", "adj_color"]
+
+    def __init__(self, node_id, n):
+        self.node_id = node_id
+        self.color = -1
+        self.adj_list = None
+        self.adj_color = [None for _ in range(n)]
+
+    def __repr__(self):
+        return (
+            f"Node_id: {self.node_id}, Color: {self.color}, "
+            f"Adj_list: ({self.adj_list}), adj_color: ({self.adj_color})"
+        )
+
+    def assign_color(self, adj_entry, color):
+        adj_entry.col_prev = None
+        adj_entry.col_next = self.adj_color[color]
+        self.adj_color[color] = adj_entry
+        if adj_entry.col_next is not None:
+            adj_entry.col_next.col_prev = adj_entry
+
+    def clear_color(self, adj_entry, color):
+        if adj_entry.col_prev is None:
+            self.adj_color[color] = adj_entry.col_next
+        else:
+            adj_entry.col_prev.col_next = adj_entry.col_next
+        if adj_entry.col_next is not None:
+            adj_entry.col_next.col_prev = adj_entry.col_prev
+
+    def iter_neighbors(self):
+        adj_node = self.adj_list
+        while adj_node is not None:
+            yield adj_node
+            adj_node = adj_node.next
+
+    def iter_neighbors_color(self, color):
+        adj_color_node = self.adj_color[color]
+        while adj_color_node is not None:
+            yield adj_color_node.node_id
+            adj_color_node = adj_color_node.col_next
+
+
+class _AdjEntry:
+    __slots__ = ["node_id", "next", "mate", "col_next", "col_prev"]
+
+    def __init__(self, node_id):
+        self.node_id = node_id
+        self.next = None
+        self.mate = None
+        self.col_next = None
+        self.col_prev = None
+
+    def __repr__(self):
+        col_next = None if self.col_next is None else self.col_next.node_id
+        col_prev = None if self.col_prev is None else self.col_prev.node_id
+        return (
+            f"Node_id: {self.node_id}, Next: ({self.next}), "
+            f"Mate: ({self.mate.node_id}), "
+            f"col_next: ({col_next}), col_prev: ({col_prev})"
+        )
+
+
+def _greedy_coloring_with_interchange(G, nodes):
+    """Return a coloring for `original_graph` using interchange approach
+
+    This procedure is an adaption of the algorithm described by [1]_,
+    and is an implementation of coloring with interchange. Please be
+    advised, that the datastructures used are rather complex because
+    they are optimized to minimize the time spent identifying
+    subcomponents of the graph, which are possible candidates for color
+    interchange.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The graph to be colored
+
+    nodes : list
+        nodes ordered using the strategy of choice
+
+    Returns
+    -------
+    dict :
+        A dictionary keyed by node to a color value
+
+    References
+    ----------
+    .. [1] Maciej M. Syslo, Narsingh Deo, Janusz S. Kowalik,
+       Discrete Optimization Algorithms with Pascal Programs, 415-424, 1983.
+       ISBN 0-486-45353-7.
+    """
+    n = len(G)
+
+    graph = {node: _Node(node, n) for node in G}
+
+    for node1, node2 in G.edges():
+        adj_entry1 = _AdjEntry(node2)
+        adj_entry2 = _AdjEntry(node1)
+        adj_entry1.mate = adj_entry2
+        adj_entry2.mate = adj_entry1
+        node1_head = graph[node1].adj_list
+        adj_entry1.next = node1_head
+        graph[node1].adj_list = adj_entry1
+        node2_head = graph[node2].adj_list
+        adj_entry2.next = node2_head
+        graph[node2].adj_list = adj_entry2
+
+    k = 0
+    for node in nodes:
+        # Find the smallest possible, unused color
+        neighbors = graph[node].iter_neighbors()
+        col_used = {graph[adj_node.node_id].color for adj_node in neighbors}
+        col_used.discard(-1)
+        k1 = next(itertools.dropwhile(lambda x: x in col_used, itertools.count()))
+
+        # k1 is now the lowest available color
+        if k1 > k:
+            connected = True
+            visited = set()
+            col1 = -1
+            col2 = -1
+            while connected and col1 < k:
+                col1 += 1
+                neighbor_cols = graph[node].iter_neighbors_color(col1)
+                col1_adj = list(neighbor_cols)
+
+                col2 = col1
+                while connected and col2 < k:
+                    col2 += 1
+                    visited = set(col1_adj)
+                    frontier = list(col1_adj)
+                    i = 0
+                    while i < len(frontier):
+                        search_node = frontier[i]
+                        i += 1
+                        col_opp = col2 if graph[search_node].color == col1 else col1
+                        neighbor_cols = graph[search_node].iter_neighbors_color(col_opp)
+
+                        for neighbor in neighbor_cols:
+                            if neighbor not in visited:
+                                visited.add(neighbor)
+                                frontier.append(neighbor)
+
+                    # Search if node is not adj to any col2 vertex
+                    connected = (
+                        len(
+                            visited.intersection(graph[node].iter_neighbors_color(col2))
+                        )
+                        > 0
+                    )
+
+            # If connected is false then we can swap !!!
+            if not connected:
+                # Update all the nodes in the component
+                for search_node in visited:
+                    graph[search_node].color = (
+                        col2 if graph[search_node].color == col1 else col1
+                    )
+                    col2_adj = graph[search_node].adj_color[col2]
+                    graph[search_node].adj_color[col2] = graph[search_node].adj_color[
+                        col1
+                    ]
+                    graph[search_node].adj_color[col1] = col2_adj
+
+                # Update all the neighboring nodes
+                for search_node in visited:
+                    col = graph[search_node].color
+                    col_opp = col1 if col == col2 else col2
+                    for adj_node in graph[search_node].iter_neighbors():
+                        if graph[adj_node.node_id].color != col_opp:
+                            # Direct reference to entry
+                            adj_mate = adj_node.mate
+                            graph[adj_node.node_id].clear_color(adj_mate, col_opp)
+                            graph[adj_node.node_id].assign_color(adj_mate, col)
+                k1 = col1
+
+        # We can color this node color k1
+        graph[node].color = k1
+        k = max(k1, k)
+
+        # Update the neighbors of this node
+        for adj_node in graph[node].iter_neighbors():
+            adj_mate = adj_node.mate
+            graph[adj_node.node_id].assign_color(adj_mate, k1)
+
+    return {node.node_id: node.color for node in graph.values()}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/test_coloring.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/test_coloring.py
new file mode 100644
index 00000000..1e5a913c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/test_coloring.py
@@ -0,0 +1,863 @@
+"""Greedy coloring test suite."""
+
+import itertools
+
+import pytest
+
+import networkx as nx
+
+is_coloring = nx.algorithms.coloring.equitable_coloring.is_coloring
+is_equitable = nx.algorithms.coloring.equitable_coloring.is_equitable
+
+
+ALL_STRATEGIES = [
+    "largest_first",
+    "random_sequential",
+    "smallest_last",
+    "independent_set",
+    "connected_sequential_bfs",
+    "connected_sequential_dfs",
+    "connected_sequential",
+    "saturation_largest_first",
+    "DSATUR",
+]
+
+# List of strategies where interchange=True results in an error
+INTERCHANGE_INVALID = ["independent_set", "saturation_largest_first", "DSATUR"]
+
+
+class TestColoring:
+    def test_basic_cases(self):
+        def check_basic_case(graph_func, n_nodes, strategy, interchange):
+            graph = graph_func()
+            coloring = nx.coloring.greedy_color(
+                graph, strategy=strategy, interchange=interchange
+            )
+            assert verify_length(coloring, n_nodes)
+            assert verify_coloring(graph, coloring)
+
+        for graph_func, n_nodes in BASIC_TEST_CASES.items():
+            for interchange in [True, False]:
+                for strategy in ALL_STRATEGIES:
+                    check_basic_case(graph_func, n_nodes, strategy, False)
+                    if strategy not in INTERCHANGE_INVALID:
+                        check_basic_case(graph_func, n_nodes, strategy, True)
+
+    def test_special_cases(self):
+        def check_special_case(strategy, graph_func, interchange, colors):
+            graph = graph_func()
+            coloring = nx.coloring.greedy_color(
+                graph, strategy=strategy, interchange=interchange
+            )
+            if not hasattr(colors, "__len__"):
+                colors = [colors]
+            assert any(verify_length(coloring, n_colors) for n_colors in colors)
+            assert verify_coloring(graph, coloring)
+
+        for strategy, arglist in SPECIAL_TEST_CASES.items():
+            for args in arglist:
+                check_special_case(strategy, args[0], args[1], args[2])
+
+    def test_interchange_invalid(self):
+        graph = one_node_graph()
+        for strategy in INTERCHANGE_INVALID:
+            pytest.raises(
+                nx.NetworkXPointlessConcept,
+                nx.coloring.greedy_color,
+                graph,
+                strategy=strategy,
+                interchange=True,
+            )
+
+    def test_bad_inputs(self):
+        graph = one_node_graph()
+        pytest.raises(
+            nx.NetworkXError,
+            nx.coloring.greedy_color,
+            graph,
+            strategy="invalid strategy",
+        )
+
+    def test_strategy_as_function(self):
+        graph = lf_shc()
+        colors_1 = nx.coloring.greedy_color(graph, "largest_first")
+        colors_2 = nx.coloring.greedy_color(graph, nx.coloring.strategy_largest_first)
+        assert colors_1 == colors_2
+
+    def test_seed_argument(self):
+        graph = lf_shc()
+        rs = nx.coloring.strategy_random_sequential
+        c1 = nx.coloring.greedy_color(graph, lambda g, c: rs(g, c, seed=1))
+        for u, v in graph.edges:
+            assert c1[u] != c1[v]
+
+    def test_is_coloring(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (1, 2)])
+        coloring = {0: 0, 1: 1, 2: 0}
+        assert is_coloring(G, coloring)
+
+        coloring[0] = 1
+        assert not is_coloring(G, coloring)
+        assert not is_equitable(G, coloring)
+
+    def test_is_equitable(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (1, 2)])
+        coloring = {0: 0, 1: 1, 2: 0}
+        assert is_equitable(G, coloring)
+
+        G.add_edges_from([(2, 3), (2, 4), (2, 5)])
+        coloring[3] = 1
+        coloring[4] = 1
+        coloring[5] = 1
+        assert is_coloring(G, coloring)
+        assert not is_equitable(G, coloring)
+
+    def test_num_colors(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (0, 3)])
+        pytest.raises(nx.NetworkXAlgorithmError, nx.coloring.equitable_color, G, 2)
+
+    def test_equitable_color(self):
+        G = nx.fast_gnp_random_graph(n=10, p=0.2, seed=42)
+        coloring = nx.coloring.equitable_color(G, max_degree(G) + 1)
+        assert is_equitable(G, coloring)
+
+    def test_equitable_color_empty(self):
+        G = nx.empty_graph()
+        coloring = nx.coloring.equitable_color(G, max_degree(G) + 1)
+        assert is_equitable(G, coloring)
+
+    def test_equitable_color_large(self):
+        G = nx.fast_gnp_random_graph(100, 0.1, seed=42)
+        coloring = nx.coloring.equitable_color(G, max_degree(G) + 1)
+        assert is_equitable(G, coloring, num_colors=max_degree(G) + 1)
+
+    def test_case_V_plus_not_in_A_cal(self):
+        # Hand crafted case to avoid the easy case.
+        L = {
+            0: [2, 5],
+            1: [3, 4],
+            2: [0, 8],
+            3: [1, 7],
+            4: [1, 6],
+            5: [0, 6],
+            6: [4, 5],
+            7: [3],
+            8: [2],
+        }
+
+        F = {
+            # Color 0
+            0: 0,
+            1: 0,
+            # Color 1
+            2: 1,
+            3: 1,
+            4: 1,
+            5: 1,
+            # Color 2
+            6: 2,
+            7: 2,
+            8: 2,
+        }
+
+        C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F)
+        N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C)
+        H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N)
+
+        nx.algorithms.coloring.equitable_coloring.procedure_P(
+            V_minus=0, V_plus=1, N=N, H=H, F=F, C=C, L=L
+        )
+        check_state(L=L, N=N, H=H, F=F, C=C)
+
+    def test_cast_no_solo(self):
+        L = {
+            0: [8, 9],
+            1: [10, 11],
+            2: [8],
+            3: [9],
+            4: [10, 11],
+            5: [8],
+            6: [9],
+            7: [10, 11],
+            8: [0, 2, 5],
+            9: [0, 3, 6],
+            10: [1, 4, 7],
+            11: [1, 4, 7],
+        }
+
+        F = {0: 0, 1: 0, 2: 2, 3: 2, 4: 2, 5: 3, 6: 3, 7: 3, 8: 1, 9: 1, 10: 1, 11: 1}
+
+        C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F)
+        N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C)
+        H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N)
+
+        nx.algorithms.coloring.equitable_coloring.procedure_P(
+            V_minus=0, V_plus=1, N=N, H=H, F=F, C=C, L=L
+        )
+        check_state(L=L, N=N, H=H, F=F, C=C)
+
+    def test_hard_prob(self):
+        # Tests for two levels of recursion.
+        num_colors, s = 5, 5
+
+        G = nx.Graph()
+        G.add_edges_from(
+            [
+                (0, 10),
+                (0, 11),
+                (0, 12),
+                (0, 23),
+                (10, 4),
+                (10, 9),
+                (10, 20),
+                (11, 4),
+                (11, 8),
+                (11, 16),
+                (12, 9),
+                (12, 22),
+                (12, 23),
+                (23, 7),
+                (1, 17),
+                (1, 18),
+                (1, 19),
+                (1, 24),
+                (17, 5),
+                (17, 13),
+                (17, 22),
+                (18, 5),
+                (19, 5),
+                (19, 6),
+                (19, 8),
+                (24, 7),
+                (24, 16),
+                (2, 4),
+                (2, 13),
+                (2, 14),
+                (2, 15),
+                (4, 6),
+                (13, 5),
+                (13, 21),
+                (14, 6),
+                (14, 15),
+                (15, 6),
+                (15, 21),
+                (3, 16),
+                (3, 20),
+                (3, 21),
+                (3, 22),
+                (16, 8),
+                (20, 8),
+                (21, 9),
+                (22, 7),
+            ]
+        )
+        F = {node: node // s for node in range(num_colors * s)}
+        F[s - 1] = num_colors - 1
+
+        params = make_params_from_graph(G=G, F=F)
+
+        nx.algorithms.coloring.equitable_coloring.procedure_P(
+            V_minus=0, V_plus=num_colors - 1, **params
+        )
+        check_state(**params)
+
+    def test_hardest_prob(self):
+        # Tests for two levels of recursion.
+        num_colors, s = 10, 4
+
+        G = nx.Graph()
+        G.add_edges_from(
+            [
+                (0, 19),
+                (0, 24),
+                (0, 29),
+                (0, 30),
+                (0, 35),
+                (19, 3),
+                (19, 7),
+                (19, 9),
+                (19, 15),
+                (19, 21),
+                (19, 24),
+                (19, 30),
+                (19, 38),
+                (24, 5),
+                (24, 11),
+                (24, 13),
+                (24, 20),
+                (24, 30),
+                (24, 37),
+                (24, 38),
+                (29, 6),
+                (29, 10),
+                (29, 13),
+                (29, 15),
+                (29, 16),
+                (29, 17),
+                (29, 20),
+                (29, 26),
+                (30, 6),
+                (30, 10),
+                (30, 15),
+                (30, 22),
+                (30, 23),
+                (30, 39),
+                (35, 6),
+                (35, 9),
+                (35, 14),
+                (35, 18),
+                (35, 22),
+                (35, 23),
+                (35, 25),
+                (35, 27),
+                (1, 20),
+                (1, 26),
+                (1, 31),
+                (1, 34),
+                (1, 38),
+                (20, 4),
+                (20, 8),
+                (20, 14),
+                (20, 18),
+                (20, 28),
+                (20, 33),
+                (26, 7),
+                (26, 10),
+                (26, 14),
+                (26, 18),
+                (26, 21),
+                (26, 32),
+                (26, 39),
+                (31, 5),
+                (31, 8),
+                (31, 13),
+                (31, 16),
+                (31, 17),
+                (31, 21),
+                (31, 25),
+                (31, 27),
+                (34, 7),
+                (34, 8),
+                (34, 13),
+                (34, 18),
+                (34, 22),
+                (34, 23),
+                (34, 25),
+                (34, 27),
+                (38, 4),
+                (38, 9),
+                (38, 12),
+                (38, 14),
+                (38, 21),
+                (38, 27),
+                (2, 3),
+                (2, 18),
+                (2, 21),
+                (2, 28),
+                (2, 32),
+                (2, 33),
+                (2, 36),
+                (2, 37),
+                (2, 39),
+                (3, 5),
+                (3, 9),
+                (3, 13),
+                (3, 22),
+                (3, 23),
+                (3, 25),
+                (3, 27),
+                (18, 6),
+                (18, 11),
+                (18, 15),
+                (18, 39),
+                (21, 4),
+                (21, 10),
+                (21, 14),
+                (21, 36),
+                (28, 6),
+                (28, 10),
+                (28, 14),
+                (28, 16),
+                (28, 17),
+                (28, 25),
+                (28, 27),
+                (32, 5),
+                (32, 10),
+                (32, 12),
+                (32, 16),
+                (32, 17),
+                (32, 22),
+                (32, 23),
+                (33, 7),
+                (33, 10),
+                (33, 12),
+                (33, 16),
+                (33, 17),
+                (33, 25),
+                (33, 27),
+                (36, 5),
+                (36, 8),
+                (36, 15),
+                (36, 16),
+                (36, 17),
+                (36, 25),
+                (36, 27),
+                (37, 5),
+                (37, 11),
+                (37, 15),
+                (37, 16),
+                (37, 17),
+                (37, 22),
+                (37, 23),
+                (39, 7),
+                (39, 8),
+                (39, 15),
+                (39, 22),
+                (39, 23),
+            ]
+        )
+        F = {node: node // s for node in range(num_colors * s)}
+        F[s - 1] = num_colors - 1  # V- = 0, V+ = num_colors - 1
+
+        params = make_params_from_graph(G=G, F=F)
+
+        nx.algorithms.coloring.equitable_coloring.procedure_P(
+            V_minus=0, V_plus=num_colors - 1, **params
+        )
+        check_state(**params)
+
+    def test_strategy_saturation_largest_first(self):
+        def color_remaining_nodes(
+            G,
+            colored_nodes,
+            full_color_assignment=None,
+            nodes_to_add_between_calls=1,
+        ):
+            color_assignments = []
+            aux_colored_nodes = colored_nodes.copy()
+
+            node_iterator = nx.algorithms.coloring.greedy_coloring.strategy_saturation_largest_first(
+                G, aux_colored_nodes
+            )
+
+            for u in node_iterator:
+                # Set to keep track of colors of neighbors
+                nbr_colors = {
+                    aux_colored_nodes[v] for v in G[u] if v in aux_colored_nodes
+                }
+                # Find the first unused color.
+                for color in itertools.count():
+                    if color not in nbr_colors:
+                        break
+                aux_colored_nodes[u] = color
+                color_assignments.append((u, color))
+
+                # Color nodes between iterations
+                for i in range(nodes_to_add_between_calls - 1):
+                    if not len(color_assignments) + len(colored_nodes) >= len(
+                        full_color_assignment
+                    ):
+                        full_color_assignment_node, color = full_color_assignment[
+                            len(color_assignments) + len(colored_nodes)
+                        ]
+
+                        # Assign the new color to the current node.
+                        aux_colored_nodes[full_color_assignment_node] = color
+                        color_assignments.append((full_color_assignment_node, color))
+
+            return color_assignments, aux_colored_nodes
+
+        for G, _, _ in SPECIAL_TEST_CASES["saturation_largest_first"]:
+            G = G()
+
+            # Check that function still works when nodes are colored between iterations
+            for nodes_to_add_between_calls in range(1, 5):
+                # Get a full color assignment, (including the order in which nodes were colored)
+                colored_nodes = {}
+                full_color_assignment, full_colored_nodes = color_remaining_nodes(
+                    G, colored_nodes
+                )
+
+                # For each node in the color assignment, add it to colored_nodes and re-run the function
+                for ind, (node, color) in enumerate(full_color_assignment):
+                    colored_nodes[node] = color
+
+                    (
+                        partial_color_assignment,
+                        partial_colored_nodes,
+                    ) = color_remaining_nodes(
+                        G,
+                        colored_nodes,
+                        full_color_assignment=full_color_assignment,
+                        nodes_to_add_between_calls=nodes_to_add_between_calls,
+                    )
+
+                    # Check that the color assignment and order of remaining nodes are the same
+                    assert full_color_assignment[ind + 1 :] == partial_color_assignment
+                    assert full_colored_nodes == partial_colored_nodes
+
+
+#  ############################  Utility functions ############################
+def verify_coloring(graph, coloring):
+    for node in graph.nodes():
+        if node not in coloring:
+            return False
+
+        color = coloring[node]
+        for neighbor in graph.neighbors(node):
+            if coloring[neighbor] == color:
+                return False
+
+    return True
+
+
+def verify_length(coloring, expected):
+    coloring = dict_to_sets(coloring)
+    return len(coloring) == expected
+
+
+def dict_to_sets(colors):
+    if len(colors) == 0:
+        return []
+
+    k = max(colors.values()) + 1
+    sets = [set() for _ in range(k)]
+
+    for node, color in colors.items():
+        sets[color].add(node)
+
+    return sets
+
+
+#  ############################  Graph Generation ############################
+
+
+def empty_graph():
+    return nx.Graph()
+
+
+def one_node_graph():
+    graph = nx.Graph()
+    graph.add_nodes_from([1])
+    return graph
+
+
+def two_node_graph():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2])
+    graph.add_edges_from([(1, 2)])
+    return graph
+
+
+def three_node_clique():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3])
+    graph.add_edges_from([(1, 2), (1, 3), (2, 3)])
+    return graph
+
+
+def disconnected():
+    graph = nx.Graph()
+    graph.add_edges_from([(1, 2), (2, 3), (4, 5), (5, 6)])
+    return graph
+
+
+def rs_shc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4])
+    graph.add_edges_from([(1, 2), (2, 3), (3, 4)])
+    return graph
+
+
+def slf_shc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
+    graph.add_edges_from(
+        [(1, 2), (1, 5), (1, 6), (2, 3), (2, 7), (3, 4), (3, 7), (4, 5), (4, 6), (5, 6)]
+    )
+    return graph
+
+
+def slf_hc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
+    graph.add_edges_from(
+        [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 5),
+            (2, 3),
+            (2, 4),
+            (2, 6),
+            (5, 7),
+            (5, 8),
+            (6, 7),
+            (6, 8),
+            (7, 8),
+        ]
+    )
+    return graph
+
+
+def lf_shc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6])
+    graph.add_edges_from([(6, 1), (1, 4), (4, 3), (3, 2), (2, 5)])
+    return graph
+
+
+def lf_hc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
+    graph.add_edges_from(
+        [
+            (1, 7),
+            (1, 6),
+            (1, 3),
+            (1, 4),
+            (7, 2),
+            (2, 6),
+            (2, 3),
+            (2, 5),
+            (5, 3),
+            (5, 4),
+            (4, 3),
+        ]
+    )
+    return graph
+
+
+def sl_shc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6])
+    graph.add_edges_from(
+        [(1, 2), (1, 3), (2, 3), (1, 4), (2, 5), (3, 6), (4, 5), (4, 6), (5, 6)]
+    )
+    return graph
+
+
+def sl_hc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
+    graph.add_edges_from(
+        [
+            (1, 2),
+            (1, 3),
+            (1, 5),
+            (1, 7),
+            (2, 3),
+            (2, 4),
+            (2, 8),
+            (8, 4),
+            (8, 6),
+            (8, 7),
+            (7, 5),
+            (7, 6),
+            (3, 4),
+            (4, 6),
+            (6, 5),
+            (5, 3),
+        ]
+    )
+    return graph
+
+
+def gis_shc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4])
+    graph.add_edges_from([(1, 2), (2, 3), (3, 4)])
+    return graph
+
+
+def gis_hc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6])
+    graph.add_edges_from([(1, 5), (2, 5), (3, 6), (4, 6), (5, 6)])
+    return graph
+
+
+def cs_shc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5])
+    graph.add_edges_from([(1, 2), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (4, 5)])
+    return graph
+
+
+def rsi_shc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6])
+    graph.add_edges_from(
+        [(1, 2), (1, 5), (1, 6), (2, 3), (3, 4), (4, 5), (4, 6), (5, 6)]
+    )
+    return graph
+
+
+def lfi_shc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
+    graph.add_edges_from(
+        [(1, 2), (1, 5), (1, 6), (2, 3), (2, 7), (3, 4), (3, 7), (4, 5), (4, 6), (5, 6)]
+    )
+    return graph
+
+
+def lfi_hc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9])
+    graph.add_edges_from(
+        [
+            (1, 2),
+            (1, 5),
+            (1, 6),
+            (1, 7),
+            (2, 3),
+            (2, 8),
+            (2, 9),
+            (3, 4),
+            (3, 8),
+            (3, 9),
+            (4, 5),
+            (4, 6),
+            (4, 7),
+            (5, 6),
+        ]
+    )
+    return graph
+
+
+def sli_shc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
+    graph.add_edges_from(
+        [
+            (1, 2),
+            (1, 3),
+            (1, 5),
+            (1, 7),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (4, 5),
+            (4, 6),
+            (5, 7),
+            (6, 7),
+        ]
+    )
+    return graph
+
+
+def sli_hc():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9])
+    graph.add_edges_from(
+        [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 5),
+            (2, 3),
+            (2, 7),
+            (2, 8),
+            (2, 9),
+            (3, 6),
+            (3, 7),
+            (3, 9),
+            (4, 5),
+            (4, 6),
+            (4, 8),
+            (4, 9),
+            (5, 6),
+            (5, 7),
+            (5, 8),
+            (6, 7),
+            (6, 9),
+            (7, 8),
+            (8, 9),
+        ]
+    )
+    return graph
+
+
+# --------------------------------------------------------------------------
+# Basic tests for all strategies
+# For each basic graph function, specify the number of expected colors.
+BASIC_TEST_CASES = {
+    empty_graph: 0,
+    one_node_graph: 1,
+    two_node_graph: 2,
+    disconnected: 2,
+    three_node_clique: 3,
+}
+
+
+# --------------------------------------------------------------------------
+# Special test cases. Each strategy has a list of tuples of the form
+# (graph function, interchange, valid # of colors)
+SPECIAL_TEST_CASES = {
+    "random_sequential": [
+        (rs_shc, False, (2, 3)),
+        (rs_shc, True, 2),
+        (rsi_shc, True, (3, 4)),
+    ],
+    "saturation_largest_first": [(slf_shc, False, (3, 4)), (slf_hc, False, 4)],
+    "largest_first": [
+        (lf_shc, False, (2, 3)),
+        (lf_hc, False, 4),
+        (lf_shc, True, 2),
+        (lf_hc, True, 3),
+        (lfi_shc, True, (3, 4)),
+        (lfi_hc, True, 4),
+    ],
+    "smallest_last": [
+        (sl_shc, False, (3, 4)),
+        (sl_hc, False, 5),
+        (sl_shc, True, 3),
+        (sl_hc, True, 4),
+        (sli_shc, True, (3, 4)),
+        (sli_hc, True, 5),
+    ],
+    "independent_set": [(gis_shc, False, (2, 3)), (gis_hc, False, 3)],
+    "connected_sequential": [(cs_shc, False, (3, 4)), (cs_shc, True, 3)],
+    "connected_sequential_dfs": [(cs_shc, False, (3, 4))],
+}
+
+
+# --------------------------------------------------------------------------
+# Helper functions to test
+# (graph function, interchange, valid # of colors)
+
+
+def check_state(L, N, H, F, C):
+    s = len(C[0])
+    num_colors = len(C.keys())
+
+    assert all(u in L[v] for u in L for v in L[u])
+    assert all(F[u] != F[v] for u in L for v in L[u])
+    assert all(len(L[u]) < num_colors for u in L)
+    assert all(len(C[x]) == s for x in C)
+    assert all(H[(c1, c2)] >= 0 for c1 in C for c2 in C)
+    assert all(N[(u, F[u])] == 0 for u in F)
+
+
+def max_degree(G):
+    """Get the maximum degree of any node in G."""
+    return max(G.degree(node) for node in G.nodes) if len(G.nodes) > 0 else 0
+
+
+def make_params_from_graph(G, F):
+    """Returns {N, L, H, C} from the given graph."""
+    num_nodes = len(G)
+    L = {u: [] for u in range(num_nodes)}
+    for u, v in G.edges:
+        L[u].append(v)
+        L[v].append(u)
+
+    C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F)
+    N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C)
+    H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N)
+
+    return {"N": N, "F": F, "C": C, "H": H, "L": L}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/communicability_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/communicability_alg.py
new file mode 100644
index 00000000..dea156b6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/communicability_alg.py
@@ -0,0 +1,163 @@
+"""
+Communicability.
+"""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["communicability", "communicability_exp"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def communicability(G):
+    r"""Returns communicability between all pairs of nodes in G.
+
+    The communicability between pairs of nodes in G is the sum of
+    walks of different lengths starting at node u and ending at node v.
+
+    Parameters
+    ----------
+    G: graph
+
+    Returns
+    -------
+    comm: dictionary of dictionaries
+        Dictionary of dictionaries keyed by nodes with communicability
+        as the value.
+
+    Raises
+    ------
+    NetworkXError
+       If the graph is not undirected and simple.
+
+    See Also
+    --------
+    communicability_exp:
+       Communicability between all pairs of nodes in G  using spectral
+       decomposition.
+    communicability_betweenness_centrality:
+       Communicability betweenness centrality for each node in G.
+
+    Notes
+    -----
+    This algorithm uses a spectral decomposition of the adjacency matrix.
+    Let G=(V,E) be a simple undirected graph.  Using the connection between
+    the powers  of the adjacency matrix and the number of walks in the graph,
+    the communicability  between nodes `u` and `v` based on the graph spectrum
+    is [1]_
+
+    .. math::
+        C(u,v)=\sum_{j=1}^{n}\phi_{j}(u)\phi_{j}(v)e^{\lambda_{j}},
+
+    where `\phi_{j}(u)` is the `u\rm{th}` element of the `j\rm{th}` orthonormal
+    eigenvector of the adjacency matrix associated with the eigenvalue
+    `\lambda_{j}`.
+
+    References
+    ----------
+    .. [1] Ernesto Estrada, Naomichi Hatano,
+       "Communicability in complex networks",
+       Phys. Rev. E 77, 036111 (2008).
+       https://arxiv.org/abs/0707.0756
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
+    >>> c = nx.communicability(G)
+    """
+    import numpy as np
+
+    nodelist = list(G)  # ordering of nodes in matrix
+    A = nx.to_numpy_array(G, nodelist)
+    # convert to 0-1 matrix
+    A[A != 0.0] = 1
+    w, vec = np.linalg.eigh(A)
+    expw = np.exp(w)
+    mapping = dict(zip(nodelist, range(len(nodelist))))
+    c = {}
+    # computing communicabilities
+    for u in G:
+        c[u] = {}
+        for v in G:
+            s = 0
+            p = mapping[u]
+            q = mapping[v]
+            for j in range(len(nodelist)):
+                s += vec[:, j][p] * vec[:, j][q] * expw[j]
+            c[u][v] = float(s)
+    return c
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def communicability_exp(G):
+    r"""Returns communicability between all pairs of nodes in G.
+
+    Communicability between pair of node (u,v) of node in G is the sum of
+    walks of different lengths starting at node u and ending at node v.
+
+    Parameters
+    ----------
+    G: graph
+
+    Returns
+    -------
+    comm: dictionary of dictionaries
+        Dictionary of dictionaries keyed by nodes with communicability
+        as the value.
+
+    Raises
+    ------
+    NetworkXError
+        If the graph is not undirected and simple.
+
+    See Also
+    --------
+    communicability:
+       Communicability between pairs of nodes in G.
+    communicability_betweenness_centrality:
+       Communicability betweenness centrality for each node in G.
+
+    Notes
+    -----
+    This algorithm uses matrix exponentiation of the adjacency matrix.
+
+    Let G=(V,E) be a simple undirected graph.  Using the connection between
+    the powers  of the adjacency matrix and the number of walks in the graph,
+    the communicability between nodes u and v is [1]_,
+
+    .. math::
+        C(u,v) = (e^A)_{uv},
+
+    where `A` is the adjacency matrix of G.
+
+    References
+    ----------
+    .. [1] Ernesto Estrada, Naomichi Hatano,
+       "Communicability in complex networks",
+       Phys. Rev. E 77, 036111 (2008).
+       https://arxiv.org/abs/0707.0756
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
+    >>> c = nx.communicability_exp(G)
+    """
+    import scipy as sp
+
+    nodelist = list(G)  # ordering of nodes in matrix
+    A = nx.to_numpy_array(G, nodelist)
+    # convert to 0-1 matrix
+    A[A != 0.0] = 1
+    # communicability matrix
+    expA = sp.linalg.expm(A)
+    mapping = dict(zip(nodelist, range(len(nodelist))))
+    c = {}
+    for u in G:
+        c[u] = {}
+        for v in G:
+            c[u][v] = float(expA[mapping[u], mapping[v]])
+    return c
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__init__.py
new file mode 100644
index 00000000..4dfa8481
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__init__.py
@@ -0,0 +1,26 @@
+"""Functions for computing and measuring community structure.
+
+The ``community`` subpackage can be accessed by using :mod:`networkx.community`, then accessing the
+functions as attributes of ``community``. For example::
+
+    >>> import networkx as nx
+    >>> G = nx.barbell_graph(5, 1)
+    >>> communities_generator = nx.community.girvan_newman(G)
+    >>> top_level_communities = next(communities_generator)
+    >>> next_level_communities = next(communities_generator)
+    >>> sorted(map(sorted, next_level_communities))
+    [[0, 1, 2, 3, 4], [5], [6, 7, 8, 9, 10]]
+
+"""
+
+from networkx.algorithms.community.asyn_fluid import *
+from networkx.algorithms.community.centrality import *
+from networkx.algorithms.community.divisive import *
+from networkx.algorithms.community.kclique import *
+from networkx.algorithms.community.kernighan_lin import *
+from networkx.algorithms.community.label_propagation import *
+from networkx.algorithms.community.lukes import *
+from networkx.algorithms.community.modularity_max import *
+from networkx.algorithms.community.quality import *
+from networkx.algorithms.community.community_utils import *
+from networkx.algorithms.community.louvain import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/asyn_fluid.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/asyn_fluid.py
new file mode 100644
index 00000000..fea72c1b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/asyn_fluid.py
@@ -0,0 +1,151 @@
+"""Asynchronous Fluid Communities algorithm for community detection."""
+
+from collections import Counter
+
+import networkx as nx
+from networkx.algorithms.components import is_connected
+from networkx.exception import NetworkXError
+from networkx.utils import groups, not_implemented_for, py_random_state
+
+__all__ = ["asyn_fluidc"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@py_random_state(3)
+@nx._dispatchable
+def asyn_fluidc(G, k, max_iter=100, seed=None):
+    """Returns communities in `G` as detected by Fluid Communities algorithm.
+
+    The asynchronous fluid communities algorithm is described in
+    [1]_. The algorithm is based on the simple idea of fluids interacting
+    in an environment, expanding and pushing each other. Its initialization is
+    random, so found communities may vary on different executions.
+
+    The algorithm proceeds as follows. First each of the initial k communities
+    is initialized in a random vertex in the graph. Then the algorithm iterates
+    over all vertices in a random order, updating the community of each vertex
+    based on its own community and the communities of its neighbors. This
+    process is performed several times until convergence.
+    At all times, each community has a total density of 1, which is equally
+    distributed among the vertices it contains. If a vertex changes of
+    community, vertex densities of affected communities are adjusted
+    immediately. When a complete iteration over all vertices is done, such that
+    no vertex changes the community it belongs to, the algorithm has converged
+    and returns.
+
+    This is the original version of the algorithm described in [1]_.
+    Unfortunately, it does not support weighted graphs yet.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Graph must be simple and undirected.
+
+    k : integer
+        The number of communities to be found.
+
+    max_iter : integer
+        The number of maximum iterations allowed. By default 100.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    communities : iterable
+        Iterable of communities given as sets of nodes.
+
+    Notes
+    -----
+    k variable is not an optional argument.
+
+    References
+    ----------
+    .. [1] Parés F., Garcia-Gasulla D. et al. "Fluid Communities: A
+       Competitive and Highly Scalable Community Detection Algorithm".
+       [https://arxiv.org/pdf/1703.09307.pdf].
+    """
+    # Initial checks
+    if not isinstance(k, int):
+        raise NetworkXError("k must be an integer.")
+    if not k > 0:
+        raise NetworkXError("k must be greater than 0.")
+    if not is_connected(G):
+        raise NetworkXError("Fluid Communities require connected Graphs.")
+    if len(G) < k:
+        raise NetworkXError("k cannot be bigger than the number of nodes.")
+    # Initialization
+    max_density = 1.0
+    vertices = list(G)
+    seed.shuffle(vertices)
+    communities = {n: i for i, n in enumerate(vertices[:k])}
+    density = {}
+    com_to_numvertices = {}
+    for vertex in communities:
+        com_to_numvertices[communities[vertex]] = 1
+        density[communities[vertex]] = max_density
+    # Set up control variables and start iterating
+    iter_count = 0
+    cont = True
+    while cont:
+        cont = False
+        iter_count += 1
+        # Loop over all vertices in graph in a random order
+        vertices = list(G)
+        seed.shuffle(vertices)
+        for vertex in vertices:
+            # Updating rule
+            com_counter = Counter()
+            # Take into account self vertex community
+            try:
+                com_counter.update({communities[vertex]: density[communities[vertex]]})
+            except KeyError:
+                pass
+            # Gather neighbor vertex communities
+            for v in G[vertex]:
+                try:
+                    com_counter.update({communities[v]: density[communities[v]]})
+                except KeyError:
+                    continue
+            # Check which is the community with highest density
+            new_com = -1
+            if len(com_counter.keys()) > 0:
+                max_freq = max(com_counter.values())
+                best_communities = [
+                    com
+                    for com, freq in com_counter.items()
+                    if (max_freq - freq) < 0.0001
+                ]
+                # If actual vertex com in best communities, it is preserved
+                try:
+                    if communities[vertex] in best_communities:
+                        new_com = communities[vertex]
+                except KeyError:
+                    pass
+                # If vertex community changes...
+                if new_com == -1:
+                    # Set flag of non-convergence
+                    cont = True
+                    # Randomly chose a new community from candidates
+                    new_com = seed.choice(best_communities)
+                    # Update previous community status
+                    try:
+                        com_to_numvertices[communities[vertex]] -= 1
+                        density[communities[vertex]] = (
+                            max_density / com_to_numvertices[communities[vertex]]
+                        )
+                    except KeyError:
+                        pass
+                    # Update new community status
+                    communities[vertex] = new_com
+                    com_to_numvertices[communities[vertex]] += 1
+                    density[communities[vertex]] = (
+                        max_density / com_to_numvertices[communities[vertex]]
+                    )
+        # If maximum iterations reached --> output actual results
+        if iter_count > max_iter:
+            break
+    # Return results by grouping communities as list of vertices
+    return iter(groups(communities).values())
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/centrality.py
new file mode 100644
index 00000000..43281701
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/centrality.py
@@ -0,0 +1,171 @@
+"""Functions for computing communities based on centrality notions."""
+
+import networkx as nx
+
+__all__ = ["girvan_newman"]
+
+
+@nx._dispatchable(preserve_edge_attrs="most_valuable_edge")
+def girvan_newman(G, most_valuable_edge=None):
+    """Finds communities in a graph using the Girvan–Newman method.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    most_valuable_edge : function
+        Function that takes a graph as input and outputs an edge. The
+        edge returned by this function will be recomputed and removed at
+        each iteration of the algorithm.
+
+        If not specified, the edge with the highest
+        :func:`networkx.edge_betweenness_centrality` will be used.
+
+    Returns
+    -------
+    iterator
+        Iterator over tuples of sets of nodes in `G`. Each set of node
+        is a community, each tuple is a sequence of communities at a
+        particular level of the algorithm.
+
+    Examples
+    --------
+    To get the first pair of communities::
+
+        >>> G = nx.path_graph(10)
+        >>> comp = nx.community.girvan_newman(G)
+        >>> tuple(sorted(c) for c in next(comp))
+        ([0, 1, 2, 3, 4], [5, 6, 7, 8, 9])
+
+    To get only the first *k* tuples of communities, use
+    :func:`itertools.islice`::
+
+        >>> import itertools
+        >>> G = nx.path_graph(8)
+        >>> k = 2
+        >>> comp = nx.community.girvan_newman(G)
+        >>> for communities in itertools.islice(comp, k):
+        ...     print(tuple(sorted(c) for c in communities))
+        ...
+        ([0, 1, 2, 3], [4, 5, 6, 7])
+        ([0, 1], [2, 3], [4, 5, 6, 7])
+
+    To stop getting tuples of communities once the number of communities
+    is greater than *k*, use :func:`itertools.takewhile`::
+
+        >>> import itertools
+        >>> G = nx.path_graph(8)
+        >>> k = 4
+        >>> comp = nx.community.girvan_newman(G)
+        >>> limited = itertools.takewhile(lambda c: len(c) <= k, comp)
+        >>> for communities in limited:
+        ...     print(tuple(sorted(c) for c in communities))
+        ...
+        ([0, 1, 2, 3], [4, 5, 6, 7])
+        ([0, 1], [2, 3], [4, 5, 6, 7])
+        ([0, 1], [2, 3], [4, 5], [6, 7])
+
+    To just choose an edge to remove based on the weight::
+
+        >>> from operator import itemgetter
+        >>> G = nx.path_graph(10)
+        >>> edges = G.edges()
+        >>> nx.set_edge_attributes(G, {(u, v): v for u, v in edges}, "weight")
+        >>> def heaviest(G):
+        ...     u, v, w = max(G.edges(data="weight"), key=itemgetter(2))
+        ...     return (u, v)
+        ...
+        >>> comp = nx.community.girvan_newman(G, most_valuable_edge=heaviest)
+        >>> tuple(sorted(c) for c in next(comp))
+        ([0, 1, 2, 3, 4, 5, 6, 7, 8], [9])
+
+    To utilize edge weights when choosing an edge with, for example, the
+    highest betweenness centrality::
+
+        >>> from networkx import edge_betweenness_centrality as betweenness
+        >>> def most_central_edge(G):
+        ...     centrality = betweenness(G, weight="weight")
+        ...     return max(centrality, key=centrality.get)
+        ...
+        >>> G = nx.path_graph(10)
+        >>> comp = nx.community.girvan_newman(G, most_valuable_edge=most_central_edge)
+        >>> tuple(sorted(c) for c in next(comp))
+        ([0, 1, 2, 3, 4], [5, 6, 7, 8, 9])
+
+    To specify a different ranking algorithm for edges, use the
+    `most_valuable_edge` keyword argument::
+
+        >>> from networkx import edge_betweenness_centrality
+        >>> from random import random
+        >>> def most_central_edge(G):
+        ...     centrality = edge_betweenness_centrality(G)
+        ...     max_cent = max(centrality.values())
+        ...     # Scale the centrality values so they are between 0 and 1,
+        ...     # and add some random noise.
+        ...     centrality = {e: c / max_cent for e, c in centrality.items()}
+        ...     # Add some random noise.
+        ...     centrality = {e: c + random() for e, c in centrality.items()}
+        ...     return max(centrality, key=centrality.get)
+        ...
+        >>> G = nx.path_graph(10)
+        >>> comp = nx.community.girvan_newman(G, most_valuable_edge=most_central_edge)
+
+    Notes
+    -----
+    The Girvan–Newman algorithm detects communities by progressively
+    removing edges from the original graph. The algorithm removes the
+    "most valuable" edge, traditionally the edge with the highest
+    betweenness centrality, at each step. As the graph breaks down into
+    pieces, the tightly knit community structure is exposed and the
+    result can be depicted as a dendrogram.
+
+    """
+    # If the graph is already empty, simply return its connected
+    # components.
+    if G.number_of_edges() == 0:
+        yield tuple(nx.connected_components(G))
+        return
+    # If no function is provided for computing the most valuable edge,
+    # use the edge betweenness centrality.
+    if most_valuable_edge is None:
+
+        def most_valuable_edge(G):
+            """Returns the edge with the highest betweenness centrality
+            in the graph `G`.
+
+            """
+            # We have guaranteed that the graph is non-empty, so this
+            # dictionary will never be empty.
+            betweenness = nx.edge_betweenness_centrality(G)
+            return max(betweenness, key=betweenness.get)
+
+    # The copy of G here must include the edge weight data.
+    g = G.copy().to_undirected()
+    # Self-loops must be removed because their removal has no effect on
+    # the connected components of the graph.
+    g.remove_edges_from(nx.selfloop_edges(g))
+    while g.number_of_edges() > 0:
+        yield _without_most_central_edges(g, most_valuable_edge)
+
+
+def _without_most_central_edges(G, most_valuable_edge):
+    """Returns the connected components of the graph that results from
+    repeatedly removing the most "valuable" edge in the graph.
+
+    `G` must be a non-empty graph. This function modifies the graph `G`
+    in-place; that is, it removes edges on the graph `G`.
+
+    `most_valuable_edge` is a function that takes the graph `G` as input
+    (or a subgraph with one or more edges of `G` removed) and returns an
+    edge. That edge will be removed and this process will be repeated
+    until the number of connected components in the graph increases.
+
+    """
+    original_num_components = nx.number_connected_components(G)
+    num_new_components = original_num_components
+    while num_new_components <= original_num_components:
+        edge = most_valuable_edge(G)
+        G.remove_edge(*edge)
+        new_components = tuple(nx.connected_components(G))
+        num_new_components = len(new_components)
+    return new_components
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/community_utils.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/community_utils.py
new file mode 100644
index 00000000..ba73a6b3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/community_utils.py
@@ -0,0 +1,30 @@
+"""Helper functions for community-finding algorithms."""
+
+import networkx as nx
+
+__all__ = ["is_partition"]
+
+
+@nx._dispatchable
+def is_partition(G, communities):
+    """Returns *True* if `communities` is a partition of the nodes of `G`.
+
+    A partition of a universe set is a family of pairwise disjoint sets
+    whose union is the entire universe set.
+
+    Parameters
+    ----------
+    G : NetworkX graph.
+
+    communities : list or iterable of sets of nodes
+        If not a list, the iterable is converted internally to a list.
+        If it is an iterator it is exhausted.
+
+    """
+    # Alternate implementation:
+    # return all(sum(1 if v in c else 0 for c in communities) == 1 for v in G)
+    if not isinstance(communities, list):
+        communities = list(communities)
+    nodes = {n for c in communities for n in c if n in G}
+
+    return len(G) == len(nodes) == sum(len(c) for c in communities)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/divisive.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/divisive.py
new file mode 100644
index 00000000..be3c7d86
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/divisive.py
@@ -0,0 +1,216 @@
+import functools
+
+import networkx as nx
+
+__all__ = [
+    "edge_betweenness_partition",
+    "edge_current_flow_betweenness_partition",
+]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def edge_betweenness_partition(G, number_of_sets, *, weight=None):
+    """Partition created by iteratively removing the highest edge betweenness edge.
+
+    This algorithm works by calculating the edge betweenness for all
+    edges and removing the edge with the highest value. It is then
+    determined whether the graph has been broken into at least
+    `number_of_sets` connected components.
+    If not the process is repeated.
+
+    Parameters
+    ----------
+    G : NetworkX Graph, DiGraph or MultiGraph
+      Graph to be partitioned
+
+    number_of_sets : int
+      Number of sets in the desired partition of the graph
+
+    weight : key, optional, default=None
+      The key to use if using weights for edge betweenness calculation
+
+    Returns
+    -------
+    C : list of sets
+      Partition of the nodes of G
+
+    Raises
+    ------
+    NetworkXError
+      If number_of_sets is <= 0 or if number_of_sets > len(G)
+
+    Examples
+    --------
+    >>> G = nx.karate_club_graph()
+    >>> part = nx.community.edge_betweenness_partition(G, 2)
+    >>> {0, 1, 3, 4, 5, 6, 7, 10, 11, 12, 13, 16, 17, 19, 21} in part
+    True
+    >>> {
+    ...     2,
+    ...     8,
+    ...     9,
+    ...     14,
+    ...     15,
+    ...     18,
+    ...     20,
+    ...     22,
+    ...     23,
+    ...     24,
+    ...     25,
+    ...     26,
+    ...     27,
+    ...     28,
+    ...     29,
+    ...     30,
+    ...     31,
+    ...     32,
+    ...     33,
+    ... } in part
+    True
+
+    See Also
+    --------
+    edge_current_flow_betweenness_partition
+
+    Notes
+    -----
+    This algorithm is fairly slow, as both the calculation of connected
+    components and edge betweenness relies on all pairs shortest
+    path algorithms. They could potentially be combined to cut down
+    on overall computation time.
+
+    References
+    ----------
+    .. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports
+       Volume 486, Issue 3-5 p. 75-174
+       http://arxiv.org/abs/0906.0612
+    """
+    if number_of_sets <= 0:
+        raise nx.NetworkXError("number_of_sets must be >0")
+    if number_of_sets == 1:
+        return [set(G)]
+    if number_of_sets == len(G):
+        return [{n} for n in G]
+    if number_of_sets > len(G):
+        raise nx.NetworkXError("number_of_sets must be <= len(G)")
+
+    H = G.copy()
+    partition = list(nx.connected_components(H))
+    while len(partition) < number_of_sets:
+        ranking = nx.edge_betweenness_centrality(H, weight=weight)
+        edge = max(ranking, key=ranking.get)
+        H.remove_edge(*edge)
+        partition = list(nx.connected_components(H))
+    return partition
+
+
+@nx._dispatchable(edge_attrs="weight")
+def edge_current_flow_betweenness_partition(G, number_of_sets, *, weight=None):
+    """Partition created by removing the highest edge current flow betweenness edge.
+
+    This algorithm works by calculating the edge current flow
+    betweenness for all edges and removing the edge with the
+    highest value. It is then determined whether the graph has
+    been broken into at least `number_of_sets` connected
+    components. If not the process is repeated.
+
+    Parameters
+    ----------
+    G : NetworkX Graph, DiGraph or MultiGraph
+      Graph to be partitioned
+
+    number_of_sets : int
+      Number of sets in the desired partition of the graph
+
+    weight : key, optional (default=None)
+      The edge attribute key to use as weights for
+      edge current flow betweenness calculations
+
+    Returns
+    -------
+    C : list of sets
+      Partition of G
+
+    Raises
+    ------
+    NetworkXError
+      If number_of_sets is <= 0 or number_of_sets > len(G)
+
+    Examples
+    --------
+    >>> G = nx.karate_club_graph()
+    >>> part = nx.community.edge_current_flow_betweenness_partition(G, 2)
+    >>> {0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 16, 17, 19, 21} in part
+    True
+    >>> {8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} in part
+    True
+
+
+    See Also
+    --------
+    edge_betweenness_partition
+
+    Notes
+    -----
+    This algorithm is extremely slow, as the recalculation of the edge
+    current flow betweenness is extremely slow.
+
+    References
+    ----------
+    .. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports
+       Volume 486, Issue 3-5 p. 75-174
+       http://arxiv.org/abs/0906.0612
+    """
+    if number_of_sets <= 0:
+        raise nx.NetworkXError("number_of_sets must be >0")
+    elif number_of_sets == 1:
+        return [set(G)]
+    elif number_of_sets == len(G):
+        return [{n} for n in G]
+    elif number_of_sets > len(G):
+        raise nx.NetworkXError("number_of_sets must be <= len(G)")
+
+    rank = functools.partial(
+        nx.edge_current_flow_betweenness_centrality, normalized=False, weight=weight
+    )
+
+    # current flow requires a connected network so we track the components explicitly
+    H = G.copy()
+    partition = list(nx.connected_components(H))
+    if len(partition) > 1:
+        Hcc_subgraphs = [H.subgraph(cc).copy() for cc in partition]
+    else:
+        Hcc_subgraphs = [H]
+
+    ranking = {}
+    for Hcc in Hcc_subgraphs:
+        ranking.update(rank(Hcc))
+
+    while len(partition) < number_of_sets:
+        edge = max(ranking, key=ranking.get)
+        for cc, Hcc in zip(partition, Hcc_subgraphs):
+            if edge[0] in cc:
+                Hcc.remove_edge(*edge)
+                del ranking[edge]
+                splitcc_list = list(nx.connected_components(Hcc))
+                if len(splitcc_list) > 1:
+                    # there are 2 connected components. split off smaller one
+                    cc_new = min(splitcc_list, key=len)
+                    Hcc_new = Hcc.subgraph(cc_new).copy()
+                    # update edge rankings for Hcc_new
+                    newranks = rank(Hcc_new)
+                    for e, r in newranks.items():
+                        ranking[e if e in ranking else e[::-1]] = r
+                    # append new cc and Hcc to their lists.
+                    partition.append(cc_new)
+                    Hcc_subgraphs.append(Hcc_new)
+
+                    # leave existing cc and Hcc in their lists, but shrink them
+                    Hcc.remove_nodes_from(cc_new)
+                    cc.difference_update(cc_new)
+                # update edge rankings for Hcc whether it was split or not
+                newranks = rank(Hcc)
+                for e, r in newranks.items():
+                    ranking[e if e in ranking else e[::-1]] = r
+                break
+    return partition
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kclique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kclique.py
new file mode 100644
index 00000000..c7249104
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kclique.py
@@ -0,0 +1,79 @@
+from collections import defaultdict
+
+import networkx as nx
+
+__all__ = ["k_clique_communities"]
+
+
+@nx._dispatchable
+def k_clique_communities(G, k, cliques=None):
+    """Find k-clique communities in graph using the percolation method.
+
+    A k-clique community is the union of all cliques of size k that
+    can be reached through adjacent (sharing k-1 nodes) k-cliques.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    k : int
+       Size of smallest clique
+
+    cliques: list or generator
+       Precomputed cliques (use networkx.find_cliques(G))
+
+    Returns
+    -------
+    Yields sets of nodes, one for each k-clique community.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> K5 = nx.convert_node_labels_to_integers(G, first_label=2)
+    >>> G.add_edges_from(K5.edges())
+    >>> c = list(nx.community.k_clique_communities(G, 4))
+    >>> sorted(list(c[0]))
+    [0, 1, 2, 3, 4, 5, 6]
+    >>> list(nx.community.k_clique_communities(G, 6))
+    []
+
+    References
+    ----------
+    .. [1] Gergely Palla, Imre Derényi, Illés Farkas1, and Tamás Vicsek,
+       Uncovering the overlapping community structure of complex networks
+       in nature and society Nature 435, 814-818, 2005,
+       doi:10.1038/nature03607
+    """
+    if k < 2:
+        raise nx.NetworkXError(f"k={k}, k must be greater than 1.")
+    if cliques is None:
+        cliques = nx.find_cliques(G)
+    cliques = [frozenset(c) for c in cliques if len(c) >= k]
+
+    # First index which nodes are in which cliques
+    membership_dict = defaultdict(list)
+    for clique in cliques:
+        for node in clique:
+            membership_dict[node].append(clique)
+
+    # For each clique, see which adjacent cliques percolate
+    perc_graph = nx.Graph()
+    perc_graph.add_nodes_from(cliques)
+    for clique in cliques:
+        for adj_clique in _get_adjacent_cliques(clique, membership_dict):
+            if len(clique.intersection(adj_clique)) >= (k - 1):
+                perc_graph.add_edge(clique, adj_clique)
+
+    # Connected components of clique graph with perc edges
+    # are the percolated cliques
+    for component in nx.connected_components(perc_graph):
+        yield (frozenset.union(*component))
+
+
+def _get_adjacent_cliques(clique, membership_dict):
+    adjacent_cliques = set()
+    for n in clique:
+        for adj_clique in membership_dict[n]:
+            if clique != adj_clique:
+                adjacent_cliques.add(adj_clique)
+    return adjacent_cliques
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kernighan_lin.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kernighan_lin.py
new file mode 100644
index 00000000..f6397d82
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kernighan_lin.py
@@ -0,0 +1,139 @@
+"""Functions for computing the Kernighan–Lin bipartition algorithm."""
+
+from itertools import count
+
+import networkx as nx
+from networkx.algorithms.community.community_utils import is_partition
+from networkx.utils import BinaryHeap, not_implemented_for, py_random_state
+
+__all__ = ["kernighan_lin_bisection"]
+
+
+def _kernighan_lin_sweep(edges, side):
+    """
+    This is a modified form of Kernighan-Lin, which moves single nodes at a
+    time, alternating between sides to keep the bisection balanced.  We keep
+    two min-heaps of swap costs to make optimal-next-move selection fast.
+    """
+    costs0, costs1 = costs = BinaryHeap(), BinaryHeap()
+    for u, side_u, edges_u in zip(count(), side, edges):
+        cost_u = sum(w if side[v] else -w for v, w in edges_u)
+        costs[side_u].insert(u, cost_u if side_u else -cost_u)
+
+    def _update_costs(costs_x, x):
+        for y, w in edges[x]:
+            costs_y = costs[side[y]]
+            cost_y = costs_y.get(y)
+            if cost_y is not None:
+                cost_y += 2 * (-w if costs_x is costs_y else w)
+                costs_y.insert(y, cost_y, True)
+
+    i = 0
+    totcost = 0
+    while costs0 and costs1:
+        u, cost_u = costs0.pop()
+        _update_costs(costs0, u)
+        v, cost_v = costs1.pop()
+        _update_costs(costs1, v)
+        totcost += cost_u + cost_v
+        i += 1
+        yield totcost, i, (u, v)
+
+
+@not_implemented_for("directed")
+@py_random_state(4)
+@nx._dispatchable(edge_attrs="weight")
+def kernighan_lin_bisection(G, partition=None, max_iter=10, weight="weight", seed=None):
+    """Partition a graph into two blocks using the Kernighan–Lin
+    algorithm.
+
+    This algorithm partitions a network into two sets by iteratively
+    swapping pairs of nodes to reduce the edge cut between the two sets.  The
+    pairs are chosen according to a modified form of Kernighan-Lin [1]_, which
+    moves node individually, alternating between sides to keep the bisection
+    balanced.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Graph must be undirected.
+
+    partition : tuple
+        Pair of iterables containing an initial partition. If not
+        specified, a random balanced partition is used.
+
+    max_iter : int
+        Maximum number of times to attempt swaps to find an
+        improvement before giving up.
+
+    weight : key
+        Edge data key to use as weight. If None, the weights are all
+        set to one.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+        Only used if partition is None
+
+    Returns
+    -------
+    partition : tuple
+        A pair of sets of nodes representing the bipartition.
+
+    Raises
+    ------
+    NetworkXError
+        If partition is not a valid partition of the nodes of the graph.
+
+    References
+    ----------
+    .. [1] Kernighan, B. W.; Lin, Shen (1970).
+       "An efficient heuristic procedure for partitioning graphs."
+       *Bell Systems Technical Journal* 49: 291--307.
+       Oxford University Press 2011.
+
+    """
+    n = len(G)
+    labels = list(G)
+    seed.shuffle(labels)
+    index = {v: i for i, v in enumerate(labels)}
+
+    if partition is None:
+        side = [0] * (n // 2) + [1] * ((n + 1) // 2)
+    else:
+        try:
+            A, B = partition
+        except (TypeError, ValueError) as err:
+            raise nx.NetworkXError("partition must be two sets") from err
+        if not is_partition(G, (A, B)):
+            raise nx.NetworkXError("partition invalid")
+        side = [0] * n
+        for a in A:
+            side[index[a]] = 1
+
+    if G.is_multigraph():
+        edges = [
+            [
+                (index[u], sum(e.get(weight, 1) for e in d.values()))
+                for u, d in G[v].items()
+            ]
+            for v in labels
+        ]
+    else:
+        edges = [
+            [(index[u], e.get(weight, 1)) for u, e in G[v].items()] for v in labels
+        ]
+
+    for i in range(max_iter):
+        costs = list(_kernighan_lin_sweep(edges, side))
+        min_cost, min_i, _ = min(costs)
+        if min_cost >= 0:
+            break
+
+        for _, _, (u, v) in costs[:min_i]:
+            side[u] = 1
+            side[v] = 0
+
+    A = {u for u, s in zip(labels, side) if s == 0}
+    B = {u for u, s in zip(labels, side) if s == 1}
+    return A, B
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/label_propagation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/label_propagation.py
new file mode 100644
index 00000000..74880286
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/label_propagation.py
@@ -0,0 +1,338 @@
+"""
+Label propagation community detection algorithms.
+"""
+
+from collections import Counter, defaultdict, deque
+
+import networkx as nx
+from networkx.utils import groups, not_implemented_for, py_random_state
+
+__all__ = [
+    "label_propagation_communities",
+    "asyn_lpa_communities",
+    "fast_label_propagation_communities",
+]
+
+
+@py_random_state("seed")
+@nx._dispatchable(edge_attrs="weight")
+def fast_label_propagation_communities(G, *, weight=None, seed=None):
+    """Returns communities in `G` as detected by fast label propagation.
+
+    The fast label propagation algorithm is described in [1]_. The algorithm is
+    probabilistic and the found communities may vary in different executions.
+
+    The algorithm operates as follows. First, the community label of each node is
+    set to a unique label. The algorithm then repeatedly updates the labels of
+    the nodes to the most frequent label in their neighborhood. In case of ties,
+    a random label is chosen from the most frequent labels.
+
+    The algorithm maintains a queue of nodes that still need to be processed.
+    Initially, all nodes are added to the queue in a random order. Then the nodes
+    are removed from the queue one by one and processed. If a node updates its label,
+    all its neighbors that have a different label are added to the queue (if not
+    already in the queue). The algorithm stops when the queue is empty.
+
+    Parameters
+    ----------
+    G : Graph, DiGraph, MultiGraph, or MultiDiGraph
+        Any NetworkX graph.
+
+    weight : string, or None (default)
+        The edge attribute representing a non-negative weight of an edge. If None,
+        each edge is assumed to have weight one. The weight of an edge is used in
+        determining the frequency with which a label appears among the neighbors of
+        a node (edge with weight `w` is equivalent to `w` unweighted edges).
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state. See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    communities : iterable
+        Iterable of communities given as sets of nodes.
+
+    Notes
+    -----
+    Edge directions are ignored for directed graphs.
+    Edge weights must be non-negative numbers.
+
+    References
+    ----------
+    .. [1] Vincent A. Traag & Lovro Šubelj. "Large network community detection by
+       fast label propagation." Scientific Reports 13 (2023): 2701.
+       https://doi.org/10.1038/s41598-023-29610-z
+    """
+
+    # Queue of nodes to be processed.
+    nodes_queue = deque(G)
+    seed.shuffle(nodes_queue)
+
+    # Set of nodes in the queue.
+    nodes_set = set(G)
+
+    # Assign unique label to each node.
+    comms = {node: i for i, node in enumerate(G)}
+
+    while nodes_queue:
+        # Remove next node from the queue to process.
+        node = nodes_queue.popleft()
+        nodes_set.remove(node)
+
+        # Isolated nodes retain their initial label.
+        if G.degree(node) > 0:
+            # Compute frequency of labels in node's neighborhood.
+            label_freqs = _fast_label_count(G, comms, node, weight)
+            max_freq = max(label_freqs.values())
+
+            # Always sample new label from most frequent labels.
+            comm = seed.choice(
+                [comm for comm in label_freqs if label_freqs[comm] == max_freq]
+            )
+
+            if comms[node] != comm:
+                comms[node] = comm
+
+                # Add neighbors that have different label to the queue.
+                for nbr in nx.all_neighbors(G, node):
+                    if comms[nbr] != comm and nbr not in nodes_set:
+                        nodes_queue.append(nbr)
+                        nodes_set.add(nbr)
+
+    yield from groups(comms).values()
+
+
+def _fast_label_count(G, comms, node, weight=None):
+    """Computes the frequency of labels in the neighborhood of a node.
+
+    Returns a dictionary keyed by label to the frequency of that label.
+    """
+
+    if weight is None:
+        # Unweighted (un)directed simple graph.
+        if not G.is_multigraph():
+            label_freqs = Counter(map(comms.get, nx.all_neighbors(G, node)))
+
+        # Unweighted (un)directed multigraph.
+        else:
+            label_freqs = defaultdict(int)
+            for nbr in G[node]:
+                label_freqs[comms[nbr]] += len(G[node][nbr])
+
+            if G.is_directed():
+                for nbr in G.pred[node]:
+                    label_freqs[comms[nbr]] += len(G.pred[node][nbr])
+
+    else:
+        # Weighted undirected simple/multigraph.
+        label_freqs = defaultdict(float)
+        for _, nbr, w in G.edges(node, data=weight, default=1):
+            label_freqs[comms[nbr]] += w
+
+        # Weighted directed simple/multigraph.
+        if G.is_directed():
+            for nbr, _, w in G.in_edges(node, data=weight, default=1):
+                label_freqs[comms[nbr]] += w
+
+    return label_freqs
+
+
+@py_random_state(2)
+@nx._dispatchable(edge_attrs="weight")
+def asyn_lpa_communities(G, weight=None, seed=None):
+    """Returns communities in `G` as detected by asynchronous label
+    propagation.
+
+    The asynchronous label propagation algorithm is described in
+    [1]_. The algorithm is probabilistic and the found communities may
+    vary on different executions.
+
+    The algorithm proceeds as follows. After initializing each node with
+    a unique label, the algorithm repeatedly sets the label of a node to
+    be the label that appears most frequently among that nodes
+    neighbors. The algorithm halts when each node has the label that
+    appears most frequently among its neighbors. The algorithm is
+    asynchronous because each node is updated without waiting for
+    updates on the remaining nodes.
+
+    This generalized version of the algorithm in [1]_ accepts edge
+    weights.
+
+    Parameters
+    ----------
+    G : Graph
+
+    weight : string
+        The edge attribute representing the weight of an edge.
+        If None, each edge is assumed to have weight one. In this
+        algorithm, the weight of an edge is used in determining the
+        frequency with which a label appears among the neighbors of a
+        node: a higher weight means the label appears more often.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    communities : iterable
+        Iterable of communities given as sets of nodes.
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+
+    References
+    ----------
+    .. [1] Raghavan, Usha Nandini, Réka Albert, and Soundar Kumara. "Near
+           linear time algorithm to detect community structures in large-scale
+           networks." Physical Review E 76.3 (2007): 036106.
+    """
+
+    labels = {n: i for i, n in enumerate(G)}
+    cont = True
+
+    while cont:
+        cont = False
+        nodes = list(G)
+        seed.shuffle(nodes)
+
+        for node in nodes:
+            if not G[node]:
+                continue
+
+            # Get label frequencies among adjacent nodes.
+            # Depending on the order they are processed in,
+            # some nodes will be in iteration t and others in t-1,
+            # making the algorithm asynchronous.
+            if weight is None:
+                # initialising a Counter from an iterator of labels is
+                # faster for getting unweighted label frequencies
+                label_freq = Counter(map(labels.get, G[node]))
+            else:
+                # updating a defaultdict is substantially faster
+                # for getting weighted label frequencies
+                label_freq = defaultdict(float)
+                for _, v, wt in G.edges(node, data=weight, default=1):
+                    label_freq[labels[v]] += wt
+
+            # Get the labels that appear with maximum frequency.
+            max_freq = max(label_freq.values())
+            best_labels = [
+                label for label, freq in label_freq.items() if freq == max_freq
+            ]
+
+            # If the node does not have one of the maximum frequency labels,
+            # randomly choose one of them and update the node's label.
+            # Continue the iteration as long as at least one node
+            # doesn't have a maximum frequency label.
+            if labels[node] not in best_labels:
+                labels[node] = seed.choice(best_labels)
+                cont = True
+
+    yield from groups(labels).values()
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def label_propagation_communities(G):
+    """Generates community sets determined by label propagation
+
+    Finds communities in `G` using a semi-synchronous label propagation
+    method [1]_. This method combines the advantages of both the synchronous
+    and asynchronous models. Not implemented for directed graphs.
+
+    Parameters
+    ----------
+    G : graph
+        An undirected NetworkX graph.
+
+    Returns
+    -------
+    communities : iterable
+        A dict_values object that contains a set of nodes for each community.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+       If the graph is directed
+
+    References
+    ----------
+    .. [1] Cordasco, G., & Gargano, L. (2010, December). Community detection
+       via semi-synchronous label propagation algorithms. In Business
+       Applications of Social Network Analysis (BASNA), 2010 IEEE International
+       Workshop on (pp. 1-8). IEEE.
+    """
+    coloring = _color_network(G)
+    # Create a unique label for each node in the graph
+    labeling = {v: k for k, v in enumerate(G)}
+    while not _labeling_complete(labeling, G):
+        # Update the labels of every node with the same color.
+        for color, nodes in coloring.items():
+            for n in nodes:
+                _update_label(n, labeling, G)
+
+    clusters = defaultdict(set)
+    for node, label in labeling.items():
+        clusters[label].add(node)
+    return clusters.values()
+
+
+def _color_network(G):
+    """Colors the network so that neighboring nodes all have distinct colors.
+
+    Returns a dict keyed by color to a set of nodes with that color.
+    """
+    coloring = {}  # color => set(node)
+    colors = nx.coloring.greedy_color(G)
+    for node, color in colors.items():
+        if color in coloring:
+            coloring[color].add(node)
+        else:
+            coloring[color] = {node}
+    return coloring
+
+
+def _labeling_complete(labeling, G):
+    """Determines whether or not LPA is done.
+
+    Label propagation is complete when all nodes have a label that is
+    in the set of highest frequency labels amongst its neighbors.
+
+    Nodes with no neighbors are considered complete.
+    """
+    return all(
+        labeling[v] in _most_frequent_labels(v, labeling, G) for v in G if len(G[v]) > 0
+    )
+
+
+def _most_frequent_labels(node, labeling, G):
+    """Returns a set of all labels with maximum frequency in `labeling`.
+
+    Input `labeling` should be a dict keyed by node to labels.
+    """
+    if not G[node]:
+        # Nodes with no neighbors are themselves a community and are labeled
+        # accordingly, hence the immediate if statement.
+        return {labeling[node]}
+
+    # Compute the frequencies of all neighbors of node
+    freqs = Counter(labeling[q] for q in G[node])
+    max_freq = max(freqs.values())
+    return {label for label, freq in freqs.items() if freq == max_freq}
+
+
+def _update_label(node, labeling, G):
+    """Updates the label of a node using the Prec-Max tie breaking algorithm
+
+    The algorithm is explained in: 'Community Detection via Semi-Synchronous
+    Label Propagation Algorithms' Cordasco and Gargano, 2011
+    """
+    high_labels = _most_frequent_labels(node, labeling, G)
+    if len(high_labels) == 1:
+        labeling[node] = high_labels.pop()
+    elif len(high_labels) > 1:
+        # Prec-Max
+        if labeling[node] not in high_labels:
+            labeling[node] = max(high_labels)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/louvain.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/louvain.py
new file mode 100644
index 00000000..959c93a5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/louvain.py
@@ -0,0 +1,382 @@
+"""Function for detecting communities based on Louvain Community Detection
+Algorithm"""
+
+import itertools
+from collections import defaultdict, deque
+
+import networkx as nx
+from networkx.algorithms.community import modularity
+from networkx.utils import py_random_state
+
+__all__ = ["louvain_communities", "louvain_partitions"]
+
+
+@py_random_state("seed")
+@nx._dispatchable(edge_attrs="weight")
+def louvain_communities(
+    G, weight="weight", resolution=1, threshold=0.0000001, max_level=None, seed=None
+):
+    r"""Find the best partition of a graph using the Louvain Community Detection
+    Algorithm.
+
+    Louvain Community Detection Algorithm is a simple method to extract the community
+    structure of a network. This is a heuristic method based on modularity optimization. [1]_
+
+    The algorithm works in 2 steps. On the first step it assigns every node to be
+    in its own community and then for each node it tries to find the maximum positive
+    modularity gain by moving each node to all of its neighbor communities. If no positive
+    gain is achieved the node remains in its original community.
+
+    The modularity gain obtained by moving an isolated node $i$ into a community $C$ can
+    easily be calculated by the following formula (combining [1]_ [2]_ and some algebra):
+
+    .. math::
+        \Delta Q = \frac{k_{i,in}}{2m} - \gamma\frac{ \Sigma_{tot} \cdot k_i}{2m^2}
+
+    where $m$ is the size of the graph, $k_{i,in}$ is the sum of the weights of the links
+    from $i$ to nodes in $C$, $k_i$ is the sum of the weights of the links incident to node $i$,
+    $\Sigma_{tot}$ is the sum of the weights of the links incident to nodes in $C$ and $\gamma$
+    is the resolution parameter.
+
+    For the directed case the modularity gain can be computed using this formula according to [3]_
+
+    .. math::
+        \Delta Q = \frac{k_{i,in}}{m}
+        - \gamma\frac{k_i^{out} \cdot\Sigma_{tot}^{in} + k_i^{in} \cdot \Sigma_{tot}^{out}}{m^2}
+
+    where $k_i^{out}$, $k_i^{in}$ are the outer and inner weighted degrees of node $i$ and
+    $\Sigma_{tot}^{in}$, $\Sigma_{tot}^{out}$ are the sum of in-going and out-going links incident
+    to nodes in $C$.
+
+    The first phase continues until no individual move can improve the modularity.
+
+    The second phase consists in building a new network whose nodes are now the communities
+    found in the first phase. To do so, the weights of the links between the new nodes are given by
+    the sum of the weight of the links between nodes in the corresponding two communities. Once this
+    phase is complete it is possible to reapply the first phase creating bigger communities with
+    increased modularity.
+
+    The above two phases are executed until no modularity gain is achieved (or is less than
+    the `threshold`, or until `max_levels` is reached).
+
+    Be careful with self-loops in the input graph. These are treated as
+    previously reduced communities -- as if the process had been started
+    in the middle of the algorithm. Large self-loop edge weights thus
+    represent strong communities and in practice may be hard to add
+    other nodes to.  If your input graph edge weights for self-loops
+    do not represent already reduced communities you may want to remove
+    the self-loops before inputting that graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+    weight : string or None, optional (default="weight")
+        The name of an edge attribute that holds the numerical value
+        used as a weight. If None then each edge has weight 1.
+    resolution : float, optional (default=1)
+        If resolution is less than 1, the algorithm favors larger communities.
+        Greater than 1 favors smaller communities
+    threshold : float, optional (default=0.0000001)
+        Modularity gain threshold for each level. If the gain of modularity
+        between 2 levels of the algorithm is less than the given threshold
+        then the algorithm stops and returns the resulting communities.
+    max_level : int or None, optional (default=None)
+        The maximum number of levels (steps of the algorithm) to compute.
+        Must be a positive integer or None. If None, then there is no max
+        level and the threshold parameter determines the stopping condition.
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    list
+        A list of sets (partition of `G`). Each set represents one community and contains
+        all the nodes that constitute it.
+
+    Examples
+    --------
+    >>> import networkx as nx
+    >>> G = nx.petersen_graph()
+    >>> nx.community.louvain_communities(G, seed=123)
+    [{0, 4, 5, 7, 9}, {1, 2, 3, 6, 8}]
+
+    Notes
+    -----
+    The order in which the nodes are considered can affect the final output. In the algorithm
+    the ordering happens using a random shuffle.
+
+    References
+    ----------
+    .. [1] Blondel, V.D. et al. Fast unfolding of communities in
+       large networks. J. Stat. Mech 10008, 1-12(2008). https://doi.org/10.1088/1742-5468/2008/10/P10008
+    .. [2] Traag, V.A., Waltman, L. & van Eck, N.J. From Louvain to Leiden: guaranteeing
+       well-connected communities. Sci Rep 9, 5233 (2019). https://doi.org/10.1038/s41598-019-41695-z
+    .. [3] Nicolas Dugué, Anthony Perez. Directed Louvain : maximizing modularity in directed networks.
+        [Research Report] Université d’Orléans. 2015. hal-01231784. https://hal.archives-ouvertes.fr/hal-01231784
+
+    See Also
+    --------
+    louvain_partitions
+    """
+
+    partitions = louvain_partitions(G, weight, resolution, threshold, seed)
+    if max_level is not None:
+        if max_level <= 0:
+            raise ValueError("max_level argument must be a positive integer or None")
+        partitions = itertools.islice(partitions, max_level)
+    final_partition = deque(partitions, maxlen=1)
+    return final_partition.pop()
+
+
+@py_random_state("seed")
+@nx._dispatchable(edge_attrs="weight")
+def louvain_partitions(
+    G, weight="weight", resolution=1, threshold=0.0000001, seed=None
+):
+    """Yields partitions for each level of the Louvain Community Detection Algorithm
+
+    Louvain Community Detection Algorithm is a simple method to extract the community
+    structure of a network. This is a heuristic method based on modularity optimization. [1]_
+
+    The partitions at each level (step of the algorithm) form a dendrogram of communities.
+    A dendrogram is a diagram representing a tree and each level represents
+    a partition of the G graph. The top level contains the smallest communities
+    and as you traverse to the bottom of the tree the communities get bigger
+    and the overall modularity increases making the partition better.
+
+    Each level is generated by executing the two phases of the Louvain Community
+    Detection Algorithm.
+
+    Be careful with self-loops in the input graph. These are treated as
+    previously reduced communities -- as if the process had been started
+    in the middle of the algorithm. Large self-loop edge weights thus
+    represent strong communities and in practice may be hard to add
+    other nodes to.  If your input graph edge weights for self-loops
+    do not represent already reduced communities you may want to remove
+    the self-loops before inputting that graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+    weight : string or None, optional (default="weight")
+     The name of an edge attribute that holds the numerical value
+     used as a weight. If None then each edge has weight 1.
+    resolution : float, optional (default=1)
+        If resolution is less than 1, the algorithm favors larger communities.
+        Greater than 1 favors smaller communities
+    threshold : float, optional (default=0.0000001)
+     Modularity gain threshold for each level. If the gain of modularity
+     between 2 levels of the algorithm is less than the given threshold
+     then the algorithm stops and returns the resulting communities.
+    seed : integer, random_state, or None (default)
+     Indicator of random number generation state.
+     See :ref:`Randomness<randomness>`.
+
+    Yields
+    ------
+    list
+        A list of sets (partition of `G`). Each set represents one community and contains
+        all the nodes that constitute it.
+
+    References
+    ----------
+    .. [1] Blondel, V.D. et al. Fast unfolding of communities in
+       large networks. J. Stat. Mech 10008, 1-12(2008)
+
+    See Also
+    --------
+    louvain_communities
+    """
+
+    partition = [{u} for u in G.nodes()]
+    if nx.is_empty(G):
+        yield partition
+        return
+    mod = modularity(G, partition, resolution=resolution, weight=weight)
+    is_directed = G.is_directed()
+    if G.is_multigraph():
+        graph = _convert_multigraph(G, weight, is_directed)
+    else:
+        graph = G.__class__()
+        graph.add_nodes_from(G)
+        graph.add_weighted_edges_from(G.edges(data=weight, default=1))
+
+    m = graph.size(weight="weight")
+    partition, inner_partition, improvement = _one_level(
+        graph, m, partition, resolution, is_directed, seed
+    )
+    improvement = True
+    while improvement:
+        # gh-5901 protect the sets in the yielded list from further manipulation here
+        yield [s.copy() for s in partition]
+        new_mod = modularity(
+            graph, inner_partition, resolution=resolution, weight="weight"
+        )
+        if new_mod - mod <= threshold:
+            return
+        mod = new_mod
+        graph = _gen_graph(graph, inner_partition)
+        partition, inner_partition, improvement = _one_level(
+            graph, m, partition, resolution, is_directed, seed
+        )
+
+
+def _one_level(G, m, partition, resolution=1, is_directed=False, seed=None):
+    """Calculate one level of the Louvain partitions tree
+
+    Parameters
+    ----------
+    G : NetworkX Graph/DiGraph
+        The graph from which to detect communities
+    m : number
+        The size of the graph `G`.
+    partition : list of sets of nodes
+        A valid partition of the graph `G`
+    resolution : positive number
+        The resolution parameter for computing the modularity of a partition
+    is_directed : bool
+        True if `G` is a directed graph.
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    """
+    node2com = {u: i for i, u in enumerate(G.nodes())}
+    inner_partition = [{u} for u in G.nodes()]
+    if is_directed:
+        in_degrees = dict(G.in_degree(weight="weight"))
+        out_degrees = dict(G.out_degree(weight="weight"))
+        Stot_in = list(in_degrees.values())
+        Stot_out = list(out_degrees.values())
+        # Calculate weights for both in and out neighbors without considering self-loops
+        nbrs = {}
+        for u in G:
+            nbrs[u] = defaultdict(float)
+            for _, n, wt in G.out_edges(u, data="weight"):
+                if u != n:
+                    nbrs[u][n] += wt
+            for n, _, wt in G.in_edges(u, data="weight"):
+                if u != n:
+                    nbrs[u][n] += wt
+    else:
+        degrees = dict(G.degree(weight="weight"))
+        Stot = list(degrees.values())
+        nbrs = {u: {v: data["weight"] for v, data in G[u].items() if v != u} for u in G}
+    rand_nodes = list(G.nodes)
+    seed.shuffle(rand_nodes)
+    nb_moves = 1
+    improvement = False
+    while nb_moves > 0:
+        nb_moves = 0
+        for u in rand_nodes:
+            best_mod = 0
+            best_com = node2com[u]
+            weights2com = _neighbor_weights(nbrs[u], node2com)
+            if is_directed:
+                in_degree = in_degrees[u]
+                out_degree = out_degrees[u]
+                Stot_in[best_com] -= in_degree
+                Stot_out[best_com] -= out_degree
+                remove_cost = (
+                    -weights2com[best_com] / m
+                    + resolution
+                    * (out_degree * Stot_in[best_com] + in_degree * Stot_out[best_com])
+                    / m**2
+                )
+            else:
+                degree = degrees[u]
+                Stot[best_com] -= degree
+                remove_cost = -weights2com[best_com] / m + resolution * (
+                    Stot[best_com] * degree
+                ) / (2 * m**2)
+            for nbr_com, wt in weights2com.items():
+                if is_directed:
+                    gain = (
+                        remove_cost
+                        + wt / m
+                        - resolution
+                        * (
+                            out_degree * Stot_in[nbr_com]
+                            + in_degree * Stot_out[nbr_com]
+                        )
+                        / m**2
+                    )
+                else:
+                    gain = (
+                        remove_cost
+                        + wt / m
+                        - resolution * (Stot[nbr_com] * degree) / (2 * m**2)
+                    )
+                if gain > best_mod:
+                    best_mod = gain
+                    best_com = nbr_com
+            if is_directed:
+                Stot_in[best_com] += in_degree
+                Stot_out[best_com] += out_degree
+            else:
+                Stot[best_com] += degree
+            if best_com != node2com[u]:
+                com = G.nodes[u].get("nodes", {u})
+                partition[node2com[u]].difference_update(com)
+                inner_partition[node2com[u]].remove(u)
+                partition[best_com].update(com)
+                inner_partition[best_com].add(u)
+                improvement = True
+                nb_moves += 1
+                node2com[u] = best_com
+    partition = list(filter(len, partition))
+    inner_partition = list(filter(len, inner_partition))
+    return partition, inner_partition, improvement
+
+
+def _neighbor_weights(nbrs, node2com):
+    """Calculate weights between node and its neighbor communities.
+
+    Parameters
+    ----------
+    nbrs : dictionary
+           Dictionary with nodes' neighbors as keys and their edge weight as value.
+    node2com : dictionary
+           Dictionary with all graph's nodes as keys and their community index as value.
+
+    """
+    weights = defaultdict(float)
+    for nbr, wt in nbrs.items():
+        weights[node2com[nbr]] += wt
+    return weights
+
+
+def _gen_graph(G, partition):
+    """Generate a new graph based on the partitions of a given graph"""
+    H = G.__class__()
+    node2com = {}
+    for i, part in enumerate(partition):
+        nodes = set()
+        for node in part:
+            node2com[node] = i
+            nodes.update(G.nodes[node].get("nodes", {node}))
+        H.add_node(i, nodes=nodes)
+
+    for node1, node2, wt in G.edges(data=True):
+        wt = wt["weight"]
+        com1 = node2com[node1]
+        com2 = node2com[node2]
+        temp = H.get_edge_data(com1, com2, {"weight": 0})["weight"]
+        H.add_edge(com1, com2, weight=wt + temp)
+    return H
+
+
+def _convert_multigraph(G, weight, is_directed):
+    """Convert a Multigraph to normal Graph"""
+    if is_directed:
+        H = nx.DiGraph()
+    else:
+        H = nx.Graph()
+    H.add_nodes_from(G)
+    for u, v, wt in G.edges(data=weight, default=1):
+        if H.has_edge(u, v):
+            H[u][v]["weight"] += wt
+        else:
+            H.add_edge(u, v, weight=wt)
+    return H
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/lukes.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/lukes.py
new file mode 100644
index 00000000..08dd7cd5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/lukes.py
@@ -0,0 +1,227 @@
+"""Lukes Algorithm for exact optimal weighted tree partitioning."""
+
+from copy import deepcopy
+from functools import lru_cache
+from random import choice
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["lukes_partitioning"]
+
+D_EDGE_W = "weight"
+D_EDGE_VALUE = 1.0
+D_NODE_W = "weight"
+D_NODE_VALUE = 1
+PKEY = "partitions"
+CLUSTER_EVAL_CACHE_SIZE = 2048
+
+
+def _split_n_from(n, min_size_of_first_part):
+    # splits j in two parts of which the first is at least
+    # the second argument
+    assert n >= min_size_of_first_part
+    for p1 in range(min_size_of_first_part, n + 1):
+        yield p1, n - p1
+
+
+@nx._dispatchable(node_attrs="node_weight", edge_attrs="edge_weight")
+def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None):
+    """Optimal partitioning of a weighted tree using the Lukes algorithm.
+
+    This algorithm partitions a connected, acyclic graph featuring integer
+    node weights and float edge weights. The resulting clusters are such
+    that the total weight of the nodes in each cluster does not exceed
+    max_size and that the weight of the edges that are cut by the partition
+    is minimum. The algorithm is based on [1]_.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    max_size : int
+        Maximum weight a partition can have in terms of sum of
+        node_weight for all nodes in the partition
+
+    edge_weight : key
+        Edge data key to use as weight. If None, the weights are all
+        set to one.
+
+    node_weight : key
+        Node data key to use as weight. If None, the weights are all
+        set to one. The data must be int.
+
+    Returns
+    -------
+    partition : list
+        A list of sets of nodes representing the clusters of the
+        partition.
+
+    Raises
+    ------
+    NotATree
+        If G is not a tree.
+    TypeError
+        If any of the values of node_weight is not int.
+
+    References
+    ----------
+    .. [1] Lukes, J. A. (1974).
+       "Efficient Algorithm for the Partitioning of Trees."
+       IBM Journal of Research and Development, 18(3), 217–224.
+
+    """
+    # First sanity check and tree preparation
+    if not nx.is_tree(G):
+        raise nx.NotATree("lukes_partitioning works only on trees")
+    else:
+        if nx.is_directed(G):
+            root = [n for n, d in G.in_degree() if d == 0]
+            assert len(root) == 1
+            root = root[0]
+            t_G = deepcopy(G)
+        else:
+            root = choice(list(G.nodes))
+            # this has the desirable side effect of not inheriting attributes
+            t_G = nx.dfs_tree(G, root)
+
+    # Since we do not want to screw up the original graph,
+    # if we have a blank attribute, we make a deepcopy
+    if edge_weight is None or node_weight is None:
+        safe_G = deepcopy(G)
+        if edge_weight is None:
+            nx.set_edge_attributes(safe_G, D_EDGE_VALUE, D_EDGE_W)
+            edge_weight = D_EDGE_W
+        if node_weight is None:
+            nx.set_node_attributes(safe_G, D_NODE_VALUE, D_NODE_W)
+            node_weight = D_NODE_W
+    else:
+        safe_G = G
+
+    # Second sanity check
+    # The values of node_weight MUST BE int.
+    # I cannot see any room for duck typing without incurring serious
+    # danger of subtle bugs.
+    all_n_attr = nx.get_node_attributes(safe_G, node_weight).values()
+    for x in all_n_attr:
+        if not isinstance(x, int):
+            raise TypeError(
+                "lukes_partitioning needs integer "
+                f"values for node_weight ({node_weight})"
+            )
+
+    # SUBROUTINES -----------------------
+    # these functions are defined here for two reasons:
+    # - brevity: we can leverage global "safe_G"
+    # - caching: signatures are hashable
+
+    @not_implemented_for("undirected")
+    # this is intended to be called only on t_G
+    def _leaves(gr):
+        for x in gr.nodes:
+            if not nx.descendants(gr, x):
+                yield x
+
+    @not_implemented_for("undirected")
+    def _a_parent_of_leaves_only(gr):
+        tleaves = set(_leaves(gr))
+        for n in set(gr.nodes) - tleaves:
+            if all(x in tleaves for x in nx.descendants(gr, n)):
+                return n
+
+    @lru_cache(CLUSTER_EVAL_CACHE_SIZE)
+    def _value_of_cluster(cluster):
+        valid_edges = [e for e in safe_G.edges if e[0] in cluster and e[1] in cluster]
+        return sum(safe_G.edges[e][edge_weight] for e in valid_edges)
+
+    def _value_of_partition(partition):
+        return sum(_value_of_cluster(frozenset(c)) for c in partition)
+
+    @lru_cache(CLUSTER_EVAL_CACHE_SIZE)
+    def _weight_of_cluster(cluster):
+        return sum(safe_G.nodes[n][node_weight] for n in cluster)
+
+    def _pivot(partition, node):
+        ccx = [c for c in partition if node in c]
+        assert len(ccx) == 1
+        return ccx[0]
+
+    def _concatenate_or_merge(partition_1, partition_2, x, i, ref_weight):
+        ccx = _pivot(partition_1, x)
+        cci = _pivot(partition_2, i)
+        merged_xi = ccx.union(cci)
+
+        # We first check if we can do the merge.
+        # If so, we do the actual calculations, otherwise we concatenate
+        if _weight_of_cluster(frozenset(merged_xi)) <= ref_weight:
+            cp1 = list(filter(lambda x: x != ccx, partition_1))
+            cp2 = list(filter(lambda x: x != cci, partition_2))
+
+            option_2 = [merged_xi] + cp1 + cp2
+            return option_2, _value_of_partition(option_2)
+        else:
+            option_1 = partition_1 + partition_2
+            return option_1, _value_of_partition(option_1)
+
+    # INITIALIZATION -----------------------
+    leaves = set(_leaves(t_G))
+    for lv in leaves:
+        t_G.nodes[lv][PKEY] = {}
+        slot = safe_G.nodes[lv][node_weight]
+        t_G.nodes[lv][PKEY][slot] = [{lv}]
+        t_G.nodes[lv][PKEY][0] = [{lv}]
+
+    for inner in [x for x in t_G.nodes if x not in leaves]:
+        t_G.nodes[inner][PKEY] = {}
+        slot = safe_G.nodes[inner][node_weight]
+        t_G.nodes[inner][PKEY][slot] = [{inner}]
+    nx._clear_cache(t_G)
+
+    # CORE ALGORITHM -----------------------
+    while True:
+        x_node = _a_parent_of_leaves_only(t_G)
+        weight_of_x = safe_G.nodes[x_node][node_weight]
+        best_value = 0
+        best_partition = None
+        bp_buffer = {}
+        x_descendants = nx.descendants(t_G, x_node)
+        for i_node in x_descendants:
+            for j in range(weight_of_x, max_size + 1):
+                for a, b in _split_n_from(j, weight_of_x):
+                    if (
+                        a not in t_G.nodes[x_node][PKEY]
+                        or b not in t_G.nodes[i_node][PKEY]
+                    ):
+                        # it's not possible to form this particular weight sum
+                        continue
+
+                    part1 = t_G.nodes[x_node][PKEY][a]
+                    part2 = t_G.nodes[i_node][PKEY][b]
+                    part, value = _concatenate_or_merge(part1, part2, x_node, i_node, j)
+
+                    if j not in bp_buffer or bp_buffer[j][1] < value:
+                        # we annotate in the buffer the best partition for j
+                        bp_buffer[j] = part, value
+
+                    # we also keep track of the overall best partition
+                    if best_value <= value:
+                        best_value = value
+                        best_partition = part
+
+            # as illustrated in Lukes, once we finished a child, we can
+            # discharge the partitions we found into the graph
+            # (the key phrase is make all x == x')
+            # so that they are used by the subsequent children
+            for w, (best_part_for_vl, vl) in bp_buffer.items():
+                t_G.nodes[x_node][PKEY][w] = best_part_for_vl
+            bp_buffer.clear()
+
+        # the absolute best partition for this node
+        # across all weights has to be stored at 0
+        t_G.nodes[x_node][PKEY][0] = best_partition
+        t_G.remove_nodes_from(x_descendants)
+
+        if x_node == root:
+            # the 0-labeled partition of root
+            # is the optimal one for the whole tree
+            return t_G.nodes[root][PKEY][0]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/modularity_max.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/modularity_max.py
new file mode 100644
index 00000000..f465e01c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/modularity_max.py
@@ -0,0 +1,451 @@
+"""Functions for detecting communities based on modularity."""
+
+from collections import defaultdict
+
+import networkx as nx
+from networkx.algorithms.community.quality import modularity
+from networkx.utils import not_implemented_for
+from networkx.utils.mapped_queue import MappedQueue
+
+__all__ = [
+    "greedy_modularity_communities",
+    "naive_greedy_modularity_communities",
+]
+
+
+def _greedy_modularity_communities_generator(G, weight=None, resolution=1):
+    r"""Yield community partitions of G and the modularity change at each step.
+
+    This function performs Clauset-Newman-Moore greedy modularity maximization [2]_
+    At each step of the process it yields the change in modularity that will occur in
+    the next step followed by yielding the new community partition after that step.
+
+    Greedy modularity maximization begins with each node in its own community
+    and repeatedly joins the pair of communities that lead to the largest
+    modularity until one community contains all nodes (the partition has one set).
+
+    This function maximizes the generalized modularity, where `resolution`
+    is the resolution parameter, often expressed as $\gamma$.
+    See :func:`~networkx.algorithms.community.quality.modularity`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : string or None, optional (default=None)
+        The name of an edge attribute that holds the numerical value used
+        as a weight.  If None, then each edge has weight 1.
+        The degree is the sum of the edge weights adjacent to the node.
+
+    resolution : float (default=1)
+        If resolution is less than 1, modularity favors larger communities.
+        Greater than 1 favors smaller communities.
+
+    Yields
+    ------
+    Alternating yield statements produce the following two objects:
+
+    communities: dict_values
+        A dict_values of frozensets of nodes, one for each community.
+        This represents a partition of the nodes of the graph into communities.
+        The first yield is the partition with each node in its own community.
+
+    dq: float
+        The change in modularity when merging the next two communities
+        that leads to the largest modularity.
+
+    See Also
+    --------
+    modularity
+
+    References
+    ----------
+    .. [1] Newman, M. E. J. "Networks: An Introduction", page 224
+       Oxford University Press 2011.
+    .. [2] Clauset, A., Newman, M. E., & Moore, C.
+       "Finding community structure in very large networks."
+       Physical Review E 70(6), 2004.
+    .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
+       Detection" Phys. Rev. E74, 2006.
+    .. [4] Newman, M. E. J."Analysis of weighted networks"
+       Physical Review E 70(5 Pt 2):056131, 2004.
+    """
+    directed = G.is_directed()
+    N = G.number_of_nodes()
+
+    # Count edges (or the sum of edge-weights for weighted graphs)
+    m = G.size(weight)
+    q0 = 1 / m
+
+    # Calculate degrees (notation from the papers)
+    # a : the fraction of (weighted) out-degree for each node
+    # b : the fraction of (weighted) in-degree for each node
+    if directed:
+        a = {node: deg_out * q0 for node, deg_out in G.out_degree(weight=weight)}
+        b = {node: deg_in * q0 for node, deg_in in G.in_degree(weight=weight)}
+    else:
+        a = b = {node: deg * q0 * 0.5 for node, deg in G.degree(weight=weight)}
+
+    # this preliminary step collects the edge weights for each node pair
+    # It handles multigraph and digraph and works fine for graph.
+    dq_dict = defaultdict(lambda: defaultdict(float))
+    for u, v, wt in G.edges(data=weight, default=1):
+        if u == v:
+            continue
+        dq_dict[u][v] += wt
+        dq_dict[v][u] += wt
+
+    # now scale and subtract the expected edge-weights term
+    for u, nbrdict in dq_dict.items():
+        for v, wt in nbrdict.items():
+            dq_dict[u][v] = q0 * wt - resolution * (a[u] * b[v] + b[u] * a[v])
+
+    # Use -dq to get a max_heap instead of a min_heap
+    # dq_heap holds a heap for each node's neighbors
+    dq_heap = {u: MappedQueue({(u, v): -dq for v, dq in dq_dict[u].items()}) for u in G}
+    # H -> all_dq_heap holds a heap with the best items for each node
+    H = MappedQueue([dq_heap[n].heap[0] for n in G if len(dq_heap[n]) > 0])
+
+    # Initialize single-node communities
+    communities = {n: frozenset([n]) for n in G}
+    yield communities.values()
+
+    # Merge the two communities that lead to the largest modularity
+    while len(H) > 1:
+        # Find best merge
+        # Remove from heap of row maxes
+        # Ties will be broken by choosing the pair with lowest min community id
+        try:
+            negdq, u, v = H.pop()
+        except IndexError:
+            break
+        dq = -negdq
+        yield dq
+        # Remove best merge from row u heap
+        dq_heap[u].pop()
+        # Push new row max onto H
+        if len(dq_heap[u]) > 0:
+            H.push(dq_heap[u].heap[0])
+        # If this element was also at the root of row v, we need to remove the
+        # duplicate entry from H
+        if dq_heap[v].heap[0] == (v, u):
+            H.remove((v, u))
+            # Remove best merge from row v heap
+            dq_heap[v].remove((v, u))
+            # Push new row max onto H
+            if len(dq_heap[v]) > 0:
+                H.push(dq_heap[v].heap[0])
+        else:
+            # Duplicate wasn't in H, just remove from row v heap
+            dq_heap[v].remove((v, u))
+
+        # Perform merge
+        communities[v] = frozenset(communities[u] | communities[v])
+        del communities[u]
+
+        # Get neighbor communities connected to the merged communities
+        u_nbrs = set(dq_dict[u])
+        v_nbrs = set(dq_dict[v])
+        all_nbrs = (u_nbrs | v_nbrs) - {u, v}
+        both_nbrs = u_nbrs & v_nbrs
+        # Update dq for merge of u into v
+        for w in all_nbrs:
+            # Calculate new dq value
+            if w in both_nbrs:
+                dq_vw = dq_dict[v][w] + dq_dict[u][w]
+            elif w in v_nbrs:
+                dq_vw = dq_dict[v][w] - resolution * (a[u] * b[w] + a[w] * b[u])
+            else:  # w in u_nbrs
+                dq_vw = dq_dict[u][w] - resolution * (a[v] * b[w] + a[w] * b[v])
+            # Update rows v and w
+            for row, col in [(v, w), (w, v)]:
+                dq_heap_row = dq_heap[row]
+                # Update dict for v,w only (u is removed below)
+                dq_dict[row][col] = dq_vw
+                # Save old max of per-row heap
+                if len(dq_heap_row) > 0:
+                    d_oldmax = dq_heap_row.heap[0]
+                else:
+                    d_oldmax = None
+                # Add/update heaps
+                d = (row, col)
+                d_negdq = -dq_vw
+                # Save old value for finding heap index
+                if w in v_nbrs:
+                    # Update existing element in per-row heap
+                    dq_heap_row.update(d, d, priority=d_negdq)
+                else:
+                    # We're creating a new nonzero element, add to heap
+                    dq_heap_row.push(d, priority=d_negdq)
+                # Update heap of row maxes if necessary
+                if d_oldmax is None:
+                    # No entries previously in this row, push new max
+                    H.push(d, priority=d_negdq)
+                else:
+                    # We've updated an entry in this row, has the max changed?
+                    row_max = dq_heap_row.heap[0]
+                    if d_oldmax != row_max or d_oldmax.priority != row_max.priority:
+                        H.update(d_oldmax, row_max)
+
+        # Remove row/col u from dq_dict matrix
+        for w in dq_dict[u]:
+            # Remove from dict
+            dq_old = dq_dict[w][u]
+            del dq_dict[w][u]
+            # Remove from heaps if we haven't already
+            if w != v:
+                # Remove both row and column
+                for row, col in [(w, u), (u, w)]:
+                    dq_heap_row = dq_heap[row]
+                    # Check if replaced dq is row max
+                    d_old = (row, col)
+                    if dq_heap_row.heap[0] == d_old:
+                        # Update per-row heap and heap of row maxes
+                        dq_heap_row.remove(d_old)
+                        H.remove(d_old)
+                        # Update row max
+                        if len(dq_heap_row) > 0:
+                            H.push(dq_heap_row.heap[0])
+                    else:
+                        # Only update per-row heap
+                        dq_heap_row.remove(d_old)
+
+        del dq_dict[u]
+        # Mark row u as deleted, but keep placeholder
+        dq_heap[u] = MappedQueue()
+        # Merge u into v and update a
+        a[v] += a[u]
+        a[u] = 0
+        if directed:
+            b[v] += b[u]
+            b[u] = 0
+
+        yield communities.values()
+
+
+@nx._dispatchable(edge_attrs="weight")
+def greedy_modularity_communities(
+    G,
+    weight=None,
+    resolution=1,
+    cutoff=1,
+    best_n=None,
+):
+    r"""Find communities in G using greedy modularity maximization.
+
+    This function uses Clauset-Newman-Moore greedy modularity maximization [2]_
+    to find the community partition with the largest modularity.
+
+    Greedy modularity maximization begins with each node in its own community
+    and repeatedly joins the pair of communities that lead to the largest
+    modularity until no further increase in modularity is possible (a maximum).
+    Two keyword arguments adjust the stopping condition. `cutoff` is a lower
+    limit on the number of communities so you can stop the process before
+    reaching a maximum (used to save computation time). `best_n` is an upper
+    limit on the number of communities so you can make the process continue
+    until at most n communities remain even if the maximum modularity occurs
+    for more. To obtain exactly n communities, set both `cutoff` and `best_n` to n.
+
+    This function maximizes the generalized modularity, where `resolution`
+    is the resolution parameter, often expressed as $\gamma$.
+    See :func:`~networkx.algorithms.community.quality.modularity`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : string or None, optional (default=None)
+        The name of an edge attribute that holds the numerical value used
+        as a weight.  If None, then each edge has weight 1.
+        The degree is the sum of the edge weights adjacent to the node.
+
+    resolution : float, optional (default=1)
+        If resolution is less than 1, modularity favors larger communities.
+        Greater than 1 favors smaller communities.
+
+    cutoff : int, optional (default=1)
+        A minimum number of communities below which the merging process stops.
+        The process stops at this number of communities even if modularity
+        is not maximized. The goal is to let the user stop the process early.
+        The process stops before the cutoff if it finds a maximum of modularity.
+
+    best_n : int or None, optional (default=None)
+        A maximum number of communities above which the merging process will
+        not stop. This forces community merging to continue after modularity
+        starts to decrease until `best_n` communities remain.
+        If ``None``, don't force it to continue beyond a maximum.
+
+    Raises
+    ------
+    ValueError : If the `cutoff` or `best_n`  value is not in the range
+        ``[1, G.number_of_nodes()]``, or if `best_n` < `cutoff`.
+
+    Returns
+    -------
+    communities: list
+        A list of frozensets of nodes, one for each community.
+        Sorted by length with largest communities first.
+
+    Examples
+    --------
+    >>> G = nx.karate_club_graph()
+    >>> c = nx.community.greedy_modularity_communities(G)
+    >>> sorted(c[0])
+    [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
+
+    See Also
+    --------
+    modularity
+
+    References
+    ----------
+    .. [1] Newman, M. E. J. "Networks: An Introduction", page 224
+       Oxford University Press 2011.
+    .. [2] Clauset, A., Newman, M. E., & Moore, C.
+       "Finding community structure in very large networks."
+       Physical Review E 70(6), 2004.
+    .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
+       Detection" Phys. Rev. E74, 2006.
+    .. [4] Newman, M. E. J."Analysis of weighted networks"
+       Physical Review E 70(5 Pt 2):056131, 2004.
+    """
+    if not G.size():
+        return [{n} for n in G]
+
+    if (cutoff < 1) or (cutoff > G.number_of_nodes()):
+        raise ValueError(f"cutoff must be between 1 and {len(G)}. Got {cutoff}.")
+    if best_n is not None:
+        if (best_n < 1) or (best_n > G.number_of_nodes()):
+            raise ValueError(f"best_n must be between 1 and {len(G)}. Got {best_n}.")
+        if best_n < cutoff:
+            raise ValueError(f"Must have best_n >= cutoff. Got {best_n} < {cutoff}")
+        if best_n == 1:
+            return [set(G)]
+    else:
+        best_n = G.number_of_nodes()
+
+    # retrieve generator object to construct output
+    community_gen = _greedy_modularity_communities_generator(
+        G, weight=weight, resolution=resolution
+    )
+
+    # construct the first best community
+    communities = next(community_gen)
+
+    # continue merging communities until one of the breaking criteria is satisfied
+    while len(communities) > cutoff:
+        try:
+            dq = next(community_gen)
+        # StopIteration occurs when communities are the connected components
+        except StopIteration:
+            communities = sorted(communities, key=len, reverse=True)
+            # if best_n requires more merging, merge big sets for highest modularity
+            while len(communities) > best_n:
+                comm1, comm2, *rest = communities
+                communities = [comm1 ^ comm2]
+                communities.extend(rest)
+            return communities
+
+        # keep going unless max_mod is reached or best_n says to merge more
+        if dq < 0 and len(communities) <= best_n:
+            break
+        communities = next(community_gen)
+
+    return sorted(communities, key=len, reverse=True)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def naive_greedy_modularity_communities(G, resolution=1, weight=None):
+    r"""Find communities in G using greedy modularity maximization.
+
+    This implementation is O(n^4), much slower than alternatives, but it is
+    provided as an easy-to-understand reference implementation.
+
+    Greedy modularity maximization begins with each node in its own community
+    and joins the pair of communities that most increases modularity until no
+    such pair exists.
+
+    This function maximizes the generalized modularity, where `resolution`
+    is the resolution parameter, often expressed as $\gamma$.
+    See :func:`~networkx.algorithms.community.quality.modularity`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Graph must be simple and undirected.
+
+    resolution : float (default=1)
+        If resolution is less than 1, modularity favors larger communities.
+        Greater than 1 favors smaller communities.
+
+    weight : string or None, optional (default=None)
+        The name of an edge attribute that holds the numerical value used
+        as a weight.  If None, then each edge has weight 1.
+        The degree is the sum of the edge weights adjacent to the node.
+
+    Returns
+    -------
+    list
+        A list of sets of nodes, one for each community.
+        Sorted by length with largest communities first.
+
+    Examples
+    --------
+    >>> G = nx.karate_club_graph()
+    >>> c = nx.community.naive_greedy_modularity_communities(G)
+    >>> sorted(c[0])
+    [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
+
+    See Also
+    --------
+    greedy_modularity_communities
+    modularity
+    """
+    # First create one community for each node
+    communities = [frozenset([u]) for u in G.nodes()]
+    # Track merges
+    merges = []
+    # Greedily merge communities until no improvement is possible
+    old_modularity = None
+    new_modularity = modularity(G, communities, resolution=resolution, weight=weight)
+    while old_modularity is None or new_modularity > old_modularity:
+        # Save modularity for comparison
+        old_modularity = new_modularity
+        # Find best pair to merge
+        trial_communities = list(communities)
+        to_merge = None
+        for i, u in enumerate(communities):
+            for j, v in enumerate(communities):
+                # Skip i==j and empty communities
+                if j <= i or len(u) == 0 or len(v) == 0:
+                    continue
+                # Merge communities u and v
+                trial_communities[j] = u | v
+                trial_communities[i] = frozenset([])
+                trial_modularity = modularity(
+                    G, trial_communities, resolution=resolution, weight=weight
+                )
+                if trial_modularity >= new_modularity:
+                    # Check if strictly better or tie
+                    if trial_modularity > new_modularity:
+                        # Found new best, save modularity and group indexes
+                        new_modularity = trial_modularity
+                        to_merge = (i, j, new_modularity - old_modularity)
+                    elif to_merge and min(i, j) < min(to_merge[0], to_merge[1]):
+                        # Break ties by choosing pair with lowest min id
+                        new_modularity = trial_modularity
+                        to_merge = (i, j, new_modularity - old_modularity)
+                # Un-merge
+                trial_communities[i] = u
+                trial_communities[j] = v
+        if to_merge is not None:
+            # If the best merge improves modularity, use it
+            merges.append(to_merge)
+            i, j, dq = to_merge
+            u, v = communities[i], communities[j]
+            communities[j] = u | v
+            communities[i] = frozenset([])
+    # Remove empty communities and sort
+    return sorted((c for c in communities if len(c) > 0), key=len, reverse=True)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/quality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/quality.py
new file mode 100644
index 00000000..f09a6d45
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/quality.py
@@ -0,0 +1,346 @@
+"""Functions for measuring the quality of a partition (into
+communities).
+
+"""
+
+from itertools import combinations
+
+import networkx as nx
+from networkx import NetworkXError
+from networkx.algorithms.community.community_utils import is_partition
+from networkx.utils.decorators import argmap
+
+__all__ = ["modularity", "partition_quality"]
+
+
+class NotAPartition(NetworkXError):
+    """Raised if a given collection is not a partition."""
+
+    def __init__(self, G, collection):
+        msg = f"{collection} is not a valid partition of the graph {G}"
+        super().__init__(msg)
+
+
+def _require_partition(G, partition):
+    """Decorator to check that a valid partition is input to a function
+
+    Raises :exc:`networkx.NetworkXError` if the partition is not valid.
+
+    This decorator should be used on functions whose first two arguments
+    are a graph and a partition of the nodes of that graph (in that
+    order)::
+
+        >>> @require_partition
+        ... def foo(G, partition):
+        ...     print("partition is valid!")
+        ...
+        >>> G = nx.complete_graph(5)
+        >>> partition = [{0, 1}, {2, 3}, {4}]
+        >>> foo(G, partition)
+        partition is valid!
+        >>> partition = [{0}, {2, 3}, {4}]
+        >>> foo(G, partition)
+        Traceback (most recent call last):
+          ...
+        networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G
+        >>> partition = [{0, 1}, {1, 2, 3}, {4}]
+        >>> foo(G, partition)
+        Traceback (most recent call last):
+          ...
+        networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G
+
+    """
+    if is_partition(G, partition):
+        return G, partition
+    raise nx.NetworkXError("`partition` is not a valid partition of the nodes of G")
+
+
+require_partition = argmap(_require_partition, (0, 1))
+
+
+@nx._dispatchable
+def intra_community_edges(G, partition):
+    """Returns the number of intra-community edges for a partition of `G`.
+
+    Parameters
+    ----------
+    G : NetworkX graph.
+
+    partition : iterable of sets of nodes
+        This must be a partition of the nodes of `G`.
+
+    The "intra-community edges" are those edges joining a pair of nodes
+    in the same block of the partition.
+
+    """
+    return sum(G.subgraph(block).size() for block in partition)
+
+
+@nx._dispatchable
+def inter_community_edges(G, partition):
+    """Returns the number of inter-community edges for a partition of `G`.
+    according to the given
+    partition of the nodes of `G`.
+
+    Parameters
+    ----------
+    G : NetworkX graph.
+
+    partition : iterable of sets of nodes
+        This must be a partition of the nodes of `G`.
+
+    The *inter-community edges* are those edges joining a pair of nodes
+    in different blocks of the partition.
+
+    Implementation note: this function creates an intermediate graph
+    that may require the same amount of memory as that of `G`.
+
+    """
+    # Alternate implementation that does not require constructing a new
+    # graph object (but does require constructing an affiliation
+    # dictionary):
+    #
+    #     aff = dict(chain.from_iterable(((v, block) for v in block)
+    #                                    for block in partition))
+    #     return sum(1 for u, v in G.edges() if aff[u] != aff[v])
+    #
+    MG = nx.MultiDiGraph if G.is_directed() else nx.MultiGraph
+    return nx.quotient_graph(G, partition, create_using=MG).size()
+
+
+@nx._dispatchable
+def inter_community_non_edges(G, partition):
+    """Returns the number of inter-community non-edges according to the
+    given partition of the nodes of `G`.
+
+    Parameters
+    ----------
+    G : NetworkX graph.
+
+    partition : iterable of sets of nodes
+        This must be a partition of the nodes of `G`.
+
+    A *non-edge* is a pair of nodes (undirected if `G` is undirected)
+    that are not adjacent in `G`. The *inter-community non-edges* are
+    those non-edges on a pair of nodes in different blocks of the
+    partition.
+
+    Implementation note: this function creates two intermediate graphs,
+    which may require up to twice the amount of memory as required to
+    store `G`.
+
+    """
+    # Alternate implementation that does not require constructing two
+    # new graph objects (but does require constructing an affiliation
+    # dictionary):
+    #
+    #     aff = dict(chain.from_iterable(((v, block) for v in block)
+    #                                    for block in partition))
+    #     return sum(1 for u, v in nx.non_edges(G) if aff[u] != aff[v])
+    #
+    return inter_community_edges(nx.complement(G), partition)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def modularity(G, communities, weight="weight", resolution=1):
+    r"""Returns the modularity of the given partition of the graph.
+
+    Modularity is defined in [1]_ as
+
+    .. math::
+        Q = \frac{1}{2m} \sum_{ij} \left( A_{ij} - \gamma\frac{k_ik_j}{2m}\right)
+            \delta(c_i,c_j)
+
+    where $m$ is the number of edges (or sum of all edge weights as in [5]_),
+    $A$ is the adjacency matrix of `G`, $k_i$ is the (weighted) degree of $i$,
+    $\gamma$ is the resolution parameter, and $\delta(c_i, c_j)$ is 1 if $i$ and
+    $j$ are in the same community else 0.
+
+    According to [2]_ (and verified by some algebra) this can be reduced to
+
+    .. math::
+       Q = \sum_{c=1}^{n}
+       \left[ \frac{L_c}{m} - \gamma\left( \frac{k_c}{2m} \right) ^2 \right]
+
+    where the sum iterates over all communities $c$, $m$ is the number of edges,
+    $L_c$ is the number of intra-community links for community $c$,
+    $k_c$ is the sum of degrees of the nodes in community $c$,
+    and $\gamma$ is the resolution parameter.
+
+    The resolution parameter sets an arbitrary tradeoff between intra-group
+    edges and inter-group edges. More complex grouping patterns can be
+    discovered by analyzing the same network with multiple values of gamma
+    and then combining the results [3]_. That said, it is very common to
+    simply use gamma=1. More on the choice of gamma is in [4]_.
+
+    The second formula is the one actually used in calculation of the modularity.
+    For directed graphs the second formula replaces $k_c$ with $k^{in}_c k^{out}_c$.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+
+    communities : list or iterable of set of nodes
+        These node sets must represent a partition of G's nodes.
+
+    weight : string or None, optional (default="weight")
+        The edge attribute that holds the numerical value used
+        as a weight. If None or an edge does not have that attribute,
+        then that edge has weight 1.
+
+    resolution : float (default=1)
+        If resolution is less than 1, modularity favors larger communities.
+        Greater than 1 favors smaller communities.
+
+    Returns
+    -------
+    Q : float
+        The modularity of the partition.
+
+    Raises
+    ------
+    NotAPartition
+        If `communities` is not a partition of the nodes of `G`.
+
+    Examples
+    --------
+    >>> G = nx.barbell_graph(3, 0)
+    >>> nx.community.modularity(G, [{0, 1, 2}, {3, 4, 5}])
+    0.35714285714285715
+    >>> nx.community.modularity(G, nx.community.label_propagation_communities(G))
+    0.35714285714285715
+
+    References
+    ----------
+    .. [1] M. E. J. Newman "Networks: An Introduction", page 224.
+       Oxford University Press, 2011.
+    .. [2] Clauset, Aaron, Mark EJ Newman, and Cristopher Moore.
+       "Finding community structure in very large networks."
+       Phys. Rev. E 70.6 (2004). <https://arxiv.org/abs/cond-mat/0408187>
+    .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community Detection"
+       Phys. Rev. E 74, 016110, 2006. https://doi.org/10.1103/PhysRevE.74.016110
+    .. [4] M. E. J. Newman, "Equivalence between modularity optimization and
+       maximum likelihood methods for community detection"
+       Phys. Rev. E 94, 052315, 2016. https://doi.org/10.1103/PhysRevE.94.052315
+    .. [5] Blondel, V.D. et al. "Fast unfolding of communities in large
+       networks" J. Stat. Mech 10008, 1-12 (2008).
+       https://doi.org/10.1088/1742-5468/2008/10/P10008
+    """
+    if not isinstance(communities, list):
+        communities = list(communities)
+    if not is_partition(G, communities):
+        raise NotAPartition(G, communities)
+
+    directed = G.is_directed()
+    if directed:
+        out_degree = dict(G.out_degree(weight=weight))
+        in_degree = dict(G.in_degree(weight=weight))
+        m = sum(out_degree.values())
+        norm = 1 / m**2
+    else:
+        out_degree = in_degree = dict(G.degree(weight=weight))
+        deg_sum = sum(out_degree.values())
+        m = deg_sum / 2
+        norm = 1 / deg_sum**2
+
+    def community_contribution(community):
+        comm = set(community)
+        L_c = sum(wt for u, v, wt in G.edges(comm, data=weight, default=1) if v in comm)
+
+        out_degree_sum = sum(out_degree[u] for u in comm)
+        in_degree_sum = sum(in_degree[u] for u in comm) if directed else out_degree_sum
+
+        return L_c / m - resolution * out_degree_sum * in_degree_sum * norm
+
+    return sum(map(community_contribution, communities))
+
+
+@require_partition
+@nx._dispatchable
+def partition_quality(G, partition):
+    """Returns the coverage and performance of a partition of G.
+
+    The *coverage* of a partition is the ratio of the number of
+    intra-community edges to the total number of edges in the graph.
+
+    The *performance* of a partition is the number of
+    intra-community edges plus inter-community non-edges divided by the total
+    number of potential edges.
+
+    This algorithm has complexity $O(C^2 + L)$ where C is the number of communities and L is the number of links.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    partition : sequence
+        Partition of the nodes of `G`, represented as a sequence of
+        sets of nodes (blocks). Each block of the partition represents a
+        community.
+
+    Returns
+    -------
+    (float, float)
+        The (coverage, performance) tuple of the partition, as defined above.
+
+    Raises
+    ------
+    NetworkXError
+        If `partition` is not a valid partition of the nodes of `G`.
+
+    Notes
+    -----
+    If `G` is a multigraph;
+        - for coverage, the multiplicity of edges is counted
+        - for performance, the result is -1 (total number of possible edges is not defined)
+
+    References
+    ----------
+    .. [1] Santo Fortunato.
+           "Community Detection in Graphs".
+           *Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
+           <https://arxiv.org/abs/0906.0612>
+    """
+
+    node_community = {}
+    for i, community in enumerate(partition):
+        for node in community:
+            node_community[node] = i
+
+    # `performance` is not defined for multigraphs
+    if not G.is_multigraph():
+        # Iterate over the communities, quadratic, to calculate `possible_inter_community_edges`
+        possible_inter_community_edges = sum(
+            len(p1) * len(p2) for p1, p2 in combinations(partition, 2)
+        )
+
+        if G.is_directed():
+            possible_inter_community_edges *= 2
+    else:
+        possible_inter_community_edges = 0
+
+    # Compute the number of edges in the complete graph -- `n` nodes,
+    # directed or undirected, depending on `G`
+    n = len(G)
+    total_pairs = n * (n - 1)
+    if not G.is_directed():
+        total_pairs //= 2
+
+    intra_community_edges = 0
+    inter_community_non_edges = possible_inter_community_edges
+
+    # Iterate over the links to count `intra_community_edges` and `inter_community_non_edges`
+    for e in G.edges():
+        if node_community[e[0]] == node_community[e[1]]:
+            intra_community_edges += 1
+        else:
+            inter_community_non_edges -= 1
+
+    coverage = intra_community_edges / len(G.edges)
+
+    if G.is_multigraph():
+        performance = -1.0
+    else:
+        performance = (intra_community_edges + inter_community_non_edges) / total_pairs
+
+    return coverage, performance
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py
new file mode 100644
index 00000000..6c023be7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py
@@ -0,0 +1,136 @@
+import pytest
+
+import networkx as nx
+from networkx import Graph, NetworkXError
+from networkx.algorithms.community import asyn_fluidc
+
+
+@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph))
+def test_raises_on_directed_and_multigraphs(graph_constructor):
+    G = graph_constructor([(0, 1), (1, 2)])
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.community.asyn_fluidc(G, 1)
+
+
+def test_exceptions():
+    test = Graph()
+    test.add_node("a")
+    pytest.raises(NetworkXError, asyn_fluidc, test, "hi")
+    pytest.raises(NetworkXError, asyn_fluidc, test, -1)
+    pytest.raises(NetworkXError, asyn_fluidc, test, 3)
+    test.add_node("b")
+    pytest.raises(NetworkXError, asyn_fluidc, test, 1)
+
+
+def test_single_node():
+    test = Graph()
+
+    test.add_node("a")
+
+    # ground truth
+    ground_truth = {frozenset(["a"])}
+
+    communities = asyn_fluidc(test, 1)
+    result = {frozenset(c) for c in communities}
+    assert result == ground_truth
+
+
+def test_two_nodes():
+    test = Graph()
+
+    test.add_edge("a", "b")
+
+    # ground truth
+    ground_truth = {frozenset(["a"]), frozenset(["b"])}
+
+    communities = asyn_fluidc(test, 2)
+    result = {frozenset(c) for c in communities}
+    assert result == ground_truth
+
+
+def test_two_clique_communities():
+    test = Graph()
+
+    # c1
+    test.add_edge("a", "b")
+    test.add_edge("a", "c")
+    test.add_edge("b", "c")
+
+    # connection
+    test.add_edge("c", "d")
+
+    # c2
+    test.add_edge("d", "e")
+    test.add_edge("d", "f")
+    test.add_edge("f", "e")
+
+    # ground truth
+    ground_truth = {frozenset(["a", "c", "b"]), frozenset(["e", "d", "f"])}
+
+    communities = asyn_fluidc(test, 2, seed=7)
+    result = {frozenset(c) for c in communities}
+    assert result == ground_truth
+
+
+def test_five_clique_ring():
+    test = Graph()
+
+    # c1
+    test.add_edge("1a", "1b")
+    test.add_edge("1a", "1c")
+    test.add_edge("1a", "1d")
+    test.add_edge("1b", "1c")
+    test.add_edge("1b", "1d")
+    test.add_edge("1c", "1d")
+
+    # c2
+    test.add_edge("2a", "2b")
+    test.add_edge("2a", "2c")
+    test.add_edge("2a", "2d")
+    test.add_edge("2b", "2c")
+    test.add_edge("2b", "2d")
+    test.add_edge("2c", "2d")
+
+    # c3
+    test.add_edge("3a", "3b")
+    test.add_edge("3a", "3c")
+    test.add_edge("3a", "3d")
+    test.add_edge("3b", "3c")
+    test.add_edge("3b", "3d")
+    test.add_edge("3c", "3d")
+
+    # c4
+    test.add_edge("4a", "4b")
+    test.add_edge("4a", "4c")
+    test.add_edge("4a", "4d")
+    test.add_edge("4b", "4c")
+    test.add_edge("4b", "4d")
+    test.add_edge("4c", "4d")
+
+    # c5
+    test.add_edge("5a", "5b")
+    test.add_edge("5a", "5c")
+    test.add_edge("5a", "5d")
+    test.add_edge("5b", "5c")
+    test.add_edge("5b", "5d")
+    test.add_edge("5c", "5d")
+
+    # connections
+    test.add_edge("1a", "2c")
+    test.add_edge("2a", "3c")
+    test.add_edge("3a", "4c")
+    test.add_edge("4a", "5c")
+    test.add_edge("5a", "1c")
+
+    # ground truth
+    ground_truth = {
+        frozenset(["1a", "1b", "1c", "1d"]),
+        frozenset(["2a", "2b", "2c", "2d"]),
+        frozenset(["3a", "3b", "3c", "3d"]),
+        frozenset(["4a", "4b", "4c", "4d"]),
+        frozenset(["5a", "5b", "5c", "5d"]),
+    }
+
+    communities = asyn_fluidc(test, 5, seed=9)
+    result = {frozenset(c) for c in communities}
+    assert result == ground_truth
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_centrality.py
new file mode 100644
index 00000000..1a8982f0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_centrality.py
@@ -0,0 +1,85 @@
+"""Unit tests for the :mod:`networkx.algorithms.community.centrality`
+module.
+
+"""
+
+from operator import itemgetter
+
+import networkx as nx
+
+
+def set_of_sets(iterable):
+    return set(map(frozenset, iterable))
+
+
+def validate_communities(result, expected):
+    assert set_of_sets(result) == set_of_sets(expected)
+
+
+def validate_possible_communities(result, *expected):
+    assert any(set_of_sets(result) == set_of_sets(p) for p in expected)
+
+
+class TestGirvanNewman:
+    """Unit tests for the
+    :func:`networkx.algorithms.community.centrality.girvan_newman`
+    function.
+
+    """
+
+    def test_no_edges(self):
+        G = nx.empty_graph(3)
+        communities = list(nx.community.girvan_newman(G))
+        assert len(communities) == 1
+        validate_communities(communities[0], [{0}, {1}, {2}])
+
+    def test_undirected(self):
+        # Start with the graph .-.-.-.
+        G = nx.path_graph(4)
+        communities = list(nx.community.girvan_newman(G))
+        assert len(communities) == 3
+        # After one removal, we get the graph .-. .-.
+        validate_communities(communities[0], [{0, 1}, {2, 3}])
+        # After the next, we get the graph .-. . ., but there are two
+        # symmetric possible versions.
+        validate_possible_communities(
+            communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
+        )
+        # After the last removal, we always get the empty graph.
+        validate_communities(communities[2], [{0}, {1}, {2}, {3}])
+
+    def test_directed(self):
+        G = nx.DiGraph(nx.path_graph(4))
+        communities = list(nx.community.girvan_newman(G))
+        assert len(communities) == 3
+        validate_communities(communities[0], [{0, 1}, {2, 3}])
+        validate_possible_communities(
+            communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
+        )
+        validate_communities(communities[2], [{0}, {1}, {2}, {3}])
+
+    def test_selfloops(self):
+        G = nx.path_graph(4)
+        G.add_edge(0, 0)
+        G.add_edge(2, 2)
+        communities = list(nx.community.girvan_newman(G))
+        assert len(communities) == 3
+        validate_communities(communities[0], [{0, 1}, {2, 3}])
+        validate_possible_communities(
+            communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
+        )
+        validate_communities(communities[2], [{0}, {1}, {2}, {3}])
+
+    def test_most_valuable_edge(self):
+        G = nx.Graph()
+        G.add_weighted_edges_from([(0, 1, 3), (1, 2, 2), (2, 3, 1)])
+        # Let the most valuable edge be the one with the highest weight.
+
+        def heaviest(G):
+            return max(G.edges(data="weight"), key=itemgetter(2))[:2]
+
+        communities = list(nx.community.girvan_newman(G, heaviest))
+        assert len(communities) == 3
+        validate_communities(communities[0], [{0}, {1, 2, 3}])
+        validate_communities(communities[1], [{0}, {1}, {2, 3}])
+        validate_communities(communities[2], [{0}, {1}, {2}, {3}])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_divisive.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_divisive.py
new file mode 100644
index 00000000..6331503f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_divisive.py
@@ -0,0 +1,106 @@
+import pytest
+
+import networkx as nx
+
+
+def test_edge_betweenness_partition():
+    G = nx.barbell_graph(3, 0)
+    C = nx.community.edge_betweenness_partition(G, 2)
+    answer = [{0, 1, 2}, {3, 4, 5}]
+    assert len(C) == len(answer)
+    for s in answer:
+        assert s in C
+
+    G = nx.barbell_graph(3, 1)
+    C = nx.community.edge_betweenness_partition(G, 3)
+    answer = [{0, 1, 2}, {4, 5, 6}, {3}]
+    assert len(C) == len(answer)
+    for s in answer:
+        assert s in C
+
+    C = nx.community.edge_betweenness_partition(G, 7)
+    answer = [{n} for n in G]
+    assert len(C) == len(answer)
+    for s in answer:
+        assert s in C
+
+    C = nx.community.edge_betweenness_partition(G, 1)
+    assert C == [set(G)]
+
+    C = nx.community.edge_betweenness_partition(G, 1, weight="weight")
+    assert C == [set(G)]
+
+    with pytest.raises(nx.NetworkXError):
+        nx.community.edge_betweenness_partition(G, 0)
+
+    with pytest.raises(nx.NetworkXError):
+        nx.community.edge_betweenness_partition(G, -1)
+
+    with pytest.raises(nx.NetworkXError):
+        nx.community.edge_betweenness_partition(G, 10)
+
+
+def test_edge_current_flow_betweenness_partition():
+    pytest.importorskip("scipy")
+
+    G = nx.barbell_graph(3, 0)
+    C = nx.community.edge_current_flow_betweenness_partition(G, 2)
+    answer = [{0, 1, 2}, {3, 4, 5}]
+    assert len(C) == len(answer)
+    for s in answer:
+        assert s in C
+
+    G = nx.barbell_graph(3, 1)
+    C = nx.community.edge_current_flow_betweenness_partition(G, 2)
+    answers = [[{0, 1, 2, 3}, {4, 5, 6}], [{0, 1, 2}, {3, 4, 5, 6}]]
+    assert len(C) == len(answers[0])
+    assert any(all(s in answer for s in C) for answer in answers)
+
+    C = nx.community.edge_current_flow_betweenness_partition(G, 3)
+    answer = [{0, 1, 2}, {4, 5, 6}, {3}]
+    assert len(C) == len(answer)
+    for s in answer:
+        assert s in C
+
+    C = nx.community.edge_current_flow_betweenness_partition(G, 4)
+    answers = [[{1, 2}, {4, 5, 6}, {3}, {0}], [{0, 1, 2}, {5, 6}, {3}, {4}]]
+    assert len(C) == len(answers[0])
+    assert any(all(s in answer for s in C) for answer in answers)
+
+    C = nx.community.edge_current_flow_betweenness_partition(G, 5)
+    answer = [{1, 2}, {5, 6}, {3}, {0}, {4}]
+    assert len(C) == len(answer)
+    for s in answer:
+        assert s in C
+
+    C = nx.community.edge_current_flow_betweenness_partition(G, 6)
+    answers = [[{2}, {5, 6}, {3}, {0}, {4}, {1}], [{1, 2}, {6}, {3}, {0}, {4}, {5}]]
+    assert len(C) == len(answers[0])
+    assert any(all(s in answer for s in C) for answer in answers)
+
+    C = nx.community.edge_current_flow_betweenness_partition(G, 7)
+    answer = [{n} for n in G]
+    assert len(C) == len(answer)
+    for s in answer:
+        assert s in C
+
+    C = nx.community.edge_current_flow_betweenness_partition(G, 1)
+    assert C == [set(G)]
+
+    C = nx.community.edge_current_flow_betweenness_partition(G, 1, weight="weight")
+    assert C == [set(G)]
+
+    with pytest.raises(nx.NetworkXError):
+        nx.community.edge_current_flow_betweenness_partition(G, 0)
+
+    with pytest.raises(nx.NetworkXError):
+        nx.community.edge_current_flow_betweenness_partition(G, -1)
+
+    with pytest.raises(nx.NetworkXError):
+        nx.community.edge_current_flow_betweenness_partition(G, 10)
+
+    N = 10
+    G = nx.empty_graph(N)
+    for i in range(2, N - 1):
+        C = nx.community.edge_current_flow_betweenness_partition(G, i)
+        assert C == [{n} for n in G]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kclique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kclique.py
new file mode 100644
index 00000000..aa0b7e82
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kclique.py
@@ -0,0 +1,91 @@
+from itertools import combinations
+
+import pytest
+
+import networkx as nx
+
+
+def test_overlapping_K5():
+    G = nx.Graph()
+    G.add_edges_from(combinations(range(5), 2))  # Add a five clique
+    G.add_edges_from(combinations(range(2, 7), 2))  # Add another five clique
+    c = list(nx.community.k_clique_communities(G, 4))
+    assert c == [frozenset(range(7))]
+    c = set(nx.community.k_clique_communities(G, 5))
+    assert c == {frozenset(range(5)), frozenset(range(2, 7))}
+
+
+def test_isolated_K5():
+    G = nx.Graph()
+    G.add_edges_from(combinations(range(5), 2))  # Add a five clique
+    G.add_edges_from(combinations(range(5, 10), 2))  # Add another five clique
+    c = set(nx.community.k_clique_communities(G, 5))
+    assert c == {frozenset(range(5)), frozenset(range(5, 10))}
+
+
+class TestZacharyKarateClub:
+    def setup_method(self):
+        self.G = nx.karate_club_graph()
+
+    def _check_communities(self, k, expected):
+        communities = set(nx.community.k_clique_communities(self.G, k))
+        assert communities == expected
+
+    def test_k2(self):
+        # clique percolation with k=2 is just connected components
+        expected = {frozenset(self.G)}
+        self._check_communities(2, expected)
+
+    def test_k3(self):
+        comm1 = [
+            0,
+            1,
+            2,
+            3,
+            7,
+            8,
+            12,
+            13,
+            14,
+            15,
+            17,
+            18,
+            19,
+            20,
+            21,
+            22,
+            23,
+            26,
+            27,
+            28,
+            29,
+            30,
+            31,
+            32,
+            33,
+        ]
+        comm2 = [0, 4, 5, 6, 10, 16]
+        comm3 = [24, 25, 31]
+        expected = {frozenset(comm1), frozenset(comm2), frozenset(comm3)}
+        self._check_communities(3, expected)
+
+    def test_k4(self):
+        expected = {
+            frozenset([0, 1, 2, 3, 7, 13]),
+            frozenset([8, 32, 30, 33]),
+            frozenset([32, 33, 29, 23]),
+        }
+        self._check_communities(4, expected)
+
+    def test_k5(self):
+        expected = {frozenset([0, 1, 2, 3, 7, 13])}
+        self._check_communities(5, expected)
+
+    def test_k6(self):
+        expected = set()
+        self._check_communities(6, expected)
+
+
+def test_bad_k():
+    with pytest.raises(nx.NetworkXError):
+        list(nx.community.k_clique_communities(nx.Graph(), 1))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py
new file mode 100644
index 00000000..25d53d5f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py
@@ -0,0 +1,92 @@
+"""Unit tests for the :mod:`networkx.algorithms.community.kernighan_lin`
+module.
+"""
+
+from itertools import permutations
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.community import kernighan_lin_bisection
+
+
+def assert_partition_equal(x, y):
+    assert set(map(frozenset, x)) == set(map(frozenset, y))
+
+
+def test_partition():
+    G = nx.barbell_graph(3, 0)
+    C = kernighan_lin_bisection(G)
+    assert_partition_equal(C, [{0, 1, 2}, {3, 4, 5}])
+
+
+def test_partition_argument():
+    G = nx.barbell_graph(3, 0)
+    partition = [{0, 1, 2}, {3, 4, 5}]
+    C = kernighan_lin_bisection(G, partition)
+    assert_partition_equal(C, partition)
+
+
+def test_partition_argument_non_integer_nodes():
+    G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
+    partition = ({"A", "B"}, {"C", "D"})
+    C = kernighan_lin_bisection(G, partition)
+    assert_partition_equal(C, partition)
+
+
+def test_seed_argument():
+    G = nx.barbell_graph(3, 0)
+    C = kernighan_lin_bisection(G, seed=1)
+    assert_partition_equal(C, [{0, 1, 2}, {3, 4, 5}])
+
+
+def test_non_disjoint_partition():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.barbell_graph(3, 0)
+        partition = ({0, 1, 2}, {2, 3, 4, 5})
+        kernighan_lin_bisection(G, partition)
+
+
+def test_too_many_blocks():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.barbell_graph(3, 0)
+        partition = ({0, 1}, {2}, {3, 4, 5})
+        kernighan_lin_bisection(G, partition)
+
+
+def test_multigraph():
+    G = nx.cycle_graph(4)
+    M = nx.MultiGraph(G.edges())
+    M.add_edges_from(G.edges())
+    M.remove_edge(1, 2)
+    for labels in permutations(range(4)):
+        mapping = dict(zip(M, labels))
+        A, B = kernighan_lin_bisection(nx.relabel_nodes(M, mapping), seed=0)
+        assert_partition_equal(
+            [A, B], [{mapping[0], mapping[1]}, {mapping[2], mapping[3]}]
+        )
+
+
+def test_max_iter_argument():
+    G = nx.Graph(
+        [
+            ("A", "B", {"weight": 1}),
+            ("A", "C", {"weight": 2}),
+            ("A", "D", {"weight": 3}),
+            ("A", "E", {"weight": 2}),
+            ("A", "F", {"weight": 4}),
+            ("B", "C", {"weight": 1}),
+            ("B", "D", {"weight": 4}),
+            ("B", "E", {"weight": 2}),
+            ("B", "F", {"weight": 1}),
+            ("C", "D", {"weight": 3}),
+            ("C", "E", {"weight": 2}),
+            ("C", "F", {"weight": 1}),
+            ("D", "E", {"weight": 4}),
+            ("D", "F", {"weight": 3}),
+            ("E", "F", {"weight": 2}),
+        ]
+    )
+    partition = ({"A", "B", "C"}, {"D", "E", "F"})
+    C = kernighan_lin_bisection(G, partition, max_iter=1)
+    assert_partition_equal(C, ({"A", "F", "C"}, {"D", "E", "B"}))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_label_propagation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_label_propagation.py
new file mode 100644
index 00000000..4be72dbf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_label_propagation.py
@@ -0,0 +1,241 @@
+from itertools import chain, combinations
+
+import pytest
+
+import networkx as nx
+
+
+def test_directed_not_supported():
+    with pytest.raises(nx.NetworkXNotImplemented):
+        # not supported for directed graphs
+        test = nx.DiGraph()
+        test.add_edge("a", "b")
+        test.add_edge("a", "c")
+        test.add_edge("b", "d")
+        result = nx.community.label_propagation_communities(test)
+
+
+def test_iterator_vs_iterable():
+    G = nx.empty_graph("a")
+    assert list(nx.community.label_propagation_communities(G)) == [{"a"}]
+    for community in nx.community.label_propagation_communities(G):
+        assert community == {"a"}
+    pytest.raises(TypeError, next, nx.community.label_propagation_communities(G))
+
+
+def test_one_node():
+    test = nx.Graph()
+    test.add_node("a")
+
+    # The expected communities are:
+    ground_truth = {frozenset(["a"])}
+
+    communities = nx.community.label_propagation_communities(test)
+    result = {frozenset(c) for c in communities}
+    assert result == ground_truth
+
+
+def test_unconnected_communities():
+    test = nx.Graph()
+    # community 1
+    test.add_edge("a", "c")
+    test.add_edge("a", "d")
+    test.add_edge("d", "c")
+    # community 2
+    test.add_edge("b", "e")
+    test.add_edge("e", "f")
+    test.add_edge("f", "b")
+
+    # The expected communities are:
+    ground_truth = {frozenset(["a", "c", "d"]), frozenset(["b", "e", "f"])}
+
+    communities = nx.community.label_propagation_communities(test)
+    result = {frozenset(c) for c in communities}
+    assert result == ground_truth
+
+
+def test_connected_communities():
+    test = nx.Graph()
+    # community 1
+    test.add_edge("a", "b")
+    test.add_edge("c", "a")
+    test.add_edge("c", "b")
+    test.add_edge("d", "a")
+    test.add_edge("d", "b")
+    test.add_edge("d", "c")
+    test.add_edge("e", "a")
+    test.add_edge("e", "b")
+    test.add_edge("e", "c")
+    test.add_edge("e", "d")
+    # community 2
+    test.add_edge("1", "2")
+    test.add_edge("3", "1")
+    test.add_edge("3", "2")
+    test.add_edge("4", "1")
+    test.add_edge("4", "2")
+    test.add_edge("4", "3")
+    test.add_edge("5", "1")
+    test.add_edge("5", "2")
+    test.add_edge("5", "3")
+    test.add_edge("5", "4")
+    # edge between community 1 and 2
+    test.add_edge("a", "1")
+    # community 3
+    test.add_edge("x", "y")
+    # community 4 with only a single node
+    test.add_node("z")
+
+    # The expected communities are:
+    ground_truth1 = {
+        frozenset(["a", "b", "c", "d", "e"]),
+        frozenset(["1", "2", "3", "4", "5"]),
+        frozenset(["x", "y"]),
+        frozenset(["z"]),
+    }
+    ground_truth2 = {
+        frozenset(["a", "b", "c", "d", "e", "1", "2", "3", "4", "5"]),
+        frozenset(["x", "y"]),
+        frozenset(["z"]),
+    }
+    ground_truth = (ground_truth1, ground_truth2)
+
+    communities = nx.community.label_propagation_communities(test)
+    result = {frozenset(c) for c in communities}
+    assert result in ground_truth
+
+
+def test_termination():
+    # ensure termination of asyn_lpa_communities in two cases
+    # that led to an endless loop in a previous version
+    test1 = nx.karate_club_graph()
+    test2 = nx.caveman_graph(2, 10)
+    test2.add_edges_from([(0, 20), (20, 10)])
+    nx.community.asyn_lpa_communities(test1)
+    nx.community.asyn_lpa_communities(test2)
+
+
+class TestAsynLpaCommunities:
+    def _check_communities(self, G, expected):
+        """Checks that the communities computed from the given graph ``G``
+        using the :func:`~networkx.asyn_lpa_communities` function match
+        the set of nodes given in ``expected``.
+
+        ``expected`` must be a :class:`set` of :class:`frozenset`
+        instances, each element of which is a node in the graph.
+
+        """
+        communities = nx.community.asyn_lpa_communities(G)
+        result = {frozenset(c) for c in communities}
+        assert result == expected
+
+    def test_null_graph(self):
+        G = nx.null_graph()
+        ground_truth = set()
+        self._check_communities(G, ground_truth)
+
+    def test_single_node(self):
+        G = nx.empty_graph(1)
+        ground_truth = {frozenset([0])}
+        self._check_communities(G, ground_truth)
+
+    def test_simple_communities(self):
+        # This graph is the disjoint union of two triangles.
+        G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"])
+        ground_truth = {frozenset("abc"), frozenset("def")}
+        self._check_communities(G, ground_truth)
+
+    def test_seed_argument(self):
+        G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"])
+        ground_truth = {frozenset("abc"), frozenset("def")}
+        communities = nx.community.asyn_lpa_communities(G, seed=1)
+        result = {frozenset(c) for c in communities}
+        assert result == ground_truth
+
+    def test_several_communities(self):
+        # This graph is the disjoint union of five triangles.
+        ground_truth = {frozenset(range(3 * i, 3 * (i + 1))) for i in range(5)}
+        edges = chain.from_iterable(combinations(c, 2) for c in ground_truth)
+        G = nx.Graph(edges)
+        self._check_communities(G, ground_truth)
+
+
+class TestFastLabelPropagationCommunities:
+    N = 100  # number of nodes
+    K = 15  # average node degree
+
+    def _check_communities(self, G, truth, weight=None, seed=42):
+        C = nx.community.fast_label_propagation_communities(G, weight=weight, seed=seed)
+        assert {frozenset(c) for c in C} == truth
+
+    def test_null_graph(self):
+        G = nx.null_graph()
+        truth = set()
+        self._check_communities(G, truth)
+
+    def test_empty_graph(self):
+        G = nx.empty_graph(self.N)
+        truth = {frozenset([i]) for i in G}
+        self._check_communities(G, truth)
+
+    def test_star_graph(self):
+        G = nx.star_graph(self.N)
+        truth = {frozenset(G)}
+        self._check_communities(G, truth)
+
+    def test_complete_graph(self):
+        G = nx.complete_graph(self.N)
+        truth = {frozenset(G)}
+        self._check_communities(G, truth)
+
+    def test_bipartite_graph(self):
+        G = nx.complete_bipartite_graph(self.N // 2, self.N // 2)
+        truth = {frozenset(G)}
+        self._check_communities(G, truth)
+
+    def test_random_graph(self):
+        G = nx.gnm_random_graph(self.N, self.N * self.K // 2, seed=42)
+        truth = {frozenset(G)}
+        self._check_communities(G, truth)
+
+    def test_disjoin_cliques(self):
+        G = nx.Graph(["ab", "AB", "AC", "BC", "12", "13", "14", "23", "24", "34"])
+        truth = {frozenset("ab"), frozenset("ABC"), frozenset("1234")}
+        self._check_communities(G, truth)
+
+    def test_ring_of_cliques(self):
+        N, K = self.N, self.K
+        G = nx.ring_of_cliques(N, K)
+        truth = {frozenset([K * i + k for k in range(K)]) for i in range(N)}
+        self._check_communities(G, truth)
+
+    def test_larger_graph(self):
+        G = nx.gnm_random_graph(100 * self.N, 50 * self.N * self.K, seed=42)
+        nx.community.fast_label_propagation_communities(G)
+
+    def test_graph_type(self):
+        G1 = nx.complete_graph(self.N, nx.MultiDiGraph())
+        G2 = nx.MultiGraph(G1)
+        G3 = nx.DiGraph(G1)
+        G4 = nx.Graph(G1)
+        truth = {frozenset(G1)}
+        self._check_communities(G1, truth)
+        self._check_communities(G2, truth)
+        self._check_communities(G3, truth)
+        self._check_communities(G4, truth)
+
+    def test_weight_argument(self):
+        G = nx.MultiDiGraph()
+        G.add_edge(1, 2, weight=1.41)
+        G.add_edge(2, 1, weight=1.41)
+        G.add_edge(2, 3)
+        G.add_edge(3, 4, weight=3.14)
+        truth = {frozenset({1, 2}), frozenset({3, 4})}
+        self._check_communities(G, truth, weight="weight")
+
+    def test_seed_argument(self):
+        G = nx.karate_club_graph()
+        C = nx.community.fast_label_propagation_communities(G, seed=2023)
+        truth = {frozenset(c) for c in C}
+        self._check_communities(G, truth, seed=2023)
+        # smoke test that seed=None works
+        C = nx.community.fast_label_propagation_communities(G, seed=None)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_louvain.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_louvain.py
new file mode 100644
index 00000000..816e6f14
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_louvain.py
@@ -0,0 +1,264 @@
+import pytest
+
+import networkx as nx
+
+
+def test_modularity_increase():
+    G = nx.LFR_benchmark_graph(
+        250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
+    )
+    partition = [{u} for u in G.nodes()]
+    mod = nx.community.modularity(G, partition)
+    partition = nx.community.louvain_communities(G)
+
+    assert nx.community.modularity(G, partition) > mod
+
+
+def test_valid_partition():
+    G = nx.LFR_benchmark_graph(
+        250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
+    )
+    H = G.to_directed()
+    partition = nx.community.louvain_communities(G)
+    partition2 = nx.community.louvain_communities(H)
+
+    assert nx.community.is_partition(G, partition)
+    assert nx.community.is_partition(H, partition2)
+
+
+def test_karate_club_partition():
+    G = nx.karate_club_graph()
+    part = [
+        {0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21},
+        {16, 4, 5, 6, 10},
+        {23, 25, 27, 28, 24, 31},
+        {32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
+    ]
+    partition = nx.community.louvain_communities(G, seed=2, weight=None)
+
+    assert part == partition
+
+
+def test_partition_iterator():
+    G = nx.path_graph(15)
+    parts_iter = nx.community.louvain_partitions(G, seed=42)
+    first_part = next(parts_iter)
+    first_copy = [s.copy() for s in first_part]
+
+    # gh-5901 reports sets changing after next partition is yielded
+    assert first_copy[0] == first_part[0]
+    second_part = next(parts_iter)
+    assert first_copy[0] == first_part[0]
+
+
+def test_undirected_selfloops():
+    G = nx.karate_club_graph()
+    expected_partition = nx.community.louvain_communities(G, seed=2, weight=None)
+    part = [
+        {0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21},
+        {16, 4, 5, 6, 10},
+        {23, 25, 27, 28, 24, 31},
+        {32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
+    ]
+    assert expected_partition == part
+
+    G.add_weighted_edges_from([(i, i, i * 1000) for i in range(9)])
+    # large self-loop weight impacts partition
+    partition = nx.community.louvain_communities(G, seed=2, weight="weight")
+    assert part != partition
+
+    # small self-loop weights aren't enough to impact partition in this graph
+    partition = nx.community.louvain_communities(G, seed=2, weight=None)
+    assert part == partition
+
+
+def test_directed_selfloops():
+    G = nx.DiGraph()
+    G.add_nodes_from(range(11))
+    G_edges = [
+        (0, 2),
+        (0, 1),
+        (1, 0),
+        (2, 1),
+        (2, 0),
+        (3, 4),
+        (4, 3),
+        (7, 8),
+        (8, 7),
+        (9, 10),
+        (10, 9),
+    ]
+    G.add_edges_from(G_edges)
+    G_expected_partition = nx.community.louvain_communities(G, seed=123, weight=None)
+
+    G.add_weighted_edges_from([(i, i, i * 1000) for i in range(3)])
+    # large self-loop weight impacts partition
+    G_partition = nx.community.louvain_communities(G, seed=123, weight="weight")
+    assert G_partition != G_expected_partition
+
+    # small self-loop weights aren't enough to impact partition in this graph
+    G_partition = nx.community.louvain_communities(G, seed=123, weight=None)
+    assert G_partition == G_expected_partition
+
+
+def test_directed_partition():
+    """
+    Test 2 cases that were looping infinitely
+    from issues #5175 and #5704
+    """
+    G = nx.DiGraph()
+    H = nx.DiGraph()
+    G.add_nodes_from(range(10))
+    H.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
+    G_edges = [
+        (0, 2),
+        (0, 1),
+        (1, 0),
+        (2, 1),
+        (2, 0),
+        (3, 4),
+        (4, 3),
+        (7, 8),
+        (8, 7),
+        (9, 10),
+        (10, 9),
+    ]
+    H_edges = [
+        (1, 2),
+        (1, 6),
+        (1, 9),
+        (2, 3),
+        (2, 4),
+        (2, 5),
+        (3, 4),
+        (4, 3),
+        (4, 5),
+        (5, 4),
+        (6, 7),
+        (6, 8),
+        (9, 10),
+        (9, 11),
+        (10, 11),
+        (11, 10),
+    ]
+    G.add_edges_from(G_edges)
+    H.add_edges_from(H_edges)
+
+    G_expected_partition = [{0, 1, 2}, {3, 4}, {5}, {6}, {8, 7}, {9, 10}]
+    G_partition = nx.community.louvain_communities(G, seed=123, weight=None)
+
+    H_expected_partition = [{2, 3, 4, 5}, {8, 1, 6, 7}, {9, 10, 11}]
+    H_partition = nx.community.louvain_communities(H, seed=123, weight=None)
+
+    assert G_partition == G_expected_partition
+    assert H_partition == H_expected_partition
+
+
+def test_none_weight_param():
+    G = nx.karate_club_graph()
+    nx.set_edge_attributes(
+        G, {edge: i * i for i, edge in enumerate(G.edges)}, name="foo"
+    )
+
+    part = [
+        {0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21},
+        {16, 4, 5, 6, 10},
+        {23, 25, 27, 28, 24, 31},
+        {32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
+    ]
+    partition1 = nx.community.louvain_communities(G, weight=None, seed=2)
+    partition2 = nx.community.louvain_communities(G, weight="foo", seed=2)
+    partition3 = nx.community.louvain_communities(G, weight="weight", seed=2)
+
+    assert part == partition1
+    assert part != partition2
+    assert part != partition3
+    assert partition2 != partition3
+
+
+def test_quality():
+    G = nx.LFR_benchmark_graph(
+        250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
+    )
+    H = nx.gn_graph(200, seed=1234)
+    I = nx.MultiGraph(G)
+    J = nx.MultiDiGraph(H)
+
+    partition = nx.community.louvain_communities(G)
+    partition2 = nx.community.louvain_communities(H)
+    partition3 = nx.community.louvain_communities(I)
+    partition4 = nx.community.louvain_communities(J)
+
+    quality = nx.community.partition_quality(G, partition)[0]
+    quality2 = nx.community.partition_quality(H, partition2)[0]
+    quality3 = nx.community.partition_quality(I, partition3)[0]
+    quality4 = nx.community.partition_quality(J, partition4)[0]
+
+    assert quality >= 0.65
+    assert quality2 >= 0.65
+    assert quality3 >= 0.65
+    assert quality4 >= 0.65
+
+
+def test_multigraph():
+    G = nx.karate_club_graph()
+    H = nx.MultiGraph(G)
+    G.add_edge(0, 1, weight=10)
+    H.add_edge(0, 1, weight=9)
+    G.add_edge(0, 9, foo=20)
+    H.add_edge(0, 9, foo=20)
+
+    partition1 = nx.community.louvain_communities(G, seed=1234)
+    partition2 = nx.community.louvain_communities(H, seed=1234)
+    partition3 = nx.community.louvain_communities(H, weight="foo", seed=1234)
+
+    assert partition1 == partition2 != partition3
+
+
+def test_resolution():
+    G = nx.LFR_benchmark_graph(
+        250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
+    )
+
+    partition1 = nx.community.louvain_communities(G, resolution=0.5, seed=12)
+    partition2 = nx.community.louvain_communities(G, seed=12)
+    partition3 = nx.community.louvain_communities(G, resolution=2, seed=12)
+
+    assert len(partition1) <= len(partition2) <= len(partition3)
+
+
+def test_threshold():
+    G = nx.LFR_benchmark_graph(
+        250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
+    )
+    partition1 = nx.community.louvain_communities(G, threshold=0.3, seed=2)
+    partition2 = nx.community.louvain_communities(G, seed=2)
+    mod1 = nx.community.modularity(G, partition1)
+    mod2 = nx.community.modularity(G, partition2)
+
+    assert mod1 <= mod2
+
+
+def test_empty_graph():
+    G = nx.Graph()
+    G.add_nodes_from(range(5))
+    expected = [{0}, {1}, {2}, {3}, {4}]
+    assert nx.community.louvain_communities(G) == expected
+
+
+def test_max_level():
+    G = nx.LFR_benchmark_graph(
+        250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
+    )
+    parts_iter = nx.community.louvain_partitions(G, seed=42)
+    for max_level, expected in enumerate(parts_iter, 1):
+        partition = nx.community.louvain_communities(G, max_level=max_level, seed=42)
+        assert partition == expected
+    assert max_level > 1  # Ensure we are actually testing max_level
+    # max_level is an upper limit; it's okay if we stop before it's hit.
+    partition = nx.community.louvain_communities(G, max_level=max_level + 1, seed=42)
+    assert partition == expected
+    with pytest.raises(
+        ValueError, match="max_level argument must be a positive integer"
+    ):
+        nx.community.louvain_communities(G, max_level=0)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_lukes.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_lukes.py
new file mode 100644
index 00000000..cfa48f0f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_lukes.py
@@ -0,0 +1,152 @@
+from itertools import product
+
+import pytest
+
+import networkx as nx
+
+EWL = "e_weight"
+NWL = "n_weight"
+
+
+# first test from the Lukes original paper
+def paper_1_case(float_edge_wt=False, explicit_node_wt=True, directed=False):
+    # problem-specific constants
+    limit = 3
+
+    # configuration
+    if float_edge_wt:
+        shift = 0.001
+    else:
+        shift = 0
+
+    if directed:
+        example_1 = nx.DiGraph()
+    else:
+        example_1 = nx.Graph()
+
+    # graph creation
+    example_1.add_edge(1, 2, **{EWL: 3 + shift})
+    example_1.add_edge(1, 4, **{EWL: 2 + shift})
+    example_1.add_edge(2, 3, **{EWL: 4 + shift})
+    example_1.add_edge(2, 5, **{EWL: 6 + shift})
+
+    # node weights
+    if explicit_node_wt:
+        nx.set_node_attributes(example_1, 1, NWL)
+        wtu = NWL
+    else:
+        wtu = None
+
+    # partitioning
+    clusters_1 = {
+        frozenset(x)
+        for x in nx.community.lukes_partitioning(
+            example_1, limit, node_weight=wtu, edge_weight=EWL
+        )
+    }
+
+    return clusters_1
+
+
+# second test from the Lukes original paper
+def paper_2_case(explicit_edge_wt=True, directed=False):
+    # problem specific constants
+    byte_block_size = 32
+
+    # configuration
+    if directed:
+        example_2 = nx.DiGraph()
+    else:
+        example_2 = nx.Graph()
+
+    if explicit_edge_wt:
+        edic = {EWL: 1}
+        wtu = EWL
+    else:
+        edic = {}
+        wtu = None
+
+    # graph creation
+    example_2.add_edge("name", "home_address", **edic)
+    example_2.add_edge("name", "education", **edic)
+    example_2.add_edge("education", "bs", **edic)
+    example_2.add_edge("education", "ms", **edic)
+    example_2.add_edge("education", "phd", **edic)
+    example_2.add_edge("name", "telephone", **edic)
+    example_2.add_edge("telephone", "home", **edic)
+    example_2.add_edge("telephone", "office", **edic)
+    example_2.add_edge("office", "no1", **edic)
+    example_2.add_edge("office", "no2", **edic)
+
+    example_2.nodes["name"][NWL] = 20
+    example_2.nodes["education"][NWL] = 10
+    example_2.nodes["bs"][NWL] = 1
+    example_2.nodes["ms"][NWL] = 1
+    example_2.nodes["phd"][NWL] = 1
+    example_2.nodes["home_address"][NWL] = 8
+    example_2.nodes["telephone"][NWL] = 8
+    example_2.nodes["home"][NWL] = 8
+    example_2.nodes["office"][NWL] = 4
+    example_2.nodes["no1"][NWL] = 1
+    example_2.nodes["no2"][NWL] = 1
+
+    # partitioning
+    clusters_2 = {
+        frozenset(x)
+        for x in nx.community.lukes_partitioning(
+            example_2, byte_block_size, node_weight=NWL, edge_weight=wtu
+        )
+    }
+
+    return clusters_2
+
+
+def test_paper_1_case():
+    ground_truth = {frozenset([1, 4]), frozenset([2, 3, 5])}
+
+    tf = (True, False)
+    for flt, nwt, drc in product(tf, tf, tf):
+        part = paper_1_case(flt, nwt, drc)
+        assert part == ground_truth
+
+
+def test_paper_2_case():
+    ground_truth = {
+        frozenset(["education", "bs", "ms", "phd"]),
+        frozenset(["name", "home_address"]),
+        frozenset(["telephone", "home", "office", "no1", "no2"]),
+    }
+
+    tf = (True, False)
+    for ewt, drc in product(tf, tf):
+        part = paper_2_case(ewt, drc)
+        assert part == ground_truth
+
+
+def test_mandatory_tree():
+    not_a_tree = nx.complete_graph(4)
+
+    with pytest.raises(nx.NotATree):
+        nx.community.lukes_partitioning(not_a_tree, 5)
+
+
+def test_mandatory_integrality():
+    byte_block_size = 32
+
+    ex_1_broken = nx.DiGraph()
+
+    ex_1_broken.add_edge(1, 2, **{EWL: 3.2})
+    ex_1_broken.add_edge(1, 4, **{EWL: 2.4})
+    ex_1_broken.add_edge(2, 3, **{EWL: 4.0})
+    ex_1_broken.add_edge(2, 5, **{EWL: 6.3})
+
+    ex_1_broken.nodes[1][NWL] = 1.2  # !
+    ex_1_broken.nodes[2][NWL] = 1
+    ex_1_broken.nodes[3][NWL] = 1
+    ex_1_broken.nodes[4][NWL] = 1
+    ex_1_broken.nodes[5][NWL] = 2
+
+    with pytest.raises(TypeError):
+        nx.community.lukes_partitioning(
+            ex_1_broken, byte_block_size, node_weight=NWL, edge_weight=EWL
+        )
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_modularity_max.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_modularity_max.py
new file mode 100644
index 00000000..0121367f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_modularity_max.py
@@ -0,0 +1,340 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms.community import (
+    greedy_modularity_communities,
+    naive_greedy_modularity_communities,
+)
+
+
+@pytest.mark.parametrize(
+    "func", (greedy_modularity_communities, naive_greedy_modularity_communities)
+)
+def test_modularity_communities(func):
+    G = nx.karate_club_graph()
+    john_a = frozenset(
+        [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
+    )
+    mr_hi = frozenset([0, 4, 5, 6, 10, 11, 16, 19])
+    overlap = frozenset([1, 2, 3, 7, 9, 12, 13, 17, 21])
+    expected = {john_a, overlap, mr_hi}
+    assert set(func(G, weight=None)) == expected
+
+
+@pytest.mark.parametrize(
+    "func", (greedy_modularity_communities, naive_greedy_modularity_communities)
+)
+def test_modularity_communities_categorical_labels(func):
+    # Using other than 0-starting contiguous integers as node-labels.
+    G = nx.Graph(
+        [
+            ("a", "b"),
+            ("a", "c"),
+            ("b", "c"),
+            ("b", "d"),  # inter-community edge
+            ("d", "e"),
+            ("d", "f"),
+            ("d", "g"),
+            ("f", "g"),
+            ("d", "e"),
+            ("f", "e"),
+        ]
+    )
+    expected = {frozenset({"f", "g", "e", "d"}), frozenset({"a", "b", "c"})}
+    assert set(func(G)) == expected
+
+
+def test_greedy_modularity_communities_components():
+    # Test for gh-5530
+    G = nx.Graph([(0, 1), (2, 3), (4, 5), (5, 6)])
+    # usual case with 3 components
+    assert greedy_modularity_communities(G) == [{4, 5, 6}, {0, 1}, {2, 3}]
+    # best_n can make the algorithm continue even when modularity goes down
+    assert greedy_modularity_communities(G, best_n=3) == [{4, 5, 6}, {0, 1}, {2, 3}]
+    assert greedy_modularity_communities(G, best_n=2) == [{0, 1, 4, 5, 6}, {2, 3}]
+    assert greedy_modularity_communities(G, best_n=1) == [{0, 1, 2, 3, 4, 5, 6}]
+
+
+def test_greedy_modularity_communities_relabeled():
+    # Test for gh-4966
+    G = nx.balanced_tree(2, 2)
+    mapping = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 7: "h"}
+    G = nx.relabel_nodes(G, mapping)
+    expected = [frozenset({"e", "d", "a", "b"}), frozenset({"c", "f", "g"})]
+    assert greedy_modularity_communities(G) == expected
+
+
+def test_greedy_modularity_communities_directed():
+    G = nx.DiGraph(
+        [
+            ("a", "b"),
+            ("a", "c"),
+            ("b", "c"),
+            ("b", "d"),  # inter-community edge
+            ("d", "e"),
+            ("d", "f"),
+            ("d", "g"),
+            ("f", "g"),
+            ("d", "e"),
+            ("f", "e"),
+        ]
+    )
+    expected = [frozenset({"f", "g", "e", "d"}), frozenset({"a", "b", "c"})]
+    assert greedy_modularity_communities(G) == expected
+
+    # with loops
+    G = nx.DiGraph()
+    G.add_edges_from(
+        [(1, 1), (1, 2), (1, 3), (2, 3), (1, 4), (4, 4), (5, 5), (4, 5), (4, 6), (5, 6)]
+    )
+    expected = [frozenset({1, 2, 3}), frozenset({4, 5, 6})]
+    assert greedy_modularity_communities(G) == expected
+
+
+@pytest.mark.parametrize(
+    "func", (greedy_modularity_communities, naive_greedy_modularity_communities)
+)
+def test_modularity_communities_weighted(func):
+    G = nx.balanced_tree(2, 3)
+    for a, b in G.edges:
+        if ((a == 1) or (a == 2)) and (b != 0):
+            G[a][b]["weight"] = 10.0
+        else:
+            G[a][b]["weight"] = 1.0
+
+    expected = [{0, 1, 3, 4, 7, 8, 9, 10}, {2, 5, 6, 11, 12, 13, 14}]
+
+    assert func(G, weight="weight") == expected
+    assert func(G, weight="weight", resolution=0.9) == expected
+    assert func(G, weight="weight", resolution=0.3) == expected
+    assert func(G, weight="weight", resolution=1.1) != expected
+
+
+def test_modularity_communities_floating_point():
+    # check for floating point error when used as key in the mapped_queue dict.
+    # Test for gh-4992 and gh-5000
+    G = nx.Graph()
+    G.add_weighted_edges_from(
+        [(0, 1, 12), (1, 4, 71), (2, 3, 15), (2, 4, 10), (3, 6, 13)]
+    )
+    expected = [{0, 1, 4}, {2, 3, 6}]
+    assert greedy_modularity_communities(G, weight="weight") == expected
+    assert (
+        greedy_modularity_communities(G, weight="weight", resolution=0.99) == expected
+    )
+
+
+def test_modularity_communities_directed_weighted():
+    G = nx.DiGraph()
+    G.add_weighted_edges_from(
+        [
+            (1, 2, 5),
+            (1, 3, 3),
+            (2, 3, 6),
+            (2, 6, 1),
+            (1, 4, 1),
+            (4, 5, 3),
+            (4, 6, 7),
+            (5, 6, 2),
+            (5, 7, 5),
+            (5, 8, 4),
+            (6, 8, 3),
+        ]
+    )
+    expected = [frozenset({4, 5, 6, 7, 8}), frozenset({1, 2, 3})]
+    assert greedy_modularity_communities(G, weight="weight") == expected
+
+    # A large weight of the edge (2, 6) causes 6 to change group, even if it shares
+    # only one connection with the new group and 3 with the old one.
+    G[2][6]["weight"] = 20
+    expected = [frozenset({1, 2, 3, 6}), frozenset({4, 5, 7, 8})]
+    assert greedy_modularity_communities(G, weight="weight") == expected
+
+
+def test_greedy_modularity_communities_multigraph():
+    G = nx.MultiGraph()
+    G.add_edges_from(
+        [
+            (1, 2),
+            (1, 2),
+            (1, 3),
+            (2, 3),
+            (1, 4),
+            (2, 4),
+            (4, 5),
+            (5, 6),
+            (5, 7),
+            (5, 7),
+            (6, 7),
+            (7, 8),
+            (5, 8),
+        ]
+    )
+    expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})]
+    assert greedy_modularity_communities(G) == expected
+
+    # Converting (4, 5) into a multi-edge causes node 4 to change group.
+    G.add_edge(4, 5)
+    expected = [frozenset({4, 5, 6, 7, 8}), frozenset({1, 2, 3})]
+    assert greedy_modularity_communities(G) == expected
+
+
+def test_greedy_modularity_communities_multigraph_weighted():
+    G = nx.MultiGraph()
+    G.add_weighted_edges_from(
+        [
+            (1, 2, 5),
+            (1, 2, 3),
+            (1, 3, 6),
+            (1, 3, 6),
+            (2, 3, 4),
+            (1, 4, 1),
+            (1, 4, 1),
+            (2, 4, 3),
+            (2, 4, 3),
+            (4, 5, 1),
+            (5, 6, 3),
+            (5, 6, 7),
+            (5, 6, 4),
+            (5, 7, 9),
+            (5, 7, 9),
+            (6, 7, 8),
+            (7, 8, 2),
+            (7, 8, 2),
+            (5, 8, 6),
+            (5, 8, 6),
+        ]
+    )
+    expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})]
+    assert greedy_modularity_communities(G, weight="weight") == expected
+
+    # Adding multi-edge (4, 5, 16) causes node 4 to change group.
+    G.add_edge(4, 5, weight=16)
+    expected = [frozenset({4, 5, 6, 7, 8}), frozenset({1, 2, 3})]
+    assert greedy_modularity_communities(G, weight="weight") == expected
+
+    # Increasing the weight of edge (1, 4) causes node 4 to return to the former group.
+    G[1][4][1]["weight"] = 3
+    expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})]
+    assert greedy_modularity_communities(G, weight="weight") == expected
+
+
+def test_greed_modularity_communities_multidigraph():
+    G = nx.MultiDiGraph()
+    G.add_edges_from(
+        [
+            (1, 2),
+            (1, 2),
+            (3, 1),
+            (2, 3),
+            (2, 3),
+            (3, 2),
+            (1, 4),
+            (2, 4),
+            (4, 2),
+            (4, 5),
+            (5, 6),
+            (5, 6),
+            (6, 5),
+            (5, 7),
+            (6, 7),
+            (7, 8),
+            (5, 8),
+            (8, 4),
+        ]
+    )
+    expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})]
+    assert greedy_modularity_communities(G, weight="weight") == expected
+
+
+def test_greed_modularity_communities_multidigraph_weighted():
+    G = nx.MultiDiGraph()
+    G.add_weighted_edges_from(
+        [
+            (1, 2, 5),
+            (1, 2, 3),
+            (3, 1, 6),
+            (1, 3, 6),
+            (3, 2, 4),
+            (1, 4, 2),
+            (1, 4, 5),
+            (2, 4, 3),
+            (3, 2, 8),
+            (4, 2, 3),
+            (4, 3, 5),
+            (4, 5, 2),
+            (5, 6, 3),
+            (5, 6, 7),
+            (6, 5, 4),
+            (5, 7, 9),
+            (5, 7, 9),
+            (7, 6, 8),
+            (7, 8, 2),
+            (8, 7, 2),
+            (5, 8, 6),
+            (5, 8, 6),
+        ]
+    )
+    expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})]
+    assert greedy_modularity_communities(G, weight="weight") == expected
+
+
+def test_resolution_parameter_impact():
+    G = nx.barbell_graph(5, 3)
+
+    gamma = 1
+    expected = [frozenset(range(5)), frozenset(range(8, 13)), frozenset(range(5, 8))]
+    assert greedy_modularity_communities(G, resolution=gamma) == expected
+    assert naive_greedy_modularity_communities(G, resolution=gamma) == expected
+
+    gamma = 2.5
+    expected = [{0, 1, 2, 3}, {9, 10, 11, 12}, {5, 6, 7}, {4}, {8}]
+    assert greedy_modularity_communities(G, resolution=gamma) == expected
+    assert naive_greedy_modularity_communities(G, resolution=gamma) == expected
+
+    gamma = 0.3
+    expected = [frozenset(range(8)), frozenset(range(8, 13))]
+    assert greedy_modularity_communities(G, resolution=gamma) == expected
+    assert naive_greedy_modularity_communities(G, resolution=gamma) == expected
+
+
+def test_cutoff_parameter():
+    G = nx.circular_ladder_graph(4)
+
+    # No aggregation:
+    expected = [{k} for k in range(8)]
+    assert greedy_modularity_communities(G, cutoff=8) == expected
+
+    # Aggregation to half order (number of nodes)
+    expected = [{k, k + 1} for k in range(0, 8, 2)]
+    assert greedy_modularity_communities(G, cutoff=4) == expected
+
+    # Default aggregation case (here, 2 communities emerge)
+    expected = [frozenset(range(4)), frozenset(range(4, 8))]
+    assert greedy_modularity_communities(G, cutoff=1) == expected
+
+
+def test_best_n():
+    G = nx.barbell_graph(5, 3)
+
+    # Same result as without enforcing cutoff:
+    best_n = 3
+    expected = [frozenset(range(5)), frozenset(range(8, 13)), frozenset(range(5, 8))]
+    assert greedy_modularity_communities(G, best_n=best_n) == expected
+
+    # One additional merging step:
+    best_n = 2
+    expected = [frozenset(range(8)), frozenset(range(8, 13))]
+    assert greedy_modularity_communities(G, best_n=best_n) == expected
+
+    # Two additional merging steps:
+    best_n = 1
+    expected = [frozenset(range(13))]
+    assert greedy_modularity_communities(G, best_n=best_n) == expected
+
+
+def test_greedy_modularity_communities_corner_cases():
+    G = nx.empty_graph()
+    assert nx.community.greedy_modularity_communities(G) == []
+    G.add_nodes_from(range(3))
+    assert nx.community.greedy_modularity_communities(G) == [{0}, {1}, {2}]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_quality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_quality.py
new file mode 100644
index 00000000..c502c7e3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_quality.py
@@ -0,0 +1,139 @@
+"""Unit tests for the :mod:`networkx.algorithms.community.quality`
+module.
+
+"""
+
+import pytest
+
+import networkx as nx
+from networkx import barbell_graph
+from networkx.algorithms.community import modularity, partition_quality
+from networkx.algorithms.community.quality import inter_community_edges
+
+
+class TestPerformance:
+    """Unit tests for the :func:`performance` function."""
+
+    def test_bad_partition(self):
+        """Tests that a poor partition has a low performance measure."""
+        G = barbell_graph(3, 0)
+        partition = [{0, 1, 4}, {2, 3, 5}]
+        assert 8 / 15 == pytest.approx(partition_quality(G, partition)[1], abs=1e-7)
+
+    def test_good_partition(self):
+        """Tests that a good partition has a high performance measure."""
+        G = barbell_graph(3, 0)
+        partition = [{0, 1, 2}, {3, 4, 5}]
+        assert 14 / 15 == pytest.approx(partition_quality(G, partition)[1], abs=1e-7)
+
+
+class TestCoverage:
+    """Unit tests for the :func:`coverage` function."""
+
+    def test_bad_partition(self):
+        """Tests that a poor partition has a low coverage measure."""
+        G = barbell_graph(3, 0)
+        partition = [{0, 1, 4}, {2, 3, 5}]
+        assert 3 / 7 == pytest.approx(partition_quality(G, partition)[0], abs=1e-7)
+
+    def test_good_partition(self):
+        """Tests that a good partition has a high coverage measure."""
+        G = barbell_graph(3, 0)
+        partition = [{0, 1, 2}, {3, 4, 5}]
+        assert 6 / 7 == pytest.approx(partition_quality(G, partition)[0], abs=1e-7)
+
+
+def test_modularity():
+    G = nx.barbell_graph(3, 0)
+    C = [{0, 1, 4}, {2, 3, 5}]
+    assert (-16 / (14**2)) == pytest.approx(modularity(G, C), abs=1e-7)
+    C = [{0, 1, 2}, {3, 4, 5}]
+    assert (35 * 2) / (14**2) == pytest.approx(modularity(G, C), abs=1e-7)
+
+    n = 1000
+    G = nx.erdos_renyi_graph(n, 0.09, seed=42, directed=True)
+    C = [set(range(n // 2)), set(range(n // 2, n))]
+    assert 0.00017154251389292754 == pytest.approx(modularity(G, C), abs=1e-7)
+
+    G = nx.margulis_gabber_galil_graph(10)
+    mid_value = G.number_of_nodes() // 2
+    nodes = list(G.nodes)
+    C = [set(nodes[:mid_value]), set(nodes[mid_value:])]
+    assert 0.13 == pytest.approx(modularity(G, C), abs=1e-7)
+
+    G = nx.DiGraph()
+    G.add_edges_from([(2, 1), (2, 3), (3, 4)])
+    C = [{1, 2}, {3, 4}]
+    assert 2 / 9 == pytest.approx(modularity(G, C), abs=1e-7)
+
+
+def test_modularity_resolution():
+    G = nx.barbell_graph(3, 0)
+    C = [{0, 1, 4}, {2, 3, 5}]
+    assert modularity(G, C) == pytest.approx(3 / 7 - 100 / 14**2)
+    gamma = 2
+    result = modularity(G, C, resolution=gamma)
+    assert result == pytest.approx(3 / 7 - gamma * 100 / 14**2)
+    gamma = 0.2
+    result = modularity(G, C, resolution=gamma)
+    assert result == pytest.approx(3 / 7 - gamma * 100 / 14**2)
+
+    C = [{0, 1, 2}, {3, 4, 5}]
+    assert modularity(G, C) == pytest.approx(6 / 7 - 98 / 14**2)
+    gamma = 2
+    result = modularity(G, C, resolution=gamma)
+    assert result == pytest.approx(6 / 7 - gamma * 98 / 14**2)
+    gamma = 0.2
+    result = modularity(G, C, resolution=gamma)
+    assert result == pytest.approx(6 / 7 - gamma * 98 / 14**2)
+
+    G = nx.barbell_graph(5, 3)
+    C = [frozenset(range(5)), frozenset(range(8, 13)), frozenset(range(5, 8))]
+    gamma = 1
+    result = modularity(G, C, resolution=gamma)
+    # This C is maximal for gamma=1:  modularity = 0.518229
+    assert result == pytest.approx((22 / 24) - gamma * (918 / (48**2)))
+    gamma = 2
+    result = modularity(G, C, resolution=gamma)
+    assert result == pytest.approx((22 / 24) - gamma * (918 / (48**2)))
+    gamma = 0.2
+    result = modularity(G, C, resolution=gamma)
+    assert result == pytest.approx((22 / 24) - gamma * (918 / (48**2)))
+
+    C = [{0, 1, 2, 3}, {9, 10, 11, 12}, {5, 6, 7}, {4}, {8}]
+    gamma = 1
+    result = modularity(G, C, resolution=gamma)
+    assert result == pytest.approx((14 / 24) - gamma * (598 / (48**2)))
+    gamma = 2.5
+    result = modularity(G, C, resolution=gamma)
+    # This C is maximal for gamma=2.5:  modularity = -0.06553819
+    assert result == pytest.approx((14 / 24) - gamma * (598 / (48**2)))
+    gamma = 0.2
+    result = modularity(G, C, resolution=gamma)
+    assert result == pytest.approx((14 / 24) - gamma * (598 / (48**2)))
+
+    C = [frozenset(range(8)), frozenset(range(8, 13))]
+    gamma = 1
+    result = modularity(G, C, resolution=gamma)
+    assert result == pytest.approx((23 / 24) - gamma * (1170 / (48**2)))
+    gamma = 2
+    result = modularity(G, C, resolution=gamma)
+    assert result == pytest.approx((23 / 24) - gamma * (1170 / (48**2)))
+    gamma = 0.3
+    result = modularity(G, C, resolution=gamma)
+    # This C is maximal for gamma=0.3:  modularity = 0.805990
+    assert result == pytest.approx((23 / 24) - gamma * (1170 / (48**2)))
+
+
+def test_inter_community_edges_with_digraphs():
+    G = nx.complete_graph(2, create_using=nx.DiGraph())
+    partition = [{0}, {1}]
+    assert inter_community_edges(G, partition) == 2
+
+    G = nx.complete_graph(10, create_using=nx.DiGraph())
+    partition = [{0}, {1, 2}, {3, 4, 5}, {6, 7, 8, 9}]
+    assert inter_community_edges(G, partition) == 70
+
+    G = nx.cycle_graph(4, create_using=nx.DiGraph())
+    partition = [{0, 1}, {2, 3}]
+    assert inter_community_edges(G, partition) == 2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_utils.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_utils.py
new file mode 100644
index 00000000..ea019db9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_utils.py
@@ -0,0 +1,26 @@
+"""Unit tests for the :mod:`networkx.algorithms.community.utils` module."""
+
+import networkx as nx
+
+
+def test_is_partition():
+    G = nx.empty_graph(3)
+    assert nx.community.is_partition(G, [{0, 1}, {2}])
+    assert nx.community.is_partition(G, ({0, 1}, {2}))
+    assert nx.community.is_partition(G, ([0, 1], [2]))
+    assert nx.community.is_partition(G, [[0, 1], [2]])
+
+
+def test_not_covering():
+    G = nx.empty_graph(3)
+    assert not nx.community.is_partition(G, [{0}, {1}])
+
+
+def test_not_disjoint():
+    G = nx.empty_graph(3)
+    assert not nx.community.is_partition(G, [{0, 1}, {1, 2}])
+
+
+def test_not_node():
+    G = nx.empty_graph(3)
+    assert not nx.community.is_partition(G, [{0, 1}, {3}])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__init__.py
new file mode 100644
index 00000000..f9ae2cab
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__init__.py
@@ -0,0 +1,6 @@
+from .connected import *
+from .strongly_connected import *
+from .weakly_connected import *
+from .attracting import *
+from .biconnected import *
+from .semiconnected import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/attracting.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/attracting.py
new file mode 100644
index 00000000..3d77cd93
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/attracting.py
@@ -0,0 +1,115 @@
+"""Attracting components."""
+
+import networkx as nx
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = [
+    "number_attracting_components",
+    "attracting_components",
+    "is_attracting_component",
+]
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def attracting_components(G):
+    """Generates the attracting components in `G`.
+
+    An attracting component in a directed graph `G` is a strongly connected
+    component with the property that a random walker on the graph will never
+    leave the component, once it enters the component.
+
+    The nodes in attracting components can also be thought of as recurrent
+    nodes.  If a random walker enters the attractor containing the node, then
+    the node will be visited infinitely often.
+
+    To obtain induced subgraphs on each component use:
+    ``(G.subgraph(c).copy() for c in attracting_components(G))``
+
+    Parameters
+    ----------
+    G : DiGraph, MultiDiGraph
+        The graph to be analyzed.
+
+    Returns
+    -------
+    attractors : generator of sets
+        A generator of sets of nodes, one for each attracting component of G.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is undirected.
+
+    See Also
+    --------
+    number_attracting_components
+    is_attracting_component
+
+    """
+    scc = list(nx.strongly_connected_components(G))
+    cG = nx.condensation(G, scc)
+    for n in cG:
+        if cG.out_degree(n) == 0:
+            yield scc[n]
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def number_attracting_components(G):
+    """Returns the number of attracting components in `G`.
+
+    Parameters
+    ----------
+    G : DiGraph, MultiDiGraph
+        The graph to be analyzed.
+
+    Returns
+    -------
+    n : int
+        The number of attracting components in G.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is undirected.
+
+    See Also
+    --------
+    attracting_components
+    is_attracting_component
+
+    """
+    return sum(1 for ac in attracting_components(G))
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def is_attracting_component(G):
+    """Returns True if `G` consists of a single attracting component.
+
+    Parameters
+    ----------
+    G : DiGraph, MultiDiGraph
+        The graph to be analyzed.
+
+    Returns
+    -------
+    attracting : bool
+        True if `G` has a single attracting component. Otherwise, False.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is undirected.
+
+    See Also
+    --------
+    attracting_components
+    number_attracting_components
+
+    """
+    ac = list(attracting_components(G))
+    if len(ac) == 1:
+        return len(ac[0]) == len(G)
+    return False
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/biconnected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/biconnected.py
new file mode 100644
index 00000000..fd0f3865
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/biconnected.py
@@ -0,0 +1,394 @@
+"""Biconnected components and articulation points."""
+
+from itertools import chain
+
+import networkx as nx
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = [
+    "biconnected_components",
+    "biconnected_component_edges",
+    "is_biconnected",
+    "articulation_points",
+]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def is_biconnected(G):
+    """Returns True if the graph is biconnected, False otherwise.
+
+    A graph is biconnected if, and only if, it cannot be disconnected by
+    removing only one node (and all edges incident on that node).  If
+    removing a node increases the number of disconnected components
+    in the graph, that node is called an articulation point, or cut
+    vertex.  A biconnected graph has no articulation points.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        An undirected graph.
+
+    Returns
+    -------
+    biconnected : bool
+        True if the graph is biconnected, False otherwise.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is not undirected.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> print(nx.is_biconnected(G))
+    False
+    >>> G.add_edge(0, 3)
+    >>> print(nx.is_biconnected(G))
+    True
+
+    See Also
+    --------
+    biconnected_components
+    articulation_points
+    biconnected_component_edges
+    is_strongly_connected
+    is_weakly_connected
+    is_connected
+    is_semiconnected
+
+    Notes
+    -----
+    The algorithm to find articulation points and biconnected
+    components is implemented using a non-recursive depth-first-search
+    (DFS) that keeps track of the highest level that back edges reach
+    in the DFS tree.  A node `n` is an articulation point if, and only
+    if, there exists a subtree rooted at `n` such that there is no
+    back edge from any successor of `n` that links to a predecessor of
+    `n` in the DFS tree.  By keeping track of all the edges traversed
+    by the DFS we can obtain the biconnected components because all
+    edges of a bicomponent will be traversed consecutively between
+    articulation points.
+
+    References
+    ----------
+    .. [1] Hopcroft, J.; Tarjan, R. (1973).
+       "Efficient algorithms for graph manipulation".
+       Communications of the ACM 16: 372–378. doi:10.1145/362248.362272
+
+    """
+    bccs = biconnected_components(G)
+    try:
+        bcc = next(bccs)
+    except StopIteration:
+        # No bicomponents (empty graph?)
+        return False
+    try:
+        next(bccs)
+    except StopIteration:
+        # Only one bicomponent
+        return len(bcc) == len(G)
+    else:
+        # Multiple bicomponents
+        return False
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def biconnected_component_edges(G):
+    """Returns a generator of lists of edges, one list for each biconnected
+    component of the input graph.
+
+    Biconnected components are maximal subgraphs such that the removal of a
+    node (and all edges incident on that node) will not disconnect the
+    subgraph.  Note that nodes may be part of more than one biconnected
+    component.  Those nodes are articulation points, or cut vertices.
+    However, each edge belongs to one, and only one, biconnected component.
+
+    Notice that by convention a dyad is considered a biconnected component.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        An undirected graph.
+
+    Returns
+    -------
+    edges : generator of lists
+        Generator of lists of edges, one list for each bicomponent.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is not undirected.
+
+    Examples
+    --------
+    >>> G = nx.barbell_graph(4, 2)
+    >>> print(nx.is_biconnected(G))
+    False
+    >>> bicomponents_edges = list(nx.biconnected_component_edges(G))
+    >>> len(bicomponents_edges)
+    5
+    >>> G.add_edge(2, 8)
+    >>> print(nx.is_biconnected(G))
+    True
+    >>> bicomponents_edges = list(nx.biconnected_component_edges(G))
+    >>> len(bicomponents_edges)
+    1
+
+    See Also
+    --------
+    is_biconnected,
+    biconnected_components,
+    articulation_points,
+
+    Notes
+    -----
+    The algorithm to find articulation points and biconnected
+    components is implemented using a non-recursive depth-first-search
+    (DFS) that keeps track of the highest level that back edges reach
+    in the DFS tree.  A node `n` is an articulation point if, and only
+    if, there exists a subtree rooted at `n` such that there is no
+    back edge from any successor of `n` that links to a predecessor of
+    `n` in the DFS tree.  By keeping track of all the edges traversed
+    by the DFS we can obtain the biconnected components because all
+    edges of a bicomponent will be traversed consecutively between
+    articulation points.
+
+    References
+    ----------
+    .. [1] Hopcroft, J.; Tarjan, R. (1973).
+           "Efficient algorithms for graph manipulation".
+           Communications of the ACM 16: 372–378. doi:10.1145/362248.362272
+
+    """
+    yield from _biconnected_dfs(G, components=True)
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def biconnected_components(G):
+    """Returns a generator of sets of nodes, one set for each biconnected
+    component of the graph
+
+    Biconnected components are maximal subgraphs such that the removal of a
+    node (and all edges incident on that node) will not disconnect the
+    subgraph. Note that nodes may be part of more than one biconnected
+    component.  Those nodes are articulation points, or cut vertices.  The
+    removal of articulation points will increase the number of connected
+    components of the graph.
+
+    Notice that by convention a dyad is considered a biconnected component.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        An undirected graph.
+
+    Returns
+    -------
+    nodes : generator
+        Generator of sets of nodes, one set for each biconnected component.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is not undirected.
+
+    Examples
+    --------
+    >>> G = nx.lollipop_graph(5, 1)
+    >>> print(nx.is_biconnected(G))
+    False
+    >>> bicomponents = list(nx.biconnected_components(G))
+    >>> len(bicomponents)
+    2
+    >>> G.add_edge(0, 5)
+    >>> print(nx.is_biconnected(G))
+    True
+    >>> bicomponents = list(nx.biconnected_components(G))
+    >>> len(bicomponents)
+    1
+
+    You can generate a sorted list of biconnected components, largest
+    first, using sort.
+
+    >>> G.remove_edge(0, 5)
+    >>> [len(c) for c in sorted(nx.biconnected_components(G), key=len, reverse=True)]
+    [5, 2]
+
+    If you only want the largest connected component, it's more
+    efficient to use max instead of sort.
+
+    >>> Gc = max(nx.biconnected_components(G), key=len)
+
+    To create the components as subgraphs use:
+    ``(G.subgraph(c).copy() for c in biconnected_components(G))``
+
+    See Also
+    --------
+    is_biconnected
+    articulation_points
+    biconnected_component_edges
+    k_components : this function is a special case where k=2
+    bridge_components : similar to this function, but is defined using
+        2-edge-connectivity instead of 2-node-connectivity.
+
+    Notes
+    -----
+    The algorithm to find articulation points and biconnected
+    components is implemented using a non-recursive depth-first-search
+    (DFS) that keeps track of the highest level that back edges reach
+    in the DFS tree.  A node `n` is an articulation point if, and only
+    if, there exists a subtree rooted at `n` such that there is no
+    back edge from any successor of `n` that links to a predecessor of
+    `n` in the DFS tree.  By keeping track of all the edges traversed
+    by the DFS we can obtain the biconnected components because all
+    edges of a bicomponent will be traversed consecutively between
+    articulation points.
+
+    References
+    ----------
+    .. [1] Hopcroft, J.; Tarjan, R. (1973).
+           "Efficient algorithms for graph manipulation".
+           Communications of the ACM 16: 372–378. doi:10.1145/362248.362272
+
+    """
+    for comp in _biconnected_dfs(G, components=True):
+        yield set(chain.from_iterable(comp))
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def articulation_points(G):
+    """Yield the articulation points, or cut vertices, of a graph.
+
+    An articulation point or cut vertex is any node whose removal (along with
+    all its incident edges) increases the number of connected components of
+    a graph.  An undirected connected graph without articulation points is
+    biconnected. Articulation points belong to more than one biconnected
+    component of a graph.
+
+    Notice that by convention a dyad is considered a biconnected component.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        An undirected graph.
+
+    Yields
+    ------
+    node
+        An articulation point in the graph.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is not undirected.
+
+    Examples
+    --------
+
+    >>> G = nx.barbell_graph(4, 2)
+    >>> print(nx.is_biconnected(G))
+    False
+    >>> len(list(nx.articulation_points(G)))
+    4
+    >>> G.add_edge(2, 8)
+    >>> print(nx.is_biconnected(G))
+    True
+    >>> len(list(nx.articulation_points(G)))
+    0
+
+    See Also
+    --------
+    is_biconnected
+    biconnected_components
+    biconnected_component_edges
+
+    Notes
+    -----
+    The algorithm to find articulation points and biconnected
+    components is implemented using a non-recursive depth-first-search
+    (DFS) that keeps track of the highest level that back edges reach
+    in the DFS tree.  A node `n` is an articulation point if, and only
+    if, there exists a subtree rooted at `n` such that there is no
+    back edge from any successor of `n` that links to a predecessor of
+    `n` in the DFS tree.  By keeping track of all the edges traversed
+    by the DFS we can obtain the biconnected components because all
+    edges of a bicomponent will be traversed consecutively between
+    articulation points.
+
+    References
+    ----------
+    .. [1] Hopcroft, J.; Tarjan, R. (1973).
+           "Efficient algorithms for graph manipulation".
+           Communications of the ACM 16: 372–378. doi:10.1145/362248.362272
+
+    """
+    seen = set()
+    for articulation in _biconnected_dfs(G, components=False):
+        if articulation not in seen:
+            seen.add(articulation)
+            yield articulation
+
+
+@not_implemented_for("directed")
+def _biconnected_dfs(G, components=True):
+    # depth-first search algorithm to generate articulation points
+    # and biconnected components
+    visited = set()
+    for start in G:
+        if start in visited:
+            continue
+        discovery = {start: 0}  # time of first discovery of node during search
+        low = {start: 0}
+        root_children = 0
+        visited.add(start)
+        edge_stack = []
+        stack = [(start, start, iter(G[start]))]
+        edge_index = {}
+        while stack:
+            grandparent, parent, children = stack[-1]
+            try:
+                child = next(children)
+                if grandparent == child:
+                    continue
+                if child in visited:
+                    if discovery[child] <= discovery[parent]:  # back edge
+                        low[parent] = min(low[parent], discovery[child])
+                        if components:
+                            edge_index[parent, child] = len(edge_stack)
+                            edge_stack.append((parent, child))
+                else:
+                    low[child] = discovery[child] = len(discovery)
+                    visited.add(child)
+                    stack.append((parent, child, iter(G[child])))
+                    if components:
+                        edge_index[parent, child] = len(edge_stack)
+                        edge_stack.append((parent, child))
+
+            except StopIteration:
+                stack.pop()
+                if len(stack) > 1:
+                    if low[parent] >= discovery[grandparent]:
+                        if components:
+                            ind = edge_index[grandparent, parent]
+                            yield edge_stack[ind:]
+                            del edge_stack[ind:]
+
+                        else:
+                            yield grandparent
+                    low[grandparent] = min(low[parent], low[grandparent])
+                elif stack:  # length 1 so grandparent is root
+                    root_children += 1
+                    if components:
+                        ind = edge_index[grandparent, parent]
+                        yield edge_stack[ind:]
+                        del edge_stack[ind:]
+        if not components:
+            # root node is articulation point if it has more than 1 child
+            if root_children > 1:
+                yield start
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/connected.py
new file mode 100644
index 00000000..ebe0d8c1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/connected.py
@@ -0,0 +1,216 @@
+"""Connected components."""
+
+import networkx as nx
+from networkx.utils.decorators import not_implemented_for
+
+from ...utils import arbitrary_element
+
+__all__ = [
+    "number_connected_components",
+    "connected_components",
+    "is_connected",
+    "node_connected_component",
+]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def connected_components(G):
+    """Generate connected components.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph
+
+    Returns
+    -------
+    comp : generator of sets
+       A generator of sets of nodes, one for each component of G.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is directed.
+
+    Examples
+    --------
+    Generate a sorted list of connected components, largest first.
+
+    >>> G = nx.path_graph(4)
+    >>> nx.add_path(G, [10, 11, 12])
+    >>> [len(c) for c in sorted(nx.connected_components(G), key=len, reverse=True)]
+    [4, 3]
+
+    If you only want the largest connected component, it's more
+    efficient to use max instead of sort.
+
+    >>> largest_cc = max(nx.connected_components(G), key=len)
+
+    To create the induced subgraph of each component use:
+
+    >>> S = [G.subgraph(c).copy() for c in nx.connected_components(G)]
+
+    See Also
+    --------
+    strongly_connected_components
+    weakly_connected_components
+
+    Notes
+    -----
+    For undirected graphs only.
+
+    """
+    seen = set()
+    n = len(G)
+    for v in G:
+        if v not in seen:
+            c = _plain_bfs(G, n, v)
+            seen.update(c)
+            yield c
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def number_connected_components(G):
+    """Returns the number of connected components.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    Returns
+    -------
+    n : integer
+       Number of connected components
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is directed.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (1, 2), (5, 6), (3, 4)])
+    >>> nx.number_connected_components(G)
+    3
+
+    See Also
+    --------
+    connected_components
+    number_weakly_connected_components
+    number_strongly_connected_components
+
+    Notes
+    -----
+    For undirected graphs only.
+
+    """
+    return sum(1 for cc in connected_components(G))
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def is_connected(G):
+    """Returns True if the graph is connected, False otherwise.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+       An undirected graph.
+
+    Returns
+    -------
+    connected : bool
+      True if the graph is connected, false otherwise.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is directed.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> print(nx.is_connected(G))
+    True
+
+    See Also
+    --------
+    is_strongly_connected
+    is_weakly_connected
+    is_semiconnected
+    is_biconnected
+    connected_components
+
+    Notes
+    -----
+    For undirected graphs only.
+
+    """
+    n = len(G)
+    if n == 0:
+        raise nx.NetworkXPointlessConcept(
+            "Connectivity is undefined for the null graph."
+        )
+    return sum(1 for node in _plain_bfs(G, n, arbitrary_element(G))) == len(G)
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def node_connected_component(G, n):
+    """Returns the set of nodes in the component of graph containing node n.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+       An undirected graph.
+
+    n : node label
+       A node in G
+
+    Returns
+    -------
+    comp : set
+       A set of nodes in the component of G containing node n.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is directed.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (1, 2), (5, 6), (3, 4)])
+    >>> nx.node_connected_component(G, 0)  # nodes of component that contains node 0
+    {0, 1, 2}
+
+    See Also
+    --------
+    connected_components
+
+    Notes
+    -----
+    For undirected graphs only.
+
+    """
+    return _plain_bfs(G, len(G), n)
+
+
+def _plain_bfs(G, n, source):
+    """A fast BFS node generator"""
+    adj = G._adj
+    seen = {source}
+    nextlevel = [source]
+    while nextlevel:
+        thislevel = nextlevel
+        nextlevel = []
+        for v in thislevel:
+            for w in adj[v]:
+                if w not in seen:
+                    seen.add(w)
+                    nextlevel.append(w)
+            if len(seen) == n:
+                return seen
+    return seen
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/semiconnected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/semiconnected.py
new file mode 100644
index 00000000..9ca5d762
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/semiconnected.py
@@ -0,0 +1,71 @@
+"""Semiconnectedness."""
+
+import networkx as nx
+from networkx.utils import not_implemented_for, pairwise
+
+__all__ = ["is_semiconnected"]
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def is_semiconnected(G):
+    r"""Returns True if the graph is semiconnected, False otherwise.
+
+    A graph is semiconnected if and only if for any pair of nodes, either one
+    is reachable from the other, or they are mutually reachable.
+
+    This function uses a theorem that states that a DAG is semiconnected
+    if for any topological sort, for node $v_n$ in that sort, there is an
+    edge $(v_i, v_{i+1})$. That allows us to check if a non-DAG `G` is
+    semiconnected by condensing the graph: i.e. constructing a new graph `H`
+    with nodes being the strongly connected components of `G`, and edges
+    (scc_1, scc_2) if there is a edge $(v_1, v_2)$ in `G` for some
+    $v_1 \in scc_1$ and $v_2 \in scc_2$. That results in a DAG, so we compute
+    the topological sort of `H` and check if for every $n$ there is an edge
+    $(scc_n, scc_{n+1})$.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A directed graph.
+
+    Returns
+    -------
+    semiconnected : bool
+        True if the graph is semiconnected, False otherwise.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is undirected.
+
+    NetworkXPointlessConcept
+        If the graph is empty.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4, create_using=nx.DiGraph())
+    >>> print(nx.is_semiconnected(G))
+    True
+    >>> G = nx.DiGraph([(1, 2), (3, 2)])
+    >>> print(nx.is_semiconnected(G))
+    False
+
+    See Also
+    --------
+    is_strongly_connected
+    is_weakly_connected
+    is_connected
+    is_biconnected
+    """
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept(
+            "Connectivity is undefined for the null graph."
+        )
+
+    if not nx.is_weakly_connected(G):
+        return False
+
+    H = nx.condensation(G)
+
+    return all(H.has_edge(u, v) for u, v in pairwise(nx.topological_sort(H)))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/strongly_connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/strongly_connected.py
new file mode 100644
index 00000000..393728ff
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/strongly_connected.py
@@ -0,0 +1,351 @@
+"""Strongly connected components."""
+
+import networkx as nx
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = [
+    "number_strongly_connected_components",
+    "strongly_connected_components",
+    "is_strongly_connected",
+    "kosaraju_strongly_connected_components",
+    "condensation",
+]
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def strongly_connected_components(G):
+    """Generate nodes in strongly connected components of graph.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        A directed graph.
+
+    Returns
+    -------
+    comp : generator of sets
+        A generator of sets of nodes, one for each strongly connected
+        component of G.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is undirected.
+
+    Examples
+    --------
+    Generate a sorted list of strongly connected components, largest first.
+
+    >>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
+    >>> nx.add_cycle(G, [10, 11, 12])
+    >>> [
+    ...     len(c)
+    ...     for c in sorted(nx.strongly_connected_components(G), key=len, reverse=True)
+    ... ]
+    [4, 3]
+
+    If you only want the largest component, it's more efficient to
+    use max instead of sort.
+
+    >>> largest = max(nx.strongly_connected_components(G), key=len)
+
+    See Also
+    --------
+    connected_components
+    weakly_connected_components
+    kosaraju_strongly_connected_components
+
+    Notes
+    -----
+    Uses Tarjan's algorithm[1]_ with Nuutila's modifications[2]_.
+    Nonrecursive version of algorithm.
+
+    References
+    ----------
+    .. [1] Depth-first search and linear graph algorithms, R. Tarjan
+       SIAM Journal of Computing 1(2):146-160, (1972).
+
+    .. [2] On finding the strongly connected components in a directed graph.
+       E. Nuutila and E. Soisalon-Soinen
+       Information Processing Letters 49(1): 9-14, (1994)..
+
+    """
+    preorder = {}
+    lowlink = {}
+    scc_found = set()
+    scc_queue = []
+    i = 0  # Preorder counter
+    neighbors = {v: iter(G[v]) for v in G}
+    for source in G:
+        if source not in scc_found:
+            queue = [source]
+            while queue:
+                v = queue[-1]
+                if v not in preorder:
+                    i = i + 1
+                    preorder[v] = i
+                done = True
+                for w in neighbors[v]:
+                    if w not in preorder:
+                        queue.append(w)
+                        done = False
+                        break
+                if done:
+                    lowlink[v] = preorder[v]
+                    for w in G[v]:
+                        if w not in scc_found:
+                            if preorder[w] > preorder[v]:
+                                lowlink[v] = min([lowlink[v], lowlink[w]])
+                            else:
+                                lowlink[v] = min([lowlink[v], preorder[w]])
+                    queue.pop()
+                    if lowlink[v] == preorder[v]:
+                        scc = {v}
+                        while scc_queue and preorder[scc_queue[-1]] > preorder[v]:
+                            k = scc_queue.pop()
+                            scc.add(k)
+                        scc_found.update(scc)
+                        yield scc
+                    else:
+                        scc_queue.append(v)
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def kosaraju_strongly_connected_components(G, source=None):
+    """Generate nodes in strongly connected components of graph.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        A directed graph.
+
+    Returns
+    -------
+    comp : generator of sets
+        A generator of sets of nodes, one for each strongly connected
+        component of G.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is undirected.
+
+    Examples
+    --------
+    Generate a sorted list of strongly connected components, largest first.
+
+    >>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
+    >>> nx.add_cycle(G, [10, 11, 12])
+    >>> [
+    ...     len(c)
+    ...     for c in sorted(
+    ...         nx.kosaraju_strongly_connected_components(G), key=len, reverse=True
+    ...     )
+    ... ]
+    [4, 3]
+
+    If you only want the largest component, it's more efficient to
+    use max instead of sort.
+
+    >>> largest = max(nx.kosaraju_strongly_connected_components(G), key=len)
+
+    See Also
+    --------
+    strongly_connected_components
+
+    Notes
+    -----
+    Uses Kosaraju's algorithm.
+
+    """
+    post = list(nx.dfs_postorder_nodes(G.reverse(copy=False), source=source))
+
+    seen = set()
+    while post:
+        r = post.pop()
+        if r in seen:
+            continue
+        c = nx.dfs_preorder_nodes(G, r)
+        new = {v for v in c if v not in seen}
+        seen.update(new)
+        yield new
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def number_strongly_connected_components(G):
+    """Returns number of strongly connected components in graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A directed graph.
+
+    Returns
+    -------
+    n : integer
+       Number of strongly connected components
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is undirected.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph(
+    ...     [(0, 1), (1, 2), (2, 0), (2, 3), (4, 5), (3, 4), (5, 6), (6, 3), (6, 7)]
+    ... )
+    >>> nx.number_strongly_connected_components(G)
+    3
+
+    See Also
+    --------
+    strongly_connected_components
+    number_connected_components
+    number_weakly_connected_components
+
+    Notes
+    -----
+    For directed graphs only.
+    """
+    return sum(1 for scc in strongly_connected_components(G))
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def is_strongly_connected(G):
+    """Test directed graph for strong connectivity.
+
+    A directed graph is strongly connected if and only if every vertex in
+    the graph is reachable from every other vertex.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+       A directed graph.
+
+    Returns
+    -------
+    connected : bool
+      True if the graph is strongly connected, False otherwise.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 0), (2, 4), (4, 2)])
+    >>> nx.is_strongly_connected(G)
+    True
+    >>> G.remove_edge(2, 3)
+    >>> nx.is_strongly_connected(G)
+    False
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is undirected.
+
+    See Also
+    --------
+    is_weakly_connected
+    is_semiconnected
+    is_connected
+    is_biconnected
+    strongly_connected_components
+
+    Notes
+    -----
+    For directed graphs only.
+    """
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept(
+            """Connectivity is undefined for the null graph."""
+        )
+
+    return len(next(strongly_connected_components(G))) == len(G)
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(returns_graph=True)
+def condensation(G, scc=None):
+    """Returns the condensation of G.
+
+    The condensation of G is the graph with each of the strongly connected
+    components contracted into a single node.
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+       A directed graph.
+
+    scc:  list or generator (optional, default=None)
+       Strongly connected components. If provided, the elements in
+       `scc` must partition the nodes in `G`. If not provided, it will be
+       calculated as scc=nx.strongly_connected_components(G).
+
+    Returns
+    -------
+    C : NetworkX DiGraph
+       The condensation graph C of G.  The node labels are integers
+       corresponding to the index of the component in the list of
+       strongly connected components of G.  C has a graph attribute named
+       'mapping' with a dictionary mapping the original nodes to the
+       nodes in C to which they belong.  Each node in C also has a node
+       attribute 'members' with the set of original nodes in G that
+       form the SCC that the node in C represents.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is undirected.
+
+    Examples
+    --------
+    Contracting two sets of strongly connected nodes into two distinct SCC
+    using the barbell graph.
+
+    >>> G = nx.barbell_graph(4, 0)
+    >>> G.remove_edge(3, 4)
+    >>> G = nx.DiGraph(G)
+    >>> H = nx.condensation(G)
+    >>> H.nodes.data()
+    NodeDataView({0: {'members': {0, 1, 2, 3}}, 1: {'members': {4, 5, 6, 7}}})
+    >>> H.graph["mapping"]
+    {0: 0, 1: 0, 2: 0, 3: 0, 4: 1, 5: 1, 6: 1, 7: 1}
+
+    Contracting a complete graph into one single SCC.
+
+    >>> G = nx.complete_graph(7, create_using=nx.DiGraph)
+    >>> H = nx.condensation(G)
+    >>> H.nodes
+    NodeView((0,))
+    >>> H.nodes.data()
+    NodeDataView({0: {'members': {0, 1, 2, 3, 4, 5, 6}}})
+
+    Notes
+    -----
+    After contracting all strongly connected components to a single node,
+    the resulting graph is a directed acyclic graph.
+
+    """
+    if scc is None:
+        scc = nx.strongly_connected_components(G)
+    mapping = {}
+    members = {}
+    C = nx.DiGraph()
+    # Add mapping dict as graph attribute
+    C.graph["mapping"] = mapping
+    if len(G) == 0:
+        return C
+    for i, component in enumerate(scc):
+        members[i] = component
+        mapping.update((n, i) for n in component)
+    number_of_components = i + 1
+    C.add_nodes_from(range(number_of_components))
+    C.add_edges_from(
+        (mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v]
+    )
+    # Add a list of members (ie original nodes) to each node (ie scc) in C.
+    nx.set_node_attributes(C, members, "members")
+    return C
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_attracting.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_attracting.py
new file mode 100644
index 00000000..336c40dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_attracting.py
@@ -0,0 +1,70 @@
+import pytest
+
+import networkx as nx
+from networkx import NetworkXNotImplemented
+
+
+class TestAttractingComponents:
+    @classmethod
+    def setup_class(cls):
+        cls.G1 = nx.DiGraph()
+        cls.G1.add_edges_from(
+            [
+                (5, 11),
+                (11, 2),
+                (11, 9),
+                (11, 10),
+                (7, 11),
+                (7, 8),
+                (8, 9),
+                (3, 8),
+                (3, 10),
+            ]
+        )
+        cls.G2 = nx.DiGraph()
+        cls.G2.add_edges_from([(0, 1), (0, 2), (1, 1), (1, 2), (2, 1)])
+
+        cls.G3 = nx.DiGraph()
+        cls.G3.add_edges_from([(0, 1), (1, 2), (2, 1), (0, 3), (3, 4), (4, 3)])
+
+        cls.G4 = nx.DiGraph()
+
+    def test_attracting_components(self):
+        ac = list(nx.attracting_components(self.G1))
+        assert {2} in ac
+        assert {9} in ac
+        assert {10} in ac
+
+        ac = list(nx.attracting_components(self.G2))
+        ac = [tuple(sorted(x)) for x in ac]
+        assert ac == [(1, 2)]
+
+        ac = list(nx.attracting_components(self.G3))
+        ac = [tuple(sorted(x)) for x in ac]
+        assert (1, 2) in ac
+        assert (3, 4) in ac
+        assert len(ac) == 2
+
+        ac = list(nx.attracting_components(self.G4))
+        assert ac == []
+
+    def test_number_attacting_components(self):
+        assert nx.number_attracting_components(self.G1) == 3
+        assert nx.number_attracting_components(self.G2) == 1
+        assert nx.number_attracting_components(self.G3) == 2
+        assert nx.number_attracting_components(self.G4) == 0
+
+    def test_is_attracting_component(self):
+        assert not nx.is_attracting_component(self.G1)
+        assert not nx.is_attracting_component(self.G2)
+        assert not nx.is_attracting_component(self.G3)
+        g2 = self.G3.subgraph([1, 2])
+        assert nx.is_attracting_component(g2)
+        assert not nx.is_attracting_component(self.G4)
+
+    def test_connected_raise(self):
+        G = nx.Graph()
+        with pytest.raises(NetworkXNotImplemented):
+            next(nx.attracting_components(G))
+        pytest.raises(NetworkXNotImplemented, nx.number_attracting_components, G)
+        pytest.raises(NetworkXNotImplemented, nx.is_attracting_component, G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_biconnected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_biconnected.py
new file mode 100644
index 00000000..19d2d883
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_biconnected.py
@@ -0,0 +1,248 @@
+import pytest
+
+import networkx as nx
+from networkx import NetworkXNotImplemented
+
+
+def assert_components_edges_equal(x, y):
+    sx = {frozenset(frozenset(e) for e in c) for c in x}
+    sy = {frozenset(frozenset(e) for e in c) for c in y}
+    assert sx == sy
+
+
+def assert_components_equal(x, y):
+    sx = {frozenset(c) for c in x}
+    sy = {frozenset(c) for c in y}
+    assert sx == sy
+
+
+def test_barbell():
+    G = nx.barbell_graph(8, 4)
+    nx.add_path(G, [7, 20, 21, 22])
+    nx.add_cycle(G, [22, 23, 24, 25])
+    pts = set(nx.articulation_points(G))
+    assert pts == {7, 8, 9, 10, 11, 12, 20, 21, 22}
+
+    answer = [
+        {12, 13, 14, 15, 16, 17, 18, 19},
+        {0, 1, 2, 3, 4, 5, 6, 7},
+        {22, 23, 24, 25},
+        {11, 12},
+        {10, 11},
+        {9, 10},
+        {8, 9},
+        {7, 8},
+        {21, 22},
+        {20, 21},
+        {7, 20},
+    ]
+    assert_components_equal(list(nx.biconnected_components(G)), answer)
+
+    G.add_edge(2, 17)
+    pts = set(nx.articulation_points(G))
+    assert pts == {7, 20, 21, 22}
+
+
+def test_articulation_points_repetitions():
+    G = nx.Graph()
+    G.add_edges_from([(0, 1), (1, 2), (1, 3)])
+    assert list(nx.articulation_points(G)) == [1]
+
+
+def test_articulation_points_cycle():
+    G = nx.cycle_graph(3)
+    nx.add_cycle(G, [1, 3, 4])
+    pts = set(nx.articulation_points(G))
+    assert pts == {1}
+
+
+def test_is_biconnected():
+    G = nx.cycle_graph(3)
+    assert nx.is_biconnected(G)
+    nx.add_cycle(G, [1, 3, 4])
+    assert not nx.is_biconnected(G)
+
+
+def test_empty_is_biconnected():
+    G = nx.empty_graph(5)
+    assert not nx.is_biconnected(G)
+    G.add_edge(0, 1)
+    assert not nx.is_biconnected(G)
+
+
+def test_biconnected_components_cycle():
+    G = nx.cycle_graph(3)
+    nx.add_cycle(G, [1, 3, 4])
+    answer = [{0, 1, 2}, {1, 3, 4}]
+    assert_components_equal(list(nx.biconnected_components(G)), answer)
+
+
+def test_biconnected_components1():
+    # graph example from
+    # https://web.archive.org/web/20121229123447/http://www.ibluemojo.com/school/articul_algorithm.html
+    edges = [
+        (0, 1),
+        (0, 5),
+        (0, 6),
+        (0, 14),
+        (1, 5),
+        (1, 6),
+        (1, 14),
+        (2, 4),
+        (2, 10),
+        (3, 4),
+        (3, 15),
+        (4, 6),
+        (4, 7),
+        (4, 10),
+        (5, 14),
+        (6, 14),
+        (7, 9),
+        (8, 9),
+        (8, 12),
+        (8, 13),
+        (10, 15),
+        (11, 12),
+        (11, 13),
+        (12, 13),
+    ]
+    G = nx.Graph(edges)
+    pts = set(nx.articulation_points(G))
+    assert pts == {4, 6, 7, 8, 9}
+    comps = list(nx.biconnected_component_edges(G))
+    answer = [
+        [(3, 4), (15, 3), (10, 15), (10, 4), (2, 10), (4, 2)],
+        [(13, 12), (13, 8), (11, 13), (12, 11), (8, 12)],
+        [(9, 8)],
+        [(7, 9)],
+        [(4, 7)],
+        [(6, 4)],
+        [(14, 0), (5, 1), (5, 0), (14, 5), (14, 1), (6, 14), (6, 0), (1, 6), (0, 1)],
+    ]
+    assert_components_edges_equal(comps, answer)
+
+
+def test_biconnected_components2():
+    G = nx.Graph()
+    nx.add_cycle(G, "ABC")
+    nx.add_cycle(G, "CDE")
+    nx.add_cycle(G, "FIJHG")
+    nx.add_cycle(G, "GIJ")
+    G.add_edge("E", "G")
+    comps = list(nx.biconnected_component_edges(G))
+    answer = [
+        [
+            tuple("GF"),
+            tuple("FI"),
+            tuple("IG"),
+            tuple("IJ"),
+            tuple("JG"),
+            tuple("JH"),
+            tuple("HG"),
+        ],
+        [tuple("EG")],
+        [tuple("CD"), tuple("DE"), tuple("CE")],
+        [tuple("AB"), tuple("BC"), tuple("AC")],
+    ]
+    assert_components_edges_equal(comps, answer)
+
+
+def test_biconnected_davis():
+    D = nx.davis_southern_women_graph()
+    bcc = list(nx.biconnected_components(D))[0]
+    assert set(D) == bcc  # All nodes in a giant bicomponent
+    # So no articulation points
+    assert len(list(nx.articulation_points(D))) == 0
+
+
+def test_biconnected_karate():
+    K = nx.karate_club_graph()
+    answer = [
+        {
+            0,
+            1,
+            2,
+            3,
+            7,
+            8,
+            9,
+            12,
+            13,
+            14,
+            15,
+            17,
+            18,
+            19,
+            20,
+            21,
+            22,
+            23,
+            24,
+            25,
+            26,
+            27,
+            28,
+            29,
+            30,
+            31,
+            32,
+            33,
+        },
+        {0, 4, 5, 6, 10, 16},
+        {0, 11},
+    ]
+    bcc = list(nx.biconnected_components(K))
+    assert_components_equal(bcc, answer)
+    assert set(nx.articulation_points(K)) == {0}
+
+
+def test_biconnected_eppstein():
+    # tests from http://www.ics.uci.edu/~eppstein/PADS/Biconnectivity.py
+    G1 = nx.Graph(
+        {
+            0: [1, 2, 5],
+            1: [0, 5],
+            2: [0, 3, 4],
+            3: [2, 4, 5, 6],
+            4: [2, 3, 5, 6],
+            5: [0, 1, 3, 4],
+            6: [3, 4],
+        }
+    )
+    G2 = nx.Graph(
+        {
+            0: [2, 5],
+            1: [3, 8],
+            2: [0, 3, 5],
+            3: [1, 2, 6, 8],
+            4: [7],
+            5: [0, 2],
+            6: [3, 8],
+            7: [4],
+            8: [1, 3, 6],
+        }
+    )
+    assert nx.is_biconnected(G1)
+    assert not nx.is_biconnected(G2)
+    answer_G2 = [{1, 3, 6, 8}, {0, 2, 5}, {2, 3}, {4, 7}]
+    bcc = list(nx.biconnected_components(G2))
+    assert_components_equal(bcc, answer_G2)
+
+
+def test_null_graph():
+    G = nx.Graph()
+    assert not nx.is_biconnected(G)
+    assert list(nx.biconnected_components(G)) == []
+    assert list(nx.biconnected_component_edges(G)) == []
+    assert list(nx.articulation_points(G)) == []
+
+
+def test_connected_raise():
+    DG = nx.DiGraph()
+    with pytest.raises(NetworkXNotImplemented):
+        next(nx.biconnected_components(DG))
+    with pytest.raises(NetworkXNotImplemented):
+        next(nx.biconnected_component_edges(DG))
+    with pytest.raises(NetworkXNotImplemented):
+        next(nx.articulation_points(DG))
+    pytest.raises(NetworkXNotImplemented, nx.is_biconnected, DG)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_connected.py
new file mode 100644
index 00000000..207214c1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_connected.py
@@ -0,0 +1,138 @@
+import pytest
+
+import networkx as nx
+from networkx import NetworkXNotImplemented
+from networkx import convert_node_labels_to_integers as cnlti
+from networkx.classes.tests import dispatch_interface
+
+
+class TestConnected:
+    @classmethod
+    def setup_class(cls):
+        G1 = cnlti(nx.grid_2d_graph(2, 2), first_label=0, ordering="sorted")
+        G2 = cnlti(nx.lollipop_graph(3, 3), first_label=4, ordering="sorted")
+        G3 = cnlti(nx.house_graph(), first_label=10, ordering="sorted")
+        cls.G = nx.union(G1, G2)
+        cls.G = nx.union(cls.G, G3)
+        cls.DG = nx.DiGraph([(1, 2), (1, 3), (2, 3)])
+        cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1)
+
+        cls.gc = []
+        G = nx.DiGraph()
+        G.add_edges_from(
+            [
+                (1, 2),
+                (2, 3),
+                (2, 8),
+                (3, 4),
+                (3, 7),
+                (4, 5),
+                (5, 3),
+                (5, 6),
+                (7, 4),
+                (7, 6),
+                (8, 1),
+                (8, 7),
+            ]
+        )
+        C = [[3, 4, 5, 7], [1, 2, 8], [6]]
+        cls.gc.append((G, C))
+
+        G = nx.DiGraph()
+        G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
+        C = [[2, 3, 4], [1]]
+        cls.gc.append((G, C))
+
+        G = nx.DiGraph()
+        G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
+        C = [[1, 2, 3]]
+        cls.gc.append((G, C))
+
+        # Eppstein's tests
+        G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
+        C = [[0], [1], [2], [3], [4], [5], [6]]
+        cls.gc.append((G, C))
+
+        G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
+        C = [[0, 1, 2], [3, 4]]
+        cls.gc.append((G, C))
+
+        G = nx.DiGraph()
+        C = []
+        cls.gc.append((G, C))
+
+    def test_connected_components(self):
+        # Test duplicated below
+        cc = nx.connected_components
+        G = self.G
+        C = {
+            frozenset([0, 1, 2, 3]),
+            frozenset([4, 5, 6, 7, 8, 9]),
+            frozenset([10, 11, 12, 13, 14]),
+        }
+        assert {frozenset(g) for g in cc(G)} == C
+
+    def test_connected_components_nx_loopback(self):
+        # This tests the @nx._dispatchable mechanism, treating nx.connected_components
+        # as if it were a re-implementation from another package.
+        # Test duplicated from above
+        cc = nx.connected_components
+        G = dispatch_interface.convert(self.G)
+        C = {
+            frozenset([0, 1, 2, 3]),
+            frozenset([4, 5, 6, 7, 8, 9]),
+            frozenset([10, 11, 12, 13, 14]),
+        }
+        if "nx_loopback" in nx.config.backends or not nx.config.backends:
+            # If `nx.config.backends` is empty, then `_dispatchable.__call__` takes a
+            # "fast path" and does not check graph inputs, so using an unknown backend
+            # here will still work.
+            assert {frozenset(g) for g in cc(G)} == C
+        else:
+            # This raises, because "nx_loopback" is not registered as a backend.
+            with pytest.raises(
+                ImportError, match="'nx_loopback' backend is not installed"
+            ):
+                cc(G)
+
+    def test_number_connected_components(self):
+        ncc = nx.number_connected_components
+        assert ncc(self.G) == 3
+
+    def test_number_connected_components2(self):
+        ncc = nx.number_connected_components
+        assert ncc(self.grid) == 1
+
+    def test_connected_components2(self):
+        cc = nx.connected_components
+        G = self.grid
+        C = {frozenset([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16])}
+        assert {frozenset(g) for g in cc(G)} == C
+
+    def test_node_connected_components(self):
+        ncc = nx.node_connected_component
+        G = self.grid
+        C = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}
+        assert ncc(G, 1) == C
+
+    def test_is_connected(self):
+        assert nx.is_connected(self.grid)
+        G = nx.Graph()
+        G.add_nodes_from([1, 2])
+        assert not nx.is_connected(G)
+
+    def test_connected_raise(self):
+        with pytest.raises(NetworkXNotImplemented):
+            next(nx.connected_components(self.DG))
+        pytest.raises(NetworkXNotImplemented, nx.number_connected_components, self.DG)
+        pytest.raises(NetworkXNotImplemented, nx.node_connected_component, self.DG, 1)
+        pytest.raises(NetworkXNotImplemented, nx.is_connected, self.DG)
+        pytest.raises(nx.NetworkXPointlessConcept, nx.is_connected, nx.Graph())
+
+    def test_connected_mutability(self):
+        G = self.grid
+        seen = set()
+        for component in nx.connected_components(G):
+            assert len(seen & component) == 0
+            seen.update(component)
+            component.clear()
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_semiconnected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_semiconnected.py
new file mode 100644
index 00000000..6376bbfb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_semiconnected.py
@@ -0,0 +1,55 @@
+from itertools import chain
+
+import pytest
+
+import networkx as nx
+
+
+class TestIsSemiconnected:
+    def test_undirected(self):
+        pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.Graph())
+        pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.MultiGraph())
+
+    def test_empty(self):
+        pytest.raises(nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.DiGraph())
+        pytest.raises(
+            nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.MultiDiGraph()
+        )
+
+    def test_single_node_graph(self):
+        G = nx.DiGraph()
+        G.add_node(0)
+        assert nx.is_semiconnected(G)
+
+    def test_path(self):
+        G = nx.path_graph(100, create_using=nx.DiGraph())
+        assert nx.is_semiconnected(G)
+        G.add_edge(100, 99)
+        assert not nx.is_semiconnected(G)
+
+    def test_cycle(self):
+        G = nx.cycle_graph(100, create_using=nx.DiGraph())
+        assert nx.is_semiconnected(G)
+        G = nx.path_graph(100, create_using=nx.DiGraph())
+        G.add_edge(0, 99)
+        assert nx.is_semiconnected(G)
+
+    def test_tree(self):
+        G = nx.DiGraph()
+        G.add_edges_from(
+            chain.from_iterable([(i, 2 * i + 1), (i, 2 * i + 2)] for i in range(100))
+        )
+        assert not nx.is_semiconnected(G)
+
+    def test_dumbbell(self):
+        G = nx.cycle_graph(100, create_using=nx.DiGraph())
+        G.add_edges_from((i + 100, (i + 1) % 100 + 100) for i in range(100))
+        assert not nx.is_semiconnected(G)  # G is disconnected.
+        G.add_edge(100, 99)
+        assert nx.is_semiconnected(G)
+
+    def test_alternating_path(self):
+        G = nx.DiGraph(
+            chain.from_iterable([(i, i - 1), (i, i + 1)] for i in range(0, 100, 2))
+        )
+        assert not nx.is_semiconnected(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py
new file mode 100644
index 00000000..27f40988
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py
@@ -0,0 +1,193 @@
+import pytest
+
+import networkx as nx
+from networkx import NetworkXNotImplemented
+
+
+class TestStronglyConnected:
+    @classmethod
+    def setup_class(cls):
+        cls.gc = []
+        G = nx.DiGraph()
+        G.add_edges_from(
+            [
+                (1, 2),
+                (2, 3),
+                (2, 8),
+                (3, 4),
+                (3, 7),
+                (4, 5),
+                (5, 3),
+                (5, 6),
+                (7, 4),
+                (7, 6),
+                (8, 1),
+                (8, 7),
+            ]
+        )
+        C = {frozenset([3, 4, 5, 7]), frozenset([1, 2, 8]), frozenset([6])}
+        cls.gc.append((G, C))
+
+        G = nx.DiGraph()
+        G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
+        C = {frozenset([2, 3, 4]), frozenset([1])}
+        cls.gc.append((G, C))
+
+        G = nx.DiGraph()
+        G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
+        C = {frozenset([1, 2, 3])}
+        cls.gc.append((G, C))
+
+        # Eppstein's tests
+        G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
+        C = {
+            frozenset([0]),
+            frozenset([1]),
+            frozenset([2]),
+            frozenset([3]),
+            frozenset([4]),
+            frozenset([5]),
+            frozenset([6]),
+        }
+        cls.gc.append((G, C))
+
+        G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
+        C = {frozenset([0, 1, 2]), frozenset([3, 4])}
+        cls.gc.append((G, C))
+
+    def test_tarjan(self):
+        scc = nx.strongly_connected_components
+        for G, C in self.gc:
+            assert {frozenset(g) for g in scc(G)} == C
+
+    def test_kosaraju(self):
+        scc = nx.kosaraju_strongly_connected_components
+        for G, C in self.gc:
+            assert {frozenset(g) for g in scc(G)} == C
+
+    def test_number_strongly_connected_components(self):
+        ncc = nx.number_strongly_connected_components
+        for G, C in self.gc:
+            assert ncc(G) == len(C)
+
+    def test_is_strongly_connected(self):
+        for G, C in self.gc:
+            if len(C) == 1:
+                assert nx.is_strongly_connected(G)
+            else:
+                assert not nx.is_strongly_connected(G)
+
+    def test_contract_scc1(self):
+        G = nx.DiGraph()
+        G.add_edges_from(
+            [
+                (1, 2),
+                (2, 3),
+                (2, 11),
+                (2, 12),
+                (3, 4),
+                (4, 3),
+                (4, 5),
+                (5, 6),
+                (6, 5),
+                (6, 7),
+                (7, 8),
+                (7, 9),
+                (7, 10),
+                (8, 9),
+                (9, 7),
+                (10, 6),
+                (11, 2),
+                (11, 4),
+                (11, 6),
+                (12, 6),
+                (12, 11),
+            ]
+        )
+        scc = list(nx.strongly_connected_components(G))
+        cG = nx.condensation(G, scc)
+        # DAG
+        assert nx.is_directed_acyclic_graph(cG)
+        # nodes
+        assert sorted(cG.nodes()) == [0, 1, 2, 3]
+        # edges
+        mapping = {}
+        for i, component in enumerate(scc):
+            for n in component:
+                mapping[n] = i
+        edge = (mapping[2], mapping[3])
+        assert cG.has_edge(*edge)
+        edge = (mapping[2], mapping[5])
+        assert cG.has_edge(*edge)
+        edge = (mapping[3], mapping[5])
+        assert cG.has_edge(*edge)
+
+    def test_contract_scc_isolate(self):
+        # Bug found and fixed in [1687].
+        G = nx.DiGraph()
+        G.add_edge(1, 2)
+        G.add_edge(2, 1)
+        scc = list(nx.strongly_connected_components(G))
+        cG = nx.condensation(G, scc)
+        assert list(cG.nodes()) == [0]
+        assert list(cG.edges()) == []
+
+    def test_contract_scc_edge(self):
+        G = nx.DiGraph()
+        G.add_edge(1, 2)
+        G.add_edge(2, 1)
+        G.add_edge(2, 3)
+        G.add_edge(3, 4)
+        G.add_edge(4, 3)
+        scc = list(nx.strongly_connected_components(G))
+        cG = nx.condensation(G, scc)
+        assert sorted(cG.nodes()) == [0, 1]
+        if 1 in scc[0]:
+            edge = (0, 1)
+        else:
+            edge = (1, 0)
+        assert list(cG.edges()) == [edge]
+
+    def test_condensation_mapping_and_members(self):
+        G, C = self.gc[1]
+        C = sorted(C, key=len, reverse=True)
+        cG = nx.condensation(G)
+        mapping = cG.graph["mapping"]
+        assert all(n in G for n in mapping)
+        assert all(0 == cN for n, cN in mapping.items() if n in C[0])
+        assert all(1 == cN for n, cN in mapping.items() if n in C[1])
+        for n, d in cG.nodes(data=True):
+            assert set(C[n]) == cG.nodes[n]["members"]
+
+    def test_null_graph(self):
+        G = nx.DiGraph()
+        assert list(nx.strongly_connected_components(G)) == []
+        assert list(nx.kosaraju_strongly_connected_components(G)) == []
+        assert len(nx.condensation(G)) == 0
+        pytest.raises(
+            nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph()
+        )
+
+    def test_connected_raise(self):
+        G = nx.Graph()
+        with pytest.raises(NetworkXNotImplemented):
+            next(nx.strongly_connected_components(G))
+        with pytest.raises(NetworkXNotImplemented):
+            next(nx.kosaraju_strongly_connected_components(G))
+        pytest.raises(NetworkXNotImplemented, nx.is_strongly_connected, G)
+        pytest.raises(NetworkXNotImplemented, nx.condensation, G)
+
+    strong_cc_methods = (
+        nx.strongly_connected_components,
+        nx.kosaraju_strongly_connected_components,
+    )
+
+    @pytest.mark.parametrize("get_components", strong_cc_methods)
+    def test_connected_mutability(self, get_components):
+        DG = nx.path_graph(5, create_using=nx.DiGraph)
+        G = nx.disjoint_union(DG, DG)
+        seen = set()
+        for component in get_components(G):
+            assert len(seen & component) == 0
+            seen.update(component)
+            component.clear()
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py
new file mode 100644
index 00000000..f0144789
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py
@@ -0,0 +1,96 @@
+import pytest
+
+import networkx as nx
+from networkx import NetworkXNotImplemented
+
+
+class TestWeaklyConnected:
+    @classmethod
+    def setup_class(cls):
+        cls.gc = []
+        G = nx.DiGraph()
+        G.add_edges_from(
+            [
+                (1, 2),
+                (2, 3),
+                (2, 8),
+                (3, 4),
+                (3, 7),
+                (4, 5),
+                (5, 3),
+                (5, 6),
+                (7, 4),
+                (7, 6),
+                (8, 1),
+                (8, 7),
+            ]
+        )
+        C = [[3, 4, 5, 7], [1, 2, 8], [6]]
+        cls.gc.append((G, C))
+
+        G = nx.DiGraph()
+        G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
+        C = [[2, 3, 4], [1]]
+        cls.gc.append((G, C))
+
+        G = nx.DiGraph()
+        G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
+        C = [[1, 2, 3]]
+        cls.gc.append((G, C))
+
+        # Eppstein's tests
+        G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
+        C = [[0], [1], [2], [3], [4], [5], [6]]
+        cls.gc.append((G, C))
+
+        G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
+        C = [[0, 1, 2], [3, 4]]
+        cls.gc.append((G, C))
+
+    def test_weakly_connected_components(self):
+        for G, C in self.gc:
+            U = G.to_undirected()
+            w = {frozenset(g) for g in nx.weakly_connected_components(G)}
+            c = {frozenset(g) for g in nx.connected_components(U)}
+            assert w == c
+
+    def test_number_weakly_connected_components(self):
+        for G, C in self.gc:
+            U = G.to_undirected()
+            w = nx.number_weakly_connected_components(G)
+            c = nx.number_connected_components(U)
+            assert w == c
+
+    def test_is_weakly_connected(self):
+        for G, C in self.gc:
+            U = G.to_undirected()
+            assert nx.is_weakly_connected(G) == nx.is_connected(U)
+
+    def test_null_graph(self):
+        G = nx.DiGraph()
+        assert list(nx.weakly_connected_components(G)) == []
+        assert nx.number_weakly_connected_components(G) == 0
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            next(nx.is_weakly_connected(G))
+
+    def test_connected_raise(self):
+        G = nx.Graph()
+        with pytest.raises(NetworkXNotImplemented):
+            next(nx.weakly_connected_components(G))
+        pytest.raises(NetworkXNotImplemented, nx.number_weakly_connected_components, G)
+        pytest.raises(NetworkXNotImplemented, nx.is_weakly_connected, G)
+
+    def test_connected_mutability(self):
+        DG = nx.path_graph(5, create_using=nx.DiGraph)
+        G = nx.disjoint_union(DG, DG)
+        seen = set()
+        for component in nx.weakly_connected_components(G):
+            assert len(seen & component) == 0
+            seen.update(component)
+            component.clear()
+
+
+def test_is_weakly_connected_empty_graph_raises():
+    G = nx.DiGraph()
+    with pytest.raises(nx.NetworkXPointlessConcept, match="Connectivity is undefined"):
+        nx.is_weakly_connected(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/weakly_connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/weakly_connected.py
new file mode 100644
index 00000000..ecfac50a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/weakly_connected.py
@@ -0,0 +1,197 @@
+"""Weakly connected components."""
+
+import networkx as nx
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = [
+    "number_weakly_connected_components",
+    "weakly_connected_components",
+    "is_weakly_connected",
+]
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def weakly_connected_components(G):
+    """Generate weakly connected components of G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A directed graph
+
+    Returns
+    -------
+    comp : generator of sets
+        A generator of sets of nodes, one for each weakly connected
+        component of G.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is undirected.
+
+    Examples
+    --------
+    Generate a sorted list of weakly connected components, largest first.
+
+    >>> G = nx.path_graph(4, create_using=nx.DiGraph())
+    >>> nx.add_path(G, [10, 11, 12])
+    >>> [
+    ...     len(c)
+    ...     for c in sorted(nx.weakly_connected_components(G), key=len, reverse=True)
+    ... ]
+    [4, 3]
+
+    If you only want the largest component, it's more efficient to
+    use max instead of sort:
+
+    >>> largest_cc = max(nx.weakly_connected_components(G), key=len)
+
+    See Also
+    --------
+    connected_components
+    strongly_connected_components
+
+    Notes
+    -----
+    For directed graphs only.
+
+    """
+    seen = set()
+    n = len(G)  # must be outside the loop to avoid performance hit with graph views
+    for v in G:
+        if v not in seen:
+            c = set(_plain_bfs(G, n, v))
+            seen.update(c)
+            yield c
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def number_weakly_connected_components(G):
+    """Returns the number of weakly connected components in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A directed graph.
+
+    Returns
+    -------
+    n : integer
+        Number of weakly connected components
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is undirected.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (2, 1), (3, 4)])
+    >>> nx.number_weakly_connected_components(G)
+    2
+
+    See Also
+    --------
+    weakly_connected_components
+    number_connected_components
+    number_strongly_connected_components
+
+    Notes
+    -----
+    For directed graphs only.
+
+    """
+    return sum(1 for wcc in weakly_connected_components(G))
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def is_weakly_connected(G):
+    """Test directed graph for weak connectivity.
+
+    A directed graph is weakly connected if and only if the graph
+    is connected when the direction of the edge between nodes is ignored.
+
+    Note that if a graph is strongly connected (i.e. the graph is connected
+    even when we account for directionality), it is by definition weakly
+    connected as well.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        A directed graph.
+
+    Returns
+    -------
+    connected : bool
+        True if the graph is weakly connected, False otherwise.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If G is undirected.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (2, 1)])
+    >>> G.add_node(3)
+    >>> nx.is_weakly_connected(G)  # node 3 is not connected to the graph
+    False
+    >>> G.add_edge(2, 3)
+    >>> nx.is_weakly_connected(G)
+    True
+
+    See Also
+    --------
+    is_strongly_connected
+    is_semiconnected
+    is_connected
+    is_biconnected
+    weakly_connected_components
+
+    Notes
+    -----
+    For directed graphs only.
+
+    """
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept(
+            """Connectivity is undefined for the null graph."""
+        )
+
+    return len(next(weakly_connected_components(G))) == len(G)
+
+
+def _plain_bfs(G, n, source):
+    """A fast BFS node generator
+
+    The direction of the edge between nodes is ignored.
+
+    For directed graphs only.
+
+    """
+    Gsucc = G._succ
+    Gpred = G._pred
+    seen = {source}
+    nextlevel = [source]
+
+    yield source
+    while nextlevel:
+        thislevel = nextlevel
+        nextlevel = []
+        for v in thislevel:
+            for w in Gsucc[v]:
+                if w not in seen:
+                    seen.add(w)
+                    nextlevel.append(w)
+                    yield w
+            for w in Gpred[v]:
+                if w not in seen:
+                    seen.add(w)
+                    nextlevel.append(w)
+                    yield w
+            if len(seen) == n:
+                return
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__init__.py
new file mode 100644
index 00000000..d08a3606
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__init__.py
@@ -0,0 +1,11 @@
+"""Connectivity and cut algorithms"""
+
+from .connectivity import *
+from .cuts import *
+from .edge_augmentation import *
+from .edge_kcomponents import *
+from .disjoint_paths import *
+from .kcomponents import *
+from .kcutsets import *
+from .stoerwagner import *
+from .utils import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/connectivity.py
new file mode 100644
index 00000000..2f85c865
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/connectivity.py
@@ -0,0 +1,811 @@
+"""
+Flow based connectivity algorithms
+"""
+
+import itertools
+from operator import itemgetter
+
+import networkx as nx
+
+# Define the default maximum flow function to use in all flow based
+# connectivity algorithms.
+from networkx.algorithms.flow import (
+    boykov_kolmogorov,
+    build_residual_network,
+    dinitz,
+    edmonds_karp,
+    preflow_push,
+    shortest_augmenting_path,
+)
+
+default_flow_func = edmonds_karp
+
+from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity
+
+__all__ = [
+    "average_node_connectivity",
+    "local_node_connectivity",
+    "node_connectivity",
+    "local_edge_connectivity",
+    "edge_connectivity",
+    "all_pairs_node_connectivity",
+]
+
+
+@nx._dispatchable(graphs={"G": 0, "auxiliary?": 4}, preserve_graph_attrs={"auxiliary"})
+def local_node_connectivity(
+    G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None
+):
+    r"""Computes local node connectivity for nodes s and t.
+
+    Local node connectivity for two non adjacent nodes s and t is the
+    minimum number of nodes that must be removed (along with their incident
+    edges) to disconnect them.
+
+    This is a flow based implementation of node connectivity. We compute the
+    maximum flow on an auxiliary digraph build from the original input
+    graph (see below for details).
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    s : node
+        Source node
+
+    t : node
+        Target node
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See below for details. The choice
+        of the default function may change from version to version and
+        should not be relied on. Default value: None.
+
+    auxiliary : NetworkX DiGraph
+        Auxiliary digraph to compute flow based node connectivity. It has
+        to have a graph attribute called mapping with a dictionary mapping
+        node names in G and in the auxiliary digraph. If provided
+        it will be reused instead of recreated. Default value: None.
+
+    residual : NetworkX DiGraph
+        Residual network to compute maximum flow. If provided it will be
+        reused instead of recreated. Default value: None.
+
+    cutoff : integer, float, or None (default: None)
+        If specified, the maximum flow algorithm will terminate when the
+        flow value reaches or exceeds the cutoff. This only works for flows
+        that support the cutoff parameter (most do) and is ignored otherwise.
+
+    Returns
+    -------
+    K : integer
+        local node connectivity for nodes s and t
+
+    Examples
+    --------
+    This function is not imported in the base NetworkX namespace, so you
+    have to explicitly import it from the connectivity package:
+
+    >>> from networkx.algorithms.connectivity import local_node_connectivity
+
+    We use in this example the platonic icosahedral graph, which has node
+    connectivity 5.
+
+    >>> G = nx.icosahedral_graph()
+    >>> local_node_connectivity(G, 0, 6)
+    5
+
+    If you need to compute local connectivity on several pairs of
+    nodes in the same graph, it is recommended that you reuse the
+    data structures that NetworkX uses in the computation: the
+    auxiliary digraph for node connectivity, and the residual
+    network for the underlying maximum flow computation.
+
+    Example of how to compute local node connectivity among
+    all pairs of nodes of the platonic icosahedral graph reusing
+    the data structures.
+
+    >>> import itertools
+    >>> # You also have to explicitly import the function for
+    >>> # building the auxiliary digraph from the connectivity package
+    >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity
+    >>> H = build_auxiliary_node_connectivity(G)
+    >>> # And the function for building the residual network from the
+    >>> # flow package
+    >>> from networkx.algorithms.flow import build_residual_network
+    >>> # Note that the auxiliary digraph has an edge attribute named capacity
+    >>> R = build_residual_network(H, "capacity")
+    >>> result = dict.fromkeys(G, dict())
+    >>> # Reuse the auxiliary digraph and the residual network by passing them
+    >>> # as parameters
+    >>> for u, v in itertools.combinations(G, 2):
+    ...     k = local_node_connectivity(G, u, v, auxiliary=H, residual=R)
+    ...     result[u][v] = k
+    >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2))
+    True
+
+    You can also use alternative flow algorithms for computing node
+    connectivity. For instance, in dense networks the algorithm
+    :meth:`shortest_augmenting_path` will usually perform better than
+    the default :meth:`edmonds_karp` which is faster for sparse
+    networks with highly skewed degree distributions. Alternative flow
+    functions have to be explicitly imported from the flow package.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> local_node_connectivity(G, 0, 6, flow_func=shortest_augmenting_path)
+    5
+
+    Notes
+    -----
+    This is a flow based implementation of node connectivity. We compute the
+    maximum flow using, by default, the :meth:`edmonds_karp` algorithm (see:
+    :meth:`maximum_flow`) on an auxiliary digraph build from the original
+    input graph:
+
+    For an undirected graph G having `n` nodes and `m` edges we derive a
+    directed graph H with `2n` nodes and `2m+n` arcs by replacing each
+    original node `v` with two nodes `v_A`, `v_B` linked by an (internal)
+    arc in H. Then for each edge (`u`, `v`) in G we add two arcs
+    (`u_B`, `v_A`) and (`v_B`, `u_A`) in H. Finally we set the attribute
+    capacity = 1 for each arc in H [1]_ .
+
+    For a directed graph G having `n` nodes and `m` arcs we derive a
+    directed graph H with `2n` nodes and `m+n` arcs by replacing each
+    original node `v` with two nodes `v_A`, `v_B` linked by an (internal)
+    arc (`v_A`, `v_B`) in H. Then for each arc (`u`, `v`) in G we add one arc
+    (`u_B`, `v_A`) in H. Finally we set the attribute capacity = 1 for
+    each arc in H.
+
+    This is equal to the local node connectivity because the value of
+    a maximum s-t-flow is equal to the capacity of a minimum s-t-cut.
+
+    See also
+    --------
+    :meth:`local_edge_connectivity`
+    :meth:`node_connectivity`
+    :meth:`minimum_node_cut`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    References
+    ----------
+    .. [1] Kammer, Frank and Hanjo Taubig. Graph Connectivity. in Brandes and
+        Erlebach, 'Network Analysis: Methodological Foundations', Lecture
+        Notes in Computer Science, Volume 3418, Springer-Verlag, 2005.
+        http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf
+
+    """
+    if flow_func is None:
+        flow_func = default_flow_func
+
+    if auxiliary is None:
+        H = build_auxiliary_node_connectivity(G)
+    else:
+        H = auxiliary
+
+    mapping = H.graph.get("mapping", None)
+    if mapping is None:
+        raise nx.NetworkXError("Invalid auxiliary digraph.")
+
+    kwargs = {"flow_func": flow_func, "residual": residual}
+
+    if flow_func is not preflow_push:
+        kwargs["cutoff"] = cutoff
+
+    if flow_func is shortest_augmenting_path:
+        kwargs["two_phase"] = True
+
+    return nx.maximum_flow_value(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs)
+
+
+@nx._dispatchable
+def node_connectivity(G, s=None, t=None, flow_func=None):
+    r"""Returns node connectivity for a graph or digraph G.
+
+    Node connectivity is equal to the minimum number of nodes that
+    must be removed to disconnect G or render it trivial. If source
+    and target nodes are provided, this function returns the local node
+    connectivity: the minimum number of nodes that must be removed to break
+    all paths from source to target in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    s : node
+        Source node. Optional. Default value: None.
+
+    t : node
+        Target node. Optional. Default value: None.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See below for details. The
+        choice of the default function may change from version
+        to version and should not be relied on. Default value: None.
+
+    Returns
+    -------
+    K : integer
+        Node connectivity of G, or local node connectivity if source
+        and target are provided.
+
+    Examples
+    --------
+    >>> # Platonic icosahedral graph is 5-node-connected
+    >>> G = nx.icosahedral_graph()
+    >>> nx.node_connectivity(G)
+    5
+
+    You can use alternative flow algorithms for the underlying maximum
+    flow computation. In dense networks the algorithm
+    :meth:`shortest_augmenting_path` will usually perform better
+    than the default :meth:`edmonds_karp`, which is faster for
+    sparse networks with highly skewed degree distributions. Alternative
+    flow functions have to be explicitly imported from the flow package.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> nx.node_connectivity(G, flow_func=shortest_augmenting_path)
+    5
+
+    If you specify a pair of nodes (source and target) as parameters,
+    this function returns the value of local node connectivity.
+
+    >>> nx.node_connectivity(G, 3, 7)
+    5
+
+    If you need to perform several local computations among different
+    pairs of nodes on the same graph, it is recommended that you reuse
+    the data structures used in the maximum flow computations. See
+    :meth:`local_node_connectivity` for details.
+
+    Notes
+    -----
+    This is a flow based implementation of node connectivity. The
+    algorithm works by solving $O((n-\delta-1+\delta(\delta-1)/2))$
+    maximum flow problems on an auxiliary digraph. Where $\delta$
+    is the minimum degree of G. For details about the auxiliary
+    digraph and the computation of local node connectivity see
+    :meth:`local_node_connectivity`. This implementation is based
+    on algorithm 11 in [1]_.
+
+    See also
+    --------
+    :meth:`local_node_connectivity`
+    :meth:`edge_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    References
+    ----------
+    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
+        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
+
+    """
+    if (s is not None and t is None) or (s is None and t is not None):
+        raise nx.NetworkXError("Both source and target must be specified.")
+
+    # Local node connectivity
+    if s is not None and t is not None:
+        if s not in G:
+            raise nx.NetworkXError(f"node {s} not in graph")
+        if t not in G:
+            raise nx.NetworkXError(f"node {t} not in graph")
+        return local_node_connectivity(G, s, t, flow_func=flow_func)
+
+    # Global node connectivity
+    if G.is_directed():
+        if not nx.is_weakly_connected(G):
+            return 0
+        iter_func = itertools.permutations
+        # It is necessary to consider both predecessors
+        # and successors for directed graphs
+
+        def neighbors(v):
+            return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)])
+
+    else:
+        if not nx.is_connected(G):
+            return 0
+        iter_func = itertools.combinations
+        neighbors = G.neighbors
+
+    # Reuse the auxiliary digraph and the residual network
+    H = build_auxiliary_node_connectivity(G)
+    R = build_residual_network(H, "capacity")
+    kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
+
+    # Pick a node with minimum degree
+    # Node connectivity is bounded by degree.
+    v, K = min(G.degree(), key=itemgetter(1))
+    # compute local node connectivity with all its non-neighbors nodes
+    for w in set(G) - set(neighbors(v)) - {v}:
+        kwargs["cutoff"] = K
+        K = min(K, local_node_connectivity(G, v, w, **kwargs))
+    # Also for non adjacent pairs of neighbors of v
+    for x, y in iter_func(neighbors(v), 2):
+        if y in G[x]:
+            continue
+        kwargs["cutoff"] = K
+        K = min(K, local_node_connectivity(G, x, y, **kwargs))
+
+    return K
+
+
+@nx._dispatchable
+def average_node_connectivity(G, flow_func=None):
+    r"""Returns the average connectivity of a graph G.
+
+    The average connectivity `\bar{\kappa}` of a graph G is the average
+    of local node connectivity over all pairs of nodes of G [1]_ .
+
+    .. math::
+
+        \bar{\kappa}(G) = \frac{\sum_{u,v} \kappa_{G}(u,v)}{{n \choose 2}}
+
+    Parameters
+    ----------
+
+    G : NetworkX graph
+        Undirected graph
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See :meth:`local_node_connectivity`
+        for details. The choice of the default function may change from
+        version to version and should not be relied on. Default value: None.
+
+    Returns
+    -------
+    K : float
+        Average node connectivity
+
+    See also
+    --------
+    :meth:`local_node_connectivity`
+    :meth:`node_connectivity`
+    :meth:`edge_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    References
+    ----------
+    .. [1]  Beineke, L., O. Oellermann, and R. Pippert (2002). The average
+            connectivity of a graph. Discrete mathematics 252(1-3), 31-45.
+            http://www.sciencedirect.com/science/article/pii/S0012365X01001807
+
+    """
+    if G.is_directed():
+        iter_func = itertools.permutations
+    else:
+        iter_func = itertools.combinations
+
+    # Reuse the auxiliary digraph and the residual network
+    H = build_auxiliary_node_connectivity(G)
+    R = build_residual_network(H, "capacity")
+    kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
+
+    num, den = 0, 0
+    for u, v in iter_func(G, 2):
+        num += local_node_connectivity(G, u, v, **kwargs)
+        den += 1
+
+    if den == 0:  # Null Graph
+        return 0
+    return num / den
+
+
+@nx._dispatchable
+def all_pairs_node_connectivity(G, nbunch=None, flow_func=None):
+    """Compute node connectivity between all pairs of nodes of G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    nbunch: container
+        Container of nodes. If provided node connectivity will be computed
+        only over pairs of nodes in nbunch.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See below for details. The
+        choice of the default function may change from version
+        to version and should not be relied on. Default value: None.
+
+    Returns
+    -------
+    all_pairs : dict
+        A dictionary with node connectivity between all pairs of nodes
+        in G, or in nbunch if provided.
+
+    See also
+    --------
+    :meth:`local_node_connectivity`
+    :meth:`edge_connectivity`
+    :meth:`local_edge_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    """
+    if nbunch is None:
+        nbunch = G
+    else:
+        nbunch = set(nbunch)
+
+    directed = G.is_directed()
+    if directed:
+        iter_func = itertools.permutations
+    else:
+        iter_func = itertools.combinations
+
+    all_pairs = {n: {} for n in nbunch}
+
+    # Reuse auxiliary digraph and residual network
+    H = build_auxiliary_node_connectivity(G)
+    mapping = H.graph["mapping"]
+    R = build_residual_network(H, "capacity")
+    kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
+
+    for u, v in iter_func(nbunch, 2):
+        K = local_node_connectivity(G, u, v, **kwargs)
+        all_pairs[u][v] = K
+        if not directed:
+            all_pairs[v][u] = K
+
+    return all_pairs
+
+
+@nx._dispatchable(graphs={"G": 0, "auxiliary?": 4})
+def local_edge_connectivity(
+    G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None
+):
+    r"""Returns local edge connectivity for nodes s and t in G.
+
+    Local edge connectivity for two nodes s and t is the minimum number
+    of edges that must be removed to disconnect them.
+
+    This is a flow based implementation of edge connectivity. We compute the
+    maximum flow on an auxiliary digraph build from the original
+    network (see below for details). This is equal to the local edge
+    connectivity because the value of a maximum s-t-flow is equal to the
+    capacity of a minimum s-t-cut (Ford and Fulkerson theorem) [1]_ .
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected or directed graph
+
+    s : node
+        Source node
+
+    t : node
+        Target node
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See below for details. The
+        choice of the default function may change from version
+        to version and should not be relied on. Default value: None.
+
+    auxiliary : NetworkX DiGraph
+        Auxiliary digraph for computing flow based edge connectivity. If
+        provided it will be reused instead of recreated. Default value: None.
+
+    residual : NetworkX DiGraph
+        Residual network to compute maximum flow. If provided it will be
+        reused instead of recreated. Default value: None.
+
+    cutoff : integer, float, or None (default: None)
+        If specified, the maximum flow algorithm will terminate when the
+        flow value reaches or exceeds the cutoff. This only works for flows
+        that support the cutoff parameter (most do) and is ignored otherwise.
+
+    Returns
+    -------
+    K : integer
+        local edge connectivity for nodes s and t.
+
+    Examples
+    --------
+    This function is not imported in the base NetworkX namespace, so you
+    have to explicitly import it from the connectivity package:
+
+    >>> from networkx.algorithms.connectivity import local_edge_connectivity
+
+    We use in this example the platonic icosahedral graph, which has edge
+    connectivity 5.
+
+    >>> G = nx.icosahedral_graph()
+    >>> local_edge_connectivity(G, 0, 6)
+    5
+
+    If you need to compute local connectivity on several pairs of
+    nodes in the same graph, it is recommended that you reuse the
+    data structures that NetworkX uses in the computation: the
+    auxiliary digraph for edge connectivity, and the residual
+    network for the underlying maximum flow computation.
+
+    Example of how to compute local edge connectivity among
+    all pairs of nodes of the platonic icosahedral graph reusing
+    the data structures.
+
+    >>> import itertools
+    >>> # You also have to explicitly import the function for
+    >>> # building the auxiliary digraph from the connectivity package
+    >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity
+    >>> H = build_auxiliary_edge_connectivity(G)
+    >>> # And the function for building the residual network from the
+    >>> # flow package
+    >>> from networkx.algorithms.flow import build_residual_network
+    >>> # Note that the auxiliary digraph has an edge attribute named capacity
+    >>> R = build_residual_network(H, "capacity")
+    >>> result = dict.fromkeys(G, dict())
+    >>> # Reuse the auxiliary digraph and the residual network by passing them
+    >>> # as parameters
+    >>> for u, v in itertools.combinations(G, 2):
+    ...     k = local_edge_connectivity(G, u, v, auxiliary=H, residual=R)
+    ...     result[u][v] = k
+    >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2))
+    True
+
+    You can also use alternative flow algorithms for computing edge
+    connectivity. For instance, in dense networks the algorithm
+    :meth:`shortest_augmenting_path` will usually perform better than
+    the default :meth:`edmonds_karp` which is faster for sparse
+    networks with highly skewed degree distributions. Alternative flow
+    functions have to be explicitly imported from the flow package.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> local_edge_connectivity(G, 0, 6, flow_func=shortest_augmenting_path)
+    5
+
+    Notes
+    -----
+    This is a flow based implementation of edge connectivity. We compute the
+    maximum flow using, by default, the :meth:`edmonds_karp` algorithm on an
+    auxiliary digraph build from the original input graph:
+
+    If the input graph is undirected, we replace each edge (`u`,`v`) with
+    two reciprocal arcs (`u`, `v`) and (`v`, `u`) and then we set the attribute
+    'capacity' for each arc to 1. If the input graph is directed we simply
+    add the 'capacity' attribute. This is an implementation of algorithm 1
+    in [1]_.
+
+    The maximum flow in the auxiliary network is equal to the local edge
+    connectivity because the value of a maximum s-t-flow is equal to the
+    capacity of a minimum s-t-cut (Ford and Fulkerson theorem).
+
+    See also
+    --------
+    :meth:`edge_connectivity`
+    :meth:`local_node_connectivity`
+    :meth:`node_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    References
+    ----------
+    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
+        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
+
+    """
+    if flow_func is None:
+        flow_func = default_flow_func
+
+    if auxiliary is None:
+        H = build_auxiliary_edge_connectivity(G)
+    else:
+        H = auxiliary
+
+    kwargs = {"flow_func": flow_func, "residual": residual}
+
+    if flow_func is not preflow_push:
+        kwargs["cutoff"] = cutoff
+
+    if flow_func is shortest_augmenting_path:
+        kwargs["two_phase"] = True
+
+    return nx.maximum_flow_value(H, s, t, **kwargs)
+
+
+@nx._dispatchable
+def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None):
+    r"""Returns the edge connectivity of the graph or digraph G.
+
+    The edge connectivity is equal to the minimum number of edges that
+    must be removed to disconnect G or render it trivial. If source
+    and target nodes are provided, this function returns the local edge
+    connectivity: the minimum number of edges that must be removed to
+    break all paths from source to target in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected or directed graph
+
+    s : node
+        Source node. Optional. Default value: None.
+
+    t : node
+        Target node. Optional. Default value: None.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See below for details. The
+        choice of the default function may change from version
+        to version and should not be relied on. Default value: None.
+
+    cutoff : integer, float, or None (default: None)
+        If specified, the maximum flow algorithm will terminate when the
+        flow value reaches or exceeds the cutoff. This only works for flows
+        that support the cutoff parameter (most do) and is ignored otherwise.
+
+    Returns
+    -------
+    K : integer
+        Edge connectivity for G, or local edge connectivity if source
+        and target were provided
+
+    Examples
+    --------
+    >>> # Platonic icosahedral graph is 5-edge-connected
+    >>> G = nx.icosahedral_graph()
+    >>> nx.edge_connectivity(G)
+    5
+
+    You can use alternative flow algorithms for the underlying
+    maximum flow computation. In dense networks the algorithm
+    :meth:`shortest_augmenting_path` will usually perform better
+    than the default :meth:`edmonds_karp`, which is faster for
+    sparse networks with highly skewed degree distributions.
+    Alternative flow functions have to be explicitly imported
+    from the flow package.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> nx.edge_connectivity(G, flow_func=shortest_augmenting_path)
+    5
+
+    If you specify a pair of nodes (source and target) as parameters,
+    this function returns the value of local edge connectivity.
+
+    >>> nx.edge_connectivity(G, 3, 7)
+    5
+
+    If you need to perform several local computations among different
+    pairs of nodes on the same graph, it is recommended that you reuse
+    the data structures used in the maximum flow computations. See
+    :meth:`local_edge_connectivity` for details.
+
+    Notes
+    -----
+    This is a flow based implementation of global edge connectivity.
+    For undirected graphs the algorithm works by finding a 'small'
+    dominating set of nodes of G (see algorithm 7 in [1]_ ) and
+    computing local maximum flow (see :meth:`local_edge_connectivity`)
+    between an arbitrary node in the dominating set and the rest of
+    nodes in it. This is an implementation of algorithm 6 in [1]_ .
+    For directed graphs, the algorithm does n calls to the maximum
+    flow function. This is an implementation of algorithm 8 in [1]_ .
+
+    See also
+    --------
+    :meth:`local_edge_connectivity`
+    :meth:`local_node_connectivity`
+    :meth:`node_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+    :meth:`k_edge_components`
+    :meth:`k_edge_subgraphs`
+
+    References
+    ----------
+    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
+        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
+
+    """
+    if (s is not None and t is None) or (s is None and t is not None):
+        raise nx.NetworkXError("Both source and target must be specified.")
+
+    # Local edge connectivity
+    if s is not None and t is not None:
+        if s not in G:
+            raise nx.NetworkXError(f"node {s} not in graph")
+        if t not in G:
+            raise nx.NetworkXError(f"node {t} not in graph")
+        return local_edge_connectivity(G, s, t, flow_func=flow_func, cutoff=cutoff)
+
+    # Global edge connectivity
+    # reuse auxiliary digraph and residual network
+    H = build_auxiliary_edge_connectivity(G)
+    R = build_residual_network(H, "capacity")
+    kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
+
+    if G.is_directed():
+        # Algorithm 8 in [1]
+        if not nx.is_weakly_connected(G):
+            return 0
+
+        # initial value for \lambda is minimum degree
+        L = min(d for n, d in G.degree())
+        nodes = list(G)
+        n = len(nodes)
+
+        if cutoff is not None:
+            L = min(cutoff, L)
+
+        for i in range(n):
+            kwargs["cutoff"] = L
+            try:
+                L = min(L, local_edge_connectivity(G, nodes[i], nodes[i + 1], **kwargs))
+            except IndexError:  # last node!
+                L = min(L, local_edge_connectivity(G, nodes[i], nodes[0], **kwargs))
+        return L
+    else:  # undirected
+        # Algorithm 6 in [1]
+        if not nx.is_connected(G):
+            return 0
+
+        # initial value for \lambda is minimum degree
+        L = min(d for n, d in G.degree())
+
+        if cutoff is not None:
+            L = min(cutoff, L)
+
+        # A dominating set is \lambda-covering
+        # We need a dominating set with at least two nodes
+        for node in G:
+            D = nx.dominating_set(G, start_with=node)
+            v = D.pop()
+            if D:
+                break
+        else:
+            # in complete graphs the dominating sets will always be of one node
+            # thus we return min degree
+            return L
+
+        for w in D:
+            kwargs["cutoff"] = L
+            L = min(L, local_edge_connectivity(G, v, w, **kwargs))
+
+        return L
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/cuts.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/cuts.py
new file mode 100644
index 00000000..27124e1b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/cuts.py
@@ -0,0 +1,612 @@
+"""
+Flow based cut algorithms
+"""
+
+import itertools
+
+import networkx as nx
+
+# Define the default maximum flow function to use in all flow based
+# cut algorithms.
+from networkx.algorithms.flow import build_residual_network, edmonds_karp
+
+default_flow_func = edmonds_karp
+
+from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity
+
+__all__ = [
+    "minimum_st_node_cut",
+    "minimum_node_cut",
+    "minimum_st_edge_cut",
+    "minimum_edge_cut",
+]
+
+
+@nx._dispatchable(
+    graphs={"G": 0, "auxiliary?": 4},
+    preserve_edge_attrs={"auxiliary": {"capacity": float("inf")}},
+    preserve_graph_attrs={"auxiliary"},
+)
+def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, residual=None):
+    """Returns the edges of the cut-set of a minimum (s, t)-cut.
+
+    This function returns the set of edges of minimum cardinality that,
+    if removed, would destroy all paths among source and target in G.
+    Edge weights are not considered. See :meth:`minimum_cut` for
+    computing minimum cuts considering edge weights.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    s : node
+        Source node for the flow.
+
+    t : node
+        Sink node for the flow.
+
+    auxiliary : NetworkX DiGraph
+        Auxiliary digraph to compute flow based node connectivity. It has
+        to have a graph attribute called mapping with a dictionary mapping
+        node names in G and in the auxiliary digraph. If provided
+        it will be reused instead of recreated. Default value: None.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See :meth:`node_connectivity` for
+        details. The choice of the default function may change from version
+        to version and should not be relied on. Default value: None.
+
+    residual : NetworkX DiGraph
+        Residual network to compute maximum flow. If provided it will be
+        reused instead of recreated. Default value: None.
+
+    Returns
+    -------
+    cutset : set
+        Set of edges that, if removed from the graph, will disconnect it.
+
+    See also
+    --------
+    :meth:`minimum_cut`
+    :meth:`minimum_node_cut`
+    :meth:`minimum_edge_cut`
+    :meth:`stoer_wagner`
+    :meth:`node_connectivity`
+    :meth:`edge_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    Examples
+    --------
+    This function is not imported in the base NetworkX namespace, so you
+    have to explicitly import it from the connectivity package:
+
+    >>> from networkx.algorithms.connectivity import minimum_st_edge_cut
+
+    We use in this example the platonic icosahedral graph, which has edge
+    connectivity 5.
+
+    >>> G = nx.icosahedral_graph()
+    >>> len(minimum_st_edge_cut(G, 0, 6))
+    5
+
+    If you need to compute local edge cuts on several pairs of
+    nodes in the same graph, it is recommended that you reuse the
+    data structures that NetworkX uses in the computation: the
+    auxiliary digraph for edge connectivity, and the residual
+    network for the underlying maximum flow computation.
+
+    Example of how to compute local edge cuts among all pairs of
+    nodes of the platonic icosahedral graph reusing the data
+    structures.
+
+    >>> import itertools
+    >>> # You also have to explicitly import the function for
+    >>> # building the auxiliary digraph from the connectivity package
+    >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity
+    >>> H = build_auxiliary_edge_connectivity(G)
+    >>> # And the function for building the residual network from the
+    >>> # flow package
+    >>> from networkx.algorithms.flow import build_residual_network
+    >>> # Note that the auxiliary digraph has an edge attribute named capacity
+    >>> R = build_residual_network(H, "capacity")
+    >>> result = dict.fromkeys(G, dict())
+    >>> # Reuse the auxiliary digraph and the residual network by passing them
+    >>> # as parameters
+    >>> for u, v in itertools.combinations(G, 2):
+    ...     k = len(minimum_st_edge_cut(G, u, v, auxiliary=H, residual=R))
+    ...     result[u][v] = k
+    >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2))
+    True
+
+    You can also use alternative flow algorithms for computing edge
+    cuts. For instance, in dense networks the algorithm
+    :meth:`shortest_augmenting_path` will usually perform better than
+    the default :meth:`edmonds_karp` which is faster for sparse
+    networks with highly skewed degree distributions. Alternative flow
+    functions have to be explicitly imported from the flow package.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> len(minimum_st_edge_cut(G, 0, 6, flow_func=shortest_augmenting_path))
+    5
+
+    """
+    if flow_func is None:
+        flow_func = default_flow_func
+
+    if auxiliary is None:
+        H = build_auxiliary_edge_connectivity(G)
+    else:
+        H = auxiliary
+
+    kwargs = {"capacity": "capacity", "flow_func": flow_func, "residual": residual}
+
+    cut_value, partition = nx.minimum_cut(H, s, t, **kwargs)
+    reachable, non_reachable = partition
+    # Any edge in the original graph linking the two sets in the
+    # partition is part of the edge cutset
+    cutset = set()
+    for u, nbrs in ((n, G[n]) for n in reachable):
+        cutset.update((u, v) for v in nbrs if v in non_reachable)
+
+    return cutset
+
+
+@nx._dispatchable(
+    graphs={"G": 0, "auxiliary?": 4},
+    preserve_node_attrs={"auxiliary": {"id": None}},
+    preserve_graph_attrs={"auxiliary"},
+)
+def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None):
+    r"""Returns a set of nodes of minimum cardinality that disconnect source
+    from target in G.
+
+    This function returns the set of nodes of minimum cardinality that,
+    if removed, would destroy all paths among source and target in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    s : node
+        Source node.
+
+    t : node
+        Target node.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See below for details. The choice
+        of the default function may change from version to version and
+        should not be relied on. Default value: None.
+
+    auxiliary : NetworkX DiGraph
+        Auxiliary digraph to compute flow based node connectivity. It has
+        to have a graph attribute called mapping with a dictionary mapping
+        node names in G and in the auxiliary digraph. If provided
+        it will be reused instead of recreated. Default value: None.
+
+    residual : NetworkX DiGraph
+        Residual network to compute maximum flow. If provided it will be
+        reused instead of recreated. Default value: None.
+
+    Returns
+    -------
+    cutset : set
+        Set of nodes that, if removed, would destroy all paths between
+        source and target in G.
+
+    Examples
+    --------
+    This function is not imported in the base NetworkX namespace, so you
+    have to explicitly import it from the connectivity package:
+
+    >>> from networkx.algorithms.connectivity import minimum_st_node_cut
+
+    We use in this example the platonic icosahedral graph, which has node
+    connectivity 5.
+
+    >>> G = nx.icosahedral_graph()
+    >>> len(minimum_st_node_cut(G, 0, 6))
+    5
+
+    If you need to compute local st cuts between several pairs of
+    nodes in the same graph, it is recommended that you reuse the
+    data structures that NetworkX uses in the computation: the
+    auxiliary digraph for node connectivity and node cuts, and the
+    residual network for the underlying maximum flow computation.
+
+    Example of how to compute local st node cuts reusing the data
+    structures:
+
+    >>> # You also have to explicitly import the function for
+    >>> # building the auxiliary digraph from the connectivity package
+    >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity
+    >>> H = build_auxiliary_node_connectivity(G)
+    >>> # And the function for building the residual network from the
+    >>> # flow package
+    >>> from networkx.algorithms.flow import build_residual_network
+    >>> # Note that the auxiliary digraph has an edge attribute named capacity
+    >>> R = build_residual_network(H, "capacity")
+    >>> # Reuse the auxiliary digraph and the residual network by passing them
+    >>> # as parameters
+    >>> len(minimum_st_node_cut(G, 0, 6, auxiliary=H, residual=R))
+    5
+
+    You can also use alternative flow algorithms for computing minimum st
+    node cuts. For instance, in dense networks the algorithm
+    :meth:`shortest_augmenting_path` will usually perform better than
+    the default :meth:`edmonds_karp` which is faster for sparse
+    networks with highly skewed degree distributions. Alternative flow
+    functions have to be explicitly imported from the flow package.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> len(minimum_st_node_cut(G, 0, 6, flow_func=shortest_augmenting_path))
+    5
+
+    Notes
+    -----
+    This is a flow based implementation of minimum node cut. The algorithm
+    is based in solving a number of maximum flow computations to determine
+    the capacity of the minimum cut on an auxiliary directed network that
+    corresponds to the minimum node cut of G. It handles both directed
+    and undirected graphs. This implementation is based on algorithm 11
+    in [1]_.
+
+    See also
+    --------
+    :meth:`minimum_node_cut`
+    :meth:`minimum_edge_cut`
+    :meth:`stoer_wagner`
+    :meth:`node_connectivity`
+    :meth:`edge_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    References
+    ----------
+    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
+        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
+
+    """
+    if auxiliary is None:
+        H = build_auxiliary_node_connectivity(G)
+    else:
+        H = auxiliary
+
+    mapping = H.graph.get("mapping", None)
+    if mapping is None:
+        raise nx.NetworkXError("Invalid auxiliary digraph.")
+    if G.has_edge(s, t) or G.has_edge(t, s):
+        return {}
+    kwargs = {"flow_func": flow_func, "residual": residual, "auxiliary": H}
+
+    # The edge cut in the auxiliary digraph corresponds to the node cut in the
+    # original graph.
+    edge_cut = minimum_st_edge_cut(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs)
+    # Each node in the original graph maps to two nodes of the auxiliary graph
+    node_cut = {H.nodes[node]["id"] for edge in edge_cut for node in edge}
+    return node_cut - {s, t}
+
+
+@nx._dispatchable
+def minimum_node_cut(G, s=None, t=None, flow_func=None):
+    r"""Returns a set of nodes of minimum cardinality that disconnects G.
+
+    If source and target nodes are provided, this function returns the
+    set of nodes of minimum cardinality that, if removed, would destroy
+    all paths among source and target in G. If not, it returns a set
+    of nodes of minimum cardinality that disconnects G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    s : node
+        Source node. Optional. Default value: None.
+
+    t : node
+        Target node. Optional. Default value: None.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See below for details. The
+        choice of the default function may change from version
+        to version and should not be relied on. Default value: None.
+
+    Returns
+    -------
+    cutset : set
+        Set of nodes that, if removed, would disconnect G. If source
+        and target nodes are provided, the set contains the nodes that
+        if removed, would destroy all paths between source and target.
+
+    Examples
+    --------
+    >>> # Platonic icosahedral graph has node connectivity 5
+    >>> G = nx.icosahedral_graph()
+    >>> node_cut = nx.minimum_node_cut(G)
+    >>> len(node_cut)
+    5
+
+    You can use alternative flow algorithms for the underlying maximum
+    flow computation. In dense networks the algorithm
+    :meth:`shortest_augmenting_path` will usually perform better
+    than the default :meth:`edmonds_karp`, which is faster for
+    sparse networks with highly skewed degree distributions. Alternative
+    flow functions have to be explicitly imported from the flow package.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> node_cut == nx.minimum_node_cut(G, flow_func=shortest_augmenting_path)
+    True
+
+    If you specify a pair of nodes (source and target) as parameters,
+    this function returns a local st node cut.
+
+    >>> len(nx.minimum_node_cut(G, 3, 7))
+    5
+
+    If you need to perform several local st cuts among different
+    pairs of nodes on the same graph, it is recommended that you reuse
+    the data structures used in the maximum flow computations. See
+    :meth:`minimum_st_node_cut` for details.
+
+    Notes
+    -----
+    This is a flow based implementation of minimum node cut. The algorithm
+    is based in solving a number of maximum flow computations to determine
+    the capacity of the minimum cut on an auxiliary directed network that
+    corresponds to the minimum node cut of G. It handles both directed
+    and undirected graphs. This implementation is based on algorithm 11
+    in [1]_.
+
+    See also
+    --------
+    :meth:`minimum_st_node_cut`
+    :meth:`minimum_cut`
+    :meth:`minimum_edge_cut`
+    :meth:`stoer_wagner`
+    :meth:`node_connectivity`
+    :meth:`edge_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    References
+    ----------
+    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
+        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
+
+    """
+    if (s is not None and t is None) or (s is None and t is not None):
+        raise nx.NetworkXError("Both source and target must be specified.")
+
+    # Local minimum node cut.
+    if s is not None and t is not None:
+        if s not in G:
+            raise nx.NetworkXError(f"node {s} not in graph")
+        if t not in G:
+            raise nx.NetworkXError(f"node {t} not in graph")
+        return minimum_st_node_cut(G, s, t, flow_func=flow_func)
+
+    # Global minimum node cut.
+    # Analog to the algorithm 11 for global node connectivity in [1].
+    if G.is_directed():
+        if not nx.is_weakly_connected(G):
+            raise nx.NetworkXError("Input graph is not connected")
+        iter_func = itertools.permutations
+
+        def neighbors(v):
+            return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)])
+
+    else:
+        if not nx.is_connected(G):
+            raise nx.NetworkXError("Input graph is not connected")
+        iter_func = itertools.combinations
+        neighbors = G.neighbors
+
+    # Reuse the auxiliary digraph and the residual network.
+    H = build_auxiliary_node_connectivity(G)
+    R = build_residual_network(H, "capacity")
+    kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
+
+    # Choose a node with minimum degree.
+    v = min(G, key=G.degree)
+    # Initial node cutset is all neighbors of the node with minimum degree.
+    min_cut = set(G[v])
+    # Compute st node cuts between v and all its non-neighbors nodes in G.
+    for w in set(G) - set(neighbors(v)) - {v}:
+        this_cut = minimum_st_node_cut(G, v, w, **kwargs)
+        if len(min_cut) >= len(this_cut):
+            min_cut = this_cut
+    # Also for non adjacent pairs of neighbors of v.
+    for x, y in iter_func(neighbors(v), 2):
+        if y in G[x]:
+            continue
+        this_cut = minimum_st_node_cut(G, x, y, **kwargs)
+        if len(min_cut) >= len(this_cut):
+            min_cut = this_cut
+
+    return min_cut
+
+
+@nx._dispatchable
+def minimum_edge_cut(G, s=None, t=None, flow_func=None):
+    r"""Returns a set of edges of minimum cardinality that disconnects G.
+
+    If source and target nodes are provided, this function returns the
+    set of edges of minimum cardinality that, if removed, would break
+    all paths among source and target in G. If not, it returns a set of
+    edges of minimum cardinality that disconnects G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    s : node
+        Source node. Optional. Default value: None.
+
+    t : node
+        Target node. Optional. Default value: None.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See below for details. The
+        choice of the default function may change from version
+        to version and should not be relied on. Default value: None.
+
+    Returns
+    -------
+    cutset : set
+        Set of edges that, if removed, would disconnect G. If source
+        and target nodes are provided, the set contains the edges that
+        if removed, would destroy all paths between source and target.
+
+    Examples
+    --------
+    >>> # Platonic icosahedral graph has edge connectivity 5
+    >>> G = nx.icosahedral_graph()
+    >>> len(nx.minimum_edge_cut(G))
+    5
+
+    You can use alternative flow algorithms for the underlying
+    maximum flow computation. In dense networks the algorithm
+    :meth:`shortest_augmenting_path` will usually perform better
+    than the default :meth:`edmonds_karp`, which is faster for
+    sparse networks with highly skewed degree distributions.
+    Alternative flow functions have to be explicitly imported
+    from the flow package.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> len(nx.minimum_edge_cut(G, flow_func=shortest_augmenting_path))
+    5
+
+    If you specify a pair of nodes (source and target) as parameters,
+    this function returns the value of local edge connectivity.
+
+    >>> nx.edge_connectivity(G, 3, 7)
+    5
+
+    If you need to perform several local computations among different
+    pairs of nodes on the same graph, it is recommended that you reuse
+    the data structures used in the maximum flow computations. See
+    :meth:`local_edge_connectivity` for details.
+
+    Notes
+    -----
+    This is a flow based implementation of minimum edge cut. For
+    undirected graphs the algorithm works by finding a 'small' dominating
+    set of nodes of G (see algorithm 7 in [1]_) and computing the maximum
+    flow between an arbitrary node in the dominating set and the rest of
+    nodes in it. This is an implementation of algorithm 6 in [1]_. For
+    directed graphs, the algorithm does n calls to the max flow function.
+    The function raises an error if the directed graph is not weakly
+    connected and returns an empty set if it is weakly connected.
+    It is an implementation of algorithm 8 in [1]_.
+
+    See also
+    --------
+    :meth:`minimum_st_edge_cut`
+    :meth:`minimum_node_cut`
+    :meth:`stoer_wagner`
+    :meth:`node_connectivity`
+    :meth:`edge_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    References
+    ----------
+    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
+        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
+
+    """
+    if (s is not None and t is None) or (s is None and t is not None):
+        raise nx.NetworkXError("Both source and target must be specified.")
+
+    # reuse auxiliary digraph and residual network
+    H = build_auxiliary_edge_connectivity(G)
+    R = build_residual_network(H, "capacity")
+    kwargs = {"flow_func": flow_func, "residual": R, "auxiliary": H}
+
+    # Local minimum edge cut if s and t are not None
+    if s is not None and t is not None:
+        if s not in G:
+            raise nx.NetworkXError(f"node {s} not in graph")
+        if t not in G:
+            raise nx.NetworkXError(f"node {t} not in graph")
+        return minimum_st_edge_cut(H, s, t, **kwargs)
+
+    # Global minimum edge cut
+    # Analog to the algorithm for global edge connectivity
+    if G.is_directed():
+        # Based on algorithm 8 in [1]
+        if not nx.is_weakly_connected(G):
+            raise nx.NetworkXError("Input graph is not connected")
+
+        # Initial cutset is all edges of a node with minimum degree
+        node = min(G, key=G.degree)
+        min_cut = set(G.edges(node))
+        nodes = list(G)
+        n = len(nodes)
+        for i in range(n):
+            try:
+                this_cut = minimum_st_edge_cut(H, nodes[i], nodes[i + 1], **kwargs)
+                if len(this_cut) <= len(min_cut):
+                    min_cut = this_cut
+            except IndexError:  # Last node!
+                this_cut = minimum_st_edge_cut(H, nodes[i], nodes[0], **kwargs)
+                if len(this_cut) <= len(min_cut):
+                    min_cut = this_cut
+
+        return min_cut
+
+    else:  # undirected
+        # Based on algorithm 6 in [1]
+        if not nx.is_connected(G):
+            raise nx.NetworkXError("Input graph is not connected")
+
+        # Initial cutset is all edges of a node with minimum degree
+        node = min(G, key=G.degree)
+        min_cut = set(G.edges(node))
+        # A dominating set is \lambda-covering
+        # We need a dominating set with at least two nodes
+        for node in G:
+            D = nx.dominating_set(G, start_with=node)
+            v = D.pop()
+            if D:
+                break
+        else:
+            # in complete graphs the dominating set will always be of one node
+            # thus we return min_cut, which now contains the edges of a node
+            # with minimum degree
+            return min_cut
+        for w in D:
+            this_cut = minimum_st_edge_cut(H, v, w, **kwargs)
+            if len(this_cut) <= len(min_cut):
+                min_cut = this_cut
+
+        return min_cut
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/disjoint_paths.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/disjoint_paths.py
new file mode 100644
index 00000000..00616492
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/disjoint_paths.py
@@ -0,0 +1,408 @@
+"""Flow based node and edge disjoint paths."""
+
+import networkx as nx
+
+# Define the default maximum flow function to use for the underlying
+# maximum flow computations
+from networkx.algorithms.flow import (
+    edmonds_karp,
+    preflow_push,
+    shortest_augmenting_path,
+)
+from networkx.exception import NetworkXNoPath
+
+default_flow_func = edmonds_karp
+from itertools import filterfalse as _filterfalse
+
+# Functions to build auxiliary data structures.
+from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity
+
+__all__ = ["edge_disjoint_paths", "node_disjoint_paths"]
+
+
+@nx._dispatchable(
+    graphs={"G": 0, "auxiliary?": 5},
+    preserve_edge_attrs={"auxiliary": {"capacity": float("inf")}},
+)
+def edge_disjoint_paths(
+    G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None
+):
+    """Returns the edges disjoint paths between source and target.
+
+    Edge disjoint paths are paths that do not share any edge. The
+    number of edge disjoint paths between source and target is equal
+    to their edge connectivity.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    s : node
+        Source node for the flow.
+
+    t : node
+        Sink node for the flow.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. The choice of the default function
+        may change from version to version and should not be relied on.
+        Default value: None.
+
+    cutoff : integer or None (default: None)
+        Maximum number of paths to yield. If specified, the maximum flow
+        algorithm will terminate when the flow value reaches or exceeds the
+        cutoff. This only works for flows that support the cutoff parameter
+        (most do) and is ignored otherwise.
+
+    auxiliary : NetworkX DiGraph
+        Auxiliary digraph to compute flow based edge connectivity. It has
+        to have a graph attribute called mapping with a dictionary mapping
+        node names in G and in the auxiliary digraph. If provided
+        it will be reused instead of recreated. Default value: None.
+
+    residual : NetworkX DiGraph
+        Residual network to compute maximum flow. If provided it will be
+        reused instead of recreated. Default value: None.
+
+    Returns
+    -------
+    paths : generator
+        A generator of edge independent paths.
+
+    Raises
+    ------
+    NetworkXNoPath
+        If there is no path between source and target.
+
+    NetworkXError
+        If source or target are not in the graph G.
+
+    See also
+    --------
+    :meth:`node_disjoint_paths`
+    :meth:`edge_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    Examples
+    --------
+    We use in this example the platonic icosahedral graph, which has node
+    edge connectivity 5, thus there are 5 edge disjoint paths between any
+    pair of nodes.
+
+    >>> G = nx.icosahedral_graph()
+    >>> len(list(nx.edge_disjoint_paths(G, 0, 6)))
+    5
+
+
+    If you need to compute edge disjoint paths on several pairs of
+    nodes in the same graph, it is recommended that you reuse the
+    data structures that NetworkX uses in the computation: the
+    auxiliary digraph for edge connectivity, and the residual
+    network for the underlying maximum flow computation.
+
+    Example of how to compute edge disjoint paths among all pairs of
+    nodes of the platonic icosahedral graph reusing the data
+    structures.
+
+    >>> import itertools
+    >>> # You also have to explicitly import the function for
+    >>> # building the auxiliary digraph from the connectivity package
+    >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity
+    >>> H = build_auxiliary_edge_connectivity(G)
+    >>> # And the function for building the residual network from the
+    >>> # flow package
+    >>> from networkx.algorithms.flow import build_residual_network
+    >>> # Note that the auxiliary digraph has an edge attribute named capacity
+    >>> R = build_residual_network(H, "capacity")
+    >>> result = {n: {} for n in G}
+    >>> # Reuse the auxiliary digraph and the residual network by passing them
+    >>> # as arguments
+    >>> for u, v in itertools.combinations(G, 2):
+    ...     k = len(list(nx.edge_disjoint_paths(G, u, v, auxiliary=H, residual=R)))
+    ...     result[u][v] = k
+    >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2))
+    True
+
+    You can also use alternative flow algorithms for computing edge disjoint
+    paths. For instance, in dense networks the algorithm
+    :meth:`shortest_augmenting_path` will usually perform better than
+    the default :meth:`edmonds_karp` which is faster for sparse
+    networks with highly skewed degree distributions. Alternative flow
+    functions have to be explicitly imported from the flow package.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> len(list(nx.edge_disjoint_paths(G, 0, 6, flow_func=shortest_augmenting_path)))
+    5
+
+    Notes
+    -----
+    This is a flow based implementation of edge disjoint paths. We compute
+    the maximum flow between source and target on an auxiliary directed
+    network. The saturated edges in the residual network after running the
+    maximum flow algorithm correspond to edge disjoint paths between source
+    and target in the original network. This function handles both directed
+    and undirected graphs, and can use all flow algorithms from NetworkX flow
+    package.
+
+    """
+    if s not in G:
+        raise nx.NetworkXError(f"node {s} not in graph")
+    if t not in G:
+        raise nx.NetworkXError(f"node {t} not in graph")
+
+    if flow_func is None:
+        flow_func = default_flow_func
+
+    if auxiliary is None:
+        H = build_auxiliary_edge_connectivity(G)
+    else:
+        H = auxiliary
+
+    # Maximum possible edge disjoint paths
+    possible = min(H.out_degree(s), H.in_degree(t))
+    if not possible:
+        raise NetworkXNoPath
+
+    if cutoff is None:
+        cutoff = possible
+    else:
+        cutoff = min(cutoff, possible)
+
+    # Compute maximum flow between source and target. Flow functions in
+    # NetworkX return a residual network.
+    kwargs = {
+        "capacity": "capacity",
+        "residual": residual,
+        "cutoff": cutoff,
+        "value_only": True,
+    }
+    if flow_func is preflow_push:
+        del kwargs["cutoff"]
+    if flow_func is shortest_augmenting_path:
+        kwargs["two_phase"] = True
+    R = flow_func(H, s, t, **kwargs)
+
+    if R.graph["flow_value"] == 0:
+        raise NetworkXNoPath
+
+    # Saturated edges in the residual network form the edge disjoint paths
+    # between source and target
+    cutset = [
+        (u, v)
+        for u, v, d in R.edges(data=True)
+        if d["capacity"] == d["flow"] and d["flow"] > 0
+    ]
+    # This is equivalent of what flow.utils.build_flow_dict returns, but
+    # only for the nodes with saturated edges and without reporting 0 flows.
+    flow_dict = {n: {} for edge in cutset for n in edge}
+    for u, v in cutset:
+        flow_dict[u][v] = 1
+
+    # Rebuild the edge disjoint paths from the flow dictionary.
+    paths_found = 0
+    for v in list(flow_dict[s]):
+        if paths_found >= cutoff:
+            # preflow_push does not support cutoff: we have to
+            # keep track of the paths founds and stop at cutoff.
+            break
+        path = [s]
+        if v == t:
+            path.append(v)
+            yield path
+            continue
+        u = v
+        while u != t:
+            path.append(u)
+            try:
+                u, _ = flow_dict[u].popitem()
+            except KeyError:
+                break
+        else:
+            path.append(t)
+            yield path
+            paths_found += 1
+
+
+@nx._dispatchable(
+    graphs={"G": 0, "auxiliary?": 5},
+    preserve_node_attrs={"auxiliary": {"id": None}},
+    preserve_graph_attrs={"auxiliary"},
+)
+def node_disjoint_paths(
+    G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None
+):
+    r"""Computes node disjoint paths between source and target.
+
+    Node disjoint paths are paths that only share their first and last
+    nodes. The number of node independent paths between two nodes is
+    equal to their local node connectivity.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    s : node
+        Source node.
+
+    t : node
+        Target node.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes.
+        The function has to accept at least three parameters: a Digraph,
+        a source node, and a target node. And return a residual network
+        that follows NetworkX conventions (see :meth:`maximum_flow` for
+        details). If flow_func is None, the default maximum flow function
+        (:meth:`edmonds_karp`) is used. See below for details. The choice
+        of the default function may change from version to version and
+        should not be relied on. Default value: None.
+
+    cutoff : integer or None (default: None)
+        Maximum number of paths to yield. If specified, the maximum flow
+        algorithm will terminate when the flow value reaches or exceeds the
+        cutoff. This only works for flows that support the cutoff parameter
+        (most do) and is ignored otherwise.
+
+    auxiliary : NetworkX DiGraph
+        Auxiliary digraph to compute flow based node connectivity. It has
+        to have a graph attribute called mapping with a dictionary mapping
+        node names in G and in the auxiliary digraph. If provided
+        it will be reused instead of recreated. Default value: None.
+
+    residual : NetworkX DiGraph
+        Residual network to compute maximum flow. If provided it will be
+        reused instead of recreated. Default value: None.
+
+    Returns
+    -------
+    paths : generator
+        Generator of node disjoint paths.
+
+    Raises
+    ------
+    NetworkXNoPath
+        If there is no path between source and target.
+
+    NetworkXError
+        If source or target are not in the graph G.
+
+    Examples
+    --------
+    We use in this example the platonic icosahedral graph, which has node
+    connectivity 5, thus there are 5 node disjoint paths between any pair
+    of non neighbor nodes.
+
+    >>> G = nx.icosahedral_graph()
+    >>> len(list(nx.node_disjoint_paths(G, 0, 6)))
+    5
+
+    If you need to compute node disjoint paths between several pairs of
+    nodes in the same graph, it is recommended that you reuse the
+    data structures that NetworkX uses in the computation: the
+    auxiliary digraph for node connectivity and node cuts, and the
+    residual network for the underlying maximum flow computation.
+
+    Example of how to compute node disjoint paths reusing the data
+    structures:
+
+    >>> # You also have to explicitly import the function for
+    >>> # building the auxiliary digraph from the connectivity package
+    >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity
+    >>> H = build_auxiliary_node_connectivity(G)
+    >>> # And the function for building the residual network from the
+    >>> # flow package
+    >>> from networkx.algorithms.flow import build_residual_network
+    >>> # Note that the auxiliary digraph has an edge attribute named capacity
+    >>> R = build_residual_network(H, "capacity")
+    >>> # Reuse the auxiliary digraph and the residual network by passing them
+    >>> # as arguments
+    >>> len(list(nx.node_disjoint_paths(G, 0, 6, auxiliary=H, residual=R)))
+    5
+
+    You can also use alternative flow algorithms for computing node disjoint
+    paths. For instance, in dense networks the algorithm
+    :meth:`shortest_augmenting_path` will usually perform better than
+    the default :meth:`edmonds_karp` which is faster for sparse
+    networks with highly skewed degree distributions. Alternative flow
+    functions have to be explicitly imported from the flow package.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> len(list(nx.node_disjoint_paths(G, 0, 6, flow_func=shortest_augmenting_path)))
+    5
+
+    Notes
+    -----
+    This is a flow based implementation of node disjoint paths. We compute
+    the maximum flow between source and target on an auxiliary directed
+    network. The saturated edges in the residual network after running the
+    maximum flow algorithm correspond to node disjoint paths between source
+    and target in the original network. This function handles both directed
+    and undirected graphs, and can use all flow algorithms from NetworkX flow
+    package.
+
+    See also
+    --------
+    :meth:`edge_disjoint_paths`
+    :meth:`node_connectivity`
+    :meth:`maximum_flow`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    """
+    if s not in G:
+        raise nx.NetworkXError(f"node {s} not in graph")
+    if t not in G:
+        raise nx.NetworkXError(f"node {t} not in graph")
+
+    if auxiliary is None:
+        H = build_auxiliary_node_connectivity(G)
+    else:
+        H = auxiliary
+
+    mapping = H.graph.get("mapping", None)
+    if mapping is None:
+        raise nx.NetworkXError("Invalid auxiliary digraph.")
+
+    # Maximum possible edge disjoint paths
+    possible = min(H.out_degree(f"{mapping[s]}B"), H.in_degree(f"{mapping[t]}A"))
+    if not possible:
+        raise NetworkXNoPath
+
+    if cutoff is None:
+        cutoff = possible
+    else:
+        cutoff = min(cutoff, possible)
+
+    kwargs = {
+        "flow_func": flow_func,
+        "residual": residual,
+        "auxiliary": H,
+        "cutoff": cutoff,
+    }
+
+    # The edge disjoint paths in the auxiliary digraph correspond to the node
+    # disjoint paths in the original graph.
+    paths_edges = edge_disjoint_paths(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs)
+    for path in paths_edges:
+        # Each node in the original graph maps to two nodes in auxiliary graph
+        yield list(_unique_everseen(H.nodes[node]["id"] for node in path))
+
+
+def _unique_everseen(iterable):
+    # Adapted from https://docs.python.org/3/library/itertools.html examples
+    "List unique elements, preserving order. Remember all elements ever seen."
+    # unique_everseen('AAAABBBCCDAABBB') --> A B C D
+    seen = set()
+    seen_add = seen.add
+    for element in _filterfalse(seen.__contains__, iterable):
+        seen_add(element)
+        yield element
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_augmentation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_augmentation.py
new file mode 100644
index 00000000..278a8e36
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_augmentation.py
@@ -0,0 +1,1270 @@
+"""
+Algorithms for finding k-edge-augmentations
+
+A k-edge-augmentation is a set of edges, that once added to a graph, ensures
+that the graph is k-edge-connected; i.e. the graph cannot be disconnected
+unless k or more edges are removed.  Typically, the goal is to find the
+augmentation with minimum weight.  In general, it is not guaranteed that a
+k-edge-augmentation exists.
+
+See Also
+--------
+:mod:`edge_kcomponents` : algorithms for finding k-edge-connected components
+:mod:`connectivity` : algorithms for determining edge connectivity.
+"""
+
+import itertools as it
+import math
+from collections import defaultdict, namedtuple
+
+import networkx as nx
+from networkx.utils import not_implemented_for, py_random_state
+
+__all__ = ["k_edge_augmentation", "is_k_edge_connected", "is_locally_k_edge_connected"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def is_k_edge_connected(G, k):
+    """Tests to see if a graph is k-edge-connected.
+
+    Is it impossible to disconnect the graph by removing fewer than k edges?
+    If so, then G is k-edge-connected.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    k : integer
+        edge connectivity to test for
+
+    Returns
+    -------
+    boolean
+        True if G is k-edge-connected.
+
+    See Also
+    --------
+    :func:`is_locally_k_edge_connected`
+
+    Examples
+    --------
+    >>> G = nx.barbell_graph(10, 0)
+    >>> nx.is_k_edge_connected(G, k=1)
+    True
+    >>> nx.is_k_edge_connected(G, k=2)
+    False
+    """
+    if k < 1:
+        raise ValueError(f"k must be positive, not {k}")
+    # First try to quickly determine if G is not k-edge-connected
+    if G.number_of_nodes() < k + 1:
+        return False
+    elif any(d < k for n, d in G.degree()):
+        return False
+    else:
+        # Otherwise perform the full check
+        if k == 1:
+            return nx.is_connected(G)
+        elif k == 2:
+            return nx.is_connected(G) and not nx.has_bridges(G)
+        else:
+            return nx.edge_connectivity(G, cutoff=k) >= k
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def is_locally_k_edge_connected(G, s, t, k):
+    """Tests to see if an edge in a graph is locally k-edge-connected.
+
+    Is it impossible to disconnect s and t by removing fewer than k edges?
+    If so, then s and t are locally k-edge-connected in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    s : node
+        Source node
+
+    t : node
+        Target node
+
+    k : integer
+        local edge connectivity for nodes s and t
+
+    Returns
+    -------
+    boolean
+        True if s and t are locally k-edge-connected in G.
+
+    See Also
+    --------
+    :func:`is_k_edge_connected`
+
+    Examples
+    --------
+    >>> from networkx.algorithms.connectivity import is_locally_k_edge_connected
+    >>> G = nx.barbell_graph(10, 0)
+    >>> is_locally_k_edge_connected(G, 5, 15, k=1)
+    True
+    >>> is_locally_k_edge_connected(G, 5, 15, k=2)
+    False
+    >>> is_locally_k_edge_connected(G, 1, 5, k=2)
+    True
+    """
+    if k < 1:
+        raise ValueError(f"k must be positive, not {k}")
+
+    # First try to quickly determine s, t is not k-locally-edge-connected in G
+    if G.degree(s) < k or G.degree(t) < k:
+        return False
+    else:
+        # Otherwise perform the full check
+        if k == 1:
+            return nx.has_path(G, s, t)
+        else:
+            localk = nx.connectivity.local_edge_connectivity(G, s, t, cutoff=k)
+            return localk >= k
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def k_edge_augmentation(G, k, avail=None, weight=None, partial=False):
+    """Finds set of edges to k-edge-connect G.
+
+    Adding edges from the augmentation to G make it impossible to disconnect G
+    unless k or more edges are removed. This function uses the most efficient
+    function available (depending on the value of k and if the problem is
+    weighted or unweighted) to search for a minimum weight subset of available
+    edges that k-edge-connects G. In general, finding a k-edge-augmentation is
+    NP-hard, so solutions are not guaranteed to be minimal. Furthermore, a
+    k-edge-augmentation may not exist.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    k : integer
+        Desired edge connectivity
+
+    avail : dict or a set of 2 or 3 tuples
+        The available edges that can be used in the augmentation.
+
+        If unspecified, then all edges in the complement of G are available.
+        Otherwise, each item is an available edge (with an optional weight).
+
+        In the unweighted case, each item is an edge ``(u, v)``.
+
+        In the weighted case, each item is a 3-tuple ``(u, v, d)`` or a dict
+        with items ``(u, v): d``.  The third item, ``d``, can be a dictionary
+        or a real number.  If ``d`` is a dictionary ``d[weight]``
+        correspondings to the weight.
+
+    weight : string
+        key to use to find weights if ``avail`` is a set of 3-tuples where the
+        third item in each tuple is a dictionary.
+
+    partial : boolean
+        If partial is True and no feasible k-edge-augmentation exists, then all
+        a partial k-edge-augmentation is generated. Adding the edges in a
+        partial augmentation to G, minimizes the number of k-edge-connected
+        components and maximizes the edge connectivity between those
+        components. For details, see :func:`partial_k_edge_augmentation`.
+
+    Yields
+    ------
+    edge : tuple
+        Edges that, once added to G, would cause G to become k-edge-connected.
+        If partial is False, an error is raised if this is not possible.
+        Otherwise, generated edges form a partial augmentation, which
+        k-edge-connects any part of G where it is possible, and maximally
+        connects the remaining parts.
+
+    Raises
+    ------
+    NetworkXUnfeasible
+        If partial is False and no k-edge-augmentation exists.
+
+    NetworkXNotImplemented
+        If the input graph is directed or a multigraph.
+
+    ValueError:
+        If k is less than 1
+
+    Notes
+    -----
+    When k=1 this returns an optimal solution.
+
+    When k=2 and ``avail`` is None, this returns an optimal solution.
+    Otherwise when k=2, this returns a 2-approximation of the optimal solution.
+
+    For k>3, this problem is NP-hard and this uses a randomized algorithm that
+        produces a feasible solution, but provides no guarantees on the
+        solution weight.
+
+    Examples
+    --------
+    >>> # Unweighted cases
+    >>> G = nx.path_graph((1, 2, 3, 4))
+    >>> G.add_node(5)
+    >>> sorted(nx.k_edge_augmentation(G, k=1))
+    [(1, 5)]
+    >>> sorted(nx.k_edge_augmentation(G, k=2))
+    [(1, 5), (5, 4)]
+    >>> sorted(nx.k_edge_augmentation(G, k=3))
+    [(1, 4), (1, 5), (2, 5), (3, 5), (4, 5)]
+    >>> complement = list(nx.k_edge_augmentation(G, k=5, partial=True))
+    >>> G.add_edges_from(complement)
+    >>> nx.edge_connectivity(G)
+    4
+
+    >>> # Weighted cases
+    >>> G = nx.path_graph((1, 2, 3, 4))
+    >>> G.add_node(5)
+    >>> # avail can be a tuple with a dict
+    >>> avail = [(1, 5, {"weight": 11}), (2, 5, {"weight": 10})]
+    >>> sorted(nx.k_edge_augmentation(G, k=1, avail=avail, weight="weight"))
+    [(2, 5)]
+    >>> # or avail can be a 3-tuple with a real number
+    >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 51)]
+    >>> sorted(nx.k_edge_augmentation(G, k=2, avail=avail))
+    [(1, 5), (2, 5), (4, 5)]
+    >>> # or avail can be a dict
+    >>> avail = {(1, 5): 11, (2, 5): 10, (4, 3): 1, (4, 5): 51}
+    >>> sorted(nx.k_edge_augmentation(G, k=2, avail=avail))
+    [(1, 5), (2, 5), (4, 5)]
+    >>> # If augmentation is infeasible, then a partial solution can be found
+    >>> avail = {(1, 5): 11}
+    >>> sorted(nx.k_edge_augmentation(G, k=2, avail=avail, partial=True))
+    [(1, 5)]
+    """
+    try:
+        if k <= 0:
+            raise ValueError(f"k must be a positive integer, not {k}")
+        elif G.number_of_nodes() < k + 1:
+            msg = f"impossible to {k} connect in graph with less than {k + 1} nodes"
+            raise nx.NetworkXUnfeasible(msg)
+        elif avail is not None and len(avail) == 0:
+            if not nx.is_k_edge_connected(G, k):
+                raise nx.NetworkXUnfeasible("no available edges")
+            aug_edges = []
+        elif k == 1:
+            aug_edges = one_edge_augmentation(
+                G, avail=avail, weight=weight, partial=partial
+            )
+        elif k == 2:
+            aug_edges = bridge_augmentation(G, avail=avail, weight=weight)
+        else:
+            # raise NotImplementedError(f'not implemented for k>2. k={k}')
+            aug_edges = greedy_k_edge_augmentation(
+                G, k=k, avail=avail, weight=weight, seed=0
+            )
+        # Do eager evaluation so we can catch any exceptions
+        # Before executing partial code.
+        yield from list(aug_edges)
+    except nx.NetworkXUnfeasible:
+        if partial:
+            # Return all available edges
+            if avail is None:
+                aug_edges = complement_edges(G)
+            else:
+                # If we can't k-edge-connect the entire graph, try to
+                # k-edge-connect as much as possible
+                aug_edges = partial_k_edge_augmentation(
+                    G, k=k, avail=avail, weight=weight
+                )
+            yield from aug_edges
+        else:
+            raise
+
+
+@nx._dispatchable
+def partial_k_edge_augmentation(G, k, avail, weight=None):
+    """Finds augmentation that k-edge-connects as much of the graph as possible.
+
+    When a k-edge-augmentation is not possible, we can still try to find a
+    small set of edges that partially k-edge-connects as much of the graph as
+    possible. All possible edges are generated between remaining parts.
+    This minimizes the number of k-edge-connected subgraphs in the resulting
+    graph and maximizes the edge connectivity between those subgraphs.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    k : integer
+        Desired edge connectivity
+
+    avail : dict or a set of 2 or 3 tuples
+        For more details, see :func:`k_edge_augmentation`.
+
+    weight : string
+        key to use to find weights if ``avail`` is a set of 3-tuples.
+        For more details, see :func:`k_edge_augmentation`.
+
+    Yields
+    ------
+    edge : tuple
+        Edges in the partial augmentation of G. These edges k-edge-connect any
+        part of G where it is possible, and maximally connects the remaining
+        parts. In other words, all edges from avail are generated except for
+        those within subgraphs that have already become k-edge-connected.
+
+    Notes
+    -----
+    Construct H that augments G with all edges in avail.
+    Find the k-edge-subgraphs of H.
+    For each k-edge-subgraph, if the number of nodes is more than k, then find
+    the k-edge-augmentation of that graph and add it to the solution. Then add
+    all edges in avail between k-edge subgraphs to the solution.
+
+    See Also
+    --------
+    :func:`k_edge_augmentation`
+
+    Examples
+    --------
+    >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7))
+    >>> G.add_node(8)
+    >>> avail = [(1, 3), (1, 4), (1, 5), (2, 4), (2, 5), (3, 5), (1, 8)]
+    >>> sorted(partial_k_edge_augmentation(G, k=2, avail=avail))
+    [(1, 5), (1, 8)]
+    """
+
+    def _edges_between_disjoint(H, only1, only2):
+        """finds edges between disjoint nodes"""
+        only1_adj = {u: set(H.adj[u]) for u in only1}
+        for u, neighbs in only1_adj.items():
+            # Find the neighbors of u in only1 that are also in only2
+            neighbs12 = neighbs.intersection(only2)
+            for v in neighbs12:
+                yield (u, v)
+
+    avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=G)
+
+    # Find which parts of the graph can be k-edge-connected
+    H = G.copy()
+    H.add_edges_from(
+        (
+            (u, v, {"weight": w, "generator": (u, v)})
+            for (u, v), w in zip(avail, avail_w)
+        )
+    )
+    k_edge_subgraphs = list(nx.k_edge_subgraphs(H, k=k))
+
+    # Generate edges to k-edge-connect internal subgraphs
+    for nodes in k_edge_subgraphs:
+        if len(nodes) > 1:
+            # Get the k-edge-connected subgraph
+            C = H.subgraph(nodes).copy()
+            # Find the internal edges that were available
+            sub_avail = {
+                d["generator"]: d["weight"]
+                for (u, v, d) in C.edges(data=True)
+                if "generator" in d
+            }
+            # Remove potential augmenting edges
+            C.remove_edges_from(sub_avail.keys())
+            # Find a subset of these edges that makes the component
+            # k-edge-connected and ignore the rest
+            yield from nx.k_edge_augmentation(C, k=k, avail=sub_avail)
+
+    # Generate all edges between CCs that could not be k-edge-connected
+    for cc1, cc2 in it.combinations(k_edge_subgraphs, 2):
+        for u, v in _edges_between_disjoint(H, cc1, cc2):
+            d = H.get_edge_data(u, v)
+            edge = d.get("generator", None)
+            if edge is not None:
+                yield edge
+
+
+@not_implemented_for("multigraph")
+@not_implemented_for("directed")
+@nx._dispatchable
+def one_edge_augmentation(G, avail=None, weight=None, partial=False):
+    """Finds minimum weight set of edges to connect G.
+
+    Equivalent to :func:`k_edge_augmentation` when k=1. Adding the resulting
+    edges to G will make it 1-edge-connected. The solution is optimal for both
+    weighted and non-weighted variants.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    avail : dict or a set of 2 or 3 tuples
+        For more details, see :func:`k_edge_augmentation`.
+
+    weight : string
+        key to use to find weights if ``avail`` is a set of 3-tuples.
+        For more details, see :func:`k_edge_augmentation`.
+
+    partial : boolean
+        If partial is True and no feasible k-edge-augmentation exists, then the
+        augmenting edges minimize the number of connected components.
+
+    Yields
+    ------
+    edge : tuple
+        Edges in the one-augmentation of G
+
+    Raises
+    ------
+    NetworkXUnfeasible
+        If partial is False and no one-edge-augmentation exists.
+
+    Notes
+    -----
+    Uses either :func:`unconstrained_one_edge_augmentation` or
+    :func:`weighted_one_edge_augmentation` depending on whether ``avail`` is
+    specified. Both algorithms are based on finding a minimum spanning tree.
+    As such both algorithms find optimal solutions and run in linear time.
+
+    See Also
+    --------
+    :func:`k_edge_augmentation`
+    """
+    if avail is None:
+        return unconstrained_one_edge_augmentation(G)
+    else:
+        return weighted_one_edge_augmentation(
+            G, avail=avail, weight=weight, partial=partial
+        )
+
+
+@not_implemented_for("multigraph")
+@not_implemented_for("directed")
+@nx._dispatchable
+def bridge_augmentation(G, avail=None, weight=None):
+    """Finds the a set of edges that bridge connects G.
+
+    Equivalent to :func:`k_edge_augmentation` when k=2, and partial=False.
+    Adding the resulting edges to G will make it 2-edge-connected.  If no
+    constraints are specified the returned set of edges is minimum an optimal,
+    otherwise the solution is approximated.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    avail : dict or a set of 2 or 3 tuples
+        For more details, see :func:`k_edge_augmentation`.
+
+    weight : string
+        key to use to find weights if ``avail`` is a set of 3-tuples.
+        For more details, see :func:`k_edge_augmentation`.
+
+    Yields
+    ------
+    edge : tuple
+        Edges in the bridge-augmentation of G
+
+    Raises
+    ------
+    NetworkXUnfeasible
+        If no bridge-augmentation exists.
+
+    Notes
+    -----
+    If there are no constraints the solution can be computed in linear time
+    using :func:`unconstrained_bridge_augmentation`. Otherwise, the problem
+    becomes NP-hard and is the solution is approximated by
+    :func:`weighted_bridge_augmentation`.
+
+    See Also
+    --------
+    :func:`k_edge_augmentation`
+    """
+    if G.number_of_nodes() < 3:
+        raise nx.NetworkXUnfeasible("impossible to bridge connect less than 3 nodes")
+    if avail is None:
+        return unconstrained_bridge_augmentation(G)
+    else:
+        return weighted_bridge_augmentation(G, avail, weight=weight)
+
+
+# --- Algorithms and Helpers ---
+
+
+def _ordered(u, v):
+    """Returns the nodes in an undirected edge in lower-triangular order"""
+    return (u, v) if u < v else (v, u)
+
+
+def _unpack_available_edges(avail, weight=None, G=None):
+    """Helper to separate avail into edges and corresponding weights"""
+    if weight is None:
+        weight = "weight"
+    if isinstance(avail, dict):
+        avail_uv = list(avail.keys())
+        avail_w = list(avail.values())
+    else:
+
+        def _try_getitem(d):
+            try:
+                return d[weight]
+            except TypeError:
+                return d
+
+        avail_uv = [tup[0:2] for tup in avail]
+        avail_w = [1 if len(tup) == 2 else _try_getitem(tup[-1]) for tup in avail]
+
+    if G is not None:
+        # Edges already in the graph are filtered
+        flags = [not G.has_edge(u, v) for u, v in avail_uv]
+        avail_uv = list(it.compress(avail_uv, flags))
+        avail_w = list(it.compress(avail_w, flags))
+    return avail_uv, avail_w
+
+
+MetaEdge = namedtuple("MetaEdge", ("meta_uv", "uv", "w"))
+
+
+def _lightest_meta_edges(mapping, avail_uv, avail_w):
+    """Maps available edges in the original graph to edges in the metagraph.
+
+    Parameters
+    ----------
+    mapping : dict
+        mapping produced by :func:`collapse`, that maps each node in the
+        original graph to a node in the meta graph
+
+    avail_uv : list
+        list of edges
+
+    avail_w : list
+        list of edge weights
+
+    Notes
+    -----
+    Each node in the metagraph is a k-edge-connected component in the original
+    graph.  We don't care about any edge within the same k-edge-connected
+    component, so we ignore self edges.  We also are only interested in the
+    minimum weight edge bridging each k-edge-connected component so, we group
+    the edges by meta-edge and take the lightest in each group.
+
+    Examples
+    --------
+    >>> # Each group represents a meta-node
+    >>> groups = ([1, 2, 3], [4, 5], [6])
+    >>> mapping = {n: meta_n for meta_n, ns in enumerate(groups) for n in ns}
+    >>> avail_uv = [(1, 2), (3, 6), (1, 4), (5, 2), (6, 1), (2, 6), (3, 1)]
+    >>> avail_w = [20, 99, 20, 15, 50, 99, 20]
+    >>> sorted(_lightest_meta_edges(mapping, avail_uv, avail_w))
+    [MetaEdge(meta_uv=(0, 1), uv=(5, 2), w=15), MetaEdge(meta_uv=(0, 2), uv=(6, 1), w=50)]
+    """
+    grouped_wuv = defaultdict(list)
+    for w, (u, v) in zip(avail_w, avail_uv):
+        # Order the meta-edge so it can be used as a dict key
+        meta_uv = _ordered(mapping[u], mapping[v])
+        # Group each available edge using the meta-edge as a key
+        grouped_wuv[meta_uv].append((w, u, v))
+
+    # Now that all available edges are grouped, choose one per group
+    for (mu, mv), choices_wuv in grouped_wuv.items():
+        # Ignore available edges within the same meta-node
+        if mu != mv:
+            # Choose the lightest available edge belonging to each meta-edge
+            w, u, v = min(choices_wuv)
+            yield MetaEdge((mu, mv), (u, v), w)
+
+
+@nx._dispatchable
+def unconstrained_one_edge_augmentation(G):
+    """Finds the smallest set of edges to connect G.
+
+    This is a variant of the unweighted MST problem.
+    If G is not empty, a feasible solution always exists.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    Yields
+    ------
+    edge : tuple
+        Edges in the one-edge-augmentation of G
+
+    See Also
+    --------
+    :func:`one_edge_augmentation`
+    :func:`k_edge_augmentation`
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (2, 3), (4, 5)])
+    >>> G.add_nodes_from([6, 7, 8])
+    >>> sorted(unconstrained_one_edge_augmentation(G))
+    [(1, 4), (4, 6), (6, 7), (7, 8)]
+    """
+    ccs1 = list(nx.connected_components(G))
+    C = collapse(G, ccs1)
+    # When we are not constrained, we can just make a meta graph tree.
+    meta_nodes = list(C.nodes())
+    # build a path in the metagraph
+    meta_aug = list(zip(meta_nodes, meta_nodes[1:]))
+    # map that path to the original graph
+    inverse = defaultdict(list)
+    for k, v in C.graph["mapping"].items():
+        inverse[v].append(k)
+    for mu, mv in meta_aug:
+        yield (inverse[mu][0], inverse[mv][0])
+
+
+@nx._dispatchable
+def weighted_one_edge_augmentation(G, avail, weight=None, partial=False):
+    """Finds the minimum weight set of edges to connect G if one exists.
+
+    This is a variant of the weighted MST problem.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    avail : dict or a set of 2 or 3 tuples
+        For more details, see :func:`k_edge_augmentation`.
+
+    weight : string
+        key to use to find weights if ``avail`` is a set of 3-tuples.
+        For more details, see :func:`k_edge_augmentation`.
+
+    partial : boolean
+        If partial is True and no feasible k-edge-augmentation exists, then the
+        augmenting edges minimize the number of connected components.
+
+    Yields
+    ------
+    edge : tuple
+        Edges in the subset of avail chosen to connect G.
+
+    See Also
+    --------
+    :func:`one_edge_augmentation`
+    :func:`k_edge_augmentation`
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (2, 3), (4, 5)])
+    >>> G.add_nodes_from([6, 7, 8])
+    >>> # any edge not in avail has an implicit weight of infinity
+    >>> avail = [(1, 3), (1, 5), (4, 7), (4, 8), (6, 1), (8, 1), (8, 2)]
+    >>> sorted(weighted_one_edge_augmentation(G, avail))
+    [(1, 5), (4, 7), (6, 1), (8, 1)]
+    >>> # find another solution by giving large weights to edges in the
+    >>> # previous solution (note some of the old edges must be used)
+    >>> avail = [(1, 3), (1, 5, 99), (4, 7, 9), (6, 1, 99), (8, 1, 99), (8, 2)]
+    >>> sorted(weighted_one_edge_augmentation(G, avail))
+    [(1, 5), (4, 7), (6, 1), (8, 2)]
+    """
+    avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=G)
+    # Collapse CCs in the original graph into nodes in a metagraph
+    # Then find an MST of the metagraph instead of the original graph
+    C = collapse(G, nx.connected_components(G))
+    mapping = C.graph["mapping"]
+    # Assign each available edge to an edge in the metagraph
+    candidate_mapping = _lightest_meta_edges(mapping, avail_uv, avail_w)
+    # nx.set_edge_attributes(C, name='weight', values=0)
+    C.add_edges_from(
+        (mu, mv, {"weight": w, "generator": uv})
+        for (mu, mv), uv, w in candidate_mapping
+    )
+    # Find MST of the meta graph
+    meta_mst = nx.minimum_spanning_tree(C)
+    if not partial and not nx.is_connected(meta_mst):
+        raise nx.NetworkXUnfeasible("Not possible to connect G with available edges")
+    # Yield the edge that generated the meta-edge
+    for mu, mv, d in meta_mst.edges(data=True):
+        if "generator" in d:
+            edge = d["generator"]
+            yield edge
+
+
+@nx._dispatchable
+def unconstrained_bridge_augmentation(G):
+    """Finds an optimal 2-edge-augmentation of G using the fewest edges.
+
+    This is an implementation of the algorithm detailed in [1]_.
+    The basic idea is to construct a meta-graph of bridge-ccs, connect leaf
+    nodes of the trees to connect the entire graph, and finally connect the
+    leafs of the tree in dfs-preorder to bridge connect the entire graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    Yields
+    ------
+    edge : tuple
+        Edges in the bridge augmentation of G
+
+    Notes
+    -----
+    Input: a graph G.
+    First find the bridge components of G and collapse each bridge-cc into a
+    node of a metagraph graph C, which is guaranteed to be a forest of trees.
+
+    C contains p "leafs" --- nodes with exactly one incident edge.
+    C contains q "isolated nodes" --- nodes with no incident edges.
+
+    Theorem: If p + q > 1, then at least :math:`ceil(p / 2) + q` edges are
+        needed to bridge connect C. This algorithm achieves this min number.
+
+    The method first adds enough edges to make G into a tree and then pairs
+    leafs in a simple fashion.
+
+    Let n be the number of trees in C. Let v(i) be an isolated vertex in the
+    i-th tree if one exists, otherwise it is a pair of distinct leafs nodes
+    in the i-th tree. Alternating edges from these sets (i.e.  adding edges
+    A1 = [(v(i)[0], v(i + 1)[1]), v(i + 1)[0], v(i + 2)[1])...]) connects C
+    into a tree T. This tree has p' = p + 2q - 2(n -1) leafs and no isolated
+    vertices. A1 has n - 1 edges. The next step finds ceil(p' / 2) edges to
+    biconnect any tree with p' leafs.
+
+    Convert T into an arborescence T' by picking an arbitrary root node with
+    degree >= 2 and directing all edges away from the root. Note the
+    implementation implicitly constructs T'.
+
+    The leafs of T are the nodes with no existing edges in T'.
+    Order the leafs of T' by DFS preorder. Then break this list in half
+    and add the zipped pairs to A2.
+
+    The set A = A1 + A2 is the minimum augmentation in the metagraph.
+
+    To convert this to edges in the original graph
+
+    References
+    ----------
+    .. [1] Eswaran, Kapali P., and R. Endre Tarjan. (1975) Augmentation problems.
+        http://epubs.siam.org/doi/abs/10.1137/0205044
+
+    See Also
+    --------
+    :func:`bridge_augmentation`
+    :func:`k_edge_augmentation`
+
+    Examples
+    --------
+    >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7))
+    >>> sorted(unconstrained_bridge_augmentation(G))
+    [(1, 7)]
+    >>> G = nx.path_graph((1, 2, 3, 2, 4, 5, 6, 7))
+    >>> sorted(unconstrained_bridge_augmentation(G))
+    [(1, 3), (3, 7)]
+    >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
+    >>> G.add_node(4)
+    >>> sorted(unconstrained_bridge_augmentation(G))
+    [(1, 4), (4, 0)]
+    """
+    # -----
+    # Mapping of terms from (Eswaran and Tarjan):
+    #     G = G_0 - the input graph
+    #     C = G_0' - the bridge condensation of G. (This is a forest of trees)
+    #     A1 = A_1 - the edges to connect the forest into a tree
+    #         leaf = pendant - a node with degree of 1
+
+    #     alpha(v) = maps the node v in G to its meta-node in C
+    #     beta(x) = maps the meta-node x in C to any node in the bridge
+    #         component of G corresponding to x.
+
+    # find the 2-edge-connected components of G
+    bridge_ccs = list(nx.connectivity.bridge_components(G))
+    # condense G into an forest C
+    C = collapse(G, bridge_ccs)
+
+    # Choose pairs of distinct leaf nodes in each tree. If this is not
+    # possible then make a pair using the single isolated node in the tree.
+    vset1 = [
+        tuple(cc) * 2  # case1: an isolated node
+        if len(cc) == 1
+        else sorted(cc, key=C.degree)[0:2]  # case2: pair of leaf nodes
+        for cc in nx.connected_components(C)
+    ]
+    if len(vset1) > 1:
+        # Use this set to construct edges that connect C into a tree.
+        nodes1 = [vs[0] for vs in vset1]
+        nodes2 = [vs[1] for vs in vset1]
+        A1 = list(zip(nodes1[1:], nodes2))
+    else:
+        A1 = []
+    # Connect each tree in the forest to construct an arborescence
+    T = C.copy()
+    T.add_edges_from(A1)
+
+    # If there are only two leaf nodes, we simply connect them.
+    leafs = [n for n, d in T.degree() if d == 1]
+    if len(leafs) == 1:
+        A2 = []
+    if len(leafs) == 2:
+        A2 = [tuple(leafs)]
+    else:
+        # Choose an arbitrary non-leaf root
+        try:
+            root = next(n for n, d in T.degree() if d > 1)
+        except StopIteration:  # no nodes found with degree > 1
+            return
+        # order the leaves of C by (induced directed) preorder
+        v2 = [n for n in nx.dfs_preorder_nodes(T, root) if T.degree(n) == 1]
+        # connecting first half of the leafs in pre-order to the second
+        # half will bridge connect the tree with the fewest edges.
+        half = math.ceil(len(v2) / 2)
+        A2 = list(zip(v2[:half], v2[-half:]))
+
+    # collect the edges used to augment the original forest
+    aug_tree_edges = A1 + A2
+
+    # Construct the mapping (beta) from meta-nodes to regular nodes
+    inverse = defaultdict(list)
+    for k, v in C.graph["mapping"].items():
+        inverse[v].append(k)
+    # sort so we choose minimum degree nodes first
+    inverse = {
+        mu: sorted(mapped, key=lambda u: (G.degree(u), u))
+        for mu, mapped in inverse.items()
+    }
+
+    # For each meta-edge, map back to an arbitrary pair in the original graph
+    G2 = G.copy()
+    for mu, mv in aug_tree_edges:
+        # Find the first available edge that doesn't exist and return it
+        for u, v in it.product(inverse[mu], inverse[mv]):
+            if not G2.has_edge(u, v):
+                G2.add_edge(u, v)
+                yield u, v
+                break
+
+
+@nx._dispatchable
+def weighted_bridge_augmentation(G, avail, weight=None):
+    """Finds an approximate min-weight 2-edge-augmentation of G.
+
+    This is an implementation of the approximation algorithm detailed in [1]_.
+    It chooses a set of edges from avail to add to G that renders it
+    2-edge-connected if such a subset exists.  This is done by finding a
+    minimum spanning arborescence of a specially constructed metagraph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    avail : set of 2 or 3 tuples.
+        candidate edges (with optional weights) to choose from
+
+    weight : string
+        key to use to find weights if avail is a set of 3-tuples where the
+        third item in each tuple is a dictionary.
+
+    Yields
+    ------
+    edge : tuple
+        Edges in the subset of avail chosen to bridge augment G.
+
+    Notes
+    -----
+    Finding a weighted 2-edge-augmentation is NP-hard.
+    Any edge not in ``avail`` is considered to have a weight of infinity.
+    The approximation factor is 2 if ``G`` is connected and 3 if it is not.
+    Runs in :math:`O(m + n log(n))` time
+
+    References
+    ----------
+    .. [1] Khuller, Samir, and Ramakrishna Thurimella. (1993) Approximation
+        algorithms for graph augmentation.
+        http://www.sciencedirect.com/science/article/pii/S0196677483710102
+
+    See Also
+    --------
+    :func:`bridge_augmentation`
+    :func:`k_edge_augmentation`
+
+    Examples
+    --------
+    >>> G = nx.path_graph((1, 2, 3, 4))
+    >>> # When the weights are equal, (1, 4) is the best
+    >>> avail = [(1, 4, 1), (1, 3, 1), (2, 4, 1)]
+    >>> sorted(weighted_bridge_augmentation(G, avail))
+    [(1, 4)]
+    >>> # Giving (1, 4) a high weight makes the two edge solution the best.
+    >>> avail = [(1, 4, 1000), (1, 3, 1), (2, 4, 1)]
+    >>> sorted(weighted_bridge_augmentation(G, avail))
+    [(1, 3), (2, 4)]
+    >>> # ------
+    >>> G = nx.path_graph((1, 2, 3, 4))
+    >>> G.add_node(5)
+    >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 1)]
+    >>> sorted(weighted_bridge_augmentation(G, avail=avail))
+    [(1, 5), (4, 5)]
+    >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 51)]
+    >>> sorted(weighted_bridge_augmentation(G, avail=avail))
+    [(1, 5), (2, 5), (4, 5)]
+    """
+
+    if weight is None:
+        weight = "weight"
+
+    # If input G is not connected the approximation factor increases to 3
+    if not nx.is_connected(G):
+        H = G.copy()
+        connectors = list(one_edge_augmentation(H, avail=avail, weight=weight))
+        H.add_edges_from(connectors)
+
+        yield from connectors
+    else:
+        connectors = []
+        H = G
+
+    if len(avail) == 0:
+        if nx.has_bridges(H):
+            raise nx.NetworkXUnfeasible("no augmentation possible")
+
+    avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=H)
+
+    # Collapse input into a metagraph. Meta nodes are bridge-ccs
+    bridge_ccs = nx.connectivity.bridge_components(H)
+    C = collapse(H, bridge_ccs)
+
+    # Use the meta graph to shrink avail to a small feasible subset
+    mapping = C.graph["mapping"]
+    # Choose the minimum weight feasible edge in each group
+    meta_to_wuv = {
+        (mu, mv): (w, uv)
+        for (mu, mv), uv, w in _lightest_meta_edges(mapping, avail_uv, avail_w)
+    }
+
+    # Mapping of terms from (Khuller and Thurimella):
+    #     C         : G_0 = (V, E^0)
+    #        This is the metagraph where each node is a 2-edge-cc in G.
+    #        The edges in C represent bridges in the original graph.
+    #     (mu, mv)  : E - E^0  # they group both avail and given edges in E
+    #     T         : \Gamma
+    #     D         : G^D = (V, E_D)
+
+    #     The paper uses ancestor because children point to parents, which is
+    #     contrary to networkx standards.  So, we actually need to run
+    #     nx.least_common_ancestor on the reversed Tree.
+
+    # Pick an arbitrary leaf from C as the root
+    try:
+        root = next(n for n, d in C.degree() if d == 1)
+    except StopIteration:  # no nodes found with degree == 1
+        return
+    # Root C into a tree TR by directing all edges away from the root
+    # Note in their paper T directs edges towards the root
+    TR = nx.dfs_tree(C, root)
+
+    # Add to D the directed edges of T and set their weight to zero
+    # This indicates that it costs nothing to use edges that were given.
+    D = nx.reverse(TR).copy()
+
+    nx.set_edge_attributes(D, name="weight", values=0)
+
+    # The LCA of mu and mv in T is the shared ancestor of mu and mv that is
+    # located farthest from the root.
+    lca_gen = nx.tree_all_pairs_lowest_common_ancestor(
+        TR, root=root, pairs=meta_to_wuv.keys()
+    )
+
+    for (mu, mv), lca in lca_gen:
+        w, uv = meta_to_wuv[(mu, mv)]
+        if lca == mu:
+            # If u is an ancestor of v in TR, then add edge u->v to D
+            D.add_edge(lca, mv, weight=w, generator=uv)
+        elif lca == mv:
+            # If v is an ancestor of u in TR, then add edge v->u to D
+            D.add_edge(lca, mu, weight=w, generator=uv)
+        else:
+            # If neither u nor v is a ancestor of the other in TR
+            # let t = lca(TR, u, v) and add edges t->u and t->v
+            # Track the original edge that GENERATED these edges.
+            D.add_edge(lca, mu, weight=w, generator=uv)
+            D.add_edge(lca, mv, weight=w, generator=uv)
+
+    # Then compute a minimum rooted branching
+    try:
+        # Note the original edges must be directed towards to root for the
+        # branching to give us a bridge-augmentation.
+        A = _minimum_rooted_branching(D, root)
+    except nx.NetworkXException as err:
+        # If there is no branching then augmentation is not possible
+        raise nx.NetworkXUnfeasible("no 2-edge-augmentation possible") from err
+
+    # For each edge e, in the branching that did not belong to the directed
+    # tree T, add the corresponding edge that **GENERATED** it (this is not
+    # necessarily e itself!)
+
+    # ensure the third case does not generate edges twice
+    bridge_connectors = set()
+    for mu, mv in A.edges():
+        data = D.get_edge_data(mu, mv)
+        if "generator" in data:
+            # Add the avail edge that generated the branching edge.
+            edge = data["generator"]
+            bridge_connectors.add(edge)
+
+    yield from bridge_connectors
+
+
+def _minimum_rooted_branching(D, root):
+    """Helper function to compute a minimum rooted branching (aka rooted
+    arborescence)
+
+    Before the branching can be computed, the directed graph must be rooted by
+    removing the predecessors of root.
+
+    A branching / arborescence of rooted graph G is a subgraph that contains a
+    directed path from the root to every other vertex. It is the directed
+    analog of the minimum spanning tree problem.
+
+    References
+    ----------
+    [1] Khuller, Samir (2002) Advanced Algorithms Lecture 24 Notes.
+    https://web.archive.org/web/20121030033722/https://www.cs.umd.edu/class/spring2011/cmsc651/lec07.pdf
+    """
+    rooted = D.copy()
+    # root the graph by removing all predecessors to `root`.
+    rooted.remove_edges_from([(u, root) for u in D.predecessors(root)])
+    # Then compute the branching / arborescence.
+    A = nx.minimum_spanning_arborescence(rooted)
+    return A
+
+
+@nx._dispatchable(returns_graph=True)
+def collapse(G, grouped_nodes):
+    """Collapses each group of nodes into a single node.
+
+    This is similar to condensation, but works on undirected graphs.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+
+    grouped_nodes:  list or generator
+       Grouping of nodes to collapse. The grouping must be disjoint.
+       If grouped_nodes are strongly_connected_components then this is
+       equivalent to :func:`condensation`.
+
+    Returns
+    -------
+    C : NetworkX Graph
+       The collapsed graph C of G with respect to the node grouping.  The node
+       labels are integers corresponding to the index of the component in the
+       list of grouped_nodes.  C has a graph attribute named 'mapping' with a
+       dictionary mapping the original nodes to the nodes in C to which they
+       belong.  Each node in C also has a node attribute 'members' with the set
+       of original nodes in G that form the group that the node in C
+       represents.
+
+    Examples
+    --------
+    >>> # Collapses a graph using disjoint groups, but not necessarily connected
+    >>> G = nx.Graph([(1, 0), (2, 3), (3, 1), (3, 4), (4, 5), (5, 6), (5, 7)])
+    >>> G.add_node("A")
+    >>> grouped_nodes = [{0, 1, 2, 3}, {5, 6, 7}]
+    >>> C = collapse(G, grouped_nodes)
+    >>> members = nx.get_node_attributes(C, "members")
+    >>> sorted(members.keys())
+    [0, 1, 2, 3]
+    >>> member_values = set(map(frozenset, members.values()))
+    >>> assert {0, 1, 2, 3} in member_values
+    >>> assert {4} in member_values
+    >>> assert {5, 6, 7} in member_values
+    >>> assert {"A"} in member_values
+    """
+    mapping = {}
+    members = {}
+    C = G.__class__()
+    i = 0  # required if G is empty
+    remaining = set(G.nodes())
+    for i, group in enumerate(grouped_nodes):
+        group = set(group)
+        assert remaining.issuperset(
+            group
+        ), "grouped nodes must exist in G and be disjoint"
+        remaining.difference_update(group)
+        members[i] = group
+        mapping.update((n, i) for n in group)
+    # remaining nodes are in their own group
+    for i, node in enumerate(remaining, start=i + 1):
+        group = {node}
+        members[i] = group
+        mapping.update((n, i) for n in group)
+    number_of_groups = i + 1
+    C.add_nodes_from(range(number_of_groups))
+    C.add_edges_from(
+        (mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v]
+    )
+    # Add a list of members (ie original nodes) to each node (ie scc) in C.
+    nx.set_node_attributes(C, name="members", values=members)
+    # Add mapping dict as graph attribute
+    C.graph["mapping"] = mapping
+    return C
+
+
+@nx._dispatchable
+def complement_edges(G):
+    """Returns only the edges in the complement of G
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+
+    Yields
+    ------
+    edge : tuple
+        Edges in the complement of G
+
+    Examples
+    --------
+    >>> G = nx.path_graph((1, 2, 3, 4))
+    >>> sorted(complement_edges(G))
+    [(1, 3), (1, 4), (2, 4)]
+    >>> G = nx.path_graph((1, 2, 3, 4), nx.DiGraph())
+    >>> sorted(complement_edges(G))
+    [(1, 3), (1, 4), (2, 1), (2, 4), (3, 1), (3, 2), (4, 1), (4, 2), (4, 3)]
+    >>> G = nx.complete_graph(1000)
+    >>> sorted(complement_edges(G))
+    []
+    """
+    G_adj = G._adj  # Store as a variable to eliminate attribute lookup
+    if G.is_directed():
+        for u, v in it.combinations(G.nodes(), 2):
+            if v not in G_adj[u]:
+                yield (u, v)
+            if u not in G_adj[v]:
+                yield (v, u)
+    else:
+        for u, v in it.combinations(G.nodes(), 2):
+            if v not in G_adj[u]:
+                yield (u, v)
+
+
+def _compat_shuffle(rng, input):
+    """wrapper around rng.shuffle for python 2 compatibility reasons"""
+    rng.shuffle(input)
+
+
+@not_implemented_for("multigraph")
+@not_implemented_for("directed")
+@py_random_state(4)
+@nx._dispatchable
+def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None):
+    """Greedy algorithm for finding a k-edge-augmentation
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph.
+
+    k : integer
+        Desired edge connectivity
+
+    avail : dict or a set of 2 or 3 tuples
+        For more details, see :func:`k_edge_augmentation`.
+
+    weight : string
+        key to use to find weights if ``avail`` is a set of 3-tuples.
+        For more details, see :func:`k_edge_augmentation`.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Yields
+    ------
+    edge : tuple
+        Edges in the greedy augmentation of G
+
+    Notes
+    -----
+    The algorithm is simple. Edges are incrementally added between parts of the
+    graph that are not yet locally k-edge-connected. Then edges are from the
+    augmenting set are pruned as long as local-edge-connectivity is not broken.
+
+    This algorithm is greedy and does not provide optimality guarantees. It
+    exists only to provide :func:`k_edge_augmentation` with the ability to
+    generate a feasible solution for arbitrary k.
+
+    See Also
+    --------
+    :func:`k_edge_augmentation`
+
+    Examples
+    --------
+    >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7))
+    >>> sorted(greedy_k_edge_augmentation(G, k=2))
+    [(1, 7)]
+    >>> sorted(greedy_k_edge_augmentation(G, k=1, avail=[]))
+    []
+    >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7))
+    >>> avail = {(u, v): 1 for (u, v) in complement_edges(G)}
+    >>> # randomized pruning process can produce different solutions
+    >>> sorted(greedy_k_edge_augmentation(G, k=4, avail=avail, seed=2))
+    [(1, 3), (1, 4), (1, 5), (1, 6), (1, 7), (2, 4), (2, 6), (3, 7), (5, 7)]
+    >>> sorted(greedy_k_edge_augmentation(G, k=4, avail=avail, seed=3))
+    [(1, 3), (1, 5), (1, 6), (2, 4), (2, 6), (3, 7), (4, 7), (5, 7)]
+    """
+    # Result set
+    aug_edges = []
+
+    done = is_k_edge_connected(G, k)
+    if done:
+        return
+    if avail is None:
+        # all edges are available
+        avail_uv = list(complement_edges(G))
+        avail_w = [1] * len(avail_uv)
+    else:
+        # Get the unique set of unweighted edges
+        avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=G)
+
+    # Greedy: order lightest edges. Use degree sum to tie-break
+    tiebreaker = [sum(map(G.degree, uv)) for uv in avail_uv]
+    avail_wduv = sorted(zip(avail_w, tiebreaker, avail_uv))
+    avail_uv = [uv for w, d, uv in avail_wduv]
+
+    # Incrementally add edges in until we are k-connected
+    H = G.copy()
+    for u, v in avail_uv:
+        done = False
+        if not is_locally_k_edge_connected(H, u, v, k=k):
+            # Only add edges in parts that are not yet locally k-edge-connected
+            aug_edges.append((u, v))
+            H.add_edge(u, v)
+            # Did adding this edge help?
+            if H.degree(u) >= k and H.degree(v) >= k:
+                done = is_k_edge_connected(H, k)
+        if done:
+            break
+
+    # Check for feasibility
+    if not done:
+        raise nx.NetworkXUnfeasible("not able to k-edge-connect with available edges")
+
+    # Randomized attempt to reduce the size of the solution
+    _compat_shuffle(seed, aug_edges)
+    for u, v in list(aug_edges):
+        # Don't remove if we know it would break connectivity
+        if H.degree(u) <= k or H.degree(v) <= k:
+            continue
+        H.remove_edge(u, v)
+        aug_edges.remove((u, v))
+        if not is_k_edge_connected(H, k=k):
+            # If removing this edge breaks feasibility, undo
+            H.add_edge(u, v)
+            aug_edges.append((u, v))
+
+    # Generate results
+    yield from aug_edges
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_kcomponents.py
new file mode 100644
index 00000000..96886f2b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_kcomponents.py
@@ -0,0 +1,592 @@
+"""
+Algorithms for finding k-edge-connected components and subgraphs.
+
+A k-edge-connected component (k-edge-cc) is a maximal set of nodes in G, such
+that all pairs of node have an edge-connectivity of at least k.
+
+A k-edge-connected subgraph (k-edge-subgraph) is a maximal set of nodes in G,
+such that the subgraph of G defined by the nodes has an edge-connectivity at
+least k.
+"""
+
+import itertools as it
+from functools import partial
+
+import networkx as nx
+from networkx.utils import arbitrary_element, not_implemented_for
+
+__all__ = [
+    "k_edge_components",
+    "k_edge_subgraphs",
+    "bridge_components",
+    "EdgeComponentAuxGraph",
+]
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def k_edge_components(G, k):
+    """Generates nodes in each maximal k-edge-connected component in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    k : Integer
+        Desired edge connectivity
+
+    Returns
+    -------
+    k_edge_components : a generator of k-edge-ccs. Each set of returned nodes
+       will have k-edge-connectivity in the graph G.
+
+    See Also
+    --------
+    :func:`local_edge_connectivity`
+    :func:`k_edge_subgraphs` : similar to this function, but the subgraph
+        defined by the nodes must also have k-edge-connectivity.
+    :func:`k_components` : similar to this function, but uses node-connectivity
+        instead of edge-connectivity
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is a multigraph.
+
+    ValueError:
+        If k is less than 1
+
+    Notes
+    -----
+    Attempts to use the most efficient implementation available based on k.
+    If k=1, this is simply connected components for directed graphs and
+    connected components for undirected graphs.
+    If k=2 on an efficient bridge connected component algorithm from _[1] is
+    run based on the chain decomposition.
+    Otherwise, the algorithm from _[2] is used.
+
+    Examples
+    --------
+    >>> import itertools as it
+    >>> from networkx.utils import pairwise
+    >>> paths = [
+    ...     (1, 2, 4, 3, 1, 4),
+    ...     (5, 6, 7, 8, 5, 7, 8, 6),
+    ... ]
+    >>> G = nx.Graph()
+    >>> G.add_nodes_from(it.chain(*paths))
+    >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths]))
+    >>> # note this returns {1, 4} unlike k_edge_subgraphs
+    >>> sorted(map(sorted, nx.k_edge_components(G, k=3)))
+    [[1, 4], [2], [3], [5, 6, 7, 8]]
+
+    References
+    ----------
+    .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29
+    .. [2] Wang, Tianhao, et al. (2015) A simple algorithm for finding all
+        k-edge-connected components.
+        http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264
+    """
+    # Compute k-edge-ccs using the most efficient algorithms available.
+    if k < 1:
+        raise ValueError("k cannot be less than 1")
+    if G.is_directed():
+        if k == 1:
+            return nx.strongly_connected_components(G)
+        else:
+            # TODO: investigate https://arxiv.org/abs/1412.6466 for k=2
+            aux_graph = EdgeComponentAuxGraph.construct(G)
+            return aux_graph.k_edge_components(k)
+    else:
+        if k == 1:
+            return nx.connected_components(G)
+        elif k == 2:
+            return bridge_components(G)
+        else:
+            aux_graph = EdgeComponentAuxGraph.construct(G)
+            return aux_graph.k_edge_components(k)
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def k_edge_subgraphs(G, k):
+    """Generates nodes in each maximal k-edge-connected subgraph in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    k : Integer
+        Desired edge connectivity
+
+    Returns
+    -------
+    k_edge_subgraphs : a generator of k-edge-subgraphs
+        Each k-edge-subgraph is a maximal set of nodes that defines a subgraph
+        of G that is k-edge-connected.
+
+    See Also
+    --------
+    :func:`edge_connectivity`
+    :func:`k_edge_components` : similar to this function, but nodes only
+        need to have k-edge-connectivity within the graph G and the subgraphs
+        might not be k-edge-connected.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is a multigraph.
+
+    ValueError:
+        If k is less than 1
+
+    Notes
+    -----
+    Attempts to use the most efficient implementation available based on k.
+    If k=1, or k=2 and the graph is undirected, then this simply calls
+    `k_edge_components`.  Otherwise the algorithm from _[1] is used.
+
+    Examples
+    --------
+    >>> import itertools as it
+    >>> from networkx.utils import pairwise
+    >>> paths = [
+    ...     (1, 2, 4, 3, 1, 4),
+    ...     (5, 6, 7, 8, 5, 7, 8, 6),
+    ... ]
+    >>> G = nx.Graph()
+    >>> G.add_nodes_from(it.chain(*paths))
+    >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths]))
+    >>> # note this does not return {1, 4} unlike k_edge_components
+    >>> sorted(map(sorted, nx.k_edge_subgraphs(G, k=3)))
+    [[1], [2], [3], [4], [5, 6, 7, 8]]
+
+    References
+    ----------
+    .. [1] Zhou, Liu, et al. (2012) Finding maximal k-edge-connected subgraphs
+        from a large graph.  ACM International Conference on Extending Database
+        Technology 2012 480-–491.
+        https://openproceedings.org/2012/conf/edbt/ZhouLYLCL12.pdf
+    """
+    if k < 1:
+        raise ValueError("k cannot be less than 1")
+    if G.is_directed():
+        if k <= 1:
+            # For directed graphs ,
+            # When k == 1, k-edge-ccs and k-edge-subgraphs are the same
+            return k_edge_components(G, k)
+        else:
+            return _k_edge_subgraphs_nodes(G, k)
+    else:
+        if k <= 2:
+            # For undirected graphs,
+            # when k <= 2, k-edge-ccs and k-edge-subgraphs are the same
+            return k_edge_components(G, k)
+        else:
+            return _k_edge_subgraphs_nodes(G, k)
+
+
+def _k_edge_subgraphs_nodes(G, k):
+    """Helper to get the nodes from the subgraphs.
+
+    This allows k_edge_subgraphs to return a generator.
+    """
+    for C in general_k_edge_subgraphs(G, k):
+        yield set(C.nodes())
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def bridge_components(G):
+    """Finds all bridge-connected components G.
+
+    Parameters
+    ----------
+    G : NetworkX undirected graph
+
+    Returns
+    -------
+    bridge_components : a generator of 2-edge-connected components
+
+
+    See Also
+    --------
+    :func:`k_edge_subgraphs` : this function is a special case for an
+        undirected graph where k=2.
+    :func:`biconnected_components` : similar to this function, but is defined
+        using 2-node-connectivity instead of 2-edge-connectivity.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is directed or a multigraph.
+
+    Notes
+    -----
+    Bridge-connected components are also known as 2-edge-connected components.
+
+    Examples
+    --------
+    >>> # The barbell graph with parameter zero has a single bridge
+    >>> G = nx.barbell_graph(5, 0)
+    >>> from networkx.algorithms.connectivity.edge_kcomponents import bridge_components
+    >>> sorted(map(sorted, bridge_components(G)))
+    [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]
+    """
+    H = G.copy()
+    H.remove_edges_from(nx.bridges(G))
+    yield from nx.connected_components(H)
+
+
+class EdgeComponentAuxGraph:
+    r"""A simple algorithm to find all k-edge-connected components in a graph.
+
+    Constructing the auxiliary graph (which may take some time) allows for the
+    k-edge-ccs to be found in linear time for arbitrary k.
+
+    Notes
+    -----
+    This implementation is based on [1]_. The idea is to construct an auxiliary
+    graph from which the k-edge-ccs can be extracted in linear time. The
+    auxiliary graph is constructed in $O(|V|\cdot F)$ operations, where F is the
+    complexity of max flow. Querying the components takes an additional $O(|V|)$
+    operations. This algorithm can be slow for large graphs, but it handles an
+    arbitrary k and works for both directed and undirected inputs.
+
+    The undirected case for k=1 is exactly connected components.
+    The undirected case for k=2 is exactly bridge connected components.
+    The directed case for k=1 is exactly strongly connected components.
+
+    References
+    ----------
+    .. [1] Wang, Tianhao, et al. (2015) A simple algorithm for finding all
+        k-edge-connected components.
+        http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264
+
+    Examples
+    --------
+    >>> import itertools as it
+    >>> from networkx.utils import pairwise
+    >>> from networkx.algorithms.connectivity import EdgeComponentAuxGraph
+    >>> # Build an interesting graph with multiple levels of k-edge-ccs
+    >>> paths = [
+    ...     (1, 2, 3, 4, 1, 3, 4, 2),  # a 3-edge-cc (a 4 clique)
+    ...     (5, 6, 7, 5),  # a 2-edge-cc (a 3 clique)
+    ...     (1, 5),  # combine first two ccs into a 1-edge-cc
+    ...     (0,),  # add an additional disconnected 1-edge-cc
+    ... ]
+    >>> G = nx.Graph()
+    >>> G.add_nodes_from(it.chain(*paths))
+    >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths]))
+    >>> # Constructing the AuxGraph takes about O(n ** 4)
+    >>> aux_graph = EdgeComponentAuxGraph.construct(G)
+    >>> # Once constructed, querying takes O(n)
+    >>> sorted(map(sorted, aux_graph.k_edge_components(k=1)))
+    [[0], [1, 2, 3, 4, 5, 6, 7]]
+    >>> sorted(map(sorted, aux_graph.k_edge_components(k=2)))
+    [[0], [1, 2, 3, 4], [5, 6, 7]]
+    >>> sorted(map(sorted, aux_graph.k_edge_components(k=3)))
+    [[0], [1, 2, 3, 4], [5], [6], [7]]
+    >>> sorted(map(sorted, aux_graph.k_edge_components(k=4)))
+    [[0], [1], [2], [3], [4], [5], [6], [7]]
+
+    The auxiliary graph is primarily used for k-edge-ccs but it
+    can also speed up the queries of k-edge-subgraphs by refining the
+    search space.
+
+    >>> import itertools as it
+    >>> from networkx.utils import pairwise
+    >>> from networkx.algorithms.connectivity import EdgeComponentAuxGraph
+    >>> paths = [
+    ...     (1, 2, 4, 3, 1, 4),
+    ... ]
+    >>> G = nx.Graph()
+    >>> G.add_nodes_from(it.chain(*paths))
+    >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths]))
+    >>> aux_graph = EdgeComponentAuxGraph.construct(G)
+    >>> sorted(map(sorted, aux_graph.k_edge_subgraphs(k=3)))
+    [[1], [2], [3], [4]]
+    >>> sorted(map(sorted, aux_graph.k_edge_components(k=3)))
+    [[1, 4], [2], [3]]
+    """
+
+    # @not_implemented_for('multigraph')  # TODO: fix decor for classmethods
+    @classmethod
+    def construct(EdgeComponentAuxGraph, G):
+        """Builds an auxiliary graph encoding edge-connectivity between nodes.
+
+        Notes
+        -----
+        Given G=(V, E), initialize an empty auxiliary graph A.
+        Choose an arbitrary source node s.  Initialize a set N of available
+        nodes (that can be used as the sink). The algorithm picks an
+        arbitrary node t from N - {s}, and then computes the minimum st-cut
+        (S, T) with value w. If G is directed the minimum of the st-cut or
+        the ts-cut is used instead. Then, the edge (s, t) is added to the
+        auxiliary graph with weight w. The algorithm is called recursively
+        first using S as the available nodes and s as the source, and then
+        using T and t. Recursion stops when the source is the only available
+        node.
+
+        Parameters
+        ----------
+        G : NetworkX graph
+        """
+        # workaround for classmethod decorator
+        not_implemented_for("multigraph")(lambda G: G)(G)
+
+        def _recursive_build(H, A, source, avail):
+            # Terminate once the flow has been compute to every node.
+            if {source} == avail:
+                return
+            # pick an arbitrary node as the sink
+            sink = arbitrary_element(avail - {source})
+            # find the minimum cut and its weight
+            value, (S, T) = nx.minimum_cut(H, source, sink)
+            if H.is_directed():
+                # check if the reverse direction has a smaller cut
+                value_, (T_, S_) = nx.minimum_cut(H, sink, source)
+                if value_ < value:
+                    value, S, T = value_, S_, T_
+            # add edge with weight of cut to the aux graph
+            A.add_edge(source, sink, weight=value)
+            # recursively call until all but one node is used
+            _recursive_build(H, A, source, avail.intersection(S))
+            _recursive_build(H, A, sink, avail.intersection(T))
+
+        # Copy input to ensure all edges have unit capacity
+        H = G.__class__()
+        H.add_nodes_from(G.nodes())
+        H.add_edges_from(G.edges(), capacity=1)
+
+        # A is the auxiliary graph to be constructed
+        # It is a weighted undirected tree
+        A = nx.Graph()
+
+        # Pick an arbitrary node as the source
+        if H.number_of_nodes() > 0:
+            source = arbitrary_element(H.nodes())
+            # Initialize a set of elements that can be chosen as the sink
+            avail = set(H.nodes())
+
+            # This constructs A
+            _recursive_build(H, A, source, avail)
+
+        # This class is a container the holds the auxiliary graph A and
+        # provides access the k_edge_components function.
+        self = EdgeComponentAuxGraph()
+        self.A = A
+        self.H = H
+        return self
+
+    def k_edge_components(self, k):
+        """Queries the auxiliary graph for k-edge-connected components.
+
+        Parameters
+        ----------
+        k : Integer
+            Desired edge connectivity
+
+        Returns
+        -------
+        k_edge_components : a generator of k-edge-ccs
+
+        Notes
+        -----
+        Given the auxiliary graph, the k-edge-connected components can be
+        determined in linear time by removing all edges with weights less than
+        k from the auxiliary graph.  The resulting connected components are the
+        k-edge-ccs in the original graph.
+        """
+        if k < 1:
+            raise ValueError("k cannot be less than 1")
+        A = self.A
+        # "traverse the auxiliary graph A and delete all edges with weights less
+        # than k"
+        aux_weights = nx.get_edge_attributes(A, "weight")
+        # Create a relevant graph with the auxiliary edges with weights >= k
+        R = nx.Graph()
+        R.add_nodes_from(A.nodes())
+        R.add_edges_from(e for e, w in aux_weights.items() if w >= k)
+
+        # Return the nodes that are k-edge-connected in the original graph
+        yield from nx.connected_components(R)
+
+    def k_edge_subgraphs(self, k):
+        """Queries the auxiliary graph for k-edge-connected subgraphs.
+
+        Parameters
+        ----------
+        k : Integer
+            Desired edge connectivity
+
+        Returns
+        -------
+        k_edge_subgraphs : a generator of k-edge-subgraphs
+
+        Notes
+        -----
+        Refines the k-edge-ccs into k-edge-subgraphs. The running time is more
+        than $O(|V|)$.
+
+        For single values of k it is faster to use `nx.k_edge_subgraphs`.
+        But for multiple values of k, it can be faster to build AuxGraph and
+        then use this method.
+        """
+        if k < 1:
+            raise ValueError("k cannot be less than 1")
+        H = self.H
+        A = self.A
+        # "traverse the auxiliary graph A and delete all edges with weights less
+        # than k"
+        aux_weights = nx.get_edge_attributes(A, "weight")
+        # Create a relevant graph with the auxiliary edges with weights >= k
+        R = nx.Graph()
+        R.add_nodes_from(A.nodes())
+        R.add_edges_from(e for e, w in aux_weights.items() if w >= k)
+
+        # Return the components whose subgraphs are k-edge-connected
+        for cc in nx.connected_components(R):
+            if len(cc) < k:
+                # Early return optimization
+                for node in cc:
+                    yield {node}
+            else:
+                # Call subgraph solution to refine the results
+                C = H.subgraph(cc)
+                yield from k_edge_subgraphs(C, k)
+
+
+def _low_degree_nodes(G, k, nbunch=None):
+    """Helper for finding nodes with degree less than k."""
+    # Nodes with degree less than k cannot be k-edge-connected.
+    if G.is_directed():
+        # Consider both in and out degree in the directed case
+        seen = set()
+        for node, degree in G.out_degree(nbunch):
+            if degree < k:
+                seen.add(node)
+                yield node
+        for node, degree in G.in_degree(nbunch):
+            if node not in seen and degree < k:
+                seen.add(node)
+                yield node
+    else:
+        # Only the degree matters in the undirected case
+        for node, degree in G.degree(nbunch):
+            if degree < k:
+                yield node
+
+
+def _high_degree_components(G, k):
+    """Helper for filtering components that can't be k-edge-connected.
+
+    Removes and generates each node with degree less than k.  Then generates
+    remaining components where all nodes have degree at least k.
+    """
+    # Iteratively remove parts of the graph that are not k-edge-connected
+    H = G.copy()
+    singletons = set(_low_degree_nodes(H, k))
+    while singletons:
+        # Only search neighbors of removed nodes
+        nbunch = set(it.chain.from_iterable(map(H.neighbors, singletons)))
+        nbunch.difference_update(singletons)
+        H.remove_nodes_from(singletons)
+        for node in singletons:
+            yield {node}
+        singletons = set(_low_degree_nodes(H, k, nbunch))
+
+    # Note: remaining connected components may not be k-edge-connected
+    if G.is_directed():
+        yield from nx.strongly_connected_components(H)
+    else:
+        yield from nx.connected_components(H)
+
+
+@nx._dispatchable(returns_graph=True)
+def general_k_edge_subgraphs(G, k):
+    """General algorithm to find all maximal k-edge-connected subgraphs in `G`.
+
+    Parameters
+    ----------
+    G : nx.Graph
+       Graph in which all maximal k-edge-connected subgraphs will be found.
+
+    k : int
+
+    Yields
+    ------
+    k_edge_subgraphs : Graph instances that are k-edge-subgraphs
+        Each k-edge-subgraph contains a maximal set of nodes that defines a
+        subgraph of `G` that is k-edge-connected.
+
+    Notes
+    -----
+    Implementation of the basic algorithm from [1]_.  The basic idea is to find
+    a global minimum cut of the graph. If the cut value is at least k, then the
+    graph is a k-edge-connected subgraph and can be added to the results.
+    Otherwise, the cut is used to split the graph in two and the procedure is
+    applied recursively. If the graph is just a single node, then it is also
+    added to the results. At the end, each result is either guaranteed to be
+    a single node or a subgraph of G that is k-edge-connected.
+
+    This implementation contains optimizations for reducing the number of calls
+    to max-flow, but there are other optimizations in [1]_ that could be
+    implemented.
+
+    References
+    ----------
+    .. [1] Zhou, Liu, et al. (2012) Finding maximal k-edge-connected subgraphs
+        from a large graph.  ACM International Conference on Extending Database
+        Technology 2012 480-–491.
+        https://openproceedings.org/2012/conf/edbt/ZhouLYLCL12.pdf
+
+    Examples
+    --------
+    >>> from networkx.utils import pairwise
+    >>> paths = [
+    ...     (11, 12, 13, 14, 11, 13, 14, 12),  # a 4-clique
+    ...     (21, 22, 23, 24, 21, 23, 24, 22),  # another 4-clique
+    ...     # connect the cliques with high degree but low connectivity
+    ...     (50, 13),
+    ...     (12, 50, 22),
+    ...     (13, 102, 23),
+    ...     (14, 101, 24),
+    ... ]
+    >>> G = nx.Graph(it.chain(*[pairwise(path) for path in paths]))
+    >>> sorted(len(k_sg) for k_sg in k_edge_subgraphs(G, k=3))
+    [1, 1, 1, 4, 4]
+    """
+    if k < 1:
+        raise ValueError("k cannot be less than 1")
+
+    # Node pruning optimization (incorporates early return)
+    # find_ccs is either connected_components/strongly_connected_components
+    find_ccs = partial(_high_degree_components, k=k)
+
+    # Quick return optimization
+    if G.number_of_nodes() < k:
+        for node in G.nodes():
+            yield G.subgraph([node]).copy()
+        return
+
+    # Intermediate results
+    R0 = {G.subgraph(cc).copy() for cc in find_ccs(G)}
+    # Subdivide CCs in the intermediate results until they are k-conn
+    while R0:
+        G1 = R0.pop()
+        if G1.number_of_nodes() == 1:
+            yield G1
+        else:
+            # Find a global minimum cut
+            cut_edges = nx.minimum_edge_cut(G1)
+            cut_value = len(cut_edges)
+            if cut_value < k:
+                # G1 is not k-edge-connected, so subdivide it
+                G1.remove_edges_from(cut_edges)
+                for cc in find_ccs(G1):
+                    R0.add(G1.subgraph(cc).copy())
+            else:
+                # Otherwise we found a k-edge-connected subgraph
+                yield G1
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcomponents.py
new file mode 100644
index 00000000..e2f1ba28
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcomponents.py
@@ -0,0 +1,223 @@
+"""
+Moody and White algorithm for k-components
+"""
+
+from collections import defaultdict
+from itertools import combinations
+from operator import itemgetter
+
+import networkx as nx
+
+# Define the default maximum flow function.
+from networkx.algorithms.flow import edmonds_karp
+from networkx.utils import not_implemented_for
+
+default_flow_func = edmonds_karp
+
+__all__ = ["k_components"]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def k_components(G, flow_func=None):
+    r"""Returns the k-component structure of a graph G.
+
+    A `k`-component is a maximal subgraph of a graph G that has, at least,
+    node connectivity `k`: we need to remove at least `k` nodes to break it
+    into more components. `k`-components have an inherent hierarchical
+    structure because they are nested in terms of connectivity: a connected
+    graph can contain several 2-components, each of which can contain
+    one or more 3-components, and so forth.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    flow_func : function
+        Function to perform the underlying flow computations. Default value
+        :meth:`edmonds_karp`. This function performs better in sparse graphs with
+        right tailed degree distributions. :meth:`shortest_augmenting_path` will
+        perform better in denser graphs.
+
+    Returns
+    -------
+    k_components : dict
+        Dictionary with all connectivity levels `k` in the input Graph as keys
+        and a list of sets of nodes that form a k-component of level `k` as
+        values.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the input graph is directed.
+
+    Examples
+    --------
+    >>> # Petersen graph has 10 nodes and it is triconnected, thus all
+    >>> # nodes are in a single component on all three connectivity levels
+    >>> G = nx.petersen_graph()
+    >>> k_components = nx.k_components(G)
+
+    Notes
+    -----
+    Moody and White [1]_ (appendix A) provide an algorithm for identifying
+    k-components in a graph, which is based on Kanevsky's algorithm [2]_
+    for finding all minimum-size node cut-sets of a graph (implemented in
+    :meth:`all_node_cuts` function):
+
+        1. Compute node connectivity, k, of the input graph G.
+
+        2. Identify all k-cutsets at the current level of connectivity using
+           Kanevsky's algorithm.
+
+        3. Generate new graph components based on the removal of
+           these cutsets. Nodes in a cutset belong to both sides
+           of the induced cut.
+
+        4. If the graph is neither complete nor trivial, return to 1;
+           else end.
+
+    This implementation also uses some heuristics (see [3]_ for details)
+    to speed up the computation.
+
+    See also
+    --------
+    node_connectivity
+    all_node_cuts
+    biconnected_components : special case of this function when k=2
+    k_edge_components : similar to this function, but uses edge-connectivity
+        instead of node-connectivity
+
+    References
+    ----------
+    .. [1]  Moody, J. and D. White (2003). Social cohesion and embeddedness:
+            A hierarchical conception of social groups.
+            American Sociological Review 68(1), 103--28.
+            http://www2.asanet.org/journals/ASRFeb03MoodyWhite.pdf
+
+    .. [2]  Kanevsky, A. (1993). Finding all minimum-size separating vertex
+            sets in a graph. Networks 23(6), 533--541.
+            http://onlinelibrary.wiley.com/doi/10.1002/net.3230230604/abstract
+
+    .. [3]  Torrents, J. and F. Ferraro (2015). Structural Cohesion:
+            Visualization and Heuristics for Fast Computation.
+            https://arxiv.org/pdf/1503.04476v1
+
+    """
+    # Dictionary with connectivity level (k) as keys and a list of
+    # sets of nodes that form a k-component as values. Note that
+    # k-components can overlap (but only k - 1 nodes).
+    k_components = defaultdict(list)
+    # Define default flow function
+    if flow_func is None:
+        flow_func = default_flow_func
+    # Bicomponents as a base to check for higher order k-components
+    for component in nx.connected_components(G):
+        # isolated nodes have connectivity 0
+        comp = set(component)
+        if len(comp) > 1:
+            k_components[1].append(comp)
+    bicomponents = [G.subgraph(c) for c in nx.biconnected_components(G)]
+    for bicomponent in bicomponents:
+        bicomp = set(bicomponent)
+        # avoid considering dyads as bicomponents
+        if len(bicomp) > 2:
+            k_components[2].append(bicomp)
+    for B in bicomponents:
+        if len(B) <= 2:
+            continue
+        k = nx.node_connectivity(B, flow_func=flow_func)
+        if k > 2:
+            k_components[k].append(set(B))
+        # Perform cuts in a DFS like order.
+        cuts = list(nx.all_node_cuts(B, k=k, flow_func=flow_func))
+        stack = [(k, _generate_partition(B, cuts, k))]
+        while stack:
+            (parent_k, partition) = stack[-1]
+            try:
+                nodes = next(partition)
+                C = B.subgraph(nodes)
+                this_k = nx.node_connectivity(C, flow_func=flow_func)
+                if this_k > parent_k and this_k > 2:
+                    k_components[this_k].append(set(C))
+                cuts = list(nx.all_node_cuts(C, k=this_k, flow_func=flow_func))
+                if cuts:
+                    stack.append((this_k, _generate_partition(C, cuts, this_k)))
+            except StopIteration:
+                stack.pop()
+
+    # This is necessary because k-components may only be reported at their
+    # maximum k level. But we want to return a dictionary in which keys are
+    # connectivity levels and values list of sets of components, without
+    # skipping any connectivity level. Also, it's possible that subsets of
+    # an already detected k-component appear at a level k. Checking for this
+    # in the while loop above penalizes the common case. Thus we also have to
+    # _consolidate all connectivity levels in _reconstruct_k_components.
+    return _reconstruct_k_components(k_components)
+
+
+def _consolidate(sets, k):
+    """Merge sets that share k or more elements.
+
+    See: http://rosettacode.org/wiki/Set_consolidation
+
+    The iterative python implementation posted there is
+    faster than this because of the overhead of building a
+    Graph and calling nx.connected_components, but it's not
+    clear for us if we can use it in NetworkX because there
+    is no licence for the code.
+
+    """
+    G = nx.Graph()
+    nodes = dict(enumerate(sets))
+    G.add_nodes_from(nodes)
+    G.add_edges_from(
+        (u, v) for u, v in combinations(nodes, 2) if len(nodes[u] & nodes[v]) >= k
+    )
+    for component in nx.connected_components(G):
+        yield set.union(*[nodes[n] for n in component])
+
+
+def _generate_partition(G, cuts, k):
+    def has_nbrs_in_partition(G, node, partition):
+        return any(n in partition for n in G[node])
+
+    components = []
+    nodes = {n for n, d in G.degree() if d > k} - {n for cut in cuts for n in cut}
+    H = G.subgraph(nodes)
+    for cc in nx.connected_components(H):
+        component = set(cc)
+        for cut in cuts:
+            for node in cut:
+                if has_nbrs_in_partition(G, node, cc):
+                    component.add(node)
+        if len(component) < G.order():
+            components.append(component)
+    yield from _consolidate(components, k + 1)
+
+
+def _reconstruct_k_components(k_comps):
+    result = {}
+    max_k = max(k_comps)
+    for k in reversed(range(1, max_k + 1)):
+        if k == max_k:
+            result[k] = list(_consolidate(k_comps[k], k))
+        elif k not in k_comps:
+            result[k] = list(_consolidate(result[k + 1], k))
+        else:
+            nodes_at_k = set.union(*k_comps[k])
+            to_add = [c for c in result[k + 1] if any(n not in nodes_at_k for n in c)]
+            if to_add:
+                result[k] = list(_consolidate(k_comps[k] + to_add, k))
+            else:
+                result[k] = list(_consolidate(k_comps[k], k))
+    return result
+
+
+def build_k_number_dict(kcomps):
+    result = {}
+    for k, comps in sorted(kcomps.items(), key=itemgetter(0)):
+        for comp in comps:
+            for node in comp:
+                result[node] = k
+    return result
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcutsets.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcutsets.py
new file mode 100644
index 00000000..de26f4c5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcutsets.py
@@ -0,0 +1,235 @@
+"""
+Kanevsky all minimum node k cutsets algorithm.
+"""
+
+import copy
+from collections import defaultdict
+from itertools import combinations
+from operator import itemgetter
+
+import networkx as nx
+from networkx.algorithms.flow import (
+    build_residual_network,
+    edmonds_karp,
+    shortest_augmenting_path,
+)
+
+from .utils import build_auxiliary_node_connectivity
+
+default_flow_func = edmonds_karp
+
+
+__all__ = ["all_node_cuts"]
+
+
+@nx._dispatchable
+def all_node_cuts(G, k=None, flow_func=None):
+    r"""Returns all minimum k cutsets of an undirected graph G.
+
+    This implementation is based on Kanevsky's algorithm [1]_ for finding all
+    minimum-size node cut-sets of an undirected graph G; ie the set (or sets)
+    of nodes of cardinality equal to the node connectivity of G. Thus if
+    removed, would break G into two or more connected components.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    k : Integer
+        Node connectivity of the input graph. If k is None, then it is
+        computed. Default value: None.
+
+    flow_func : function
+        Function to perform the underlying flow computations. Default value is
+        :func:`~networkx.algorithms.flow.edmonds_karp`. This function performs
+        better in sparse graphs with right tailed degree distributions.
+        :func:`~networkx.algorithms.flow.shortest_augmenting_path` will
+        perform better in denser graphs.
+
+
+    Returns
+    -------
+    cuts : a generator of node cutsets
+        Each node cutset has cardinality equal to the node connectivity of
+        the input graph.
+
+    Examples
+    --------
+    >>> # A two-dimensional grid graph has 4 cutsets of cardinality 2
+    >>> G = nx.grid_2d_graph(5, 5)
+    >>> cutsets = list(nx.all_node_cuts(G))
+    >>> len(cutsets)
+    4
+    >>> all(2 == len(cutset) for cutset in cutsets)
+    True
+    >>> nx.node_connectivity(G)
+    2
+
+    Notes
+    -----
+    This implementation is based on the sequential algorithm for finding all
+    minimum-size separating vertex sets in a graph [1]_. The main idea is to
+    compute minimum cuts using local maximum flow computations among a set
+    of nodes of highest degree and all other non-adjacent nodes in the Graph.
+    Once we find a minimum cut, we add an edge between the high degree
+    node and the target node of the local maximum flow computation to make
+    sure that we will not find that minimum cut again.
+
+    See also
+    --------
+    node_connectivity
+    edmonds_karp
+    shortest_augmenting_path
+
+    References
+    ----------
+    .. [1]  Kanevsky, A. (1993). Finding all minimum-size separating vertex
+            sets in a graph. Networks 23(6), 533--541.
+            http://onlinelibrary.wiley.com/doi/10.1002/net.3230230604/abstract
+
+    """
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Input graph is disconnected.")
+
+    # Address some corner cases first.
+    # For complete Graphs
+
+    if nx.density(G) == 1:
+        yield from ()
+        return
+
+    # Initialize data structures.
+    # Keep track of the cuts already computed so we do not repeat them.
+    seen = []
+    # Even-Tarjan reduction is what we call auxiliary digraph
+    # for node connectivity.
+    H = build_auxiliary_node_connectivity(G)
+    H_nodes = H.nodes  # for speed
+    mapping = H.graph["mapping"]
+    # Keep a copy of original predecessors, H will be modified later.
+    # Shallow copy is enough.
+    original_H_pred = copy.copy(H._pred)
+    R = build_residual_network(H, "capacity")
+    kwargs = {"capacity": "capacity", "residual": R}
+    # Define default flow function
+    if flow_func is None:
+        flow_func = default_flow_func
+    if flow_func is shortest_augmenting_path:
+        kwargs["two_phase"] = True
+    # Begin the actual algorithm
+    # step 1: Find node connectivity k of G
+    if k is None:
+        k = nx.node_connectivity(G, flow_func=flow_func)
+    # step 2:
+    # Find k nodes with top degree, call it X:
+    X = {n for n, d in sorted(G.degree(), key=itemgetter(1), reverse=True)[:k]}
+    # Check if X is a k-node-cutset
+    if _is_separating_set(G, X):
+        seen.append(X)
+        yield X
+
+    for x in X:
+        # step 3: Compute local connectivity flow of x with all other
+        # non adjacent nodes in G
+        non_adjacent = set(G) - {x} - set(G[x])
+        for v in non_adjacent:
+            # step 4: compute maximum flow in an Even-Tarjan reduction H of G
+            # and step 5: build the associated residual network R
+            R = flow_func(H, f"{mapping[x]}B", f"{mapping[v]}A", **kwargs)
+            flow_value = R.graph["flow_value"]
+
+            if flow_value == k:
+                # Find the nodes incident to the flow.
+                E1 = flowed_edges = [
+                    (u, w) for (u, w, d) in R.edges(data=True) if d["flow"] != 0
+                ]
+                VE1 = incident_nodes = {n for edge in E1 for n in edge}
+                # Remove saturated edges form the residual network.
+                # Note that reversed edges are introduced with capacity 0
+                # in the residual graph and they need to be removed too.
+                saturated_edges = [
+                    (u, w, d)
+                    for (u, w, d) in R.edges(data=True)
+                    if d["capacity"] == d["flow"] or d["capacity"] == 0
+                ]
+                R.remove_edges_from(saturated_edges)
+                R_closure = nx.transitive_closure(R)
+                # step 6: shrink the strongly connected components of
+                # residual flow network R and call it L.
+                L = nx.condensation(R)
+                cmap = L.graph["mapping"]
+                inv_cmap = defaultdict(list)
+                for n, scc in cmap.items():
+                    inv_cmap[scc].append(n)
+                # Find the incident nodes in the condensed graph.
+                VE1 = {cmap[n] for n in VE1}
+                # step 7: Compute all antichains of L;
+                # they map to closed sets in H.
+                # Any edge in H that links a closed set is part of a cutset.
+                for antichain in nx.antichains(L):
+                    # Only antichains that are subsets of incident nodes counts.
+                    # Lemma 8 in reference.
+                    if not set(antichain).issubset(VE1):
+                        continue
+                    # Nodes in an antichain of the condensation graph of
+                    # the residual network map to a closed set of nodes that
+                    # define a node partition of the auxiliary digraph H
+                    # through taking all of antichain's predecessors in the
+                    # transitive closure.
+                    S = set()
+                    for scc in antichain:
+                        S.update(inv_cmap[scc])
+                    S_ancestors = set()
+                    for n in S:
+                        S_ancestors.update(R_closure._pred[n])
+                    S.update(S_ancestors)
+                    if f"{mapping[x]}B" not in S or f"{mapping[v]}A" in S:
+                        continue
+                    # Find the cutset that links the node partition (S,~S) in H
+                    cutset = set()
+                    for u in S:
+                        cutset.update((u, w) for w in original_H_pred[u] if w not in S)
+                    # The edges in H that form the cutset are internal edges
+                    # (ie edges that represent a node of the original graph G)
+                    if any(H_nodes[u]["id"] != H_nodes[w]["id"] for u, w in cutset):
+                        continue
+                    node_cut = {H_nodes[u]["id"] for u, _ in cutset}
+
+                    if len(node_cut) == k:
+                        # The cut is invalid if it includes internal edges of
+                        # end nodes. The other half of Lemma 8 in ref.
+                        if x in node_cut or v in node_cut:
+                            continue
+                        if node_cut not in seen:
+                            yield node_cut
+                            seen.append(node_cut)
+
+                # Add an edge (x, v) to make sure that we do not
+                # find this cutset again. This is equivalent
+                # of adding the edge in the input graph
+                # G.add_edge(x, v) and then regenerate H and R:
+                # Add edges to the auxiliary digraph.
+                # See build_residual_network for convention we used
+                # in residual graphs.
+                H.add_edge(f"{mapping[x]}B", f"{mapping[v]}A", capacity=1)
+                H.add_edge(f"{mapping[v]}B", f"{mapping[x]}A", capacity=1)
+                # Add edges to the residual network.
+                R.add_edge(f"{mapping[x]}B", f"{mapping[v]}A", capacity=1)
+                R.add_edge(f"{mapping[v]}A", f"{mapping[x]}B", capacity=0)
+                R.add_edge(f"{mapping[v]}B", f"{mapping[x]}A", capacity=1)
+                R.add_edge(f"{mapping[x]}A", f"{mapping[v]}B", capacity=0)
+
+                # Add again the saturated edges to reuse the residual network
+                R.add_edges_from(saturated_edges)
+
+
+def _is_separating_set(G, cut):
+    """Assumes that the input graph is connected"""
+    if len(cut) == len(G) - 1:
+        return True
+
+    H = nx.restricted_view(G, cut, [])
+    if nx.is_connected(H):
+        return False
+    return True
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/stoerwagner.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/stoerwagner.py
new file mode 100644
index 00000000..29604b14
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/stoerwagner.py
@@ -0,0 +1,152 @@
+"""
+Stoer-Wagner minimum cut algorithm.
+"""
+
+from itertools import islice
+
+import networkx as nx
+
+from ...utils import BinaryHeap, arbitrary_element, not_implemented_for
+
+__all__ = ["stoer_wagner"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def stoer_wagner(G, weight="weight", heap=BinaryHeap):
+    r"""Returns the weighted minimum edge cut using the Stoer-Wagner algorithm.
+
+    Determine the minimum edge cut of a connected graph using the
+    Stoer-Wagner algorithm. In weighted cases, all weights must be
+    nonnegative.
+
+    The running time of the algorithm depends on the type of heaps used:
+
+    ============== =============================================
+    Type of heap   Running time
+    ============== =============================================
+    Binary heap    $O(n (m + n) \log n)$
+    Fibonacci heap $O(nm + n^2 \log n)$
+    Pairing heap   $O(2^{2 \sqrt{\log \log n}} nm + n^2 \log n)$
+    ============== =============================================
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Edges of the graph are expected to have an attribute named by the
+        weight parameter below. If this attribute is not present, the edge is
+        considered to have unit weight.
+
+    weight : string
+        Name of the weight attribute of the edges. If the attribute is not
+        present, unit weight is assumed. Default value: 'weight'.
+
+    heap : class
+        Type of heap to be used in the algorithm. It should be a subclass of
+        :class:`MinHeap` or implement a compatible interface.
+
+        If a stock heap implementation is to be used, :class:`BinaryHeap` is
+        recommended over :class:`PairingHeap` for Python implementations without
+        optimized attribute accesses (e.g., CPython) despite a slower
+        asymptotic running time. For Python implementations with optimized
+        attribute accesses (e.g., PyPy), :class:`PairingHeap` provides better
+        performance. Default value: :class:`BinaryHeap`.
+
+    Returns
+    -------
+    cut_value : integer or float
+        The sum of weights of edges in a minimum cut.
+
+    partition : pair of node lists
+        A partitioning of the nodes that defines a minimum cut.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph is directed or a multigraph.
+
+    NetworkXError
+        If the graph has less than two nodes, is not connected or has a
+        negative-weighted edge.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> G.add_edge("x", "a", weight=3)
+    >>> G.add_edge("x", "b", weight=1)
+    >>> G.add_edge("a", "c", weight=3)
+    >>> G.add_edge("b", "c", weight=5)
+    >>> G.add_edge("b", "d", weight=4)
+    >>> G.add_edge("d", "e", weight=2)
+    >>> G.add_edge("c", "y", weight=2)
+    >>> G.add_edge("e", "y", weight=3)
+    >>> cut_value, partition = nx.stoer_wagner(G)
+    >>> cut_value
+    4
+    """
+    n = len(G)
+    if n < 2:
+        raise nx.NetworkXError("graph has less than two nodes.")
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("graph is not connected.")
+
+    # Make a copy of the graph for internal use.
+    G = nx.Graph(
+        (u, v, {"weight": e.get(weight, 1)}) for u, v, e in G.edges(data=True) if u != v
+    )
+    G.__networkx_cache__ = None  # Disable caching
+
+    for u, v, e in G.edges(data=True):
+        if e["weight"] < 0:
+            raise nx.NetworkXError("graph has a negative-weighted edge.")
+
+    cut_value = float("inf")
+    nodes = set(G)
+    contractions = []  # contracted node pairs
+
+    # Repeatedly pick a pair of nodes to contract until only one node is left.
+    for i in range(n - 1):
+        # Pick an arbitrary node u and create a set A = {u}.
+        u = arbitrary_element(G)
+        A = {u}
+        # Repeatedly pick the node "most tightly connected" to A and add it to
+        # A. The tightness of connectivity of a node not in A is defined by the
+        # of edges connecting it to nodes in A.
+        h = heap()  # min-heap emulating a max-heap
+        for v, e in G[u].items():
+            h.insert(v, -e["weight"])
+        # Repeat until all but one node has been added to A.
+        for j in range(n - i - 2):
+            u = h.pop()[0]
+            A.add(u)
+            for v, e in G[u].items():
+                if v not in A:
+                    h.insert(v, h.get(v, 0) - e["weight"])
+        # A and the remaining node v define a "cut of the phase". There is a
+        # minimum cut of the original graph that is also a cut of the phase.
+        # Due to contractions in earlier phases, v may in fact represent
+        # multiple nodes in the original graph.
+        v, w = h.min()
+        w = -w
+        if w < cut_value:
+            cut_value = w
+            best_phase = i
+        # Contract v and the last node added to A.
+        contractions.append((u, v))
+        for w, e in G[v].items():
+            if w != u:
+                if w not in G[u]:
+                    G.add_edge(u, w, weight=e["weight"])
+                else:
+                    G[u][w]["weight"] += e["weight"]
+        G.remove_node(v)
+
+    # Recover the optimal partitioning from the contractions.
+    G = nx.Graph(islice(contractions, best_phase))
+    v = contractions[best_phase][1]
+    G.add_node(v)
+    reachable = set(nx.single_source_shortest_path_length(G, v))
+    partition = (list(reachable), list(nodes - reachable))
+
+    return cut_value, partition
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_connectivity.py
new file mode 100644
index 00000000..7aef2477
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_connectivity.py
@@ -0,0 +1,421 @@
+import itertools
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms import flow
+from networkx.algorithms.connectivity import (
+    local_edge_connectivity,
+    local_node_connectivity,
+)
+
+flow_funcs = [
+    flow.boykov_kolmogorov,
+    flow.dinitz,
+    flow.edmonds_karp,
+    flow.preflow_push,
+    flow.shortest_augmenting_path,
+]
+
+
+# helper functions for tests
+
+
+def _generate_no_biconnected(max_attempts=50):
+    attempts = 0
+    while True:
+        G = nx.fast_gnp_random_graph(100, 0.0575, seed=42)
+        if nx.is_connected(G) and not nx.is_biconnected(G):
+            attempts = 0
+            yield G
+        else:
+            if attempts >= max_attempts:
+                msg = f"Tried {max_attempts} times: no suitable Graph."
+                raise Exception(msg)
+            else:
+                attempts += 1
+
+
+def test_average_connectivity():
+    # figure 1 from:
+    # Beineke, L., O. Oellermann, and R. Pippert (2002). The average
+    # connectivity of a graph. Discrete mathematics 252(1-3), 31-45
+    # http://www.sciencedirect.com/science/article/pii/S0012365X01001807
+    G1 = nx.path_graph(3)
+    G1.add_edges_from([(1, 3), (1, 4)])
+    G2 = nx.path_graph(3)
+    G2.add_edges_from([(1, 3), (1, 4), (0, 3), (0, 4), (3, 4)])
+    G3 = nx.Graph()
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert nx.average_node_connectivity(G1, **kwargs) == 1, errmsg
+        assert nx.average_node_connectivity(G2, **kwargs) == 2.2, errmsg
+        assert nx.average_node_connectivity(G3, **kwargs) == 0, errmsg
+
+
+def test_average_connectivity_directed():
+    G = nx.DiGraph([(1, 3), (1, 4), (1, 5)])
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert nx.average_node_connectivity(G) == 0.25, errmsg
+
+
+def test_articulation_points():
+    Ggen = _generate_no_biconnected()
+    for flow_func in flow_funcs:
+        for i in range(3):
+            G = next(Ggen)
+            errmsg = f"Assertion failed in function: {flow_func.__name__}"
+            assert nx.node_connectivity(G, flow_func=flow_func) == 1, errmsg
+
+
+def test_brandes_erlebach():
+    # Figure 1 chapter 7: Connectivity
+    # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf
+    G = nx.Graph()
+    G.add_edges_from(
+        [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 5),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 6),
+            (4, 6),
+            (4, 7),
+            (5, 7),
+            (6, 8),
+            (6, 9),
+            (7, 8),
+            (7, 10),
+            (8, 11),
+            (9, 10),
+            (9, 11),
+            (10, 11),
+        ]
+    )
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert 3 == local_edge_connectivity(G, 1, 11, **kwargs), errmsg
+        assert 3 == nx.edge_connectivity(G, 1, 11, **kwargs), errmsg
+        assert 2 == local_node_connectivity(G, 1, 11, **kwargs), errmsg
+        assert 2 == nx.node_connectivity(G, 1, 11, **kwargs), errmsg
+        assert 2 == nx.edge_connectivity(G, **kwargs), errmsg
+        assert 2 == nx.node_connectivity(G, **kwargs), errmsg
+        if flow_func is flow.preflow_push:
+            assert 3 == nx.edge_connectivity(G, 1, 11, cutoff=2, **kwargs), errmsg
+        else:
+            assert 2 == nx.edge_connectivity(G, 1, 11, cutoff=2, **kwargs), errmsg
+
+
+def test_white_harary_1():
+    # Figure 1b white and harary (2001)
+    # https://doi.org/10.1111/0081-1750.00098
+    # A graph with high adhesion (edge connectivity) and low cohesion
+    # (vertex connectivity)
+    G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4))
+    G.remove_node(7)
+    for i in range(4, 7):
+        G.add_edge(0, i)
+    G = nx.disjoint_union(G, nx.complete_graph(4))
+    G.remove_node(G.order() - 1)
+    for i in range(7, 10):
+        G.add_edge(0, i)
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert 1 == nx.node_connectivity(G, flow_func=flow_func), errmsg
+        assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg
+
+
+def test_white_harary_2():
+    # Figure 8 white and harary (2001)
+    # https://doi.org/10.1111/0081-1750.00098
+    G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4))
+    G.add_edge(0, 4)
+    # kappa <= lambda <= delta
+    assert 3 == min(nx.core_number(G).values())
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert 1 == nx.node_connectivity(G, flow_func=flow_func), errmsg
+        assert 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg
+
+
+def test_complete_graphs():
+    for n in range(5, 20, 5):
+        for flow_func in flow_funcs:
+            G = nx.complete_graph(n)
+            errmsg = f"Assertion failed in function: {flow_func.__name__}"
+            assert n - 1 == nx.node_connectivity(G, flow_func=flow_func), errmsg
+            assert n - 1 == nx.node_connectivity(
+                G.to_directed(), flow_func=flow_func
+            ), errmsg
+            assert n - 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg
+            assert n - 1 == nx.edge_connectivity(
+                G.to_directed(), flow_func=flow_func
+            ), errmsg
+
+
+def test_empty_graphs():
+    for k in range(5, 25, 5):
+        G = nx.empty_graph(k)
+        for flow_func in flow_funcs:
+            errmsg = f"Assertion failed in function: {flow_func.__name__}"
+            assert 0 == nx.node_connectivity(G, flow_func=flow_func), errmsg
+            assert 0 == nx.edge_connectivity(G, flow_func=flow_func), errmsg
+
+
+def test_petersen():
+    G = nx.petersen_graph()
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert 3 == nx.node_connectivity(G, flow_func=flow_func), errmsg
+        assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg
+
+
+def test_tutte():
+    G = nx.tutte_graph()
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert 3 == nx.node_connectivity(G, flow_func=flow_func), errmsg
+        assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg
+
+
+def test_dodecahedral():
+    G = nx.dodecahedral_graph()
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert 3 == nx.node_connectivity(G, flow_func=flow_func), errmsg
+        assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg
+
+
+def test_octahedral():
+    G = nx.octahedral_graph()
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert 4 == nx.node_connectivity(G, flow_func=flow_func), errmsg
+        assert 4 == nx.edge_connectivity(G, flow_func=flow_func), errmsg
+
+
+def test_icosahedral():
+    G = nx.icosahedral_graph()
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert 5 == nx.node_connectivity(G, flow_func=flow_func), errmsg
+        assert 5 == nx.edge_connectivity(G, flow_func=flow_func), errmsg
+
+
+def test_missing_source():
+    G = nx.path_graph(4)
+    for flow_func in flow_funcs:
+        pytest.raises(
+            nx.NetworkXError, nx.node_connectivity, G, 10, 1, flow_func=flow_func
+        )
+
+
+def test_missing_target():
+    G = nx.path_graph(4)
+    for flow_func in flow_funcs:
+        pytest.raises(
+            nx.NetworkXError, nx.node_connectivity, G, 1, 10, flow_func=flow_func
+        )
+
+
+def test_edge_missing_source():
+    G = nx.path_graph(4)
+    for flow_func in flow_funcs:
+        pytest.raises(
+            nx.NetworkXError, nx.edge_connectivity, G, 10, 1, flow_func=flow_func
+        )
+
+
+def test_edge_missing_target():
+    G = nx.path_graph(4)
+    for flow_func in flow_funcs:
+        pytest.raises(
+            nx.NetworkXError, nx.edge_connectivity, G, 1, 10, flow_func=flow_func
+        )
+
+
+def test_not_weakly_connected():
+    G = nx.DiGraph()
+    nx.add_path(G, [1, 2, 3])
+    nx.add_path(G, [4, 5])
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert nx.node_connectivity(G) == 0, errmsg
+        assert nx.edge_connectivity(G) == 0, errmsg
+
+
+def test_not_connected():
+    G = nx.Graph()
+    nx.add_path(G, [1, 2, 3])
+    nx.add_path(G, [4, 5])
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert nx.node_connectivity(G) == 0, errmsg
+        assert nx.edge_connectivity(G) == 0, errmsg
+
+
+def test_directed_edge_connectivity():
+    G = nx.cycle_graph(10, create_using=nx.DiGraph())  # only one direction
+    D = nx.cycle_graph(10).to_directed()  # 2 reciprocal edges
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        assert 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg
+        assert 1 == local_edge_connectivity(G, 1, 4, flow_func=flow_func), errmsg
+        assert 1 == nx.edge_connectivity(G, 1, 4, flow_func=flow_func), errmsg
+        assert 2 == nx.edge_connectivity(D, flow_func=flow_func), errmsg
+        assert 2 == local_edge_connectivity(D, 1, 4, flow_func=flow_func), errmsg
+        assert 2 == nx.edge_connectivity(D, 1, 4, flow_func=flow_func), errmsg
+
+
+def test_cutoff():
+    G = nx.complete_graph(5)
+    for local_func in [local_edge_connectivity, local_node_connectivity]:
+        for flow_func in flow_funcs:
+            if flow_func is flow.preflow_push:
+                # cutoff is not supported by preflow_push
+                continue
+            for cutoff in [3, 2, 1]:
+                result = local_func(G, 0, 4, flow_func=flow_func, cutoff=cutoff)
+                assert cutoff == result, f"cutoff error in {flow_func.__name__}"
+
+
+def test_invalid_auxiliary():
+    G = nx.complete_graph(5)
+    pytest.raises(nx.NetworkXError, local_node_connectivity, G, 0, 3, auxiliary=G)
+
+
+def test_interface_only_source():
+    G = nx.complete_graph(5)
+    for interface_func in [nx.node_connectivity, nx.edge_connectivity]:
+        pytest.raises(nx.NetworkXError, interface_func, G, s=0)
+
+
+def test_interface_only_target():
+    G = nx.complete_graph(5)
+    for interface_func in [nx.node_connectivity, nx.edge_connectivity]:
+        pytest.raises(nx.NetworkXError, interface_func, G, t=3)
+
+
+def test_edge_connectivity_flow_vs_stoer_wagner():
+    graph_funcs = [nx.icosahedral_graph, nx.octahedral_graph, nx.dodecahedral_graph]
+    for graph_func in graph_funcs:
+        G = graph_func()
+        assert nx.stoer_wagner(G)[0] == nx.edge_connectivity(G)
+
+
+class TestAllPairsNodeConnectivity:
+    @classmethod
+    def setup_class(cls):
+        cls.path = nx.path_graph(7)
+        cls.directed_path = nx.path_graph(7, create_using=nx.DiGraph())
+        cls.cycle = nx.cycle_graph(7)
+        cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
+        cls.gnp = nx.gnp_random_graph(30, 0.1, seed=42)
+        cls.directed_gnp = nx.gnp_random_graph(30, 0.1, directed=True, seed=42)
+        cls.K20 = nx.complete_graph(20)
+        cls.K10 = nx.complete_graph(10)
+        cls.K5 = nx.complete_graph(5)
+        cls.G_list = [
+            cls.path,
+            cls.directed_path,
+            cls.cycle,
+            cls.directed_cycle,
+            cls.gnp,
+            cls.directed_gnp,
+            cls.K10,
+            cls.K5,
+            cls.K20,
+        ]
+
+    def test_cycles(self):
+        K_undir = nx.all_pairs_node_connectivity(self.cycle)
+        for source in K_undir:
+            for target, k in K_undir[source].items():
+                assert k == 2
+        K_dir = nx.all_pairs_node_connectivity(self.directed_cycle)
+        for source in K_dir:
+            for target, k in K_dir[source].items():
+                assert k == 1
+
+    def test_complete(self):
+        for G in [self.K10, self.K5, self.K20]:
+            K = nx.all_pairs_node_connectivity(G)
+            for source in K:
+                for target, k in K[source].items():
+                    assert k == len(G) - 1
+
+    def test_paths(self):
+        K_undir = nx.all_pairs_node_connectivity(self.path)
+        for source in K_undir:
+            for target, k in K_undir[source].items():
+                assert k == 1
+        K_dir = nx.all_pairs_node_connectivity(self.directed_path)
+        for source in K_dir:
+            for target, k in K_dir[source].items():
+                if source < target:
+                    assert k == 1
+                else:
+                    assert k == 0
+
+    def test_all_pairs_connectivity_nbunch(self):
+        G = nx.complete_graph(5)
+        nbunch = [0, 2, 3]
+        C = nx.all_pairs_node_connectivity(G, nbunch=nbunch)
+        assert len(C) == len(nbunch)
+
+    def test_all_pairs_connectivity_icosahedral(self):
+        G = nx.icosahedral_graph()
+        C = nx.all_pairs_node_connectivity(G)
+        assert all(5 == C[u][v] for u, v in itertools.combinations(G, 2))
+
+    def test_all_pairs_connectivity(self):
+        G = nx.Graph()
+        nodes = [0, 1, 2, 3]
+        nx.add_path(G, nodes)
+        A = {n: {} for n in G}
+        for u, v in itertools.combinations(nodes, 2):
+            A[u][v] = A[v][u] = nx.node_connectivity(G, u, v)
+        C = nx.all_pairs_node_connectivity(G)
+        assert sorted((k, sorted(v)) for k, v in A.items()) == sorted(
+            (k, sorted(v)) for k, v in C.items()
+        )
+
+    def test_all_pairs_connectivity_directed(self):
+        G = nx.DiGraph()
+        nodes = [0, 1, 2, 3]
+        nx.add_path(G, nodes)
+        A = {n: {} for n in G}
+        for u, v in itertools.permutations(nodes, 2):
+            A[u][v] = nx.node_connectivity(G, u, v)
+        C = nx.all_pairs_node_connectivity(G)
+        assert sorted((k, sorted(v)) for k, v in A.items()) == sorted(
+            (k, sorted(v)) for k, v in C.items()
+        )
+
+    def test_all_pairs_connectivity_nbunch_combinations(self):
+        G = nx.complete_graph(5)
+        nbunch = [0, 2, 3]
+        A = {n: {} for n in nbunch}
+        for u, v in itertools.combinations(nbunch, 2):
+            A[u][v] = A[v][u] = nx.node_connectivity(G, u, v)
+        C = nx.all_pairs_node_connectivity(G, nbunch=nbunch)
+        assert sorted((k, sorted(v)) for k, v in A.items()) == sorted(
+            (k, sorted(v)) for k, v in C.items()
+        )
+
+    def test_all_pairs_connectivity_nbunch_iter(self):
+        G = nx.complete_graph(5)
+        nbunch = [0, 2, 3]
+        A = {n: {} for n in nbunch}
+        for u, v in itertools.combinations(nbunch, 2):
+            A[u][v] = A[v][u] = nx.node_connectivity(G, u, v)
+        C = nx.all_pairs_node_connectivity(G, nbunch=iter(nbunch))
+        assert sorted((k, sorted(v)) for k, v in A.items()) == sorted(
+            (k, sorted(v)) for k, v in C.items()
+        )
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_cuts.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_cuts.py
new file mode 100644
index 00000000..7a485be3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_cuts.py
@@ -0,0 +1,309 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms import flow
+from networkx.algorithms.connectivity import minimum_st_edge_cut, minimum_st_node_cut
+from networkx.utils import arbitrary_element
+
+flow_funcs = [
+    flow.boykov_kolmogorov,
+    flow.dinitz,
+    flow.edmonds_karp,
+    flow.preflow_push,
+    flow.shortest_augmenting_path,
+]
+
+# Tests for node and edge cutsets
+
+
+def _generate_no_biconnected(max_attempts=50):
+    attempts = 0
+    while True:
+        G = nx.fast_gnp_random_graph(100, 0.0575, seed=42)
+        if nx.is_connected(G) and not nx.is_biconnected(G):
+            attempts = 0
+            yield G
+        else:
+            if attempts >= max_attempts:
+                msg = f"Tried {attempts} times: no suitable Graph."
+                raise Exception(msg)
+            else:
+                attempts += 1
+
+
+def test_articulation_points():
+    Ggen = _generate_no_biconnected()
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        for i in range(1):  # change 1 to 3 or more for more realizations.
+            G = next(Ggen)
+            cut = nx.minimum_node_cut(G, flow_func=flow_func)
+            assert len(cut) == 1, errmsg
+            assert cut.pop() in set(nx.articulation_points(G)), errmsg
+
+
+def test_brandes_erlebach_book():
+    # Figure 1 chapter 7: Connectivity
+    # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf
+    G = nx.Graph()
+    G.add_edges_from(
+        [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 5),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 6),
+            (4, 6),
+            (4, 7),
+            (5, 7),
+            (6, 8),
+            (6, 9),
+            (7, 8),
+            (7, 10),
+            (8, 11),
+            (9, 10),
+            (9, 11),
+            (10, 11),
+        ]
+    )
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge cutsets
+        assert 3 == len(nx.minimum_edge_cut(G, 1, 11, **kwargs)), errmsg
+        edge_cut = nx.minimum_edge_cut(G, **kwargs)
+        # Node 5 has only two edges
+        assert 2 == len(edge_cut), errmsg
+        H = G.copy()
+        H.remove_edges_from(edge_cut)
+        assert not nx.is_connected(H), errmsg
+        # node cuts
+        assert {6, 7} == minimum_st_node_cut(G, 1, 11, **kwargs), errmsg
+        assert {6, 7} == nx.minimum_node_cut(G, 1, 11, **kwargs), errmsg
+        node_cut = nx.minimum_node_cut(G, **kwargs)
+        assert 2 == len(node_cut), errmsg
+        H = G.copy()
+        H.remove_nodes_from(node_cut)
+        assert not nx.is_connected(H), errmsg
+
+
+def test_white_harary_paper():
+    # Figure 1b white and harary (2001)
+    # https://doi.org/10.1111/0081-1750.00098
+    # A graph with high adhesion (edge connectivity) and low cohesion
+    # (node connectivity)
+    G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4))
+    G.remove_node(7)
+    for i in range(4, 7):
+        G.add_edge(0, i)
+    G = nx.disjoint_union(G, nx.complete_graph(4))
+    G.remove_node(G.order() - 1)
+    for i in range(7, 10):
+        G.add_edge(0, i)
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge cuts
+        edge_cut = nx.minimum_edge_cut(G, **kwargs)
+        assert 3 == len(edge_cut), errmsg
+        H = G.copy()
+        H.remove_edges_from(edge_cut)
+        assert not nx.is_connected(H), errmsg
+        # node cuts
+        node_cut = nx.minimum_node_cut(G, **kwargs)
+        assert {0} == node_cut, errmsg
+        H = G.copy()
+        H.remove_nodes_from(node_cut)
+        assert not nx.is_connected(H), errmsg
+
+
+def test_petersen_cutset():
+    G = nx.petersen_graph()
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge cuts
+        edge_cut = nx.minimum_edge_cut(G, **kwargs)
+        assert 3 == len(edge_cut), errmsg
+        H = G.copy()
+        H.remove_edges_from(edge_cut)
+        assert not nx.is_connected(H), errmsg
+        # node cuts
+        node_cut = nx.minimum_node_cut(G, **kwargs)
+        assert 3 == len(node_cut), errmsg
+        H = G.copy()
+        H.remove_nodes_from(node_cut)
+        assert not nx.is_connected(H), errmsg
+
+
+def test_octahedral_cutset():
+    G = nx.octahedral_graph()
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge cuts
+        edge_cut = nx.minimum_edge_cut(G, **kwargs)
+        assert 4 == len(edge_cut), errmsg
+        H = G.copy()
+        H.remove_edges_from(edge_cut)
+        assert not nx.is_connected(H), errmsg
+        # node cuts
+        node_cut = nx.minimum_node_cut(G, **kwargs)
+        assert 4 == len(node_cut), errmsg
+        H = G.copy()
+        H.remove_nodes_from(node_cut)
+        assert not nx.is_connected(H), errmsg
+
+
+def test_icosahedral_cutset():
+    G = nx.icosahedral_graph()
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge cuts
+        edge_cut = nx.minimum_edge_cut(G, **kwargs)
+        assert 5 == len(edge_cut), errmsg
+        H = G.copy()
+        H.remove_edges_from(edge_cut)
+        assert not nx.is_connected(H), errmsg
+        # node cuts
+        node_cut = nx.minimum_node_cut(G, **kwargs)
+        assert 5 == len(node_cut), errmsg
+        H = G.copy()
+        H.remove_nodes_from(node_cut)
+        assert not nx.is_connected(H), errmsg
+
+
+def test_node_cutset_exception():
+    G = nx.Graph()
+    G.add_edges_from([(1, 2), (3, 4)])
+    for flow_func in flow_funcs:
+        pytest.raises(nx.NetworkXError, nx.minimum_node_cut, G, flow_func=flow_func)
+
+
+def test_node_cutset_random_graphs():
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        for i in range(3):
+            G = nx.fast_gnp_random_graph(50, 0.25, seed=42)
+            if not nx.is_connected(G):
+                ccs = iter(nx.connected_components(G))
+                start = arbitrary_element(next(ccs))
+                G.add_edges_from((start, arbitrary_element(c)) for c in ccs)
+            cutset = nx.minimum_node_cut(G, flow_func=flow_func)
+            assert nx.node_connectivity(G) == len(cutset), errmsg
+            G.remove_nodes_from(cutset)
+            assert not nx.is_connected(G), errmsg
+
+
+def test_edge_cutset_random_graphs():
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        for i in range(3):
+            G = nx.fast_gnp_random_graph(50, 0.25, seed=42)
+            if not nx.is_connected(G):
+                ccs = iter(nx.connected_components(G))
+                start = arbitrary_element(next(ccs))
+                G.add_edges_from((start, arbitrary_element(c)) for c in ccs)
+            cutset = nx.minimum_edge_cut(G, flow_func=flow_func)
+            assert nx.edge_connectivity(G) == len(cutset), errmsg
+            G.remove_edges_from(cutset)
+            assert not nx.is_connected(G), errmsg
+
+
+def test_empty_graphs():
+    G = nx.Graph()
+    D = nx.DiGraph()
+    for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]:
+        for flow_func in flow_funcs:
+            pytest.raises(
+                nx.NetworkXPointlessConcept, interface_func, G, flow_func=flow_func
+            )
+            pytest.raises(
+                nx.NetworkXPointlessConcept, interface_func, D, flow_func=flow_func
+            )
+
+
+def test_unbounded():
+    G = nx.complete_graph(5)
+    for flow_func in flow_funcs:
+        assert 4 == len(minimum_st_edge_cut(G, 1, 4, flow_func=flow_func))
+
+
+def test_missing_source():
+    G = nx.path_graph(4)
+    for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
+        for flow_func in flow_funcs:
+            pytest.raises(
+                nx.NetworkXError, interface_func, G, 10, 1, flow_func=flow_func
+            )
+
+
+def test_missing_target():
+    G = nx.path_graph(4)
+    for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
+        for flow_func in flow_funcs:
+            pytest.raises(
+                nx.NetworkXError, interface_func, G, 1, 10, flow_func=flow_func
+            )
+
+
+def test_not_weakly_connected():
+    G = nx.DiGraph()
+    nx.add_path(G, [1, 2, 3])
+    nx.add_path(G, [4, 5])
+    for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
+        for flow_func in flow_funcs:
+            pytest.raises(nx.NetworkXError, interface_func, G, flow_func=flow_func)
+
+
+def test_not_connected():
+    G = nx.Graph()
+    nx.add_path(G, [1, 2, 3])
+    nx.add_path(G, [4, 5])
+    for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
+        for flow_func in flow_funcs:
+            pytest.raises(nx.NetworkXError, interface_func, G, flow_func=flow_func)
+
+
+def tests_min_cut_complete():
+    G = nx.complete_graph(5)
+    for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
+        for flow_func in flow_funcs:
+            assert 4 == len(interface_func(G, flow_func=flow_func))
+
+
+def tests_min_cut_complete_directed():
+    G = nx.complete_graph(5)
+    G = G.to_directed()
+    for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]:
+        for flow_func in flow_funcs:
+            assert 4 == len(interface_func(G, flow_func=flow_func))
+
+
+def tests_minimum_st_node_cut():
+    G = nx.Graph()
+    G.add_nodes_from([0, 1, 2, 3, 7, 8, 11, 12])
+    G.add_edges_from([(7, 11), (1, 11), (1, 12), (12, 8), (0, 1)])
+    nodelist = minimum_st_node_cut(G, 7, 11)
+    assert nodelist == {}
+
+
+def test_invalid_auxiliary():
+    G = nx.complete_graph(5)
+    pytest.raises(nx.NetworkXError, minimum_st_node_cut, G, 0, 3, auxiliary=G)
+
+
+def test_interface_only_source():
+    G = nx.complete_graph(5)
+    for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]:
+        pytest.raises(nx.NetworkXError, interface_func, G, s=0)
+
+
+def test_interface_only_target():
+    G = nx.complete_graph(5)
+    for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]:
+        pytest.raises(nx.NetworkXError, interface_func, G, t=3)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_disjoint_paths.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_disjoint_paths.py
new file mode 100644
index 00000000..0c0fad9f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_disjoint_paths.py
@@ -0,0 +1,249 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms import flow
+from networkx.utils import pairwise
+
+flow_funcs = [
+    flow.boykov_kolmogorov,
+    flow.edmonds_karp,
+    flow.dinitz,
+    flow.preflow_push,
+    flow.shortest_augmenting_path,
+]
+
+
+def is_path(G, path):
+    return all(v in G[u] for u, v in pairwise(path))
+
+
+def are_edge_disjoint_paths(G, paths):
+    if not paths:
+        return False
+    for path in paths:
+        assert is_path(G, path)
+    paths_edges = [list(pairwise(p)) for p in paths]
+    num_of_edges = sum(len(e) for e in paths_edges)
+    num_unique_edges = len(set.union(*[set(es) for es in paths_edges]))
+    if num_of_edges == num_unique_edges:
+        return True
+    return False
+
+
+def are_node_disjoint_paths(G, paths):
+    if not paths:
+        return False
+    for path in paths:
+        assert is_path(G, path)
+    # first and last nodes are source and target
+    st = {paths[0][0], paths[0][-1]}
+    num_of_nodes = len([n for path in paths for n in path if n not in st])
+    num_unique_nodes = len({n for path in paths for n in path if n not in st})
+    if num_of_nodes == num_unique_nodes:
+        return True
+    return False
+
+
+def test_graph_from_pr_2053():
+    G = nx.Graph()
+    G.add_edges_from(
+        [
+            ("A", "B"),
+            ("A", "D"),
+            ("A", "F"),
+            ("A", "G"),
+            ("B", "C"),
+            ("B", "D"),
+            ("B", "G"),
+            ("C", "D"),
+            ("C", "E"),
+            ("C", "Z"),
+            ("D", "E"),
+            ("D", "F"),
+            ("E", "F"),
+            ("E", "Z"),
+            ("F", "Z"),
+            ("G", "Z"),
+        ]
+    )
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge disjoint paths
+        edge_paths = list(nx.edge_disjoint_paths(G, "A", "Z", **kwargs))
+        assert are_edge_disjoint_paths(G, edge_paths), errmsg
+        assert nx.edge_connectivity(G, "A", "Z") == len(edge_paths), errmsg
+        # node disjoint paths
+        node_paths = list(nx.node_disjoint_paths(G, "A", "Z", **kwargs))
+        assert are_node_disjoint_paths(G, node_paths), errmsg
+        assert nx.node_connectivity(G, "A", "Z") == len(node_paths), errmsg
+
+
+def test_florentine_families():
+    G = nx.florentine_families_graph()
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge disjoint paths
+        edge_dpaths = list(nx.edge_disjoint_paths(G, "Medici", "Strozzi", **kwargs))
+        assert are_edge_disjoint_paths(G, edge_dpaths), errmsg
+        assert nx.edge_connectivity(G, "Medici", "Strozzi") == len(edge_dpaths), errmsg
+        # node disjoint paths
+        node_dpaths = list(nx.node_disjoint_paths(G, "Medici", "Strozzi", **kwargs))
+        assert are_node_disjoint_paths(G, node_dpaths), errmsg
+        assert nx.node_connectivity(G, "Medici", "Strozzi") == len(node_dpaths), errmsg
+
+
+def test_karate():
+    G = nx.karate_club_graph()
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge disjoint paths
+        edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 33, **kwargs))
+        assert are_edge_disjoint_paths(G, edge_dpaths), errmsg
+        assert nx.edge_connectivity(G, 0, 33) == len(edge_dpaths), errmsg
+        # node disjoint paths
+        node_dpaths = list(nx.node_disjoint_paths(G, 0, 33, **kwargs))
+        assert are_node_disjoint_paths(G, node_dpaths), errmsg
+        assert nx.node_connectivity(G, 0, 33) == len(node_dpaths), errmsg
+
+
+def test_petersen_disjoint_paths():
+    G = nx.petersen_graph()
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge disjoint paths
+        edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs))
+        assert are_edge_disjoint_paths(G, edge_dpaths), errmsg
+        assert 3 == len(edge_dpaths), errmsg
+        # node disjoint paths
+        node_dpaths = list(nx.node_disjoint_paths(G, 0, 6, **kwargs))
+        assert are_node_disjoint_paths(G, node_dpaths), errmsg
+        assert 3 == len(node_dpaths), errmsg
+
+
+def test_octahedral_disjoint_paths():
+    G = nx.octahedral_graph()
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge disjoint paths
+        edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 5, **kwargs))
+        assert are_edge_disjoint_paths(G, edge_dpaths), errmsg
+        assert 4 == len(edge_dpaths), errmsg
+        # node disjoint paths
+        node_dpaths = list(nx.node_disjoint_paths(G, 0, 5, **kwargs))
+        assert are_node_disjoint_paths(G, node_dpaths), errmsg
+        assert 4 == len(node_dpaths), errmsg
+
+
+def test_icosahedral_disjoint_paths():
+    G = nx.icosahedral_graph()
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        # edge disjoint paths
+        edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs))
+        assert are_edge_disjoint_paths(G, edge_dpaths), errmsg
+        assert 5 == len(edge_dpaths), errmsg
+        # node disjoint paths
+        node_dpaths = list(nx.node_disjoint_paths(G, 0, 6, **kwargs))
+        assert are_node_disjoint_paths(G, node_dpaths), errmsg
+        assert 5 == len(node_dpaths), errmsg
+
+
+def test_cutoff_disjoint_paths():
+    G = nx.icosahedral_graph()
+    for flow_func in flow_funcs:
+        kwargs = {"flow_func": flow_func}
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        for cutoff in [2, 4]:
+            kwargs["cutoff"] = cutoff
+            # edge disjoint paths
+            edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs))
+            assert are_edge_disjoint_paths(G, edge_dpaths), errmsg
+            assert cutoff == len(edge_dpaths), errmsg
+            # node disjoint paths
+            node_dpaths = list(nx.node_disjoint_paths(G, 0, 6, **kwargs))
+            assert are_node_disjoint_paths(G, node_dpaths), errmsg
+            assert cutoff == len(node_dpaths), errmsg
+
+
+def test_missing_source_edge_paths():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.path_graph(4)
+        list(nx.edge_disjoint_paths(G, 10, 1))
+
+
+def test_missing_source_node_paths():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.path_graph(4)
+        list(nx.node_disjoint_paths(G, 10, 1))
+
+
+def test_missing_target_edge_paths():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.path_graph(4)
+        list(nx.edge_disjoint_paths(G, 1, 10))
+
+
+def test_missing_target_node_paths():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.path_graph(4)
+        list(nx.node_disjoint_paths(G, 1, 10))
+
+
+def test_not_weakly_connected_edges():
+    with pytest.raises(nx.NetworkXNoPath):
+        G = nx.DiGraph()
+        nx.add_path(G, [1, 2, 3])
+        nx.add_path(G, [4, 5])
+        list(nx.edge_disjoint_paths(G, 1, 5))
+
+
+def test_not_weakly_connected_nodes():
+    with pytest.raises(nx.NetworkXNoPath):
+        G = nx.DiGraph()
+        nx.add_path(G, [1, 2, 3])
+        nx.add_path(G, [4, 5])
+        list(nx.node_disjoint_paths(G, 1, 5))
+
+
+def test_not_connected_edges():
+    with pytest.raises(nx.NetworkXNoPath):
+        G = nx.Graph()
+        nx.add_path(G, [1, 2, 3])
+        nx.add_path(G, [4, 5])
+        list(nx.edge_disjoint_paths(G, 1, 5))
+
+
+def test_not_connected_nodes():
+    with pytest.raises(nx.NetworkXNoPath):
+        G = nx.Graph()
+        nx.add_path(G, [1, 2, 3])
+        nx.add_path(G, [4, 5])
+        list(nx.node_disjoint_paths(G, 1, 5))
+
+
+def test_isolated_edges():
+    with pytest.raises(nx.NetworkXNoPath):
+        G = nx.Graph()
+        G.add_node(1)
+        nx.add_path(G, [4, 5])
+        list(nx.edge_disjoint_paths(G, 1, 5))
+
+
+def test_isolated_nodes():
+    with pytest.raises(nx.NetworkXNoPath):
+        G = nx.Graph()
+        G.add_node(1)
+        nx.add_path(G, [4, 5])
+        list(nx.node_disjoint_paths(G, 1, 5))
+
+
+def test_invalid_auxiliary():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.complete_graph(5)
+        list(nx.node_disjoint_paths(G, 0, 3, auxiliary=G))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_augmentation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_augmentation.py
new file mode 100644
index 00000000..e1d92d99
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_augmentation.py
@@ -0,0 +1,502 @@
+import itertools as it
+import random
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.connectivity import k_edge_augmentation
+from networkx.algorithms.connectivity.edge_augmentation import (
+    _unpack_available_edges,
+    collapse,
+    complement_edges,
+    is_k_edge_connected,
+    is_locally_k_edge_connected,
+)
+from networkx.utils import pairwise
+
+# This should be set to the largest k for which an efficient algorithm is
+# explicitly defined.
+MAX_EFFICIENT_K = 2
+
+
+def tarjan_bridge_graph():
+    # graph from tarjan paper
+    # RE Tarjan - "A note on finding the bridges of a graph"
+    # Information Processing Letters, 1974 - Elsevier
+    # doi:10.1016/0020-0190(74)90003-9.
+    # define 2-connected components and bridges
+    ccs = [
+        (1, 2, 4, 3, 1, 4),
+        (5, 6, 7, 5),
+        (8, 9, 10, 8),
+        (17, 18, 16, 15, 17),
+        (11, 12, 14, 13, 11, 14),
+    ]
+    bridges = [(4, 8), (3, 5), (3, 17)]
+    G = nx.Graph(it.chain(*(pairwise(path) for path in ccs + bridges)))
+    return G
+
+
+def test_weight_key():
+    G = nx.Graph()
+    G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9])
+    G.add_edges_from([(3, 8), (1, 2), (2, 3)])
+    impossible = {(3, 6), (3, 9)}
+    rng = random.Random(0)
+    avail_uv = list(set(complement_edges(G)) - impossible)
+    avail = [(u, v, {"cost": rng.random()}) for u, v in avail_uv]
+
+    _augment_and_check(G, k=1)
+    _augment_and_check(G, k=1, avail=avail_uv)
+    _augment_and_check(G, k=1, avail=avail, weight="cost")
+
+    _check_augmentations(G, avail, weight="cost")
+
+
+def test_is_locally_k_edge_connected_exceptions():
+    pytest.raises(nx.NetworkXNotImplemented, is_k_edge_connected, nx.DiGraph(), k=0)
+    pytest.raises(nx.NetworkXNotImplemented, is_k_edge_connected, nx.MultiGraph(), k=0)
+    pytest.raises(ValueError, is_k_edge_connected, nx.Graph(), k=0)
+
+
+def test_is_k_edge_connected():
+    G = nx.barbell_graph(10, 0)
+    assert is_k_edge_connected(G, k=1)
+    assert not is_k_edge_connected(G, k=2)
+
+    G = nx.Graph()
+    G.add_nodes_from([5, 15])
+    assert not is_k_edge_connected(G, k=1)
+    assert not is_k_edge_connected(G, k=2)
+
+    G = nx.complete_graph(5)
+    assert is_k_edge_connected(G, k=1)
+    assert is_k_edge_connected(G, k=2)
+    assert is_k_edge_connected(G, k=3)
+    assert is_k_edge_connected(G, k=4)
+
+    G = nx.compose(nx.complete_graph([0, 1, 2]), nx.complete_graph([3, 4, 5]))
+    assert not is_k_edge_connected(G, k=1)
+    assert not is_k_edge_connected(G, k=2)
+    assert not is_k_edge_connected(G, k=3)
+
+
+def test_is_k_edge_connected_exceptions():
+    pytest.raises(
+        nx.NetworkXNotImplemented, is_locally_k_edge_connected, nx.DiGraph(), 1, 2, k=0
+    )
+    pytest.raises(
+        nx.NetworkXNotImplemented,
+        is_locally_k_edge_connected,
+        nx.MultiGraph(),
+        1,
+        2,
+        k=0,
+    )
+    pytest.raises(ValueError, is_locally_k_edge_connected, nx.Graph(), 1, 2, k=0)
+
+
+def test_is_locally_k_edge_connected():
+    G = nx.barbell_graph(10, 0)
+    assert is_locally_k_edge_connected(G, 5, 15, k=1)
+    assert not is_locally_k_edge_connected(G, 5, 15, k=2)
+
+    G = nx.Graph()
+    G.add_nodes_from([5, 15])
+    assert not is_locally_k_edge_connected(G, 5, 15, k=2)
+
+
+def test_null_graph():
+    G = nx.Graph()
+    _check_augmentations(G, max_k=MAX_EFFICIENT_K + 2)
+
+
+def test_cliques():
+    for n in range(1, 10):
+        G = nx.complete_graph(n)
+        _check_augmentations(G, max_k=MAX_EFFICIENT_K + 2)
+
+
+def test_clique_and_node():
+    for n in range(1, 10):
+        G = nx.complete_graph(n)
+        G.add_node(n + 1)
+        _check_augmentations(G, max_k=MAX_EFFICIENT_K + 2)
+
+
+def test_point_graph():
+    G = nx.Graph()
+    G.add_node(1)
+    _check_augmentations(G, max_k=MAX_EFFICIENT_K + 2)
+
+
+def test_edgeless_graph():
+    G = nx.Graph()
+    G.add_nodes_from([1, 2, 3, 4])
+    _check_augmentations(G)
+
+
+def test_invalid_k():
+    G = nx.Graph()
+    pytest.raises(ValueError, list, k_edge_augmentation(G, k=-1))
+    pytest.raises(ValueError, list, k_edge_augmentation(G, k=0))
+
+
+def test_unfeasible():
+    G = tarjan_bridge_graph()
+    pytest.raises(nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=1, avail=[]))
+
+    pytest.raises(nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=2, avail=[]))
+
+    pytest.raises(
+        nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=2, avail=[(7, 9)])
+    )
+
+    # partial solutions should not error if real solutions are infeasible
+    aug_edges = list(k_edge_augmentation(G, k=2, avail=[(7, 9)], partial=True))
+    assert aug_edges == [(7, 9)]
+
+    _check_augmentations(G, avail=[], max_k=MAX_EFFICIENT_K + 2)
+
+    _check_augmentations(G, avail=[(7, 9)], max_k=MAX_EFFICIENT_K + 2)
+
+
+def test_tarjan():
+    G = tarjan_bridge_graph()
+
+    aug_edges = set(_augment_and_check(G, k=2)[0])
+    print(f"aug_edges = {aug_edges!r}")
+    # can't assert edge exactly equality due to non-determinant edge order
+    # but we do know the size of the solution must be 3
+    assert len(aug_edges) == 3
+
+    avail = [
+        (9, 7),
+        (8, 5),
+        (2, 10),
+        (6, 13),
+        (11, 18),
+        (1, 17),
+        (2, 3),
+        (16, 17),
+        (18, 14),
+        (15, 14),
+    ]
+    aug_edges = set(_augment_and_check(G, avail=avail, k=2)[0])
+
+    # Can't assert exact length since approximation depends on the order of a
+    # dict traversal.
+    assert len(aug_edges) <= 3 * 2
+
+    _check_augmentations(G, avail)
+
+
+def test_configuration():
+    # seeds = [2718183590, 2470619828, 1694705158, 3001036531, 2401251497]
+    seeds = [1001, 1002, 1003, 1004]
+    for seed in seeds:
+        deg_seq = nx.random_powerlaw_tree_sequence(20, seed=seed, tries=5000)
+        G = nx.Graph(nx.configuration_model(deg_seq, seed=seed))
+        G.remove_edges_from(nx.selfloop_edges(G))
+        _check_augmentations(G)
+
+
+def test_shell():
+    # seeds = [2057382236, 3331169846, 1840105863, 476020778, 2247498425]
+    seeds = [18]
+    for seed in seeds:
+        constructor = [(12, 70, 0.8), (15, 40, 0.6)]
+        G = nx.random_shell_graph(constructor, seed=seed)
+        _check_augmentations(G)
+
+
+def test_karate():
+    G = nx.karate_club_graph()
+    _check_augmentations(G)
+
+
+def test_star():
+    G = nx.star_graph(3)
+    _check_augmentations(G)
+
+    G = nx.star_graph(5)
+    _check_augmentations(G)
+
+    G = nx.star_graph(10)
+    _check_augmentations(G)
+
+
+def test_barbell():
+    G = nx.barbell_graph(5, 0)
+    _check_augmentations(G)
+
+    G = nx.barbell_graph(5, 2)
+    _check_augmentations(G)
+
+    G = nx.barbell_graph(5, 3)
+    _check_augmentations(G)
+
+    G = nx.barbell_graph(5, 4)
+    _check_augmentations(G)
+
+
+def test_bridge():
+    G = nx.Graph([(2393, 2257), (2393, 2685), (2685, 2257), (1758, 2257)])
+    _check_augmentations(G)
+
+
+def test_gnp_augmentation():
+    rng = random.Random(0)
+    G = nx.gnp_random_graph(30, 0.005, seed=0)
+    # Randomly make edges available
+    avail = {
+        (u, v): 1 + rng.random() for u, v in complement_edges(G) if rng.random() < 0.25
+    }
+    _check_augmentations(G, avail)
+
+
+def _assert_solution_properties(G, aug_edges, avail_dict=None):
+    """Checks that aug_edges are consistently formatted"""
+    if avail_dict is not None:
+        assert all(
+            e in avail_dict for e in aug_edges
+        ), "when avail is specified aug-edges should be in avail"
+
+    unique_aug = set(map(tuple, map(sorted, aug_edges)))
+    unique_aug = list(map(tuple, map(sorted, aug_edges)))
+    assert len(aug_edges) == len(unique_aug), "edges should be unique"
+
+    assert not any(u == v for u, v in unique_aug), "should be no self-edges"
+
+    assert not any(
+        G.has_edge(u, v) for u, v in unique_aug
+    ), "aug edges and G.edges should be disjoint"
+
+
+def _augment_and_check(
+    G, k, avail=None, weight=None, verbose=False, orig_k=None, max_aug_k=None
+):
+    """
+    Does one specific augmentation and checks for properties of the result
+    """
+    if orig_k is None:
+        try:
+            orig_k = nx.edge_connectivity(G)
+        except nx.NetworkXPointlessConcept:
+            orig_k = 0
+    info = {}
+    try:
+        if avail is not None:
+            # ensure avail is in dict form
+            avail_dict = dict(zip(*_unpack_available_edges(avail, weight=weight)))
+        else:
+            avail_dict = None
+        try:
+            # Find the augmentation if possible
+            generator = nx.k_edge_augmentation(G, k=k, weight=weight, avail=avail)
+            assert not isinstance(generator, list), "should always return an iter"
+            aug_edges = []
+            for edge in generator:
+                aug_edges.append(edge)
+        except nx.NetworkXUnfeasible:
+            infeasible = True
+            info["infeasible"] = True
+            assert len(aug_edges) == 0, "should not generate anything if unfeasible"
+
+            if avail is None:
+                n_nodes = G.number_of_nodes()
+                assert n_nodes <= k, (
+                    "unconstrained cases are only unfeasible if |V| <= k. "
+                    f"Got |V|={n_nodes} and k={k}"
+                )
+            else:
+                if max_aug_k is None:
+                    G_aug_all = G.copy()
+                    G_aug_all.add_edges_from(avail_dict.keys())
+                    try:
+                        max_aug_k = nx.edge_connectivity(G_aug_all)
+                    except nx.NetworkXPointlessConcept:
+                        max_aug_k = 0
+
+                assert max_aug_k < k, (
+                    "avail should only be unfeasible if using all edges "
+                    "does not achieve k-edge-connectivity"
+                )
+
+            # Test for a partial solution
+            partial_edges = list(
+                nx.k_edge_augmentation(G, k=k, weight=weight, partial=True, avail=avail)
+            )
+
+            info["n_partial_edges"] = len(partial_edges)
+
+            if avail_dict is None:
+                assert set(partial_edges) == set(
+                    complement_edges(G)
+                ), "unweighted partial solutions should be the complement"
+            elif len(avail_dict) > 0:
+                H = G.copy()
+
+                # Find the partial / full augmented connectivity
+                H.add_edges_from(partial_edges)
+                partial_conn = nx.edge_connectivity(H)
+
+                H.add_edges_from(set(avail_dict.keys()))
+                full_conn = nx.edge_connectivity(H)
+
+                # Full connectivity should be no better than our partial
+                # solution.
+                assert (
+                    partial_conn == full_conn
+                ), "adding more edges should not increase k-conn"
+
+            # Find the new edge-connectivity after adding the augmenting edges
+            aug_edges = partial_edges
+        else:
+            infeasible = False
+
+        # Find the weight of the augmentation
+        num_edges = len(aug_edges)
+        if avail is not None:
+            total_weight = sum(avail_dict[e] for e in aug_edges)
+        else:
+            total_weight = num_edges
+
+        info["total_weight"] = total_weight
+        info["num_edges"] = num_edges
+
+        # Find the new edge-connectivity after adding the augmenting edges
+        G_aug = G.copy()
+        G_aug.add_edges_from(aug_edges)
+        try:
+            aug_k = nx.edge_connectivity(G_aug)
+        except nx.NetworkXPointlessConcept:
+            aug_k = 0
+        info["aug_k"] = aug_k
+
+        # Do checks
+        if not infeasible and orig_k < k:
+            assert info["aug_k"] >= k, f"connectivity should increase to k={k} or more"
+
+        assert info["aug_k"] >= orig_k, "augmenting should never reduce connectivity"
+
+        _assert_solution_properties(G, aug_edges, avail_dict)
+
+    except Exception:
+        info["failed"] = True
+        print(f"edges = {list(G.edges())}")
+        print(f"nodes = {list(G.nodes())}")
+        print(f"aug_edges = {list(aug_edges)}")
+        print(f"info  = {info}")
+        raise
+    else:
+        if verbose:
+            print(f"info  = {info}")
+
+    if infeasible:
+        aug_edges = None
+    return aug_edges, info
+
+
+def _check_augmentations(G, avail=None, max_k=None, weight=None, verbose=False):
+    """Helper to check weighted/unweighted cases with multiple values of k"""
+    # Using all available edges, find the maximum edge-connectivity
+    try:
+        orig_k = nx.edge_connectivity(G)
+    except nx.NetworkXPointlessConcept:
+        orig_k = 0
+
+    if avail is not None:
+        all_aug_edges = _unpack_available_edges(avail, weight=weight)[0]
+        G_aug_all = G.copy()
+        G_aug_all.add_edges_from(all_aug_edges)
+        try:
+            max_aug_k = nx.edge_connectivity(G_aug_all)
+        except nx.NetworkXPointlessConcept:
+            max_aug_k = 0
+    else:
+        max_aug_k = G.number_of_nodes() - 1
+
+    if max_k is None:
+        max_k = min(4, max_aug_k)
+
+    avail_uniform = {e: 1 for e in complement_edges(G)}
+
+    if verbose:
+        print("\n=== CHECK_AUGMENTATION ===")
+        print(f"G.number_of_nodes = {G.number_of_nodes()!r}")
+        print(f"G.number_of_edges = {G.number_of_edges()!r}")
+        print(f"max_k = {max_k!r}")
+        print(f"max_aug_k = {max_aug_k!r}")
+        print(f"orig_k = {orig_k!r}")
+
+    # check augmentation for multiple values of k
+    for k in range(1, max_k + 1):
+        if verbose:
+            print("---------------")
+            print(f"Checking k = {k}")
+
+        # Check the unweighted version
+        if verbose:
+            print("unweighted case")
+        aug_edges1, info1 = _augment_and_check(G, k=k, verbose=verbose, orig_k=orig_k)
+
+        # Check that the weighted version with all available edges and uniform
+        # weights gives a similar solution to the unweighted case.
+        if verbose:
+            print("weighted uniform case")
+        aug_edges2, info2 = _augment_and_check(
+            G,
+            k=k,
+            avail=avail_uniform,
+            verbose=verbose,
+            orig_k=orig_k,
+            max_aug_k=G.number_of_nodes() - 1,
+        )
+
+        # Check the weighted version
+        if avail is not None:
+            if verbose:
+                print("weighted case")
+            aug_edges3, info3 = _augment_and_check(
+                G,
+                k=k,
+                avail=avail,
+                weight=weight,
+                verbose=verbose,
+                max_aug_k=max_aug_k,
+                orig_k=orig_k,
+            )
+
+        if aug_edges1 is not None:
+            # Check approximation ratios
+            if k == 1:
+                # when k=1, both solutions should be optimal
+                assert info2["total_weight"] == info1["total_weight"]
+            if k == 2:
+                # when k=2, the weighted version is an approximation
+                if orig_k == 0:
+                    # the approximation ratio is 3 if G is not connected
+                    assert info2["total_weight"] <= info1["total_weight"] * 3
+                else:
+                    # the approximation ratio is 2 if G is was connected
+                    assert info2["total_weight"] <= info1["total_weight"] * 2
+                _check_unconstrained_bridge_property(G, info1)
+
+
+def _check_unconstrained_bridge_property(G, info1):
+    # Check Theorem 5 from Eswaran and Tarjan. (1975) Augmentation problems
+    import math
+
+    bridge_ccs = list(nx.connectivity.bridge_components(G))
+    # condense G into an forest C
+    C = collapse(G, bridge_ccs)
+
+    p = len([n for n, d in C.degree() if d == 1])  # leafs
+    q = len([n for n, d in C.degree() if d == 0])  # isolated
+    if p + q > 1:
+        size_target = math.ceil(p / 2) + q
+        size_aug = info1["num_edges"]
+        assert (
+            size_aug == size_target
+        ), "augmentation size is different from what theory predicts"
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py
new file mode 100644
index 00000000..4a1f681a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py
@@ -0,0 +1,488 @@
+import itertools as it
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.connectivity import EdgeComponentAuxGraph, bridge_components
+from networkx.algorithms.connectivity.edge_kcomponents import general_k_edge_subgraphs
+from networkx.utils import pairwise
+
+# ----------------
+# Helper functions
+# ----------------
+
+
+def fset(list_of_sets):
+    """allows == to be used for list of sets"""
+    return set(map(frozenset, list_of_sets))
+
+
+def _assert_subgraph_edge_connectivity(G, ccs_subgraph, k):
+    """
+    tests properties of k-edge-connected subgraphs
+
+    the actual edge connectivity should be no less than k unless the cc is a
+    single node.
+    """
+    for cc in ccs_subgraph:
+        C = G.subgraph(cc)
+        if len(cc) > 1:
+            connectivity = nx.edge_connectivity(C)
+            assert connectivity >= k
+
+
+def _memo_connectivity(G, u, v, memo):
+    edge = (u, v)
+    if edge in memo:
+        return memo[edge]
+    if not G.is_directed():
+        redge = (v, u)
+        if redge in memo:
+            return memo[redge]
+    memo[edge] = nx.edge_connectivity(G, *edge)
+    return memo[edge]
+
+
+def _all_pairs_connectivity(G, cc, k, memo):
+    # Brute force check
+    for u, v in it.combinations(cc, 2):
+        # Use a memoization dict to save on computation
+        connectivity = _memo_connectivity(G, u, v, memo)
+        if G.is_directed():
+            connectivity = min(connectivity, _memo_connectivity(G, v, u, memo))
+        assert connectivity >= k
+
+
+def _assert_local_cc_edge_connectivity(G, ccs_local, k, memo):
+    """
+    tests properties of k-edge-connected components
+
+    the local edge connectivity between each pair of nodes in the original
+    graph should be no less than k unless the cc is a single node.
+    """
+    for cc in ccs_local:
+        if len(cc) > 1:
+            # Strategy for testing a bit faster: If the subgraph has high edge
+            # connectivity then it must have local connectivity
+            C = G.subgraph(cc)
+            connectivity = nx.edge_connectivity(C)
+            if connectivity < k:
+                # Otherwise do the brute force (with memoization) check
+                _all_pairs_connectivity(G, cc, k, memo)
+
+
+# Helper function
+def _check_edge_connectivity(G):
+    """
+    Helper - generates all k-edge-components using the aux graph.  Checks the
+    both local and subgraph edge connectivity of each cc. Also checks that
+    alternate methods of computing the k-edge-ccs generate the same result.
+    """
+    # Construct the auxiliary graph that can be used to make each k-cc or k-sub
+    aux_graph = EdgeComponentAuxGraph.construct(G)
+
+    # memoize the local connectivity in this graph
+    memo = {}
+
+    for k in it.count(1):
+        # Test "local" k-edge-components and k-edge-subgraphs
+        ccs_local = fset(aux_graph.k_edge_components(k))
+        ccs_subgraph = fset(aux_graph.k_edge_subgraphs(k))
+
+        # Check connectivity properties that should be guaranteed by the
+        # algorithms.
+        _assert_local_cc_edge_connectivity(G, ccs_local, k, memo)
+        _assert_subgraph_edge_connectivity(G, ccs_subgraph, k)
+
+        if k == 1 or k == 2 and not G.is_directed():
+            assert (
+                ccs_local == ccs_subgraph
+            ), "Subgraphs and components should be the same when k == 1 or (k == 2 and not G.directed())"
+
+        if G.is_directed():
+            # Test special case methods are the same as the aux graph
+            if k == 1:
+                alt_sccs = fset(nx.strongly_connected_components(G))
+                assert alt_sccs == ccs_local, "k=1 failed alt"
+                assert alt_sccs == ccs_subgraph, "k=1 failed alt"
+        else:
+            # Test special case methods are the same as the aux graph
+            if k == 1:
+                alt_ccs = fset(nx.connected_components(G))
+                assert alt_ccs == ccs_local, "k=1 failed alt"
+                assert alt_ccs == ccs_subgraph, "k=1 failed alt"
+            elif k == 2:
+                alt_bridge_ccs = fset(bridge_components(G))
+                assert alt_bridge_ccs == ccs_local, "k=2 failed alt"
+                assert alt_bridge_ccs == ccs_subgraph, "k=2 failed alt"
+            # if new methods for k == 3 or k == 4 are implemented add them here
+
+        # Check the general subgraph method works by itself
+        alt_subgraph_ccs = fset(
+            [set(C.nodes()) for C in general_k_edge_subgraphs(G, k=k)]
+        )
+        assert alt_subgraph_ccs == ccs_subgraph, "alt subgraph method failed"
+
+        # Stop once k is larger than all special case methods
+        # and we cannot break down ccs any further.
+        if k > 2 and all(len(cc) == 1 for cc in ccs_local):
+            break
+
+
+# ----------------
+# Misc tests
+# ----------------
+
+
+def test_zero_k_exception():
+    G = nx.Graph()
+    # functions that return generators error immediately
+    pytest.raises(ValueError, nx.k_edge_components, G, k=0)
+    pytest.raises(ValueError, nx.k_edge_subgraphs, G, k=0)
+
+    # actual generators only error when you get the first item
+    aux_graph = EdgeComponentAuxGraph.construct(G)
+    pytest.raises(ValueError, list, aux_graph.k_edge_components(k=0))
+    pytest.raises(ValueError, list, aux_graph.k_edge_subgraphs(k=0))
+
+    pytest.raises(ValueError, list, general_k_edge_subgraphs(G, k=0))
+
+
+def test_empty_input():
+    G = nx.Graph()
+    assert [] == list(nx.k_edge_components(G, k=5))
+    assert [] == list(nx.k_edge_subgraphs(G, k=5))
+
+    G = nx.DiGraph()
+    assert [] == list(nx.k_edge_components(G, k=5))
+    assert [] == list(nx.k_edge_subgraphs(G, k=5))
+
+
+def test_not_implemented():
+    G = nx.MultiGraph()
+    pytest.raises(nx.NetworkXNotImplemented, EdgeComponentAuxGraph.construct, G)
+    pytest.raises(nx.NetworkXNotImplemented, nx.k_edge_components, G, k=2)
+    pytest.raises(nx.NetworkXNotImplemented, nx.k_edge_subgraphs, G, k=2)
+    with pytest.raises(nx.NetworkXNotImplemented):
+        next(bridge_components(G))
+    with pytest.raises(nx.NetworkXNotImplemented):
+        next(bridge_components(nx.DiGraph()))
+
+
+def test_general_k_edge_subgraph_quick_return():
+    # tests quick return optimization
+    G = nx.Graph()
+    G.add_node(0)
+    subgraphs = list(general_k_edge_subgraphs(G, k=1))
+    assert len(subgraphs) == 1
+    for subgraph in subgraphs:
+        assert subgraph.number_of_nodes() == 1
+
+    G.add_node(1)
+    subgraphs = list(general_k_edge_subgraphs(G, k=1))
+    assert len(subgraphs) == 2
+    for subgraph in subgraphs:
+        assert subgraph.number_of_nodes() == 1
+
+
+# ----------------
+# Undirected tests
+# ----------------
+
+
+def test_random_gnp():
+    # seeds = [1550709854, 1309423156, 4208992358, 2785630813, 1915069929]
+    seeds = [12, 13]
+
+    for seed in seeds:
+        G = nx.gnp_random_graph(20, 0.2, seed=seed)
+        _check_edge_connectivity(G)
+
+
+def test_configuration():
+    # seeds = [2718183590, 2470619828, 1694705158, 3001036531, 2401251497]
+    seeds = [14, 15]
+    for seed in seeds:
+        deg_seq = nx.random_powerlaw_tree_sequence(20, seed=seed, tries=5000)
+        G = nx.Graph(nx.configuration_model(deg_seq, seed=seed))
+        G.remove_edges_from(nx.selfloop_edges(G))
+        _check_edge_connectivity(G)
+
+
+def test_shell():
+    # seeds = [2057382236, 3331169846, 1840105863, 476020778, 2247498425]
+    seeds = [20]
+    for seed in seeds:
+        constructor = [(12, 70, 0.8), (15, 40, 0.6)]
+        G = nx.random_shell_graph(constructor, seed=seed)
+        _check_edge_connectivity(G)
+
+
+def test_karate():
+    G = nx.karate_club_graph()
+    _check_edge_connectivity(G)
+
+
+def test_tarjan_bridge():
+    # graph from tarjan paper
+    # RE Tarjan - "A note on finding the bridges of a graph"
+    # Information Processing Letters, 1974 - Elsevier
+    # doi:10.1016/0020-0190(74)90003-9.
+    # define 2-connected components and bridges
+    ccs = [
+        (1, 2, 4, 3, 1, 4),
+        (5, 6, 7, 5),
+        (8, 9, 10, 8),
+        (17, 18, 16, 15, 17),
+        (11, 12, 14, 13, 11, 14),
+    ]
+    bridges = [(4, 8), (3, 5), (3, 17)]
+    G = nx.Graph(it.chain(*(pairwise(path) for path in ccs + bridges)))
+    _check_edge_connectivity(G)
+
+
+def test_bridge_cc():
+    # define 2-connected components and bridges
+    cc2 = [(1, 2, 4, 3, 1, 4), (8, 9, 10, 8), (11, 12, 13, 11)]
+    bridges = [(4, 8), (3, 5), (20, 21), (22, 23, 24)]
+    G = nx.Graph(it.chain(*(pairwise(path) for path in cc2 + bridges)))
+    bridge_ccs = fset(bridge_components(G))
+    target_ccs = fset(
+        [{1, 2, 3, 4}, {5}, {8, 9, 10}, {11, 12, 13}, {20}, {21}, {22}, {23}, {24}]
+    )
+    assert bridge_ccs == target_ccs
+    _check_edge_connectivity(G)
+
+
+def test_undirected_aux_graph():
+    # Graph similar to the one in
+    # http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264
+    a, b, c, d, e, f, g, h, i = "abcdefghi"
+    paths = [
+        (a, d, b, f, c),
+        (a, e, b),
+        (a, e, b, c, g, b, a),
+        (c, b),
+        (f, g, f),
+        (h, i),
+    ]
+    G = nx.Graph(it.chain(*[pairwise(path) for path in paths]))
+    aux_graph = EdgeComponentAuxGraph.construct(G)
+
+    components_1 = fset(aux_graph.k_edge_subgraphs(k=1))
+    target_1 = fset([{a, b, c, d, e, f, g}, {h, i}])
+    assert target_1 == components_1
+
+    # Check that the undirected case for k=1 agrees with CCs
+    alt_1 = fset(nx.k_edge_subgraphs(G, k=1))
+    assert alt_1 == components_1
+
+    components_2 = fset(aux_graph.k_edge_subgraphs(k=2))
+    target_2 = fset([{a, b, c, d, e, f, g}, {h}, {i}])
+    assert target_2 == components_2
+
+    # Check that the undirected case for k=2 agrees with bridge components
+    alt_2 = fset(nx.k_edge_subgraphs(G, k=2))
+    assert alt_2 == components_2
+
+    components_3 = fset(aux_graph.k_edge_subgraphs(k=3))
+    target_3 = fset([{a}, {b, c, f, g}, {d}, {e}, {h}, {i}])
+    assert target_3 == components_3
+
+    components_4 = fset(aux_graph.k_edge_subgraphs(k=4))
+    target_4 = fset([{a}, {b}, {c}, {d}, {e}, {f}, {g}, {h}, {i}])
+    assert target_4 == components_4
+
+    _check_edge_connectivity(G)
+
+
+def test_local_subgraph_difference():
+    paths = [
+        (11, 12, 13, 14, 11, 13, 14, 12),  # first 4-clique
+        (21, 22, 23, 24, 21, 23, 24, 22),  # second 4-clique
+        # paths connecting each node of the 4 cliques
+        (11, 101, 21),
+        (12, 102, 22),
+        (13, 103, 23),
+        (14, 104, 24),
+    ]
+    G = nx.Graph(it.chain(*[pairwise(path) for path in paths]))
+    aux_graph = EdgeComponentAuxGraph.construct(G)
+
+    # Each clique is returned separately in k-edge-subgraphs
+    subgraph_ccs = fset(aux_graph.k_edge_subgraphs(3))
+    subgraph_target = fset(
+        [{101}, {102}, {103}, {104}, {21, 22, 23, 24}, {11, 12, 13, 14}]
+    )
+    assert subgraph_ccs == subgraph_target
+
+    # But in k-edge-ccs they are returned together
+    # because they are locally 3-edge-connected
+    local_ccs = fset(aux_graph.k_edge_components(3))
+    local_target = fset([{101}, {102}, {103}, {104}, {11, 12, 13, 14, 21, 22, 23, 24}])
+    assert local_ccs == local_target
+
+
+def test_local_subgraph_difference_directed():
+    dipaths = [(1, 2, 3, 4, 1), (1, 3, 1)]
+    G = nx.DiGraph(it.chain(*[pairwise(path) for path in dipaths]))
+
+    assert fset(nx.k_edge_components(G, k=1)) == fset(nx.k_edge_subgraphs(G, k=1))
+
+    # Unlike undirected graphs, when k=2, for directed graphs there is a case
+    # where the k-edge-ccs are not the same as the k-edge-subgraphs.
+    # (in directed graphs ccs and subgraphs are the same when k=2)
+    assert fset(nx.k_edge_components(G, k=2)) != fset(nx.k_edge_subgraphs(G, k=2))
+
+    assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3))
+
+    _check_edge_connectivity(G)
+
+
+def test_triangles():
+    paths = [
+        (11, 12, 13, 11),  # first 3-clique
+        (21, 22, 23, 21),  # second 3-clique
+        (11, 21),  # connected by an edge
+    ]
+    G = nx.Graph(it.chain(*[pairwise(path) for path in paths]))
+
+    # subgraph and ccs are the same in all cases here
+    assert fset(nx.k_edge_components(G, k=1)) == fset(nx.k_edge_subgraphs(G, k=1))
+
+    assert fset(nx.k_edge_components(G, k=2)) == fset(nx.k_edge_subgraphs(G, k=2))
+
+    assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3))
+
+    _check_edge_connectivity(G)
+
+
+def test_four_clique():
+    paths = [
+        (11, 12, 13, 14, 11, 13, 14, 12),  # first 4-clique
+        (21, 22, 23, 24, 21, 23, 24, 22),  # second 4-clique
+        # paths connecting the 4 cliques such that they are
+        # 3-connected in G, but not in the subgraph.
+        # Case where the nodes bridging them do not have degree less than 3.
+        (100, 13),
+        (12, 100, 22),
+        (13, 200, 23),
+        (14, 300, 24),
+    ]
+    G = nx.Graph(it.chain(*[pairwise(path) for path in paths]))
+
+    # The subgraphs and ccs are different for k=3
+    local_ccs = fset(nx.k_edge_components(G, k=3))
+    subgraphs = fset(nx.k_edge_subgraphs(G, k=3))
+    assert local_ccs != subgraphs
+
+    # The cliques ares in the same cc
+    clique1 = frozenset(paths[0])
+    clique2 = frozenset(paths[1])
+    assert clique1.union(clique2).union({100}) in local_ccs
+
+    # but different subgraphs
+    assert clique1 in subgraphs
+    assert clique2 in subgraphs
+
+    assert G.degree(100) == 3
+
+    _check_edge_connectivity(G)
+
+
+def test_five_clique():
+    # Make a graph that can be disconnected less than 4 edges, but no node has
+    # degree less than 4.
+    G = nx.disjoint_union(nx.complete_graph(5), nx.complete_graph(5))
+    paths = [
+        # add aux-connections
+        (1, 100, 6),
+        (2, 100, 7),
+        (3, 200, 8),
+        (4, 200, 100),
+    ]
+    G.add_edges_from(it.chain(*[pairwise(path) for path in paths]))
+    assert min(dict(nx.degree(G)).values()) == 4
+
+    # For k=3 they are the same
+    assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3))
+
+    # For k=4 they are the different
+    # the aux nodes are in the same CC as clique 1 but no the same subgraph
+    assert fset(nx.k_edge_components(G, k=4)) != fset(nx.k_edge_subgraphs(G, k=4))
+
+    # For k=5 they are not the same
+    assert fset(nx.k_edge_components(G, k=5)) != fset(nx.k_edge_subgraphs(G, k=5))
+
+    # For k=6 they are the same
+    assert fset(nx.k_edge_components(G, k=6)) == fset(nx.k_edge_subgraphs(G, k=6))
+    _check_edge_connectivity(G)
+
+
+# ----------------
+# Undirected tests
+# ----------------
+
+
+def test_directed_aux_graph():
+    # Graph similar to the one in
+    # http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264
+    a, b, c, d, e, f, g, h, i = "abcdefghi"
+    dipaths = [
+        (a, d, b, f, c),
+        (a, e, b),
+        (a, e, b, c, g, b, a),
+        (c, b),
+        (f, g, f),
+        (h, i),
+    ]
+    G = nx.DiGraph(it.chain(*[pairwise(path) for path in dipaths]))
+    aux_graph = EdgeComponentAuxGraph.construct(G)
+
+    components_1 = fset(aux_graph.k_edge_subgraphs(k=1))
+    target_1 = fset([{a, b, c, d, e, f, g}, {h}, {i}])
+    assert target_1 == components_1
+
+    # Check that the directed case for k=1 agrees with SCCs
+    alt_1 = fset(nx.strongly_connected_components(G))
+    assert alt_1 == components_1
+
+    components_2 = fset(aux_graph.k_edge_subgraphs(k=2))
+    target_2 = fset([{i}, {e}, {d}, {b, c, f, g}, {h}, {a}])
+    assert target_2 == components_2
+
+    components_3 = fset(aux_graph.k_edge_subgraphs(k=3))
+    target_3 = fset([{a}, {b}, {c}, {d}, {e}, {f}, {g}, {h}, {i}])
+    assert target_3 == components_3
+
+
+def test_random_gnp_directed():
+    # seeds = [3894723670, 500186844, 267231174, 2181982262, 1116750056]
+    seeds = [21]
+    for seed in seeds:
+        G = nx.gnp_random_graph(20, 0.2, directed=True, seed=seed)
+        _check_edge_connectivity(G)
+
+
+def test_configuration_directed():
+    # seeds = [671221681, 2403749451, 124433910, 672335939, 1193127215]
+    seeds = [67]
+    for seed in seeds:
+        deg_seq = nx.random_powerlaw_tree_sequence(20, seed=seed, tries=5000)
+        G = nx.DiGraph(nx.configuration_model(deg_seq, seed=seed))
+        G.remove_edges_from(nx.selfloop_edges(G))
+        _check_edge_connectivity(G)
+
+
+def test_shell_directed():
+    # seeds = [3134027055, 4079264063, 1350769518, 1405643020, 530038094]
+    seeds = [31]
+    for seed in seeds:
+        constructor = [(12, 70, 0.8), (15, 40, 0.6)]
+        G = nx.random_shell_graph(constructor, seed=seed).to_directed()
+        _check_edge_connectivity(G)
+
+
+def test_karate_directed():
+    G = nx.karate_club_graph().to_directed()
+    _check_edge_connectivity(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcomponents.py
new file mode 100644
index 00000000..f4436acd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcomponents.py
@@ -0,0 +1,296 @@
+# Test for Moody and White k-components algorithm
+import pytest
+
+import networkx as nx
+from networkx.algorithms.connectivity.kcomponents import (
+    _consolidate,
+    build_k_number_dict,
+)
+
+##
+# A nice synthetic graph
+##
+
+
+def torrents_and_ferraro_graph():
+    # Graph from https://arxiv.org/pdf/1503.04476v1 p.26
+    G = nx.convert_node_labels_to_integers(
+        nx.grid_graph([5, 5]), label_attribute="labels"
+    )
+    rlabels = nx.get_node_attributes(G, "labels")
+    labels = {v: k for k, v in rlabels.items()}
+
+    for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]:
+        new_node = G.order() + 1
+        # Petersen graph is triconnected
+        P = nx.petersen_graph()
+        G = nx.disjoint_union(G, P)
+        # Add two edges between the grid and P
+        G.add_edge(new_node + 1, nodes[0])
+        G.add_edge(new_node, nodes[1])
+        # K5 is 4-connected
+        K = nx.complete_graph(5)
+        G = nx.disjoint_union(G, K)
+        # Add three edges between P and K5
+        G.add_edge(new_node + 2, new_node + 11)
+        G.add_edge(new_node + 3, new_node + 12)
+        G.add_edge(new_node + 4, new_node + 13)
+        # Add another K5 sharing a node
+        G = nx.disjoint_union(G, K)
+        nbrs = G[new_node + 10]
+        G.remove_node(new_node + 10)
+        for nbr in nbrs:
+            G.add_edge(new_node + 17, nbr)
+        # This edge makes the graph biconnected; it's
+        # needed because K5s share only one node.
+        G.add_edge(new_node + 16, new_node + 8)
+
+    for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]:
+        new_node = G.order() + 1
+        # Petersen graph is triconnected
+        P = nx.petersen_graph()
+        G = nx.disjoint_union(G, P)
+        # Add two edges between the grid and P
+        G.add_edge(new_node + 1, nodes[0])
+        G.add_edge(new_node, nodes[1])
+        # K5 is 4-connected
+        K = nx.complete_graph(5)
+        G = nx.disjoint_union(G, K)
+        # Add three edges between P and K5
+        G.add_edge(new_node + 2, new_node + 11)
+        G.add_edge(new_node + 3, new_node + 12)
+        G.add_edge(new_node + 4, new_node + 13)
+        # Add another K5 sharing two nodes
+        G = nx.disjoint_union(G, K)
+        nbrs = G[new_node + 10]
+        G.remove_node(new_node + 10)
+        for nbr in nbrs:
+            G.add_edge(new_node + 17, nbr)
+        nbrs2 = G[new_node + 9]
+        G.remove_node(new_node + 9)
+        for nbr in nbrs2:
+            G.add_edge(new_node + 18, nbr)
+    return G
+
+
+def test_directed():
+    with pytest.raises(nx.NetworkXNotImplemented):
+        G = nx.gnp_random_graph(10, 0.2, directed=True, seed=42)
+        nx.k_components(G)
+
+
+# Helper function
+def _check_connectivity(G, k_components):
+    for k, components in k_components.items():
+        if k < 3:
+            continue
+        # check that k-components have node connectivity >= k.
+        for component in components:
+            C = G.subgraph(component)
+            K = nx.node_connectivity(C)
+            assert K >= k
+
+
+@pytest.mark.slow
+def test_torrents_and_ferraro_graph():
+    G = torrents_and_ferraro_graph()
+    result = nx.k_components(G)
+    _check_connectivity(G, result)
+
+    # In this example graph there are 8 3-components, 4 with 15 nodes
+    # and 4 with 5 nodes.
+    assert len(result[3]) == 8
+    assert len([c for c in result[3] if len(c) == 15]) == 4
+    assert len([c for c in result[3] if len(c) == 5]) == 4
+    # There are also 8 4-components all with 5 nodes.
+    assert len(result[4]) == 8
+    assert all(len(c) == 5 for c in result[4])
+
+
+@pytest.mark.slow
+def test_random_gnp():
+    G = nx.gnp_random_graph(50, 0.2, seed=42)
+    result = nx.k_components(G)
+    _check_connectivity(G, result)
+
+
+@pytest.mark.slow
+def test_shell():
+    constructor = [(20, 80, 0.8), (80, 180, 0.6)]
+    G = nx.random_shell_graph(constructor, seed=42)
+    result = nx.k_components(G)
+    _check_connectivity(G, result)
+
+
+def test_configuration():
+    deg_seq = nx.random_powerlaw_tree_sequence(100, tries=5, seed=72)
+    G = nx.Graph(nx.configuration_model(deg_seq))
+    G.remove_edges_from(nx.selfloop_edges(G))
+    result = nx.k_components(G)
+    _check_connectivity(G, result)
+
+
+def test_karate():
+    G = nx.karate_club_graph()
+    result = nx.k_components(G)
+    _check_connectivity(G, result)
+
+
+def test_karate_component_number():
+    karate_k_num = {
+        0: 4,
+        1: 4,
+        2: 4,
+        3: 4,
+        4: 3,
+        5: 3,
+        6: 3,
+        7: 4,
+        8: 4,
+        9: 2,
+        10: 3,
+        11: 1,
+        12: 2,
+        13: 4,
+        14: 2,
+        15: 2,
+        16: 2,
+        17: 2,
+        18: 2,
+        19: 3,
+        20: 2,
+        21: 2,
+        22: 2,
+        23: 3,
+        24: 3,
+        25: 3,
+        26: 2,
+        27: 3,
+        28: 3,
+        29: 3,
+        30: 4,
+        31: 3,
+        32: 4,
+        33: 4,
+    }
+    G = nx.karate_club_graph()
+    k_components = nx.k_components(G)
+    k_num = build_k_number_dict(k_components)
+    assert karate_k_num == k_num
+
+
+def test_davis_southern_women():
+    G = nx.davis_southern_women_graph()
+    result = nx.k_components(G)
+    _check_connectivity(G, result)
+
+
+def test_davis_southern_women_detail_3_and_4():
+    solution = {
+        3: [
+            {
+                "Nora Fayette",
+                "E10",
+                "Myra Liddel",
+                "E12",
+                "E14",
+                "Frances Anderson",
+                "Evelyn Jefferson",
+                "Ruth DeSand",
+                "Helen Lloyd",
+                "Eleanor Nye",
+                "E9",
+                "E8",
+                "E5",
+                "E4",
+                "E7",
+                "E6",
+                "E1",
+                "Verne Sanderson",
+                "E3",
+                "E2",
+                "Theresa Anderson",
+                "Pearl Oglethorpe",
+                "Katherina Rogers",
+                "Brenda Rogers",
+                "E13",
+                "Charlotte McDowd",
+                "Sylvia Avondale",
+                "Laura Mandeville",
+            }
+        ],
+        4: [
+            {
+                "Nora Fayette",
+                "E10",
+                "Verne Sanderson",
+                "E12",
+                "Frances Anderson",
+                "Evelyn Jefferson",
+                "Ruth DeSand",
+                "Helen Lloyd",
+                "Eleanor Nye",
+                "E9",
+                "E8",
+                "E5",
+                "E4",
+                "E7",
+                "E6",
+                "Myra Liddel",
+                "E3",
+                "Theresa Anderson",
+                "Katherina Rogers",
+                "Brenda Rogers",
+                "Charlotte McDowd",
+                "Sylvia Avondale",
+                "Laura Mandeville",
+            }
+        ],
+    }
+    G = nx.davis_southern_women_graph()
+    result = nx.k_components(G)
+    for k, components in result.items():
+        if k < 3:
+            continue
+        assert len(components) == len(solution[k])
+        for component in components:
+            assert component in solution[k]
+
+
+def test_set_consolidation_rosettacode():
+    # Tests from http://rosettacode.org/wiki/Set_consolidation
+    def list_of_sets_equal(result, solution):
+        assert {frozenset(s) for s in result} == {frozenset(s) for s in solution}
+
+    question = [{"A", "B"}, {"C", "D"}]
+    solution = [{"A", "B"}, {"C", "D"}]
+    list_of_sets_equal(_consolidate(question, 1), solution)
+    question = [{"A", "B"}, {"B", "C"}]
+    solution = [{"A", "B", "C"}]
+    list_of_sets_equal(_consolidate(question, 1), solution)
+    question = [{"A", "B"}, {"C", "D"}, {"D", "B"}]
+    solution = [{"A", "C", "B", "D"}]
+    list_of_sets_equal(_consolidate(question, 1), solution)
+    question = [{"H", "I", "K"}, {"A", "B"}, {"C", "D"}, {"D", "B"}, {"F", "G", "H"}]
+    solution = [{"A", "C", "B", "D"}, {"G", "F", "I", "H", "K"}]
+    list_of_sets_equal(_consolidate(question, 1), solution)
+    question = [
+        {"A", "H"},
+        {"H", "I", "K"},
+        {"A", "B"},
+        {"C", "D"},
+        {"D", "B"},
+        {"F", "G", "H"},
+    ]
+    solution = [{"A", "C", "B", "D", "G", "F", "I", "H", "K"}]
+    list_of_sets_equal(_consolidate(question, 1), solution)
+    question = [
+        {"H", "I", "K"},
+        {"A", "B"},
+        {"C", "D"},
+        {"D", "B"},
+        {"F", "G", "H"},
+        {"A", "H"},
+    ]
+    solution = [{"A", "C", "B", "D", "G", "F", "I", "H", "K"}]
+    list_of_sets_equal(_consolidate(question, 1), solution)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcutsets.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcutsets.py
new file mode 100644
index 00000000..4b4b5494
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcutsets.py
@@ -0,0 +1,273 @@
+# Jordi Torrents
+# Test for k-cutsets
+import itertools
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms import flow
+from networkx.algorithms.connectivity.kcutsets import _is_separating_set
+
+MAX_CUTSETS_TO_TEST = 4  # originally 100. cut to decrease testing time
+
+flow_funcs = [
+    flow.boykov_kolmogorov,
+    flow.dinitz,
+    flow.edmonds_karp,
+    flow.preflow_push,
+    flow.shortest_augmenting_path,
+]
+
+
+##
+# Some nice synthetic graphs
+##
+def graph_example_1():
+    G = nx.convert_node_labels_to_integers(
+        nx.grid_graph([5, 5]), label_attribute="labels"
+    )
+    rlabels = nx.get_node_attributes(G, "labels")
+    labels = {v: k for k, v in rlabels.items()}
+
+    for nodes in [
+        (labels[(0, 0)], labels[(1, 0)]),
+        (labels[(0, 4)], labels[(1, 4)]),
+        (labels[(3, 0)], labels[(4, 0)]),
+        (labels[(3, 4)], labels[(4, 4)]),
+    ]:
+        new_node = G.order() + 1
+        # Petersen graph is triconnected
+        P = nx.petersen_graph()
+        G = nx.disjoint_union(G, P)
+        # Add two edges between the grid and P
+        G.add_edge(new_node + 1, nodes[0])
+        G.add_edge(new_node, nodes[1])
+        # K5 is 4-connected
+        K = nx.complete_graph(5)
+        G = nx.disjoint_union(G, K)
+        # Add three edges between P and K5
+        G.add_edge(new_node + 2, new_node + 11)
+        G.add_edge(new_node + 3, new_node + 12)
+        G.add_edge(new_node + 4, new_node + 13)
+        # Add another K5 sharing a node
+        G = nx.disjoint_union(G, K)
+        nbrs = G[new_node + 10]
+        G.remove_node(new_node + 10)
+        for nbr in nbrs:
+            G.add_edge(new_node + 17, nbr)
+        G.add_edge(new_node + 16, new_node + 5)
+    return G
+
+
+def torrents_and_ferraro_graph():
+    G = nx.convert_node_labels_to_integers(
+        nx.grid_graph([5, 5]), label_attribute="labels"
+    )
+    rlabels = nx.get_node_attributes(G, "labels")
+    labels = {v: k for k, v in rlabels.items()}
+
+    for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]:
+        new_node = G.order() + 1
+        # Petersen graph is triconnected
+        P = nx.petersen_graph()
+        G = nx.disjoint_union(G, P)
+        # Add two edges between the grid and P
+        G.add_edge(new_node + 1, nodes[0])
+        G.add_edge(new_node, nodes[1])
+        # K5 is 4-connected
+        K = nx.complete_graph(5)
+        G = nx.disjoint_union(G, K)
+        # Add three edges between P and K5
+        G.add_edge(new_node + 2, new_node + 11)
+        G.add_edge(new_node + 3, new_node + 12)
+        G.add_edge(new_node + 4, new_node + 13)
+        # Add another K5 sharing a node
+        G = nx.disjoint_union(G, K)
+        nbrs = G[new_node + 10]
+        G.remove_node(new_node + 10)
+        for nbr in nbrs:
+            G.add_edge(new_node + 17, nbr)
+        # Commenting this makes the graph not biconnected !!
+        # This stupid mistake make one reviewer very angry :P
+        G.add_edge(new_node + 16, new_node + 8)
+
+    for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]:
+        new_node = G.order() + 1
+        # Petersen graph is triconnected
+        P = nx.petersen_graph()
+        G = nx.disjoint_union(G, P)
+        # Add two edges between the grid and P
+        G.add_edge(new_node + 1, nodes[0])
+        G.add_edge(new_node, nodes[1])
+        # K5 is 4-connected
+        K = nx.complete_graph(5)
+        G = nx.disjoint_union(G, K)
+        # Add three edges between P and K5
+        G.add_edge(new_node + 2, new_node + 11)
+        G.add_edge(new_node + 3, new_node + 12)
+        G.add_edge(new_node + 4, new_node + 13)
+        # Add another K5 sharing two nodes
+        G = nx.disjoint_union(G, K)
+        nbrs = G[new_node + 10]
+        G.remove_node(new_node + 10)
+        for nbr in nbrs:
+            G.add_edge(new_node + 17, nbr)
+        nbrs2 = G[new_node + 9]
+        G.remove_node(new_node + 9)
+        for nbr in nbrs2:
+            G.add_edge(new_node + 18, nbr)
+    return G
+
+
+# Helper function
+def _check_separating_sets(G):
+    for cc in nx.connected_components(G):
+        if len(cc) < 3:
+            continue
+        Gc = G.subgraph(cc)
+        node_conn = nx.node_connectivity(Gc)
+        all_cuts = nx.all_node_cuts(Gc)
+        # Only test a limited number of cut sets to reduce test time.
+        for cut in itertools.islice(all_cuts, MAX_CUTSETS_TO_TEST):
+            assert node_conn == len(cut)
+            assert not nx.is_connected(nx.restricted_view(G, cut, []))
+
+
+@pytest.mark.slow
+def test_torrents_and_ferraro_graph():
+    G = torrents_and_ferraro_graph()
+    _check_separating_sets(G)
+
+
+def test_example_1():
+    G = graph_example_1()
+    _check_separating_sets(G)
+
+
+def test_random_gnp():
+    G = nx.gnp_random_graph(100, 0.1, seed=42)
+    _check_separating_sets(G)
+
+
+def test_shell():
+    constructor = [(20, 80, 0.8), (80, 180, 0.6)]
+    G = nx.random_shell_graph(constructor, seed=42)
+    _check_separating_sets(G)
+
+
+def test_configuration():
+    deg_seq = nx.random_powerlaw_tree_sequence(100, tries=5, seed=72)
+    G = nx.Graph(nx.configuration_model(deg_seq))
+    G.remove_edges_from(nx.selfloop_edges(G))
+    _check_separating_sets(G)
+
+
+def test_karate():
+    G = nx.karate_club_graph()
+    _check_separating_sets(G)
+
+
+def _generate_no_biconnected(max_attempts=50):
+    attempts = 0
+    while True:
+        G = nx.fast_gnp_random_graph(100, 0.0575, seed=42)
+        if nx.is_connected(G) and not nx.is_biconnected(G):
+            attempts = 0
+            yield G
+        else:
+            if attempts >= max_attempts:
+                msg = f"Tried {attempts} times: no suitable Graph."
+                raise Exception(msg)
+            else:
+                attempts += 1
+
+
+def test_articulation_points():
+    Ggen = _generate_no_biconnected()
+    for i in range(1):  # change 1 to 3 or more for more realizations.
+        G = next(Ggen)
+        articulation_points = [{a} for a in nx.articulation_points(G)]
+        for cut in nx.all_node_cuts(G):
+            assert cut in articulation_points
+
+
+def test_grid_2d_graph():
+    # All minimum node cuts of a 2d grid
+    # are the four pairs of nodes that are
+    # neighbors of the four corner nodes.
+    G = nx.grid_2d_graph(5, 5)
+    solution = [{(0, 1), (1, 0)}, {(3, 0), (4, 1)}, {(3, 4), (4, 3)}, {(0, 3), (1, 4)}]
+    for cut in nx.all_node_cuts(G):
+        assert cut in solution
+
+
+def test_disconnected_graph():
+    G = nx.fast_gnp_random_graph(100, 0.01, seed=42)
+    cuts = nx.all_node_cuts(G)
+    pytest.raises(nx.NetworkXError, next, cuts)
+
+
+@pytest.mark.slow
+def test_alternative_flow_functions():
+    graphs = [nx.grid_2d_graph(4, 4), nx.cycle_graph(5)]
+    for G in graphs:
+        node_conn = nx.node_connectivity(G)
+        for flow_func in flow_funcs:
+            all_cuts = nx.all_node_cuts(G, flow_func=flow_func)
+            # Only test a limited number of cut sets to reduce test time.
+            for cut in itertools.islice(all_cuts, MAX_CUTSETS_TO_TEST):
+                assert node_conn == len(cut)
+                assert not nx.is_connected(nx.restricted_view(G, cut, []))
+
+
+def test_is_separating_set_complete_graph():
+    G = nx.complete_graph(5)
+    assert _is_separating_set(G, {0, 1, 2, 3})
+
+
+def test_is_separating_set():
+    for i in [5, 10, 15]:
+        G = nx.star_graph(i)
+        max_degree_node = max(G, key=G.degree)
+        assert _is_separating_set(G, {max_degree_node})
+
+
+def test_non_repeated_cuts():
+    # The algorithm was repeating the cut {0, 1} for the giant biconnected
+    # component of the Karate club graph.
+    K = nx.karate_club_graph()
+    bcc = max(list(nx.biconnected_components(K)), key=len)
+    G = K.subgraph(bcc)
+    solution = [{32, 33}, {2, 33}, {0, 3}, {0, 1}, {29, 33}]
+    cuts = list(nx.all_node_cuts(G))
+    if len(solution) != len(cuts):
+        print(f"Solution: {solution}")
+        print(f"Result: {cuts}")
+    assert len(solution) == len(cuts)
+    for cut in cuts:
+        assert cut in solution
+
+
+def test_cycle_graph():
+    G = nx.cycle_graph(5)
+    solution = [{0, 2}, {0, 3}, {1, 3}, {1, 4}, {2, 4}]
+    cuts = list(nx.all_node_cuts(G))
+    assert len(solution) == len(cuts)
+    for cut in cuts:
+        assert cut in solution
+
+
+def test_complete_graph():
+    G = nx.complete_graph(5)
+    assert nx.node_connectivity(G) == 4
+    assert list(nx.all_node_cuts(G)) == []
+
+
+def test_all_node_cuts_simple_case():
+    G = nx.complete_graph(5)
+    G.remove_edges_from([(0, 1), (3, 4)])
+    expected = [{0, 1, 2}, {2, 3, 4}]
+    actual = list(nx.all_node_cuts(G))
+    assert len(actual) == len(expected)
+    for cut in actual:
+        assert cut in expected
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_stoer_wagner.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_stoer_wagner.py
new file mode 100644
index 00000000..2b9e2bab
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_stoer_wagner.py
@@ -0,0 +1,102 @@
+from itertools import chain
+
+import pytest
+
+import networkx as nx
+
+
+def _check_partition(G, cut_value, partition, weight):
+    assert isinstance(partition, tuple)
+    assert len(partition) == 2
+    assert isinstance(partition[0], list)
+    assert isinstance(partition[1], list)
+    assert len(partition[0]) > 0
+    assert len(partition[1]) > 0
+    assert sum(map(len, partition)) == len(G)
+    assert set(chain.from_iterable(partition)) == set(G)
+    partition = tuple(map(set, partition))
+    w = 0
+    for u, v, e in G.edges(data=True):
+        if (u in partition[0]) == (v in partition[1]):
+            w += e.get(weight, 1)
+    assert w == cut_value
+
+
+def _test_stoer_wagner(G, answer, weight="weight"):
+    cut_value, partition = nx.stoer_wagner(G, weight, heap=nx.utils.PairingHeap)
+    assert cut_value == answer
+    _check_partition(G, cut_value, partition, weight)
+    cut_value, partition = nx.stoer_wagner(G, weight, heap=nx.utils.BinaryHeap)
+    assert cut_value == answer
+    _check_partition(G, cut_value, partition, weight)
+
+
+def test_graph1():
+    G = nx.Graph()
+    G.add_edge("x", "a", weight=3)
+    G.add_edge("x", "b", weight=1)
+    G.add_edge("a", "c", weight=3)
+    G.add_edge("b", "c", weight=5)
+    G.add_edge("b", "d", weight=4)
+    G.add_edge("d", "e", weight=2)
+    G.add_edge("c", "y", weight=2)
+    G.add_edge("e", "y", weight=3)
+    _test_stoer_wagner(G, 4)
+
+
+def test_graph2():
+    G = nx.Graph()
+    G.add_edge("x", "a")
+    G.add_edge("x", "b")
+    G.add_edge("a", "c")
+    G.add_edge("b", "c")
+    G.add_edge("b", "d")
+    G.add_edge("d", "e")
+    G.add_edge("c", "y")
+    G.add_edge("e", "y")
+    _test_stoer_wagner(G, 2)
+
+
+def test_graph3():
+    # Source:
+    # Stoer, M. and Wagner, F. (1997). "A simple min-cut algorithm". Journal of
+    # the ACM 44 (4), 585-591.
+    G = nx.Graph()
+    G.add_edge(1, 2, weight=2)
+    G.add_edge(1, 5, weight=3)
+    G.add_edge(2, 3, weight=3)
+    G.add_edge(2, 5, weight=2)
+    G.add_edge(2, 6, weight=2)
+    G.add_edge(3, 4, weight=4)
+    G.add_edge(3, 7, weight=2)
+    G.add_edge(4, 7, weight=2)
+    G.add_edge(4, 8, weight=2)
+    G.add_edge(5, 6, weight=3)
+    G.add_edge(6, 7, weight=1)
+    G.add_edge(7, 8, weight=3)
+    _test_stoer_wagner(G, 4)
+
+
+def test_weight_name():
+    G = nx.Graph()
+    G.add_edge(1, 2, weight=1, cost=8)
+    G.add_edge(1, 3, cost=2)
+    G.add_edge(2, 3, cost=4)
+    _test_stoer_wagner(G, 6, weight="cost")
+
+
+def test_exceptions():
+    G = nx.Graph()
+    pytest.raises(nx.NetworkXError, nx.stoer_wagner, G)
+    G.add_node(1)
+    pytest.raises(nx.NetworkXError, nx.stoer_wagner, G)
+    G.add_node(2)
+    pytest.raises(nx.NetworkXError, nx.stoer_wagner, G)
+    G.add_edge(1, 2, weight=-2)
+    pytest.raises(nx.NetworkXError, nx.stoer_wagner, G)
+    G = nx.DiGraph()
+    pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G)
+    G = nx.MultiGraph()
+    pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G)
+    G = nx.MultiDiGraph()
+    pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/utils.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/utils.py
new file mode 100644
index 00000000..7bf99945
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/utils.py
@@ -0,0 +1,88 @@
+"""
+Utilities for connectivity package
+"""
+
+import networkx as nx
+
+__all__ = ["build_auxiliary_node_connectivity", "build_auxiliary_edge_connectivity"]
+
+
+@nx._dispatchable(returns_graph=True)
+def build_auxiliary_node_connectivity(G):
+    r"""Creates a directed graph D from an undirected graph G to compute flow
+    based node connectivity.
+
+    For an undirected graph G having `n` nodes and `m` edges we derive a
+    directed graph D with `2n` nodes and `2m+n` arcs by replacing each
+    original node `v` with two nodes `vA`, `vB` linked by an (internal)
+    arc in D. Then for each edge (`u`, `v`) in G we add two arcs (`uB`, `vA`)
+    and (`vB`, `uA`) in D. Finally we set the attribute capacity = 1 for each
+    arc in D [1]_.
+
+    For a directed graph having `n` nodes and `m` arcs we derive a
+    directed graph D with `2n` nodes and `m+n` arcs by replacing each
+    original node `v` with two nodes `vA`, `vB` linked by an (internal)
+    arc (`vA`, `vB`) in D. Then for each arc (`u`, `v`) in G we add one
+    arc (`uB`, `vA`) in D. Finally we set the attribute capacity = 1 for
+    each arc in D.
+
+    A dictionary with a mapping between nodes in the original graph and the
+    auxiliary digraph is stored as a graph attribute: D.graph['mapping'].
+
+    References
+    ----------
+    .. [1] Kammer, Frank and Hanjo Taubig. Graph Connectivity. in Brandes and
+        Erlebach, 'Network Analysis: Methodological Foundations', Lecture
+        Notes in Computer Science, Volume 3418, Springer-Verlag, 2005.
+        https://doi.org/10.1007/978-3-540-31955-9_7
+
+    """
+    directed = G.is_directed()
+
+    mapping = {}
+    H = nx.DiGraph()
+
+    for i, node in enumerate(G):
+        mapping[node] = i
+        H.add_node(f"{i}A", id=node)
+        H.add_node(f"{i}B", id=node)
+        H.add_edge(f"{i}A", f"{i}B", capacity=1)
+
+    edges = []
+    for source, target in G.edges():
+        edges.append((f"{mapping[source]}B", f"{mapping[target]}A"))
+        if not directed:
+            edges.append((f"{mapping[target]}B", f"{mapping[source]}A"))
+    H.add_edges_from(edges, capacity=1)
+
+    # Store mapping as graph attribute
+    H.graph["mapping"] = mapping
+    return H
+
+
+@nx._dispatchable(returns_graph=True)
+def build_auxiliary_edge_connectivity(G):
+    """Auxiliary digraph for computing flow based edge connectivity
+
+    If the input graph is undirected, we replace each edge (`u`,`v`) with
+    two reciprocal arcs (`u`, `v`) and (`v`, `u`) and then we set the attribute
+    'capacity' for each arc to 1. If the input graph is directed we simply
+    add the 'capacity' attribute. Part of algorithm 1 in [1]_ .
+
+    References
+    ----------
+    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. (this is a
+        chapter, look for the reference of the book).
+        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
+    """
+    if G.is_directed():
+        H = nx.DiGraph()
+        H.add_nodes_from(G.nodes())
+        H.add_edges_from(G.edges(), capacity=1)
+        return H
+    else:
+        H = nx.DiGraph()
+        H.add_nodes_from(G.nodes())
+        for source, target in G.edges():
+            H.add_edges_from([(source, target), (target, source)], capacity=1)
+        return H
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/core.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/core.py
new file mode 100644
index 00000000..6acfb499
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/core.py
@@ -0,0 +1,649 @@
+"""
+Find the k-cores of a graph.
+
+The k-core is found by recursively pruning nodes with degrees less than k.
+
+See the following references for details:
+
+An O(m) Algorithm for Cores Decomposition of Networks
+Vladimir Batagelj and Matjaz Zaversnik, 2003.
+https://arxiv.org/abs/cs.DS/0310049
+
+Generalized Cores
+Vladimir Batagelj and Matjaz Zaversnik, 2002.
+https://arxiv.org/pdf/cs/0202039
+
+For directed graphs a more general notion is that of D-cores which
+looks at (k, l) restrictions on (in, out) degree. The (k, k) D-core
+is the k-core.
+
+D-cores: Measuring Collaboration of Directed Graphs Based on Degeneracy
+Christos Giatsidis, Dimitrios M. Thilikos, Michalis Vazirgiannis, ICDM 2011.
+http://www.graphdegeneracy.org/dcores_ICDM_2011.pdf
+
+Multi-scale structure and topological anomaly detection via a new network \
+statistic: The onion decomposition
+L. Hébert-Dufresne, J. A. Grochow, and A. Allard
+Scientific Reports 6, 31708 (2016)
+http://doi.org/10.1038/srep31708
+
+"""
+
+import networkx as nx
+
+__all__ = [
+    "core_number",
+    "k_core",
+    "k_shell",
+    "k_crust",
+    "k_corona",
+    "k_truss",
+    "onion_layers",
+]
+
+
+@nx.utils.not_implemented_for("multigraph")
+@nx._dispatchable
+def core_number(G):
+    """Returns the core number for each node.
+
+    A k-core is a maximal subgraph that contains nodes of degree k or more.
+
+    The core number of a node is the largest value k of a k-core containing
+    that node.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected or directed graph
+
+    Returns
+    -------
+    core_number : dictionary
+       A dictionary keyed by node to the core number.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a multigraph or contains self loops.
+
+    Notes
+    -----
+    For directed graphs the node degree is defined to be the
+    in-degree + out-degree.
+
+    Examples
+    --------
+    >>> degrees = [0, 1, 2, 2, 2, 2, 3]
+    >>> H = nx.havel_hakimi_graph(degrees)
+    >>> nx.core_number(H)
+    {0: 1, 1: 2, 2: 2, 3: 2, 4: 1, 5: 2, 6: 0}
+    >>> G = nx.DiGraph()
+    >>> G.add_edges_from([(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)])
+    >>> nx.core_number(G)
+    {1: 2, 2: 2, 3: 2, 4: 2}
+
+    References
+    ----------
+    .. [1] An O(m) Algorithm for Cores Decomposition of Networks
+       Vladimir Batagelj and Matjaz Zaversnik, 2003.
+       https://arxiv.org/abs/cs.DS/0310049
+    """
+    if nx.number_of_selfloops(G) > 0:
+        msg = (
+            "Input graph has self loops which is not permitted; "
+            "Consider using G.remove_edges_from(nx.selfloop_edges(G))."
+        )
+        raise nx.NetworkXNotImplemented(msg)
+    degrees = dict(G.degree())
+    # Sort nodes by degree.
+    nodes = sorted(degrees, key=degrees.get)
+    bin_boundaries = [0]
+    curr_degree = 0
+    for i, v in enumerate(nodes):
+        if degrees[v] > curr_degree:
+            bin_boundaries.extend([i] * (degrees[v] - curr_degree))
+            curr_degree = degrees[v]
+    node_pos = {v: pos for pos, v in enumerate(nodes)}
+    # The initial guess for the core number of a node is its degree.
+    core = degrees
+    nbrs = {v: list(nx.all_neighbors(G, v)) for v in G}
+    for v in nodes:
+        for u in nbrs[v]:
+            if core[u] > core[v]:
+                nbrs[u].remove(v)
+                pos = node_pos[u]
+                bin_start = bin_boundaries[core[u]]
+                node_pos[u] = bin_start
+                node_pos[nodes[bin_start]] = pos
+                nodes[bin_start], nodes[pos] = nodes[pos], nodes[bin_start]
+                bin_boundaries[core[u]] += 1
+                core[u] -= 1
+    return core
+
+
+def _core_subgraph(G, k_filter, k=None, core=None):
+    """Returns the subgraph induced by nodes passing filter `k_filter`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       The graph or directed graph to process
+    k_filter : filter function
+       This function filters the nodes chosen. It takes three inputs:
+       A node of G, the filter's cutoff, and the core dict of the graph.
+       The function should return a Boolean value.
+    k : int, optional
+      The order of the core. If not specified use the max core number.
+      This value is used as the cutoff for the filter.
+    core : dict, optional
+      Precomputed core numbers keyed by node for the graph `G`.
+      If not specified, the core numbers will be computed from `G`.
+
+    """
+    if core is None:
+        core = core_number(G)
+    if k is None:
+        k = max(core.values())
+    nodes = (v for v in core if k_filter(v, k, core))
+    return G.subgraph(nodes).copy()
+
+
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def k_core(G, k=None, core_number=None):
+    """Returns the k-core of G.
+
+    A k-core is a maximal subgraph that contains nodes of degree `k` or more.
+
+    .. deprecated:: 3.3
+       `k_core` will not accept `MultiGraph` objects in version 3.5.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+      A graph or directed graph
+    k : int, optional
+      The order of the core. If not specified return the main core.
+    core_number : dictionary, optional
+      Precomputed core numbers for the graph G.
+
+    Returns
+    -------
+    G : NetworkX graph
+      The k-core subgraph
+
+    Raises
+    ------
+    NetworkXNotImplemented
+      The k-core is not defined for multigraphs or graphs with self loops.
+
+    Notes
+    -----
+    The main core is the core with `k` as the largest core_number.
+
+    For directed graphs the node degree is defined to be the
+    in-degree + out-degree.
+
+    Graph, node, and edge attributes are copied to the subgraph.
+
+    Examples
+    --------
+    >>> degrees = [0, 1, 2, 2, 2, 2, 3]
+    >>> H = nx.havel_hakimi_graph(degrees)
+    >>> H.degree
+    DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
+    >>> nx.k_core(H).nodes
+    NodeView((1, 2, 3, 5))
+
+    See Also
+    --------
+    core_number
+
+    References
+    ----------
+    .. [1] An O(m) Algorithm for Cores Decomposition of Networks
+       Vladimir Batagelj and Matjaz Zaversnik,  2003.
+       https://arxiv.org/abs/cs.DS/0310049
+    """
+
+    import warnings
+
+    if G.is_multigraph():
+        warnings.warn(
+            (
+                "\n\n`k_core` will not accept `MultiGraph` objects in version 3.5.\n"
+                "Convert it to an undirected graph instead, using::\n\n"
+                "\tG = nx.Graph(G)\n"
+            ),
+            category=DeprecationWarning,
+            stacklevel=5,
+        )
+
+    def k_filter(v, k, c):
+        return c[v] >= k
+
+    return _core_subgraph(G, k_filter, k, core_number)
+
+
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def k_shell(G, k=None, core_number=None):
+    """Returns the k-shell of G.
+
+    The k-shell is the subgraph induced by nodes with core number k.
+    That is, nodes in the k-core that are not in the (k+1)-core.
+
+    .. deprecated:: 3.3
+       `k_shell` will not accept `MultiGraph` objects in version 3.5.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+      A graph or directed graph.
+    k : int, optional
+      The order of the shell. If not specified return the outer shell.
+    core_number : dictionary, optional
+      Precomputed core numbers for the graph G.
+
+
+    Returns
+    -------
+    G : NetworkX graph
+       The k-shell subgraph
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        The k-shell is not implemented for multigraphs or graphs with self loops.
+
+    Notes
+    -----
+    This is similar to k_corona but in that case only neighbors in the
+    k-core are considered.
+
+    For directed graphs the node degree is defined to be the
+    in-degree + out-degree.
+
+    Graph, node, and edge attributes are copied to the subgraph.
+
+    Examples
+    --------
+    >>> degrees = [0, 1, 2, 2, 2, 2, 3]
+    >>> H = nx.havel_hakimi_graph(degrees)
+    >>> H.degree
+    DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
+    >>> nx.k_shell(H, k=1).nodes
+    NodeView((0, 4))
+
+    See Also
+    --------
+    core_number
+    k_corona
+
+
+    References
+    ----------
+    .. [1] A model of Internet topology using k-shell decomposition
+       Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
+       and Eran Shir, PNAS  July 3, 2007   vol. 104  no. 27  11150-11154
+       http://www.pnas.org/content/104/27/11150.full
+    """
+
+    import warnings
+
+    if G.is_multigraph():
+        warnings.warn(
+            (
+                "\n\n`k_shell` will not accept `MultiGraph` objects in version 3.5.\n"
+                "Convert it to an undirected graph instead, using::\n\n"
+                "\tG = nx.Graph(G)\n"
+            ),
+            category=DeprecationWarning,
+            stacklevel=5,
+        )
+
+    def k_filter(v, k, c):
+        return c[v] == k
+
+    return _core_subgraph(G, k_filter, k, core_number)
+
+
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def k_crust(G, k=None, core_number=None):
+    """Returns the k-crust of G.
+
+    The k-crust is the graph G with the edges of the k-core removed
+    and isolated nodes found after the removal of edges are also removed.
+
+    .. deprecated:: 3.3
+       `k_crust` will not accept `MultiGraph` objects in version 3.5.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph or directed graph.
+    k : int, optional
+      The order of the shell. If not specified return the main crust.
+    core_number : dictionary, optional
+      Precomputed core numbers for the graph G.
+
+    Returns
+    -------
+    G : NetworkX graph
+       The k-crust subgraph
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        The k-crust is not implemented for multigraphs or graphs with self loops.
+
+    Notes
+    -----
+    This definition of k-crust is different than the definition in [1]_.
+    The k-crust in [1]_ is equivalent to the k+1 crust of this algorithm.
+
+    For directed graphs the node degree is defined to be the
+    in-degree + out-degree.
+
+    Graph, node, and edge attributes are copied to the subgraph.
+
+    Examples
+    --------
+    >>> degrees = [0, 1, 2, 2, 2, 2, 3]
+    >>> H = nx.havel_hakimi_graph(degrees)
+    >>> H.degree
+    DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
+    >>> nx.k_crust(H, k=1).nodes
+    NodeView((0, 4, 6))
+
+    See Also
+    --------
+    core_number
+
+    References
+    ----------
+    .. [1] A model of Internet topology using k-shell decomposition
+       Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
+       and Eran Shir, PNAS  July 3, 2007   vol. 104  no. 27  11150-11154
+       http://www.pnas.org/content/104/27/11150.full
+    """
+
+    import warnings
+
+    if G.is_multigraph():
+        warnings.warn(
+            (
+                "\n\n`k_crust` will not accept `MultiGraph` objects in version 3.5.\n"
+                "Convert it to an undirected graph instead, using::\n\n"
+                "\tG = nx.Graph(G)\n"
+            ),
+            category=DeprecationWarning,
+            stacklevel=5,
+        )
+
+    # Default for k is one less than in _core_subgraph, so just inline.
+    #    Filter is c[v] <= k
+    if core_number is None:
+        core_number = nx.core_number(G)
+    if k is None:
+        k = max(core_number.values()) - 1
+    nodes = (v for v in core_number if core_number[v] <= k)
+    return G.subgraph(nodes).copy()
+
+
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def k_corona(G, k, core_number=None):
+    """Returns the k-corona of G.
+
+    The k-corona is the subgraph of nodes in the k-core which have
+    exactly k neighbors in the k-core.
+
+    .. deprecated:: 3.3
+       `k_corona` will not accept `MultiGraph` objects in version 3.5.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph or directed graph
+    k : int
+       The order of the corona.
+    core_number : dictionary, optional
+       Precomputed core numbers for the graph G.
+
+    Returns
+    -------
+    G : NetworkX graph
+       The k-corona subgraph
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        The k-corona is not defined for multigraphs or graphs with self loops.
+
+    Notes
+    -----
+    For directed graphs the node degree is defined to be the
+    in-degree + out-degree.
+
+    Graph, node, and edge attributes are copied to the subgraph.
+
+    Examples
+    --------
+    >>> degrees = [0, 1, 2, 2, 2, 2, 3]
+    >>> H = nx.havel_hakimi_graph(degrees)
+    >>> H.degree
+    DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
+    >>> nx.k_corona(H, k=2).nodes
+    NodeView((1, 2, 3, 5))
+
+    See Also
+    --------
+    core_number
+
+    References
+    ----------
+    .. [1]  k -core (bootstrap) percolation on complex networks:
+       Critical phenomena and nonlocal effects,
+       A. V. Goltsev, S. N. Dorogovtsev, and J. F. F. Mendes,
+       Phys. Rev. E 73, 056101 (2006)
+       http://link.aps.org/doi/10.1103/PhysRevE.73.056101
+    """
+
+    import warnings
+
+    if G.is_multigraph():
+        warnings.warn(
+            (
+                "\n\n`k_corona` will not accept `MultiGraph` objects in version 3.5.\n"
+                "Convert it to an undirected graph instead, using::\n\n"
+                "\tG = nx.Graph(G)\n"
+            ),
+            category=DeprecationWarning,
+            stacklevel=5,
+        )
+
+    def func(v, k, c):
+        return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k)
+
+    return _core_subgraph(G, func, k, core_number)
+
+
+@nx.utils.not_implemented_for("directed")
+@nx.utils.not_implemented_for("multigraph")
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def k_truss(G, k):
+    """Returns the k-truss of `G`.
+
+    The k-truss is the maximal induced subgraph of `G` which contains at least
+    three vertices where every edge is incident to at least `k-2` triangles.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+      An undirected graph
+    k : int
+      The order of the truss
+
+    Returns
+    -------
+    H : NetworkX graph
+      The k-truss subgraph
+
+    Raises
+    ------
+    NetworkXNotImplemented
+      If `G` is a multigraph or directed graph or if it contains self loops.
+
+    Notes
+    -----
+    A k-clique is a (k-2)-truss and a k-truss is a (k+1)-core.
+
+    Graph, node, and edge attributes are copied to the subgraph.
+
+    K-trusses were originally defined in [2] which states that the k-truss
+    is the maximal induced subgraph where each edge belongs to at least
+    `k-2` triangles. A more recent paper, [1], uses a slightly different
+    definition requiring that each edge belong to at least `k` triangles.
+    This implementation uses the original definition of `k-2` triangles.
+
+    Examples
+    --------
+    >>> degrees = [0, 1, 2, 2, 2, 2, 3]
+    >>> H = nx.havel_hakimi_graph(degrees)
+    >>> H.degree
+    DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
+    >>> nx.k_truss(H, k=2).nodes
+    NodeView((0, 1, 2, 3, 4, 5))
+
+    References
+    ----------
+    .. [1] Bounds and Algorithms for k-truss. Paul Burkhardt, Vance Faber,
+       David G. Harris, 2018. https://arxiv.org/abs/1806.05523v2
+    .. [2] Trusses: Cohesive Subgraphs for Social Network Analysis. Jonathan
+       Cohen, 2005.
+    """
+    if nx.number_of_selfloops(G) > 0:
+        msg = (
+            "Input graph has self loops which is not permitted; "
+            "Consider using G.remove_edges_from(nx.selfloop_edges(G))."
+        )
+        raise nx.NetworkXNotImplemented(msg)
+
+    H = G.copy()
+
+    n_dropped = 1
+    while n_dropped > 0:
+        n_dropped = 0
+        to_drop = []
+        seen = set()
+        for u in H:
+            nbrs_u = set(H[u])
+            seen.add(u)
+            new_nbrs = [v for v in nbrs_u if v not in seen]
+            for v in new_nbrs:
+                if len(nbrs_u & set(H[v])) < (k - 2):
+                    to_drop.append((u, v))
+        H.remove_edges_from(to_drop)
+        n_dropped = len(to_drop)
+        H.remove_nodes_from(list(nx.isolates(H)))
+
+    return H
+
+
+@nx.utils.not_implemented_for("multigraph")
+@nx.utils.not_implemented_for("directed")
+@nx._dispatchable
+def onion_layers(G):
+    """Returns the layer of each vertex in an onion decomposition of the graph.
+
+    The onion decomposition refines the k-core decomposition by providing
+    information on the internal organization of each k-shell. It is usually
+    used alongside the `core numbers`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected graph without self loops.
+
+    Returns
+    -------
+    od_layers : dictionary
+        A dictionary keyed by node to the onion layer. The layers are
+        contiguous integers starting at 1.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a multigraph or directed graph or if it contains self loops.
+
+    Examples
+    --------
+    >>> degrees = [0, 1, 2, 2, 2, 2, 3]
+    >>> H = nx.havel_hakimi_graph(degrees)
+    >>> H.degree
+    DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
+    >>> nx.onion_layers(H)
+    {6: 1, 0: 2, 4: 3, 1: 4, 2: 4, 3: 4, 5: 4}
+
+    See Also
+    --------
+    core_number
+
+    References
+    ----------
+    .. [1] Multi-scale structure and topological anomaly detection via a new
+       network statistic: The onion decomposition
+       L. Hébert-Dufresne, J. A. Grochow, and A. Allard
+       Scientific Reports 6, 31708 (2016)
+       http://doi.org/10.1038/srep31708
+    .. [2] Percolation and the effective structure of complex networks
+       A. Allard and L. Hébert-Dufresne
+       Physical Review X 9, 011023 (2019)
+       http://doi.org/10.1103/PhysRevX.9.011023
+    """
+    if nx.number_of_selfloops(G) > 0:
+        msg = (
+            "Input graph contains self loops which is not permitted; "
+            "Consider using G.remove_edges_from(nx.selfloop_edges(G))."
+        )
+        raise nx.NetworkXNotImplemented(msg)
+    # Dictionaries to register the k-core/onion decompositions.
+    od_layers = {}
+    # Adjacency list
+    neighbors = {v: list(nx.all_neighbors(G, v)) for v in G}
+    # Effective degree of nodes.
+    degrees = dict(G.degree())
+    # Performs the onion decomposition.
+    current_core = 1
+    current_layer = 1
+    # Sets vertices of degree 0 to layer 1, if any.
+    isolated_nodes = list(nx.isolates(G))
+    if len(isolated_nodes) > 0:
+        for v in isolated_nodes:
+            od_layers[v] = current_layer
+            degrees.pop(v)
+        current_layer = 2
+    # Finds the layer for the remaining nodes.
+    while len(degrees) > 0:
+        # Sets the order for looking at nodes.
+        nodes = sorted(degrees, key=degrees.get)
+        # Sets properly the current core.
+        min_degree = degrees[nodes[0]]
+        if min_degree > current_core:
+            current_core = min_degree
+        # Identifies vertices in the current layer.
+        this_layer = []
+        for n in nodes:
+            if degrees[n] > current_core:
+                break
+            this_layer.append(n)
+        # Identifies the core/layer of the vertices in the current layer.
+        for v in this_layer:
+            od_layers[v] = current_layer
+            for n in neighbors[v]:
+                neighbors[n].remove(v)
+                degrees[n] = degrees[n] - 1
+            degrees.pop(v)
+        # Updates the layer count.
+        current_layer = current_layer + 1
+    # Returns the dictionaries containing the onion layer of each vertices.
+    return od_layers
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/covering.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/covering.py
new file mode 100644
index 00000000..a0e15dd3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/covering.py
@@ -0,0 +1,142 @@
+"""Functions related to graph covers."""
+
+from functools import partial
+from itertools import chain
+
+import networkx as nx
+from networkx.utils import arbitrary_element, not_implemented_for
+
+__all__ = ["min_edge_cover", "is_edge_cover"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def min_edge_cover(G, matching_algorithm=None):
+    """Returns the min cardinality edge cover of the graph as a set of edges.
+
+    A smallest edge cover can be found in polynomial time by finding
+    a maximum matching and extending it greedily so that all nodes
+    are covered. This function follows that process. A maximum matching
+    algorithm can be specified for the first step of the algorithm.
+    The resulting set may return a set with one 2-tuple for each edge,
+    (the usual case) or with both 2-tuples `(u, v)` and `(v, u)` for
+    each edge. The latter is only done when a bipartite matching algorithm
+    is specified as `matching_algorithm`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected graph.
+
+    matching_algorithm : function
+        A function that returns a maximum cardinality matching for `G`.
+        The function must take one input, the graph `G`, and return
+        either a set of edges (with only one direction for the pair of nodes)
+        or a dictionary mapping each node to its mate. If not specified,
+        :func:`~networkx.algorithms.matching.max_weight_matching` is used.
+        Common bipartite matching functions include
+        :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
+        or
+        :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`.
+
+    Returns
+    -------
+    min_cover : set
+
+        A set of the edges in a minimum edge cover in the form of tuples.
+        It contains only one of the equivalent 2-tuples `(u, v)` and `(v, u)`
+        for each edge. If a bipartite method is used to compute the matching,
+        the returned set contains both the 2-tuples `(u, v)` and `(v, u)`
+        for each edge of a minimum edge cover.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> sorted(nx.min_edge_cover(G))
+    [(2, 1), (3, 0)]
+
+    Notes
+    -----
+    An edge cover of a graph is a set of edges such that every node of
+    the graph is incident to at least one edge of the set.
+    The minimum edge cover is an edge covering of smallest cardinality.
+
+    Due to its implementation, the worst-case running time of this algorithm
+    is bounded by the worst-case running time of the function
+    ``matching_algorithm``.
+
+    Minimum edge cover for `G` can also be found using the `min_edge_covering`
+    function in :mod:`networkx.algorithms.bipartite.covering` which is
+    simply this function with a default matching algorithm of
+    :func:`~networkx.algorithms.bipartite.matching.hopcraft_karp_matching`
+    """
+    if len(G) == 0:
+        return set()
+    if nx.number_of_isolates(G) > 0:
+        # ``min_cover`` does not exist as there is an isolated node
+        raise nx.NetworkXException(
+            "Graph has a node with no edge incident on it, so no edge cover exists."
+        )
+    if matching_algorithm is None:
+        matching_algorithm = partial(nx.max_weight_matching, maxcardinality=True)
+    maximum_matching = matching_algorithm(G)
+    # ``min_cover`` is superset of ``maximum_matching``
+    try:
+        # bipartite matching algs return dict so convert if needed
+        min_cover = set(maximum_matching.items())
+        bipartite_cover = True
+    except AttributeError:
+        min_cover = maximum_matching
+        bipartite_cover = False
+    # iterate for uncovered nodes
+    uncovered_nodes = set(G) - {v for u, v in min_cover} - {u for u, v in min_cover}
+    for v in uncovered_nodes:
+        # Since `v` is uncovered, each edge incident to `v` will join it
+        # with a covered node (otherwise, if there were an edge joining
+        # uncovered nodes `u` and `v`, the maximum matching algorithm
+        # would have found it), so we can choose an arbitrary edge
+        # incident to `v`. (This applies only in a simple graph, not a
+        # multigraph.)
+        u = arbitrary_element(G[v])
+        min_cover.add((u, v))
+        if bipartite_cover:
+            min_cover.add((v, u))
+    return min_cover
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def is_edge_cover(G, cover):
+    """Decides whether a set of edges is a valid edge cover of the graph.
+
+    Given a set of edges, whether it is an edge covering can
+    be decided if we just check whether all nodes of the graph
+    has an edge from the set, incident on it.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected bipartite graph.
+
+    cover : set
+        Set of edges to be checked.
+
+    Returns
+    -------
+    bool
+        Whether the set of edges is a valid edge cover of the graph.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> cover = {(2, 1), (3, 0)}
+    >>> nx.is_edge_cover(G, cover)
+    True
+
+    Notes
+    -----
+    An edge cover of a graph is a set of edges such that every node of
+    the graph is incident to at least one edge of the set.
+    """
+    return set(G) <= set(chain.from_iterable(cover))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/cuts.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/cuts.py
new file mode 100644
index 00000000..e9514312
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/cuts.py
@@ -0,0 +1,398 @@
+"""Functions for finding and evaluating cuts in a graph."""
+
+from itertools import chain
+
+import networkx as nx
+
+__all__ = [
+    "boundary_expansion",
+    "conductance",
+    "cut_size",
+    "edge_expansion",
+    "mixing_expansion",
+    "node_expansion",
+    "normalized_cut_size",
+    "volume",
+]
+
+
+# TODO STILL NEED TO UPDATE ALL THE DOCUMENTATION!
+
+
+@nx._dispatchable(edge_attrs="weight")
+def cut_size(G, S, T=None, weight=None):
+    """Returns the size of the cut between two sets of nodes.
+
+    A *cut* is a partition of the nodes of a graph into two sets. The
+    *cut size* is the sum of the weights of the edges "between" the two
+    sets of nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    S : collection
+        A collection of nodes in `G`.
+
+    T : collection
+        A collection of nodes in `G`. If not specified, this is taken to
+        be the set complement of `S`.
+
+    weight : object
+        Edge attribute key to use as weight. If not specified, edges
+        have weight one.
+
+    Returns
+    -------
+    number
+        Total weight of all edges from nodes in set `S` to nodes in
+        set `T` (and, in the case of directed graphs, all edges from
+        nodes in `T` to nodes in `S`).
+
+    Examples
+    --------
+    In the graph with two cliques joined by a single edges, the natural
+    bipartition of the graph into two blocks, one for each clique,
+    yields a cut of weight one::
+
+        >>> G = nx.barbell_graph(3, 0)
+        >>> S = {0, 1, 2}
+        >>> T = {3, 4, 5}
+        >>> nx.cut_size(G, S, T)
+        1
+
+    Each parallel edge in a multigraph is counted when determining the
+    cut size::
+
+        >>> G = nx.MultiGraph(["ab", "ab"])
+        >>> S = {"a"}
+        >>> T = {"b"}
+        >>> nx.cut_size(G, S, T)
+        2
+
+    Notes
+    -----
+    In a multigraph, the cut size is the total weight of edges including
+    multiplicity.
+
+    """
+    edges = nx.edge_boundary(G, S, T, data=weight, default=1)
+    if G.is_directed():
+        edges = chain(edges, nx.edge_boundary(G, T, S, data=weight, default=1))
+    return sum(weight for u, v, weight in edges)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def volume(G, S, weight=None):
+    """Returns the volume of a set of nodes.
+
+    The *volume* of a set *S* is the sum of the (out-)degrees of nodes
+    in *S* (taking into account parallel edges in multigraphs). [1]
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    S : collection
+        A collection of nodes in `G`.
+
+    weight : object
+        Edge attribute key to use as weight. If not specified, edges
+        have weight one.
+
+    Returns
+    -------
+    number
+        The volume of the set of nodes represented by `S` in the graph
+        `G`.
+
+    See also
+    --------
+    conductance
+    cut_size
+    edge_expansion
+    edge_boundary
+    normalized_cut_size
+
+    References
+    ----------
+    .. [1] David Gleich.
+           *Hierarchical Directed Spectral Graph Partitioning*.
+           <https://www.cs.purdue.edu/homes/dgleich/publications/Gleich%202005%20-%20hierarchical%20directed%20spectral.pdf>
+
+    """
+    degree = G.out_degree if G.is_directed() else G.degree
+    return sum(d for v, d in degree(S, weight=weight))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def normalized_cut_size(G, S, T=None, weight=None):
+    """Returns the normalized size of the cut between two sets of nodes.
+
+    The *normalized cut size* is the cut size times the sum of the
+    reciprocal sizes of the volumes of the two sets. [1]
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    S : collection
+        A collection of nodes in `G`.
+
+    T : collection
+        A collection of nodes in `G`.
+
+    weight : object
+        Edge attribute key to use as weight. If not specified, edges
+        have weight one.
+
+    Returns
+    -------
+    number
+        The normalized cut size between the two sets `S` and `T`.
+
+    Notes
+    -----
+    In a multigraph, the cut size is the total weight of edges including
+    multiplicity.
+
+    See also
+    --------
+    conductance
+    cut_size
+    edge_expansion
+    volume
+
+    References
+    ----------
+    .. [1] David Gleich.
+           *Hierarchical Directed Spectral Graph Partitioning*.
+           <https://www.cs.purdue.edu/homes/dgleich/publications/Gleich%202005%20-%20hierarchical%20directed%20spectral.pdf>
+
+    """
+    if T is None:
+        T = set(G) - set(S)
+    num_cut_edges = cut_size(G, S, T=T, weight=weight)
+    volume_S = volume(G, S, weight=weight)
+    volume_T = volume(G, T, weight=weight)
+    return num_cut_edges * ((1 / volume_S) + (1 / volume_T))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def conductance(G, S, T=None, weight=None):
+    """Returns the conductance of two sets of nodes.
+
+    The *conductance* is the quotient of the cut size and the smaller of
+    the volumes of the two sets. [1]
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    S : collection
+        A collection of nodes in `G`.
+
+    T : collection
+        A collection of nodes in `G`.
+
+    weight : object
+        Edge attribute key to use as weight. If not specified, edges
+        have weight one.
+
+    Returns
+    -------
+    number
+        The conductance between the two sets `S` and `T`.
+
+    See also
+    --------
+    cut_size
+    edge_expansion
+    normalized_cut_size
+    volume
+
+    References
+    ----------
+    .. [1] David Gleich.
+           *Hierarchical Directed Spectral Graph Partitioning*.
+           <https://www.cs.purdue.edu/homes/dgleich/publications/Gleich%202005%20-%20hierarchical%20directed%20spectral.pdf>
+
+    """
+    if T is None:
+        T = set(G) - set(S)
+    num_cut_edges = cut_size(G, S, T, weight=weight)
+    volume_S = volume(G, S, weight=weight)
+    volume_T = volume(G, T, weight=weight)
+    return num_cut_edges / min(volume_S, volume_T)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def edge_expansion(G, S, T=None, weight=None):
+    """Returns the edge expansion between two node sets.
+
+    The *edge expansion* is the quotient of the cut size and the smaller
+    of the cardinalities of the two sets. [1]
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    S : collection
+        A collection of nodes in `G`.
+
+    T : collection
+        A collection of nodes in `G`.
+
+    weight : object
+        Edge attribute key to use as weight. If not specified, edges
+        have weight one.
+
+    Returns
+    -------
+    number
+        The edge expansion between the two sets `S` and `T`.
+
+    See also
+    --------
+    boundary_expansion
+    mixing_expansion
+    node_expansion
+
+    References
+    ----------
+    .. [1] Fan Chung.
+           *Spectral Graph Theory*.
+           (CBMS Regional Conference Series in Mathematics, No. 92),
+           American Mathematical Society, 1997, ISBN 0-8218-0315-8
+           <http://www.math.ucsd.edu/~fan/research/revised.html>
+
+    """
+    if T is None:
+        T = set(G) - set(S)
+    num_cut_edges = cut_size(G, S, T=T, weight=weight)
+    return num_cut_edges / min(len(S), len(T))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def mixing_expansion(G, S, T=None, weight=None):
+    """Returns the mixing expansion between two node sets.
+
+    The *mixing expansion* is the quotient of the cut size and twice the
+    number of edges in the graph. [1]
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    S : collection
+        A collection of nodes in `G`.
+
+    T : collection
+        A collection of nodes in `G`.
+
+    weight : object
+        Edge attribute key to use as weight. If not specified, edges
+        have weight one.
+
+    Returns
+    -------
+    number
+        The mixing expansion between the two sets `S` and `T`.
+
+    See also
+    --------
+    boundary_expansion
+    edge_expansion
+    node_expansion
+
+    References
+    ----------
+    .. [1] Vadhan, Salil P.
+           "Pseudorandomness."
+           *Foundations and Trends
+           in Theoretical Computer Science* 7.1–3 (2011): 1–336.
+           <https://doi.org/10.1561/0400000010>
+
+    """
+    num_cut_edges = cut_size(G, S, T=T, weight=weight)
+    num_total_edges = G.number_of_edges()
+    return num_cut_edges / (2 * num_total_edges)
+
+
+# TODO What is the generalization to two arguments, S and T? Does the
+# denominator become `min(len(S), len(T))`?
+@nx._dispatchable
+def node_expansion(G, S):
+    """Returns the node expansion of the set `S`.
+
+    The *node expansion* is the quotient of the size of the node
+    boundary of *S* and the cardinality of *S*. [1]
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    S : collection
+        A collection of nodes in `G`.
+
+    Returns
+    -------
+    number
+        The node expansion of the set `S`.
+
+    See also
+    --------
+    boundary_expansion
+    edge_expansion
+    mixing_expansion
+
+    References
+    ----------
+    .. [1] Vadhan, Salil P.
+           "Pseudorandomness."
+           *Foundations and Trends
+           in Theoretical Computer Science* 7.1–3 (2011): 1–336.
+           <https://doi.org/10.1561/0400000010>
+
+    """
+    neighborhood = set(chain.from_iterable(G.neighbors(v) for v in S))
+    return len(neighborhood) / len(S)
+
+
+# TODO What is the generalization to two arguments, S and T? Does the
+# denominator become `min(len(S), len(T))`?
+@nx._dispatchable
+def boundary_expansion(G, S):
+    """Returns the boundary expansion of the set `S`.
+
+    The *boundary expansion* is the quotient of the size
+    of the node boundary and the cardinality of *S*. [1]
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    S : collection
+        A collection of nodes in `G`.
+
+    Returns
+    -------
+    number
+        The boundary expansion of the set `S`.
+
+    See also
+    --------
+    edge_expansion
+    mixing_expansion
+    node_expansion
+
+    References
+    ----------
+    .. [1] Vadhan, Salil P.
+           "Pseudorandomness."
+           *Foundations and Trends in Theoretical Computer Science*
+           7.1–3 (2011): 1–336.
+           <https://doi.org/10.1561/0400000010>
+
+    """
+    return len(nx.node_boundary(G, S)) / len(S)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/cycles.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/cycles.py
new file mode 100644
index 00000000..975462a7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/cycles.py
@@ -0,0 +1,1230 @@
+"""
+========================
+Cycle finding algorithms
+========================
+"""
+
+from collections import Counter, defaultdict
+from itertools import combinations, product
+from math import inf
+
+import networkx as nx
+from networkx.utils import not_implemented_for, pairwise
+
+__all__ = [
+    "cycle_basis",
+    "simple_cycles",
+    "recursive_simple_cycles",
+    "find_cycle",
+    "minimum_cycle_basis",
+    "chordless_cycles",
+    "girth",
+]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def cycle_basis(G, root=None):
+    """Returns a list of cycles which form a basis for cycles of G.
+
+    A basis for cycles of a network is a minimal collection of
+    cycles such that any cycle in the network can be written
+    as a sum of cycles in the basis.  Here summation of cycles
+    is defined as "exclusive or" of the edges. Cycle bases are
+    useful, e.g. when deriving equations for electric circuits
+    using Kirchhoff's Laws.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+    root : node, optional
+       Specify starting node for basis.
+
+    Returns
+    -------
+    A list of cycle lists.  Each cycle list is a list of nodes
+    which forms a cycle (loop) in G.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> nx.add_cycle(G, [0, 1, 2, 3])
+    >>> nx.add_cycle(G, [0, 3, 4, 5])
+    >>> nx.cycle_basis(G, 0)
+    [[3, 4, 5, 0], [1, 2, 3, 0]]
+
+    Notes
+    -----
+    This is adapted from algorithm CACM 491 [1]_.
+
+    References
+    ----------
+    .. [1] Paton, K. An algorithm for finding a fundamental set of
+       cycles of a graph. Comm. ACM 12, 9 (Sept 1969), 514-518.
+
+    See Also
+    --------
+    simple_cycles
+    minimum_cycle_basis
+    """
+    gnodes = dict.fromkeys(G)  # set-like object that maintains node order
+    cycles = []
+    while gnodes:  # loop over connected components
+        if root is None:
+            root = gnodes.popitem()[0]
+        stack = [root]
+        pred = {root: root}
+        used = {root: set()}
+        while stack:  # walk the spanning tree finding cycles
+            z = stack.pop()  # use last-in so cycles easier to find
+            zused = used[z]
+            for nbr in G[z]:
+                if nbr not in used:  # new node
+                    pred[nbr] = z
+                    stack.append(nbr)
+                    used[nbr] = {z}
+                elif nbr == z:  # self loops
+                    cycles.append([z])
+                elif nbr not in zused:  # found a cycle
+                    pn = used[nbr]
+                    cycle = [nbr, z]
+                    p = pred[z]
+                    while p not in pn:
+                        cycle.append(p)
+                        p = pred[p]
+                    cycle.append(p)
+                    cycles.append(cycle)
+                    used[nbr].add(z)
+        for node in pred:
+            gnodes.pop(node, None)
+        root = None
+    return cycles
+
+
+@nx._dispatchable
+def simple_cycles(G, length_bound=None):
+    """Find simple cycles (elementary circuits) of a graph.
+
+    A "simple cycle", or "elementary circuit", is a closed path where
+    no node appears twice.  In a directed graph, two simple cycles are distinct
+    if they are not cyclic permutations of each other.  In an undirected graph,
+    two simple cycles are distinct if they are not cyclic permutations of each
+    other nor of the other's reversal.
+
+    Optionally, the cycles are bounded in length.  In the unbounded case, we use
+    a nonrecursive, iterator/generator version of Johnson's algorithm [1]_.  In
+    the bounded case, we use a version of the algorithm of Gupta and
+    Suzumura [2]_. There may be better algorithms for some cases [3]_ [4]_ [5]_.
+
+    The algorithms of Johnson, and Gupta and Suzumura, are enhanced by some
+    well-known preprocessing techniques.  When `G` is directed, we restrict our
+    attention to strongly connected components of `G`, generate all simple cycles
+    containing a certain node, remove that node, and further decompose the
+    remainder into strongly connected components.  When `G` is undirected, we
+    restrict our attention to biconnected components, generate all simple cycles
+    containing a particular edge, remove that edge, and further decompose the
+    remainder into biconnected components.
+
+    Note that multigraphs are supported by this function -- and in undirected
+    multigraphs, a pair of parallel edges is considered a cycle of length 2.
+    Likewise, self-loops are considered to be cycles of length 1.  We define
+    cycles as sequences of nodes; so the presence of loops and parallel edges
+    does not change the number of simple cycles in a graph.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+       A networkx graph. Undirected, directed, and multigraphs are all supported.
+
+    length_bound : int or None, optional (default=None)
+       If `length_bound` is an int, generate all simple cycles of `G` with length at
+       most `length_bound`.  Otherwise, generate all simple cycles of `G`.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)])
+    >>> sorted(nx.simple_cycles(G))
+    [[0], [0, 1, 2], [0, 2], [1, 2], [2]]
+
+    To filter the cycles so that they don't include certain nodes or edges,
+    copy your graph and eliminate those nodes or edges before calling.
+    For example, to exclude self-loops from the above example:
+
+    >>> H = G.copy()
+    >>> H.remove_edges_from(nx.selfloop_edges(G))
+    >>> sorted(nx.simple_cycles(H))
+    [[0, 1, 2], [0, 2], [1, 2]]
+
+    Notes
+    -----
+    When `length_bound` is None, the time complexity is $O((n+e)(c+1))$ for $n$
+    nodes, $e$ edges and $c$ simple circuits.  Otherwise, when ``length_bound > 1``,
+    the time complexity is $O((c+n)(k-1)d^k)$ where $d$ is the average degree of
+    the nodes of `G` and $k$ = `length_bound`.
+
+    Raises
+    ------
+    ValueError
+        when ``length_bound < 0``.
+
+    References
+    ----------
+    .. [1] Finding all the elementary circuits of a directed graph.
+       D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
+       https://doi.org/10.1137/0204007
+    .. [2] Finding All Bounded-Length Simple Cycles in a Directed Graph
+       A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094
+    .. [3] Enumerating the cycles of a digraph: a new preprocessing strategy.
+       G. Loizou and P. Thanish, Information Sciences, v. 27, 163-182, 1982.
+    .. [4] A search strategy for the elementary cycles of a directed graph.
+       J.L. Szwarcfiter and P.E. Lauer, BIT NUMERICAL MATHEMATICS,
+       v. 16, no. 2, 192-204, 1976.
+    .. [5] Optimal Listing of Cycles and st-Paths in Undirected Graphs
+        R. Ferreira and R. Grossi and A. Marino and N. Pisanti and R. Rizzi and
+        G. Sacomoto https://arxiv.org/abs/1205.2766
+
+    See Also
+    --------
+    cycle_basis
+    chordless_cycles
+    """
+
+    if length_bound is not None:
+        if length_bound == 0:
+            return
+        elif length_bound < 0:
+            raise ValueError("length bound must be non-negative")
+
+    directed = G.is_directed()
+    yield from ([v] for v, Gv in G.adj.items() if v in Gv)
+
+    if length_bound is not None and length_bound == 1:
+        return
+
+    if G.is_multigraph() and not directed:
+        visited = set()
+        for u, Gu in G.adj.items():
+            multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited)
+            yield from ([u, v] for v, m in multiplicity if m > 1)
+            visited.add(u)
+
+    # explicitly filter out loops; implicitly filter out parallel edges
+    if directed:
+        G = nx.DiGraph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u)
+    else:
+        G = nx.Graph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u)
+
+    # this case is not strictly necessary but improves performance
+    if length_bound is not None and length_bound == 2:
+        if directed:
+            visited = set()
+            for u, Gu in G.adj.items():
+                yield from (
+                    [v, u] for v in visited.intersection(Gu) if G.has_edge(v, u)
+                )
+                visited.add(u)
+        return
+
+    if directed:
+        yield from _directed_cycle_search(G, length_bound)
+    else:
+        yield from _undirected_cycle_search(G, length_bound)
+
+
+def _directed_cycle_search(G, length_bound):
+    """A dispatch function for `simple_cycles` for directed graphs.
+
+    We generate all cycles of G through binary partition.
+
+        1. Pick a node v in G which belongs to at least one cycle
+            a. Generate all cycles of G which contain the node v.
+            b. Recursively generate all cycles of G \\ v.
+
+    This is accomplished through the following:
+
+        1. Compute the strongly connected components SCC of G.
+        2. Select and remove a biconnected component C from BCC.  Select a
+           non-tree edge (u, v) of a depth-first search of G[C].
+        3. For each simple cycle P containing v in G[C], yield P.
+        4. Add the biconnected components of G[C \\ v] to BCC.
+
+    If the parameter length_bound is not None, then step 3 will be limited to
+    simple cycles of length at most length_bound.
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+       A directed graph
+
+    length_bound : int or None
+       If length_bound is an int, generate all simple cycles of G with length at most length_bound.
+       Otherwise, generate all simple cycles of G.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+    """
+
+    scc = nx.strongly_connected_components
+    components = [c for c in scc(G) if len(c) >= 2]
+    while components:
+        c = components.pop()
+        Gc = G.subgraph(c)
+        v = next(iter(c))
+        if length_bound is None:
+            yield from _johnson_cycle_search(Gc, [v])
+        else:
+            yield from _bounded_cycle_search(Gc, [v], length_bound)
+        # delete v after searching G, to make sure we can find v
+        G.remove_node(v)
+        components.extend(c for c in scc(Gc) if len(c) >= 2)
+
+
+def _undirected_cycle_search(G, length_bound):
+    """A dispatch function for `simple_cycles` for undirected graphs.
+
+    We generate all cycles of G through binary partition.
+
+        1. Pick an edge (u, v) in G which belongs to at least one cycle
+            a. Generate all cycles of G which contain the edge (u, v)
+            b. Recursively generate all cycles of G \\ (u, v)
+
+    This is accomplished through the following:
+
+        1. Compute the biconnected components BCC of G.
+        2. Select and remove a biconnected component C from BCC.  Select a
+           non-tree edge (u, v) of a depth-first search of G[C].
+        3. For each (v -> u) path P remaining in G[C] \\ (u, v), yield P.
+        4. Add the biconnected components of G[C] \\ (u, v) to BCC.
+
+    If the parameter length_bound is not None, then step 3 will be limited to simple paths
+    of length at most length_bound.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+       An undirected graph
+
+    length_bound : int or None
+       If length_bound is an int, generate all simple cycles of G with length at most length_bound.
+       Otherwise, generate all simple cycles of G.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+    """
+
+    bcc = nx.biconnected_components
+    components = [c for c in bcc(G) if len(c) >= 3]
+    while components:
+        c = components.pop()
+        Gc = G.subgraph(c)
+        uv = list(next(iter(Gc.edges)))
+        G.remove_edge(*uv)
+        # delete (u, v) before searching G, to avoid fake 3-cycles [u, v, u]
+        if length_bound is None:
+            yield from _johnson_cycle_search(Gc, uv)
+        else:
+            yield from _bounded_cycle_search(Gc, uv, length_bound)
+        components.extend(c for c in bcc(Gc) if len(c) >= 3)
+
+
+class _NeighborhoodCache(dict):
+    """Very lightweight graph wrapper which caches neighborhoods as list.
+
+    This dict subclass uses the __missing__ functionality to query graphs for
+    their neighborhoods, and store the result as a list.  This is used to avoid
+    the performance penalty incurred by subgraph views.
+    """
+
+    def __init__(self, G):
+        self.G = G
+
+    def __missing__(self, v):
+        Gv = self[v] = list(self.G[v])
+        return Gv
+
+
+def _johnson_cycle_search(G, path):
+    """The main loop of the cycle-enumeration algorithm of Johnson.
+
+    Parameters
+    ----------
+    G : NetworkX Graph or DiGraph
+       A graph
+
+    path : list
+       A cycle prefix.  All cycles generated will begin with this prefix.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+
+    References
+    ----------
+        .. [1] Finding all the elementary circuits of a directed graph.
+       D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
+       https://doi.org/10.1137/0204007
+
+    """
+
+    G = _NeighborhoodCache(G)
+    blocked = set(path)
+    B = defaultdict(set)  # graph portions that yield no elementary circuit
+    start = path[0]
+    stack = [iter(G[path[-1]])]
+    closed = [False]
+    while stack:
+        nbrs = stack[-1]
+        for w in nbrs:
+            if w == start:
+                yield path[:]
+                closed[-1] = True
+            elif w not in blocked:
+                path.append(w)
+                closed.append(False)
+                stack.append(iter(G[w]))
+                blocked.add(w)
+                break
+        else:  # no more nbrs
+            stack.pop()
+            v = path.pop()
+            if closed.pop():
+                if closed:
+                    closed[-1] = True
+                unblock_stack = {v}
+                while unblock_stack:
+                    u = unblock_stack.pop()
+                    if u in blocked:
+                        blocked.remove(u)
+                        unblock_stack.update(B[u])
+                        B[u].clear()
+            else:
+                for w in G[v]:
+                    B[w].add(v)
+
+
+def _bounded_cycle_search(G, path, length_bound):
+    """The main loop of the cycle-enumeration algorithm of Gupta and Suzumura.
+
+    Parameters
+    ----------
+    G : NetworkX Graph or DiGraph
+       A graph
+
+    path : list
+       A cycle prefix.  All cycles generated will begin with this prefix.
+
+    length_bound: int
+        A length bound.  All cycles generated will have length at most length_bound.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+
+    References
+    ----------
+    .. [1] Finding All Bounded-Length Simple Cycles in a Directed Graph
+       A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094
+
+    """
+    G = _NeighborhoodCache(G)
+    lock = {v: 0 for v in path}
+    B = defaultdict(set)
+    start = path[0]
+    stack = [iter(G[path[-1]])]
+    blen = [length_bound]
+    while stack:
+        nbrs = stack[-1]
+        for w in nbrs:
+            if w == start:
+                yield path[:]
+                blen[-1] = 1
+            elif len(path) < lock.get(w, length_bound):
+                path.append(w)
+                blen.append(length_bound)
+                lock[w] = len(path)
+                stack.append(iter(G[w]))
+                break
+        else:
+            stack.pop()
+            v = path.pop()
+            bl = blen.pop()
+            if blen:
+                blen[-1] = min(blen[-1], bl)
+            if bl < length_bound:
+                relax_stack = [(bl, v)]
+                while relax_stack:
+                    bl, u = relax_stack.pop()
+                    if lock.get(u, length_bound) < length_bound - bl + 1:
+                        lock[u] = length_bound - bl + 1
+                        relax_stack.extend((bl + 1, w) for w in B[u].difference(path))
+            else:
+                for w in G[v]:
+                    B[w].add(v)
+
+
+@nx._dispatchable
+def chordless_cycles(G, length_bound=None):
+    """Find simple chordless cycles of a graph.
+
+    A `simple cycle` is a closed path where no node appears twice.  In a simple
+    cycle, a `chord` is an additional edge between two nodes in the cycle.  A
+    `chordless cycle` is a simple cycle without chords.  Said differently, a
+    chordless cycle is a cycle C in a graph G where the number of edges in the
+    induced graph G[C] is equal to the length of `C`.
+
+    Note that some care must be taken in the case that G is not a simple graph
+    nor a simple digraph.  Some authors limit the definition of chordless cycles
+    to have a prescribed minimum length; we do not.
+
+        1. We interpret self-loops to be chordless cycles, except in multigraphs
+           with multiple loops in parallel.  Likewise, in a chordless cycle of
+           length greater than 1, there can be no nodes with self-loops.
+
+        2. We interpret directed two-cycles to be chordless cycles, except in
+           multi-digraphs when any edge in a two-cycle has a parallel copy.
+
+        3. We interpret parallel pairs of undirected edges as two-cycles, except
+           when a third (or more) parallel edge exists between the two nodes.
+
+        4. Generalizing the above, edges with parallel clones may not occur in
+           chordless cycles.
+
+    In a directed graph, two chordless cycles are distinct if they are not
+    cyclic permutations of each other.  In an undirected graph, two chordless
+    cycles are distinct if they are not cyclic permutations of each other nor of
+    the other's reversal.
+
+    Optionally, the cycles are bounded in length.
+
+    We use an algorithm strongly inspired by that of Dias et al [1]_.  It has
+    been modified in the following ways:
+
+        1. Recursion is avoided, per Python's limitations
+
+        2. The labeling function is not necessary, because the starting paths
+            are chosen (and deleted from the host graph) to prevent multiple
+            occurrences of the same path
+
+        3. The search is optionally bounded at a specified length
+
+        4. Support for directed graphs is provided by extending cycles along
+            forward edges, and blocking nodes along forward and reverse edges
+
+        5. Support for multigraphs is provided by omitting digons from the set
+            of forward edges
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+       A directed graph
+
+    length_bound : int or None, optional (default=None)
+       If length_bound is an int, generate all simple cycles of G with length at
+       most length_bound.  Otherwise, generate all simple cycles of G.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+
+    Examples
+    --------
+    >>> sorted(list(nx.chordless_cycles(nx.complete_graph(4))))
+    [[1, 0, 2], [1, 0, 3], [2, 0, 3], [2, 1, 3]]
+
+    Notes
+    -----
+    When length_bound is None, and the graph is simple, the time complexity is
+    $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$ chordless cycles.
+
+    Raises
+    ------
+    ValueError
+        when length_bound < 0.
+
+    References
+    ----------
+    .. [1] Efficient enumeration of chordless cycles
+       E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi
+       https://arxiv.org/abs/1309.1051
+
+    See Also
+    --------
+    simple_cycles
+    """
+
+    if length_bound is not None:
+        if length_bound == 0:
+            return
+        elif length_bound < 0:
+            raise ValueError("length bound must be non-negative")
+
+    directed = G.is_directed()
+    multigraph = G.is_multigraph()
+
+    if multigraph:
+        yield from ([v] for v, Gv in G.adj.items() if len(Gv.get(v, ())) == 1)
+    else:
+        yield from ([v] for v, Gv in G.adj.items() if v in Gv)
+
+    if length_bound is not None and length_bound == 1:
+        return
+
+    # Nodes with loops cannot belong to longer cycles.  Let's delete them here.
+    # also, we implicitly reduce the multiplicity of edges down to 1 in the case
+    # of multiedges.
+    if directed:
+        F = nx.DiGraph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
+        B = F.to_undirected(as_view=False)
+    else:
+        F = nx.Graph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
+        B = None
+
+    # If we're given a multigraph, we have a few cases to consider with parallel
+    # edges.
+    #
+    # 1. If we have 2 or more edges in parallel between the nodes (u, v), we
+    #    must not construct longer cycles along (u, v).
+    # 2. If G is not directed, then a pair of parallel edges between (u, v) is a
+    #    chordless cycle unless there exists a third (or more) parallel edge.
+    # 3. If G is directed, then parallel edges do not form cycles, but do
+    #    preclude back-edges from forming cycles (handled in the next section),
+    #    Thus, if an edge (u, v) is duplicated and the reverse (v, u) is also
+    #    present, then we remove both from F.
+    #
+    # In directed graphs, we need to consider both directions that edges can
+    # take, so iterate over all edges (u, v) and possibly (v, u).  In undirected
+    # graphs, we need to be a little careful to only consider every edge once,
+    # so we use a "visited" set to emulate node-order comparisons.
+
+    if multigraph:
+        if not directed:
+            B = F.copy()
+            visited = set()
+        for u, Gu in G.adj.items():
+            if directed:
+                multiplicity = ((v, len(Guv)) for v, Guv in Gu.items())
+                for v, m in multiplicity:
+                    if m > 1:
+                        F.remove_edges_from(((u, v), (v, u)))
+            else:
+                multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited)
+                for v, m in multiplicity:
+                    if m == 2:
+                        yield [u, v]
+                    if m > 1:
+                        F.remove_edge(u, v)
+                visited.add(u)
+
+    # If we're given a directed graphs, we need to think about digons.  If we
+    # have two edges (u, v) and (v, u), then that's a two-cycle.  If either edge
+    # was duplicated above, then we removed both from F.  So, any digons we find
+    # here are chordless.  After finding digons, we remove their edges from F
+    # to avoid traversing them in the search for chordless cycles.
+    if directed:
+        for u, Fu in F.adj.items():
+            digons = [[u, v] for v in Fu if F.has_edge(v, u)]
+            yield from digons
+            F.remove_edges_from(digons)
+            F.remove_edges_from(e[::-1] for e in digons)
+
+    if length_bound is not None and length_bound == 2:
+        return
+
+    # Now, we prepare to search for cycles.  We have removed all cycles of
+    # lengths 1 and 2, so F is a simple graph or simple digraph.  We repeatedly
+    # separate digraphs into their strongly connected components, and undirected
+    # graphs into their biconnected components.  For each component, we pick a
+    # node v, search for chordless cycles based at each "stem" (u, v, w), and
+    # then remove v from that component before separating the graph again.
+    if directed:
+        separate = nx.strongly_connected_components
+
+        # Directed stems look like (u -> v -> w), so we use the product of
+        # predecessors of v with successors of v.
+        def stems(C, v):
+            for u, w in product(C.pred[v], C.succ[v]):
+                if not G.has_edge(u, w):  # omit stems with acyclic chords
+                    yield [u, v, w], F.has_edge(w, u)
+
+    else:
+        separate = nx.biconnected_components
+
+        # Undirected stems look like (u ~ v ~ w), but we must not also search
+        # (w ~ v ~ u), so we use combinations of v's neighbors of length 2.
+        def stems(C, v):
+            yield from (([u, v, w], F.has_edge(w, u)) for u, w in combinations(C[v], 2))
+
+    components = [c for c in separate(F) if len(c) > 2]
+    while components:
+        c = components.pop()
+        v = next(iter(c))
+        Fc = F.subgraph(c)
+        Fcc = Bcc = None
+        for S, is_triangle in stems(Fc, v):
+            if is_triangle:
+                yield S
+            else:
+                if Fcc is None:
+                    Fcc = _NeighborhoodCache(Fc)
+                    Bcc = Fcc if B is None else _NeighborhoodCache(B.subgraph(c))
+                yield from _chordless_cycle_search(Fcc, Bcc, S, length_bound)
+
+        components.extend(c for c in separate(F.subgraph(c - {v})) if len(c) > 2)
+
+
+def _chordless_cycle_search(F, B, path, length_bound):
+    """The main loop for chordless cycle enumeration.
+
+    This algorithm is strongly inspired by that of Dias et al [1]_.  It has been
+    modified in the following ways:
+
+        1. Recursion is avoided, per Python's limitations
+
+        2. The labeling function is not necessary, because the starting paths
+            are chosen (and deleted from the host graph) to prevent multiple
+            occurrences of the same path
+
+        3. The search is optionally bounded at a specified length
+
+        4. Support for directed graphs is provided by extending cycles along
+            forward edges, and blocking nodes along forward and reverse edges
+
+        5. Support for multigraphs is provided by omitting digons from the set
+            of forward edges
+
+    Parameters
+    ----------
+    F : _NeighborhoodCache
+       A graph of forward edges to follow in constructing cycles
+
+    B : _NeighborhoodCache
+       A graph of blocking edges to prevent the production of chordless cycles
+
+    path : list
+       A cycle prefix.  All cycles generated will begin with this prefix.
+
+    length_bound : int
+       A length bound.  All cycles generated will have length at most length_bound.
+
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+
+    References
+    ----------
+    .. [1] Efficient enumeration of chordless cycles
+       E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi
+       https://arxiv.org/abs/1309.1051
+
+    """
+    blocked = defaultdict(int)
+    target = path[0]
+    blocked[path[1]] = 1
+    for w in path[1:]:
+        for v in B[w]:
+            blocked[v] += 1
+
+    stack = [iter(F[path[2]])]
+    while stack:
+        nbrs = stack[-1]
+        for w in nbrs:
+            if blocked[w] == 1 and (length_bound is None or len(path) < length_bound):
+                Fw = F[w]
+                if target in Fw:
+                    yield path + [w]
+                else:
+                    Bw = B[w]
+                    if target in Bw:
+                        continue
+                    for v in Bw:
+                        blocked[v] += 1
+                    path.append(w)
+                    stack.append(iter(Fw))
+                    break
+        else:
+            stack.pop()
+            for v in B[path.pop()]:
+                blocked[v] -= 1
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(mutates_input=True)
+def recursive_simple_cycles(G):
+    """Find simple cycles (elementary circuits) of a directed graph.
+
+    A `simple cycle`, or `elementary circuit`, is a closed path where
+    no node appears twice. Two elementary circuits are distinct if they
+    are not cyclic permutations of each other.
+
+    This version uses a recursive algorithm to build a list of cycles.
+    You should probably use the iterator version called simple_cycles().
+    Warning: This recursive version uses lots of RAM!
+    It appears in NetworkX for pedagogical value.
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+       A directed graph
+
+    Returns
+    -------
+    A list of cycles, where each cycle is represented by a list of nodes
+    along the cycle.
+
+    Example:
+
+    >>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
+    >>> G = nx.DiGraph(edges)
+    >>> nx.recursive_simple_cycles(G)
+    [[0], [2], [0, 1, 2], [0, 2], [1, 2]]
+
+    Notes
+    -----
+    The implementation follows pp. 79-80 in [1]_.
+
+    The time complexity is $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$
+    elementary circuits.
+
+    References
+    ----------
+    .. [1] Finding all the elementary circuits of a directed graph.
+       D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
+       https://doi.org/10.1137/0204007
+
+    See Also
+    --------
+    simple_cycles, cycle_basis
+    """
+
+    # Jon Olav Vik, 2010-08-09
+    def _unblock(thisnode):
+        """Recursively unblock and remove nodes from B[thisnode]."""
+        if blocked[thisnode]:
+            blocked[thisnode] = False
+            while B[thisnode]:
+                _unblock(B[thisnode].pop())
+
+    def circuit(thisnode, startnode, component):
+        closed = False  # set to True if elementary path is closed
+        path.append(thisnode)
+        blocked[thisnode] = True
+        for nextnode in component[thisnode]:  # direct successors of thisnode
+            if nextnode == startnode:
+                result.append(path[:])
+                closed = True
+            elif not blocked[nextnode]:
+                if circuit(nextnode, startnode, component):
+                    closed = True
+        if closed:
+            _unblock(thisnode)
+        else:
+            for nextnode in component[thisnode]:
+                if thisnode not in B[nextnode]:  # TODO: use set for speedup?
+                    B[nextnode].append(thisnode)
+        path.pop()  # remove thisnode from path
+        return closed
+
+    path = []  # stack of nodes in current path
+    blocked = defaultdict(bool)  # vertex: blocked from search?
+    B = defaultdict(list)  # graph portions that yield no elementary circuit
+    result = []  # list to accumulate the circuits found
+
+    # Johnson's algorithm exclude self cycle edges like (v, v)
+    # To be backward compatible, we record those cycles in advance
+    # and then remove from subG
+    for v in G:
+        if G.has_edge(v, v):
+            result.append([v])
+            G.remove_edge(v, v)
+
+    # Johnson's algorithm requires some ordering of the nodes.
+    # They might not be sortable so we assign an arbitrary ordering.
+    ordering = dict(zip(G, range(len(G))))
+    for s in ordering:
+        # Build the subgraph induced by s and following nodes in the ordering
+        subgraph = G.subgraph(node for node in G if ordering[node] >= ordering[s])
+        # Find the strongly connected component in the subgraph
+        # that contains the least node according to the ordering
+        strongcomp = nx.strongly_connected_components(subgraph)
+        mincomp = min(strongcomp, key=lambda ns: min(ordering[n] for n in ns))
+        component = G.subgraph(mincomp)
+        if len(component) > 1:
+            # smallest node in the component according to the ordering
+            startnode = min(component, key=ordering.__getitem__)
+            for node in component:
+                blocked[node] = False
+                B[node][:] = []
+            dummy = circuit(startnode, startnode, component)
+    return result
+
+
+@nx._dispatchable
+def find_cycle(G, source=None, orientation=None):
+    """Returns a cycle found via depth-first traversal.
+
+    The cycle is a list of edges indicating the cyclic path.
+    Orientation of directed edges is controlled by `orientation`.
+
+    Parameters
+    ----------
+    G : graph
+        A directed/undirected graph/multigraph.
+
+    source : node, list of nodes
+        The node from which the traversal begins. If None, then a source
+        is chosen arbitrarily and repeatedly until all edges from each node in
+        the graph are searched.
+
+    orientation : None | 'original' | 'reverse' | 'ignore' (default: None)
+        For directed graphs and directed multigraphs, edge traversals need not
+        respect the original orientation of the edges.
+        When set to 'reverse' every edge is traversed in the reverse direction.
+        When set to 'ignore', every edge is treated as undirected.
+        When set to 'original', every edge is treated as directed.
+        In all three cases, the yielded edge tuples add a last entry to
+        indicate the direction in which that edge was traversed.
+        If orientation is None, the yielded edge has no direction indicated.
+        The direction is respected, but not reported.
+
+    Returns
+    -------
+    edges : directed edges
+        A list of directed edges indicating the path taken for the loop.
+        If no cycle is found, then an exception is raised.
+        For graphs, an edge is of the form `(u, v)` where `u` and `v`
+        are the tail and head of the edge as determined by the traversal.
+        For multigraphs, an edge is of the form `(u, v, key)`, where `key` is
+        the key of the edge. When the graph is directed, then `u` and `v`
+        are always in the order of the actual directed edge.
+        If orientation is not None then the edge tuple is extended to include
+        the direction of traversal ('forward' or 'reverse') on that edge.
+
+    Raises
+    ------
+    NetworkXNoCycle
+        If no cycle was found.
+
+    Examples
+    --------
+    In this example, we construct a DAG and find, in the first call, that there
+    are no directed cycles, and so an exception is raised. In the second call,
+    we ignore edge orientations and find that there is an undirected cycle.
+    Note that the second call finds a directed cycle while effectively
+    traversing an undirected graph, and so, we found an "undirected cycle".
+    This means that this DAG structure does not form a directed tree (which
+    is also known as a polytree).
+
+    >>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2)])
+    >>> nx.find_cycle(G, orientation="original")
+    Traceback (most recent call last):
+        ...
+    networkx.exception.NetworkXNoCycle: No cycle found.
+    >>> list(nx.find_cycle(G, orientation="ignore"))
+    [(0, 1, 'forward'), (1, 2, 'forward'), (0, 2, 'reverse')]
+
+    See Also
+    --------
+    simple_cycles
+    """
+    if not G.is_directed() or orientation in (None, "original"):
+
+        def tailhead(edge):
+            return edge[:2]
+
+    elif orientation == "reverse":
+
+        def tailhead(edge):
+            return edge[1], edge[0]
+
+    elif orientation == "ignore":
+
+        def tailhead(edge):
+            if edge[-1] == "reverse":
+                return edge[1], edge[0]
+            return edge[:2]
+
+    explored = set()
+    cycle = []
+    final_node = None
+    for start_node in G.nbunch_iter(source):
+        if start_node in explored:
+            # No loop is possible.
+            continue
+
+        edges = []
+        # All nodes seen in this iteration of edge_dfs
+        seen = {start_node}
+        # Nodes in active path.
+        active_nodes = {start_node}
+        previous_head = None
+
+        for edge in nx.edge_dfs(G, start_node, orientation):
+            # Determine if this edge is a continuation of the active path.
+            tail, head = tailhead(edge)
+            if head in explored:
+                # Then we've already explored it. No loop is possible.
+                continue
+            if previous_head is not None and tail != previous_head:
+                # This edge results from backtracking.
+                # Pop until we get a node whose head equals the current tail.
+                # So for example, we might have:
+                #  (0, 1), (1, 2), (2, 3), (1, 4)
+                # which must become:
+                #  (0, 1), (1, 4)
+                while True:
+                    try:
+                        popped_edge = edges.pop()
+                    except IndexError:
+                        edges = []
+                        active_nodes = {tail}
+                        break
+                    else:
+                        popped_head = tailhead(popped_edge)[1]
+                        active_nodes.remove(popped_head)
+
+                    if edges:
+                        last_head = tailhead(edges[-1])[1]
+                        if tail == last_head:
+                            break
+            edges.append(edge)
+
+            if head in active_nodes:
+                # We have a loop!
+                cycle.extend(edges)
+                final_node = head
+                break
+            else:
+                seen.add(head)
+                active_nodes.add(head)
+                previous_head = head
+
+        if cycle:
+            break
+        else:
+            explored.update(seen)
+
+    else:
+        assert len(cycle) == 0
+        raise nx.exception.NetworkXNoCycle("No cycle found.")
+
+    # We now have a list of edges which ends on a cycle.
+    # So we need to remove from the beginning edges that are not relevant.
+
+    for i, edge in enumerate(cycle):
+        tail, head = tailhead(edge)
+        if tail == final_node:
+            break
+
+    return cycle[i:]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def minimum_cycle_basis(G, weight=None):
+    """Returns a minimum weight cycle basis for G
+
+    Minimum weight means a cycle basis for which the total weight
+    (length for unweighted graphs) of all the cycles is minimum.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+    weight: string
+        name of the edge attribute to use for edge weights
+
+    Returns
+    -------
+    A list of cycle lists.  Each cycle list is a list of nodes
+    which forms a cycle (loop) in G. Note that the nodes are not
+    necessarily returned in a order by which they appear in the cycle
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> nx.add_cycle(G, [0, 1, 2, 3])
+    >>> nx.add_cycle(G, [0, 3, 4, 5])
+    >>> nx.minimum_cycle_basis(G)
+    [[5, 4, 3, 0], [3, 2, 1, 0]]
+
+    References:
+        [1] Kavitha, Telikepalli, et al. "An O(m^2n) Algorithm for
+        Minimum Cycle Basis of Graphs."
+        http://link.springer.com/article/10.1007/s00453-007-9064-z
+        [2] de Pina, J. 1995. Applications of shortest path methods.
+        Ph.D. thesis, University of Amsterdam, Netherlands
+
+    See Also
+    --------
+    simple_cycles, cycle_basis
+    """
+    # We first split the graph in connected subgraphs
+    return sum(
+        (_min_cycle_basis(G.subgraph(c), weight) for c in nx.connected_components(G)),
+        [],
+    )
+
+
+def _min_cycle_basis(G, weight):
+    cb = []
+    # We  extract the edges not in a spanning tree. We do not really need a
+    # *minimum* spanning tree. That is why we call the next function with
+    # weight=None. Depending on implementation, it may be faster as well
+    tree_edges = list(nx.minimum_spanning_edges(G, weight=None, data=False))
+    chords = G.edges - tree_edges - {(v, u) for u, v in tree_edges}
+
+    # We maintain a set of vectors orthogonal to sofar found cycles
+    set_orth = [{edge} for edge in chords]
+    while set_orth:
+        base = set_orth.pop()
+        # kth cycle is "parallel" to kth vector in set_orth
+        cycle_edges = _min_cycle(G, base, weight)
+        cb.append([v for u, v in cycle_edges])
+
+        # now update set_orth so that k+1,k+2... th elements are
+        # orthogonal to the newly found cycle, as per [p. 336, 1]
+        set_orth = [
+            (
+                {e for e in orth if e not in base if e[::-1] not in base}
+                | {e for e in base if e not in orth if e[::-1] not in orth}
+            )
+            if sum((e in orth or e[::-1] in orth) for e in cycle_edges) % 2
+            else orth
+            for orth in set_orth
+        ]
+    return cb
+
+
+def _min_cycle(G, orth, weight):
+    """
+    Computes the minimum weight cycle in G,
+    orthogonal to the vector orth as per [p. 338, 1]
+    Use (u, 1) to indicate the lifted copy of u (denoted u' in paper).
+    """
+    Gi = nx.Graph()
+
+    # Add 2 copies of each edge in G to Gi.
+    # If edge is in orth, add cross edge; otherwise in-plane edge
+    for u, v, wt in G.edges(data=weight, default=1):
+        if (u, v) in orth or (v, u) in orth:
+            Gi.add_edges_from([(u, (v, 1)), ((u, 1), v)], Gi_weight=wt)
+        else:
+            Gi.add_edges_from([(u, v), ((u, 1), (v, 1))], Gi_weight=wt)
+
+    # find the shortest length in Gi between n and (n, 1) for each n
+    # Note: Use "Gi_weight" for name of weight attribute
+    spl = nx.shortest_path_length
+    lift = {n: spl(Gi, source=n, target=(n, 1), weight="Gi_weight") for n in G}
+
+    # Now compute that short path in Gi, which translates to a cycle in G
+    start = min(lift, key=lift.get)
+    end = (start, 1)
+    min_path_i = nx.shortest_path(Gi, source=start, target=end, weight="Gi_weight")
+
+    # Now we obtain the actual path, re-map nodes in Gi to those in G
+    min_path = [n if n in G else n[0] for n in min_path_i]
+
+    # Now remove the edges that occur two times
+    # two passes: flag which edges get kept, then build it
+    edgelist = list(pairwise(min_path))
+    edgeset = set()
+    for e in edgelist:
+        if e in edgeset:
+            edgeset.remove(e)
+        elif e[::-1] in edgeset:
+            edgeset.remove(e[::-1])
+        else:
+            edgeset.add(e)
+
+    min_edgelist = []
+    for e in edgelist:
+        if e in edgeset:
+            min_edgelist.append(e)
+            edgeset.remove(e)
+        elif e[::-1] in edgeset:
+            min_edgelist.append(e[::-1])
+            edgeset.remove(e[::-1])
+
+    return min_edgelist
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def girth(G):
+    """Returns the girth of the graph.
+
+    The girth of a graph is the length of its shortest cycle, or infinity if
+    the graph is acyclic. The algorithm follows the description given on the
+    Wikipedia page [1]_, and runs in time O(mn) on a graph with m edges and n
+    nodes.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+
+    Returns
+    -------
+    int or math.inf
+
+    Examples
+    --------
+    All examples below (except P_5) can easily be checked using Wikipedia,
+    which has a page for each of these famous graphs.
+
+    >>> nx.girth(nx.chvatal_graph())
+    4
+    >>> nx.girth(nx.tutte_graph())
+    4
+    >>> nx.girth(nx.petersen_graph())
+    5
+    >>> nx.girth(nx.heawood_graph())
+    6
+    >>> nx.girth(nx.pappus_graph())
+    6
+    >>> nx.girth(nx.path_graph(5))
+    inf
+
+    References
+    ----------
+    .. [1] `Wikipedia: Girth <https://en.wikipedia.org/wiki/Girth_(graph_theory)>`_
+
+    """
+    girth = depth_limit = inf
+    tree_edge = nx.algorithms.traversal.breadth_first_search.TREE_EDGE
+    level_edge = nx.algorithms.traversal.breadth_first_search.LEVEL_EDGE
+    for n in G:
+        # run a BFS from source n, keeping track of distances; since we want
+        # the shortest cycle, no need to explore beyond the current minimum length
+        depth = {n: 0}
+        for u, v, label in nx.bfs_labeled_edges(G, n):
+            du = depth[u]
+            if du > depth_limit:
+                break
+            if label is tree_edge:
+                depth[v] = du + 1
+            else:
+                # if (u, v) is a level edge, the length is du + du + 1 (odd)
+                # otherwise, it's a forward edge; length is du + (du + 1) + 1 (even)
+                delta = label is level_edge
+                length = du + du + 2 - delta
+                if length < girth:
+                    girth = length
+                    depth_limit = du - delta
+
+    return girth
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/d_separation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/d_separation.py
new file mode 100644
index 00000000..a688eca4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/d_separation.py
@@ -0,0 +1,722 @@
+"""
+Algorithm for testing d-separation in DAGs.
+
+*d-separation* is a test for conditional independence in probability
+distributions that can be factorized using DAGs.  It is a purely
+graphical test that uses the underlying graph and makes no reference
+to the actual distribution parameters.  See [1]_ for a formal
+definition.
+
+The implementation is based on the conceptually simple linear time
+algorithm presented in [2]_.  Refer to [3]_, [4]_ for a couple of
+alternative algorithms.
+
+The functional interface in NetworkX consists of three functions:
+
+- `find_minimal_d_separator` returns a minimal d-separator set ``z``.
+  That is, removing any node or nodes from it makes it no longer a d-separator.
+- `is_d_separator` checks if a given set is a d-separator.
+- `is_minimal_d_separator` checks if a given set is a minimal d-separator.
+
+D-separators
+------------
+
+Here, we provide a brief overview of d-separation and related concepts that
+are relevant for understanding it:
+
+The ideas of d-separation and d-connection relate to paths being open or blocked.
+
+- A "path" is a sequence of nodes connected in order by edges. Unlike for most
+  graph theory analysis, the direction of the edges is ignored. Thus the path
+  can be thought of as a traditional path on the undirected version of the graph.
+- A "candidate d-separator" ``z`` is a set of nodes being considered as
+  possibly blocking all paths between two prescribed sets ``x`` and ``y`` of nodes.
+  We refer to each node in the candidate d-separator as "known".
+- A "collider" node on a path is a node that is a successor of its two neighbor
+  nodes on the path. That is, ``c`` is a collider if the edge directions
+  along the path look like ``... u -> c <- v ...``.
+- If a collider node or any of its descendants are "known", the collider
+  is called an "open collider". Otherwise it is a "blocking collider".
+- Any path can be "blocked" in two ways. If the path contains a "known" node
+  that is not a collider, the path is blocked. Also, if the path contains a
+  collider that is not a "known" node, the path is blocked.
+- A path is "open" if it is not blocked. That is, it is open if every node is
+  either an open collider or not a "known". Said another way, every
+  "known" in the path is a collider and every collider is open (has a
+  "known" as a inclusive descendant). The concept of "open path" is meant to
+  demonstrate a probabilistic conditional dependence between two nodes given
+  prescribed knowledge ("known" nodes).
+- Two sets ``x`` and ``y`` of nodes are "d-separated" by a set of nodes ``z``
+  if all paths between nodes in ``x`` and nodes in ``y`` are blocked. That is,
+  if there are no open paths from any node in ``x`` to any node in ``y``.
+  Such a set ``z`` is a "d-separator" of ``x`` and ``y``.
+- A "minimal d-separator" is a d-separator ``z`` for which no node or subset
+  of nodes can be removed with it still being a d-separator.
+
+The d-separator blocks some paths between ``x`` and ``y`` but opens others.
+Nodes in the d-separator block paths if the nodes are not colliders.
+But if a collider or its descendant nodes are in the d-separation set, the
+colliders are open, allowing a path through that collider.
+
+Illustration of D-separation with examples
+------------------------------------------
+
+A pair of two nodes, ``u`` and ``v``, are d-connected if there is a path
+from ``u`` to ``v`` that is not blocked. That means, there is an open
+path from ``u`` to ``v``.
+
+For example, if the d-separating set is the empty set, then the following paths are
+open between ``u`` and ``v``:
+
+- u <- n -> v
+- u -> w -> ... -> n -> v
+
+If  on the other hand, ``n`` is in the d-separating set, then ``n`` blocks
+those paths between ``u`` and ``v``.
+
+Colliders block a path if they and their descendants are not included
+in the d-separating set. An example of a path that is blocked when the
+d-separating set is empty is:
+
+- u -> w -> ... -> n <- v
+
+The node ``n`` is a collider in this path and is not in the d-separating set.
+So ``n`` blocks this path. However, if ``n`` or a descendant of ``n`` is
+included in the d-separating set, then the path through the collider
+at ``n`` (... -> n <- ...) is "open".
+
+D-separation is concerned with blocking all paths between nodes from ``x`` to ``y``.
+A d-separating set between ``x`` and ``y`` is one where all paths are blocked.
+
+D-separation and its applications in probability
+------------------------------------------------
+
+D-separation is commonly used in probabilistic causal-graph models. D-separation
+connects the idea of probabilistic "dependence" with separation in a graph. If
+one assumes the causal Markov condition [5]_, (every node is conditionally
+independent of its non-descendants, given its parents) then d-separation implies
+conditional independence in probability distributions.
+Symmetrically, d-connection implies dependence.
+
+The intuition is as follows. The edges on a causal graph indicate which nodes
+influence the outcome of other nodes directly. An edge from u to v
+implies that the outcome of event ``u`` influences the probabilities for
+the outcome of event ``v``. Certainly knowing ``u`` changes predictions for ``v``.
+But also knowing ``v`` changes predictions for ``u``. The outcomes are dependent.
+Furthermore, an edge from ``v`` to ``w`` would mean that ``w`` and ``v`` are dependent
+and thus that ``u`` could indirectly influence ``w``.
+
+Without any knowledge about the system (candidate d-separating set is empty)
+a causal graph ``u -> v -> w`` allows all three nodes to be dependent. But
+if we know the outcome of ``v``, the conditional probabilities of outcomes for
+``u`` and ``w`` are independent of each other. That is, once we know the outcome
+for ```v`, the probabilities for ``w`` do not depend on the outcome for ``u``.
+This is the idea behind ``v`` blocking the path if it is "known" (in the candidate
+d-separating set).
+
+The same argument works whether the direction of the edges are both
+left-going and when both arrows head out from the middle. Having a "known"
+node on a path blocks the collider-free path because those relationships
+make the conditional probabilities independent.
+
+The direction of the causal edges does impact dependence precisely in the
+case of a collider e.g. ``u -> v <- w``. In that situation, both ``u`` and ``w``
+influence ``v```. But they do not directly influence each other. So without any
+knowledge of any outcomes, ``u`` and ``w`` are independent. That is the idea behind
+colliders blocking the path. But, if ``v`` is known, the conditional probabilities
+of ``u`` and ``w`` can be dependent. This is the heart of Berkson's Paradox [6]_.
+For example, suppose ``u`` and ``w`` are boolean events (they either happen or do not)
+and ``v`` represents the outcome "at least one of ``u`` and ``w`` occur". Then knowing
+``v`` is true makes the conditional probabilities of ``u`` and ``w`` dependent.
+Essentially, knowing that at least one of them is true raises the probability of
+each. But further knowledge that ``w`` is true (or false) change the conditional
+probability of ``u`` to either the original value or 1. So the conditional
+probability of ``u`` depends on the outcome of ``w`` even though there is no
+causal relationship between them. When a collider is known, dependence can
+occur across paths through that collider. This is the reason open colliders
+do not block paths.
+
+Furthermore, even if ``v`` is not "known", if one of its descendants is "known"
+we can use that information to know more about ``v`` which again makes
+``u`` and ``w`` potentially dependent. Suppose the chance of ``n`` occurring
+is much higher when ``v`` occurs ("at least one of ``u`` and ``w`` occur").
+Then if we know ``n`` occurred, it is more likely that ``v`` occurred and that
+makes the chance of ``u`` and ``w`` dependent. This is the idea behind why
+a collider does no block a path if any descendant of the collider is "known".
+
+When two sets of nodes ``x`` and ``y`` are d-separated by a set ``z``,
+it means that given the outcomes of the nodes in ``z``, the probabilities
+of outcomes of the nodes in ``x`` are independent of the outcomes of the
+nodes in ``y`` and vice versa.
+
+Examples
+--------
+A Hidden Markov Model with 5 observed states and 5 hidden states
+where the hidden states have causal relationships resulting in
+a path results in the following causal network. We check that
+early states along the path are separated from late state in
+the path by the d-separator of the middle hidden state.
+Thus if we condition on the middle hidden state, the early
+state probabilities are independent of the late state outcomes.
+
+>>> G = nx.DiGraph()
+>>> G.add_edges_from(
+...     [
+...         ("H1", "H2"),
+...         ("H2", "H3"),
+...         ("H3", "H4"),
+...         ("H4", "H5"),
+...         ("H1", "O1"),
+...         ("H2", "O2"),
+...         ("H3", "O3"),
+...         ("H4", "O4"),
+...         ("H5", "O5"),
+...     ]
+... )
+>>> x, y, z = ({"H1", "O1"}, {"H5", "O5"}, {"H3"})
+>>> nx.is_d_separator(G, x, y, z)
+True
+>>> nx.is_minimal_d_separator(G, x, y, z)
+True
+>>> nx.is_minimal_d_separator(G, x, y, z | {"O3"})
+False
+>>> z = nx.find_minimal_d_separator(G, x | y, {"O2", "O3", "O4"})
+>>> z == {"H2", "H4"}
+True
+
+If no minimal_d_separator exists, `None` is returned
+
+>>> other_z = nx.find_minimal_d_separator(G, x | y, {"H2", "H3"})
+>>> other_z is None
+True
+
+
+References
+----------
+
+.. [1] Pearl, J.  (2009).  Causality.  Cambridge: Cambridge University Press.
+
+.. [2] Darwiche, A.  (2009).  Modeling and reasoning with Bayesian networks.
+   Cambridge: Cambridge University Press.
+
+.. [3] Shachter, Ross D. "Bayes-ball: The rational pastime (for
+   determining irrelevance and requisite information in belief networks
+   and influence diagrams)." In Proceedings of the Fourteenth Conference
+   on Uncertainty in Artificial Intelligence (UAI), (pp. 480–487). 1998.
+
+.. [4] Koller, D., & Friedman, N. (2009).
+   Probabilistic graphical models: principles and techniques. The MIT Press.
+
+.. [5] https://en.wikipedia.org/wiki/Causal_Markov_condition
+
+.. [6] https://en.wikipedia.org/wiki/Berkson%27s_paradox
+
+"""
+
+from collections import deque
+from itertools import chain
+
+import networkx as nx
+from networkx.utils import UnionFind, not_implemented_for
+
+__all__ = [
+    "is_d_separator",
+    "is_minimal_d_separator",
+    "find_minimal_d_separator",
+    "d_separated",
+    "minimal_d_separator",
+]
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def is_d_separator(G, x, y, z):
+    """Return whether node sets `x` and `y` are d-separated by `z`.
+
+    Parameters
+    ----------
+    G : nx.DiGraph
+        A NetworkX DAG.
+
+    x : node or set of nodes
+        First node or set of nodes in `G`.
+
+    y : node or set of nodes
+        Second node or set of nodes in `G`.
+
+    z : node or set of nodes
+        Potential separator (set of conditioning nodes in `G`). Can be empty set.
+
+    Returns
+    -------
+    b : bool
+        A boolean that is true if `x` is d-separated from `y` given `z` in `G`.
+
+    Raises
+    ------
+    NetworkXError
+        The *d-separation* test is commonly used on disjoint sets of
+        nodes in acyclic directed graphs.  Accordingly, the algorithm
+        raises a :exc:`NetworkXError` if the node sets are not
+        disjoint or if the input graph is not a DAG.
+
+    NodeNotFound
+        If any of the input nodes are not found in the graph,
+        a :exc:`NodeNotFound` exception is raised
+
+    Notes
+    -----
+    A d-separating set in a DAG is a set of nodes that
+    blocks all paths between the two sets. Nodes in `z`
+    block a path if they are part of the path and are not a collider,
+    or a descendant of a collider. Also colliders that are not in `z`
+    block a path. A collider structure along a path
+    is ``... -> c <- ...`` where ``c`` is the collider node.
+
+    https://en.wikipedia.org/wiki/Bayesian_network#d-separation
+    """
+    try:
+        x = {x} if x in G else x
+        y = {y} if y in G else y
+        z = {z} if z in G else z
+
+        intersection = x & y or x & z or y & z
+        if intersection:
+            raise nx.NetworkXError(
+                f"The sets are not disjoint, with intersection {intersection}"
+            )
+
+        set_v = x | y | z
+        if set_v - G.nodes:
+            raise nx.NodeNotFound(f"The node(s) {set_v - G.nodes} are not found in G")
+    except TypeError:
+        raise nx.NodeNotFound("One of x, y, or z is not a node or a set of nodes in G")
+
+    if not nx.is_directed_acyclic_graph(G):
+        raise nx.NetworkXError("graph should be directed acyclic")
+
+    # contains -> and <-> edges from starting node T
+    forward_deque = deque([])
+    forward_visited = set()
+
+    # contains <- and - edges from starting node T
+    backward_deque = deque(x)
+    backward_visited = set()
+
+    ancestors_or_z = set().union(*[nx.ancestors(G, node) for node in x]) | z | x
+
+    while forward_deque or backward_deque:
+        if backward_deque:
+            node = backward_deque.popleft()
+            backward_visited.add(node)
+            if node in y:
+                return False
+            if node in z:
+                continue
+
+            # add <- edges to backward deque
+            backward_deque.extend(G.pred[node].keys() - backward_visited)
+            # add -> edges to forward deque
+            forward_deque.extend(G.succ[node].keys() - forward_visited)
+
+        if forward_deque:
+            node = forward_deque.popleft()
+            forward_visited.add(node)
+            if node in y:
+                return False
+
+            # Consider if -> node <- is opened due to ancestor of node in z
+            if node in ancestors_or_z:
+                # add <- edges to backward deque
+                backward_deque.extend(G.pred[node].keys() - backward_visited)
+            if node not in z:
+                # add -> edges to forward deque
+                forward_deque.extend(G.succ[node].keys() - forward_visited)
+
+    return True
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def find_minimal_d_separator(G, x, y, *, included=None, restricted=None):
+    """Returns a minimal d-separating set between `x` and `y` if possible
+
+    A d-separating set in a DAG is a set of nodes that blocks all
+    paths between the two sets of nodes, `x` and `y`. This function
+    constructs a d-separating set that is "minimal", meaning no nodes can
+    be removed without it losing the d-separating property for `x` and `y`.
+    If no d-separating sets exist for `x` and `y`, this returns `None`.
+
+    In a DAG there may be more than one minimal d-separator between two
+    sets of nodes. Minimal d-separators are not always unique. This function
+    returns one minimal d-separator, or `None` if no d-separator exists.
+
+    Uses the algorithm presented in [1]_. The complexity of the algorithm
+    is :math:`O(m)`, where :math:`m` stands for the number of edges in
+    the subgraph of G consisting of only the ancestors of `x` and `y`.
+    For full details, see [1]_.
+
+    Parameters
+    ----------
+    G : graph
+        A networkx DAG.
+    x : set | node
+        A node or set of nodes in the graph.
+    y : set | node
+        A node or set of nodes in the graph.
+    included : set | node | None
+        A node or set of nodes which must be included in the found separating set,
+        default is None, which means the empty set.
+    restricted : set | node | None
+        Restricted node or set of nodes to consider. Only these nodes can be in
+        the found separating set, default is None meaning all nodes in ``G``.
+
+    Returns
+    -------
+    z : set | None
+        The minimal d-separating set, if at least one d-separating set exists,
+        otherwise None.
+
+    Raises
+    ------
+    NetworkXError
+        Raises a :exc:`NetworkXError` if the input graph is not a DAG
+        or if node sets `x`, `y`, and `included` are not disjoint.
+
+    NodeNotFound
+        If any of the input nodes are not found in the graph,
+        a :exc:`NodeNotFound` exception is raised.
+
+    References
+    ----------
+    .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding
+        minimal d-separators in linear time and applications." In
+        Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020.
+    """
+    if not nx.is_directed_acyclic_graph(G):
+        raise nx.NetworkXError("graph should be directed acyclic")
+
+    try:
+        x = {x} if x in G else x
+        y = {y} if y in G else y
+
+        if included is None:
+            included = set()
+        elif included in G:
+            included = {included}
+
+        if restricted is None:
+            restricted = set(G)
+        elif restricted in G:
+            restricted = {restricted}
+
+        set_y = x | y | included | restricted
+        if set_y - G.nodes:
+            raise nx.NodeNotFound(f"The node(s) {set_y - G.nodes} are not found in G")
+    except TypeError:
+        raise nx.NodeNotFound(
+            "One of x, y, included or restricted is not a node or set of nodes in G"
+        )
+
+    if not included <= restricted:
+        raise nx.NetworkXError(
+            f"Included nodes {included} must be in restricted nodes {restricted}"
+        )
+
+    intersection = x & y or x & included or y & included
+    if intersection:
+        raise nx.NetworkXError(
+            f"The sets x, y, included are not disjoint. Overlap: {intersection}"
+        )
+
+    nodeset = x | y | included
+    ancestors_x_y_included = nodeset.union(*[nx.ancestors(G, node) for node in nodeset])
+
+    z_init = restricted & (ancestors_x_y_included - (x | y))
+
+    x_closure = _reachable(G, x, ancestors_x_y_included, z_init)
+    if x_closure & y:
+        return None
+
+    z_updated = z_init & (x_closure | included)
+    y_closure = _reachable(G, y, ancestors_x_y_included, z_updated)
+    return z_updated & (y_closure | included)
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def is_minimal_d_separator(G, x, y, z, *, included=None, restricted=None):
+    """Determine if `z` is a minimal d-separator for `x` and `y`.
+
+    A d-separator, `z`, in a DAG is a set of nodes that blocks
+    all paths from nodes in set `x` to nodes in set `y`.
+    A minimal d-separator is a d-separator `z` such that removing
+    any subset of nodes makes it no longer a d-separator.
+
+    Note: This function checks whether `z` is a d-separator AND is
+    minimal. One can use the function `is_d_separator` to only check if
+    `z` is a d-separator. See examples below.
+
+    Parameters
+    ----------
+    G : nx.DiGraph
+        A NetworkX DAG.
+    x : node | set
+        A node or set of nodes in the graph.
+    y : node | set
+        A node or set of nodes in the graph.
+    z : node | set
+        The node or set of nodes to check if it is a minimal d-separating set.
+        The function :func:`is_d_separator` is called inside this function
+        to verify that `z` is in fact a d-separator.
+    included : set | node | None
+        A node or set of nodes which must be included in the found separating set,
+        default is ``None``, which means the empty set.
+    restricted : set | node | None
+        Restricted node or set of nodes to consider. Only these nodes can be in
+        the found separating set, default is ``None`` meaning all nodes in ``G``.
+
+    Returns
+    -------
+    bool
+        Whether or not the set `z` is a minimal d-separator subject to
+        `restricted` nodes and `included` node constraints.
+
+    Examples
+    --------
+    >>> G = nx.path_graph([0, 1, 2, 3], create_using=nx.DiGraph)
+    >>> G.add_node(4)
+    >>> nx.is_minimal_d_separator(G, 0, 2, {1})
+    True
+    >>> # since {1} is the minimal d-separator, {1, 3, 4} is not minimal
+    >>> nx.is_minimal_d_separator(G, 0, 2, {1, 3, 4})
+    False
+    >>> # alternatively, if we only want to check that {1, 3, 4} is a d-separator
+    >>> nx.is_d_separator(G, 0, 2, {1, 3, 4})
+    True
+
+    Raises
+    ------
+    NetworkXError
+        Raises a :exc:`NetworkXError` if the input graph is not a DAG.
+
+    NodeNotFound
+        If any of the input nodes are not found in the graph,
+        a :exc:`NodeNotFound` exception is raised.
+
+    References
+    ----------
+    .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding
+        minimal d-separators in linear time and applications." In
+        Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020.
+
+    Notes
+    -----
+    This function works on verifying that a set is minimal and
+    d-separating between two nodes. Uses criterion (a), (b), (c) on
+    page 4 of [1]_. a) closure(`x`) and `y` are disjoint. b) `z` contains
+    all nodes from `included` and is contained in the `restricted`
+    nodes and in the union of ancestors of `x`, `y`, and `included`.
+    c) the nodes in `z` not in `included` are contained in both
+    closure(x) and closure(y). The closure of a set is the set of nodes
+    connected to the set by a directed path in G.
+
+    The complexity is :math:`O(m)`, where :math:`m` stands for the
+    number of edges in the subgraph of G consisting of only the
+    ancestors of `x` and `y`.
+
+    For full details, see [1]_.
+    """
+    if not nx.is_directed_acyclic_graph(G):
+        raise nx.NetworkXError("graph should be directed acyclic")
+
+    try:
+        x = {x} if x in G else x
+        y = {y} if y in G else y
+        z = {z} if z in G else z
+
+        if included is None:
+            included = set()
+        elif included in G:
+            included = {included}
+
+        if restricted is None:
+            restricted = set(G)
+        elif restricted in G:
+            restricted = {restricted}
+
+        set_y = x | y | included | restricted
+        if set_y - G.nodes:
+            raise nx.NodeNotFound(f"The node(s) {set_y - G.nodes} are not found in G")
+    except TypeError:
+        raise nx.NodeNotFound(
+            "One of x, y, z, included or restricted is not a node or set of nodes in G"
+        )
+
+    if not included <= z:
+        raise nx.NetworkXError(
+            f"Included nodes {included} must be in proposed separating set z {x}"
+        )
+    if not z <= restricted:
+        raise nx.NetworkXError(
+            f"Separating set {z} must be contained in restricted set {restricted}"
+        )
+
+    intersection = x.intersection(y) or x.intersection(z) or y.intersection(z)
+    if intersection:
+        raise nx.NetworkXError(
+            f"The sets are not disjoint, with intersection {intersection}"
+        )
+
+    nodeset = x | y | included
+    ancestors_x_y_included = nodeset.union(*[nx.ancestors(G, n) for n in nodeset])
+
+    # criterion (a) -- check that z is actually a separator
+    x_closure = _reachable(G, x, ancestors_x_y_included, z)
+    if x_closure & y:
+        return False
+
+    # criterion (b) -- basic constraint; included and restricted already checked above
+    if not (z <= ancestors_x_y_included):
+        return False
+
+    # criterion (c) -- check that z is minimal
+    y_closure = _reachable(G, y, ancestors_x_y_included, z)
+    if not ((z - included) <= (x_closure & y_closure)):
+        return False
+    return True
+
+
+@not_implemented_for("undirected")
+def _reachable(G, x, a, z):
+    """Modified Bayes-Ball algorithm for finding d-connected nodes.
+
+    Find all nodes in `a` that are d-connected to those in `x` by
+    those in `z`. This is an implementation of the function
+    `REACHABLE` in [1]_ (which is itself a modification of the
+    Bayes-Ball algorithm [2]_) when restricted to DAGs.
+
+    Parameters
+    ----------
+    G : nx.DiGraph
+        A NetworkX DAG.
+    x : node | set
+        A node in the DAG, or a set of nodes.
+    a : node | set
+        A (set of) node(s) in the DAG containing the ancestors of `x`.
+    z : node | set
+        The node or set of nodes conditioned on when checking d-connectedness.
+
+    Returns
+    -------
+    w : set
+        The closure of `x` in `a` with respect to d-connectedness
+        given `z`.
+
+    References
+    ----------
+    .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding
+        minimal d-separators in linear time and applications." In
+        Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020.
+
+    .. [2] Shachter, Ross D. "Bayes-ball: The rational pastime
+       (for determining irrelevance and requisite information in
+       belief networks and influence diagrams)." In Proceedings of the
+       Fourteenth Conference on Uncertainty in Artificial Intelligence
+       (UAI), (pp. 480–487). 1998.
+    """
+
+    def _pass(e, v, f, n):
+        """Whether a ball entering node `v` along edge `e` passes to `n` along `f`.
+
+        Boolean function defined on page 6 of [1]_.
+
+        Parameters
+        ----------
+        e : bool
+            Directed edge by which the ball got to node `v`; `True` iff directed into `v`.
+        v : node
+            Node where the ball is.
+        f : bool
+            Directed edge connecting nodes `v` and `n`; `True` iff directed `n`.
+        n : node
+            Checking whether the ball passes to this node.
+
+        Returns
+        -------
+        b : bool
+            Whether the ball passes or not.
+
+        References
+        ----------
+        .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding
+           minimal d-separators in linear time and applications." In
+           Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020.
+        """
+        is_element_of_A = n in a
+        # almost_definite_status = True  # always true for DAGs; not so for RCGs
+        collider_if_in_Z = v not in z or (e and not f)
+        return is_element_of_A and collider_if_in_Z  # and almost_definite_status
+
+    queue = deque([])
+    for node in x:
+        if bool(G.pred[node]):
+            queue.append((True, node))
+        if bool(G.succ[node]):
+            queue.append((False, node))
+    processed = queue.copy()
+
+    while any(queue):
+        e, v = queue.popleft()
+        preds = ((False, n) for n in G.pred[v])
+        succs = ((True, n) for n in G.succ[v])
+        f_n_pairs = chain(preds, succs)
+        for f, n in f_n_pairs:
+            if (f, n) not in processed and _pass(e, v, f, n):
+                queue.append((f, n))
+                processed.append((f, n))
+
+    return {w for (_, w) in processed}
+
+
+# Deprecated functions:
+def d_separated(G, x, y, z):
+    """Return whether nodes sets ``x`` and ``y`` are d-separated by ``z``.
+
+    .. deprecated:: 3.3
+
+        This function is deprecated and will be removed in NetworkX v3.5.
+        Please use `is_d_separator(G, x, y, z)`.
+
+    """
+    import warnings
+
+    warnings.warn(
+        "d_separated is deprecated and will be removed in NetworkX v3.5."
+        "Please use `is_d_separator(G, x, y, z)`.",
+        category=DeprecationWarning,
+        stacklevel=2,
+    )
+    return nx.is_d_separator(G, x, y, z)
+
+
+def minimal_d_separator(G, u, v):
+    """Returns a minimal_d-separating set between `x` and `y` if possible
+
+    .. deprecated:: 3.3
+
+        minimal_d_separator is deprecated and will be removed in NetworkX v3.5.
+        Please use `find_minimal_d_separator(G, x, y)`.
+
+    """
+    import warnings
+
+    warnings.warn(
+        (
+            "This function is deprecated and will be removed in NetworkX v3.5."
+            "Please use `is_d_separator(G, x, y)`."
+        ),
+        category=DeprecationWarning,
+        stacklevel=2,
+    )
+    return nx.find_minimal_d_separator(G, u, v)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/dag.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/dag.py
new file mode 100644
index 00000000..c757afb9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/dag.py
@@ -0,0 +1,1418 @@
+"""Algorithms for directed acyclic graphs (DAGs).
+
+Note that most of these functions are only guaranteed to work for DAGs.
+In general, these functions do not check for acyclic-ness, so it is up
+to the user to check for that.
+"""
+
+import heapq
+from collections import deque
+from functools import partial
+from itertools import chain, combinations, product, starmap
+from math import gcd
+
+import networkx as nx
+from networkx.utils import arbitrary_element, not_implemented_for, pairwise
+
+__all__ = [
+    "descendants",
+    "ancestors",
+    "topological_sort",
+    "lexicographical_topological_sort",
+    "all_topological_sorts",
+    "topological_generations",
+    "is_directed_acyclic_graph",
+    "is_aperiodic",
+    "transitive_closure",
+    "transitive_closure_dag",
+    "transitive_reduction",
+    "antichains",
+    "dag_longest_path",
+    "dag_longest_path_length",
+    "dag_to_branching",
+    "compute_v_structures",
+]
+
+chaini = chain.from_iterable
+
+
+@nx._dispatchable
+def descendants(G, source):
+    """Returns all nodes reachable from `source` in `G`.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+    source : node in `G`
+
+    Returns
+    -------
+    set()
+        The descendants of `source` in `G`
+
+    Raises
+    ------
+    NetworkXError
+        If node `source` is not in `G`.
+
+    Examples
+    --------
+    >>> DG = nx.path_graph(5, create_using=nx.DiGraph)
+    >>> sorted(nx.descendants(DG, 2))
+    [3, 4]
+
+    The `source` node is not a descendant of itself, but can be included manually:
+
+    >>> sorted(nx.descendants(DG, 2) | {2})
+    [2, 3, 4]
+
+    See also
+    --------
+    ancestors
+    """
+    return {child for parent, child in nx.bfs_edges(G, source)}
+
+
+@nx._dispatchable
+def ancestors(G, source):
+    """Returns all nodes having a path to `source` in `G`.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+    source : node in `G`
+
+    Returns
+    -------
+    set()
+        The ancestors of `source` in `G`
+
+    Raises
+    ------
+    NetworkXError
+        If node `source` is not in `G`.
+
+    Examples
+    --------
+    >>> DG = nx.path_graph(5, create_using=nx.DiGraph)
+    >>> sorted(nx.ancestors(DG, 2))
+    [0, 1]
+
+    The `source` node is not an ancestor of itself, but can be included manually:
+
+    >>> sorted(nx.ancestors(DG, 2) | {2})
+    [0, 1, 2]
+
+    See also
+    --------
+    descendants
+    """
+    return {child for parent, child in nx.bfs_edges(G, source, reverse=True)}
+
+
+@nx._dispatchable
+def has_cycle(G):
+    """Decides whether the directed graph has a cycle."""
+    try:
+        # Feed the entire iterator into a zero-length deque.
+        deque(topological_sort(G), maxlen=0)
+    except nx.NetworkXUnfeasible:
+        return True
+    else:
+        return False
+
+
+@nx._dispatchable
+def is_directed_acyclic_graph(G):
+    """Returns True if the graph `G` is a directed acyclic graph (DAG) or
+    False if not.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    bool
+        True if `G` is a DAG, False otherwise
+
+    Examples
+    --------
+    Undirected graph::
+
+        >>> G = nx.Graph([(1, 2), (2, 3)])
+        >>> nx.is_directed_acyclic_graph(G)
+        False
+
+    Directed graph with cycle::
+
+        >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+        >>> nx.is_directed_acyclic_graph(G)
+        False
+
+    Directed acyclic graph::
+
+        >>> G = nx.DiGraph([(1, 2), (2, 3)])
+        >>> nx.is_directed_acyclic_graph(G)
+        True
+
+    See also
+    --------
+    topological_sort
+    """
+    return G.is_directed() and not has_cycle(G)
+
+
+@nx._dispatchable
+def topological_generations(G):
+    """Stratifies a DAG into generations.
+
+    A topological generation is node collection in which ancestors of a node in each
+    generation are guaranteed to be in a previous generation, and any descendants of
+    a node are guaranteed to be in a following generation. Nodes are guaranteed to
+    be in the earliest possible generation that they can belong to.
+
+    Parameters
+    ----------
+    G : NetworkX digraph
+        A directed acyclic graph (DAG)
+
+    Yields
+    ------
+    sets of nodes
+        Yields sets of nodes representing each generation.
+
+    Raises
+    ------
+    NetworkXError
+        Generations are defined for directed graphs only. If the graph
+        `G` is undirected, a :exc:`NetworkXError` is raised.
+
+    NetworkXUnfeasible
+        If `G` is not a directed acyclic graph (DAG) no topological generations
+        exist and a :exc:`NetworkXUnfeasible` exception is raised.  This can also
+        be raised if `G` is changed while the returned iterator is being processed
+
+    RuntimeError
+        If `G` is changed while the returned iterator is being processed.
+
+    Examples
+    --------
+    >>> DG = nx.DiGraph([(2, 1), (3, 1)])
+    >>> [sorted(generation) for generation in nx.topological_generations(DG)]
+    [[2, 3], [1]]
+
+    Notes
+    -----
+    The generation in which a node resides can also be determined by taking the
+    max-path-distance from the node to the farthest leaf node. That value can
+    be obtained with this function using `enumerate(topological_generations(G))`.
+
+    See also
+    --------
+    topological_sort
+    """
+    if not G.is_directed():
+        raise nx.NetworkXError("Topological sort not defined on undirected graphs.")
+
+    multigraph = G.is_multigraph()
+    indegree_map = {v: d for v, d in G.in_degree() if d > 0}
+    zero_indegree = [v for v, d in G.in_degree() if d == 0]
+
+    while zero_indegree:
+        this_generation = zero_indegree
+        zero_indegree = []
+        for node in this_generation:
+            if node not in G:
+                raise RuntimeError("Graph changed during iteration")
+            for child in G.neighbors(node):
+                try:
+                    indegree_map[child] -= len(G[node][child]) if multigraph else 1
+                except KeyError as err:
+                    raise RuntimeError("Graph changed during iteration") from err
+                if indegree_map[child] == 0:
+                    zero_indegree.append(child)
+                    del indegree_map[child]
+        yield this_generation
+
+    if indegree_map:
+        raise nx.NetworkXUnfeasible(
+            "Graph contains a cycle or graph changed during iteration"
+        )
+
+
+@nx._dispatchable
+def topological_sort(G):
+    """Returns a generator of nodes in topologically sorted order.
+
+    A topological sort is a nonunique permutation of the nodes of a
+    directed graph such that an edge from u to v implies that u
+    appears before v in the topological sort order. This ordering is
+    valid only if the graph has no directed cycles.
+
+    Parameters
+    ----------
+    G : NetworkX digraph
+        A directed acyclic graph (DAG)
+
+    Yields
+    ------
+    nodes
+        Yields the nodes in topological sorted order.
+
+    Raises
+    ------
+    NetworkXError
+        Topological sort is defined for directed graphs only. If the graph `G`
+        is undirected, a :exc:`NetworkXError` is raised.
+
+    NetworkXUnfeasible
+        If `G` is not a directed acyclic graph (DAG) no topological sort exists
+        and a :exc:`NetworkXUnfeasible` exception is raised.  This can also be
+        raised if `G` is changed while the returned iterator is being processed
+
+    RuntimeError
+        If `G` is changed while the returned iterator is being processed.
+
+    Examples
+    --------
+    To get the reverse order of the topological sort:
+
+    >>> DG = nx.DiGraph([(1, 2), (2, 3)])
+    >>> list(reversed(list(nx.topological_sort(DG))))
+    [3, 2, 1]
+
+    If your DiGraph naturally has the edges representing tasks/inputs
+    and nodes representing people/processes that initiate tasks, then
+    topological_sort is not quite what you need. You will have to change
+    the tasks to nodes with dependence reflected by edges. The result is
+    a kind of topological sort of the edges. This can be done
+    with :func:`networkx.line_graph` as follows:
+
+    >>> list(nx.topological_sort(nx.line_graph(DG)))
+    [(1, 2), (2, 3)]
+
+    Notes
+    -----
+    This algorithm is based on a description and proof in
+    "Introduction to Algorithms: A Creative Approach" [1]_ .
+
+    See also
+    --------
+    is_directed_acyclic_graph, lexicographical_topological_sort
+
+    References
+    ----------
+    .. [1] Manber, U. (1989).
+       *Introduction to Algorithms - A Creative Approach.* Addison-Wesley.
+    """
+    for generation in nx.topological_generations(G):
+        yield from generation
+
+
+@nx._dispatchable
+def lexicographical_topological_sort(G, key=None):
+    """Generate the nodes in the unique lexicographical topological sort order.
+
+    Generates a unique ordering of nodes by first sorting topologically (for which there are often
+    multiple valid orderings) and then additionally by sorting lexicographically.
+
+    A topological sort arranges the nodes of a directed graph so that the
+    upstream node of each directed edge precedes the downstream node.
+    It is always possible to find a solution for directed graphs that have no cycles.
+    There may be more than one valid solution.
+
+    Lexicographical sorting is just sorting alphabetically. It is used here to break ties in the
+    topological sort and to determine a single, unique ordering.  This can be useful in comparing
+    sort results.
+
+    The lexicographical order can be customized by providing a function to the `key=` parameter.
+    The definition of the key function is the same as used in python's built-in `sort()`.
+    The function takes a single argument and returns a key to use for sorting purposes.
+
+    Lexicographical sorting can fail if the node names are un-sortable. See the example below.
+    The solution is to provide a function to the `key=` argument that returns sortable keys.
+
+
+    Parameters
+    ----------
+    G : NetworkX digraph
+        A directed acyclic graph (DAG)
+
+    key : function, optional
+        A function of one argument that converts a node name to a comparison key.
+        It defines and resolves ambiguities in the sort order.  Defaults to the identity function.
+
+    Yields
+    ------
+    nodes
+        Yields the nodes of G in lexicographical topological sort order.
+
+    Raises
+    ------
+    NetworkXError
+        Topological sort is defined for directed graphs only. If the graph `G`
+        is undirected, a :exc:`NetworkXError` is raised.
+
+    NetworkXUnfeasible
+        If `G` is not a directed acyclic graph (DAG) no topological sort exists
+        and a :exc:`NetworkXUnfeasible` exception is raised.  This can also be
+        raised if `G` is changed while the returned iterator is being processed
+
+    RuntimeError
+        If `G` is changed while the returned iterator is being processed.
+
+    TypeError
+        Results from un-sortable node names.
+        Consider using `key=` parameter to resolve ambiguities in the sort order.
+
+    Examples
+    --------
+    >>> DG = nx.DiGraph([(2, 1), (2, 5), (1, 3), (1, 4), (5, 4)])
+    >>> list(nx.lexicographical_topological_sort(DG))
+    [2, 1, 3, 5, 4]
+    >>> list(nx.lexicographical_topological_sort(DG, key=lambda x: -x))
+    [2, 5, 1, 4, 3]
+
+    The sort will fail for any graph with integer and string nodes. Comparison of integer to strings
+    is not defined in python.  Is 3 greater or less than 'red'?
+
+    >>> DG = nx.DiGraph([(1, "red"), (3, "red"), (1, "green"), (2, "blue")])
+    >>> list(nx.lexicographical_topological_sort(DG))
+    Traceback (most recent call last):
+    ...
+    TypeError: '<' not supported between instances of 'str' and 'int'
+    ...
+
+    Incomparable nodes can be resolved using a `key` function. This example function
+    allows comparison of integers and strings by returning a tuple where the first
+    element is True for `str`, False otherwise. The second element is the node name.
+    This groups the strings and integers separately so they can be compared only among themselves.
+
+    >>> key = lambda node: (isinstance(node, str), node)
+    >>> list(nx.lexicographical_topological_sort(DG, key=key))
+    [1, 2, 3, 'blue', 'green', 'red']
+
+    Notes
+    -----
+    This algorithm is based on a description and proof in
+    "Introduction to Algorithms: A Creative Approach" [1]_ .
+
+    See also
+    --------
+    topological_sort
+
+    References
+    ----------
+    .. [1] Manber, U. (1989).
+       *Introduction to Algorithms - A Creative Approach.* Addison-Wesley.
+    """
+    if not G.is_directed():
+        msg = "Topological sort not defined on undirected graphs."
+        raise nx.NetworkXError(msg)
+
+    if key is None:
+
+        def key(node):
+            return node
+
+    nodeid_map = {n: i for i, n in enumerate(G)}
+
+    def create_tuple(node):
+        return key(node), nodeid_map[node], node
+
+    indegree_map = {v: d for v, d in G.in_degree() if d > 0}
+    # These nodes have zero indegree and ready to be returned.
+    zero_indegree = [create_tuple(v) for v, d in G.in_degree() if d == 0]
+    heapq.heapify(zero_indegree)
+
+    while zero_indegree:
+        _, _, node = heapq.heappop(zero_indegree)
+
+        if node not in G:
+            raise RuntimeError("Graph changed during iteration")
+        for _, child in G.edges(node):
+            try:
+                indegree_map[child] -= 1
+            except KeyError as err:
+                raise RuntimeError("Graph changed during iteration") from err
+            if indegree_map[child] == 0:
+                try:
+                    heapq.heappush(zero_indegree, create_tuple(child))
+                except TypeError as err:
+                    raise TypeError(
+                        f"{err}\nConsider using `key=` parameter to resolve ambiguities in the sort order."
+                    )
+                del indegree_map[child]
+
+        yield node
+
+    if indegree_map:
+        msg = "Graph contains a cycle or graph changed during iteration"
+        raise nx.NetworkXUnfeasible(msg)
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def all_topological_sorts(G):
+    """Returns a generator of _all_ topological sorts of the directed graph G.
+
+    A topological sort is a nonunique permutation of the nodes such that an
+    edge from u to v implies that u appears before v in the topological sort
+    order.
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+        A directed graph
+
+    Yields
+    ------
+    topological_sort_order : list
+        a list of nodes in `G`, representing one of the topological sort orders
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is not directed
+    NetworkXUnfeasible
+        If `G` is not acyclic
+
+    Examples
+    --------
+    To enumerate all topological sorts of directed graph:
+
+    >>> DG = nx.DiGraph([(1, 2), (2, 3), (2, 4)])
+    >>> list(nx.all_topological_sorts(DG))
+    [[1, 2, 4, 3], [1, 2, 3, 4]]
+
+    Notes
+    -----
+    Implements an iterative version of the algorithm given in [1].
+
+    References
+    ----------
+    .. [1] Knuth, Donald E., Szwarcfiter, Jayme L. (1974).
+       "A Structured Program to Generate All Topological Sorting Arrangements"
+       Information Processing Letters, Volume 2, Issue 6, 1974, Pages 153-157,
+       ISSN 0020-0190,
+       https://doi.org/10.1016/0020-0190(74)90001-5.
+       Elsevier (North-Holland), Amsterdam
+    """
+    if not G.is_directed():
+        raise nx.NetworkXError("Topological sort not defined on undirected graphs.")
+
+    # the names of count and D are chosen to match the global variables in [1]
+    # number of edges originating in a vertex v
+    count = dict(G.in_degree())
+    # vertices with indegree 0
+    D = deque([v for v, d in G.in_degree() if d == 0])
+    # stack of first value chosen at a position k in the topological sort
+    bases = []
+    current_sort = []
+
+    # do-while construct
+    while True:
+        assert all(count[v] == 0 for v in D)
+
+        if len(current_sort) == len(G):
+            yield list(current_sort)
+
+            # clean-up stack
+            while len(current_sort) > 0:
+                assert len(bases) == len(current_sort)
+                q = current_sort.pop()
+
+                # "restores" all edges (q, x)
+                # NOTE: it is important to iterate over edges instead
+                # of successors, so count is updated correctly in multigraphs
+                for _, j in G.out_edges(q):
+                    count[j] += 1
+                    assert count[j] >= 0
+                # remove entries from D
+                while len(D) > 0 and count[D[-1]] > 0:
+                    D.pop()
+
+                # corresponds to a circular shift of the values in D
+                # if the first value chosen (the base) is in the first
+                # position of D again, we are done and need to consider the
+                # previous condition
+                D.appendleft(q)
+                if D[-1] == bases[-1]:
+                    # all possible values have been chosen at current position
+                    # remove corresponding marker
+                    bases.pop()
+                else:
+                    # there are still elements that have not been fixed
+                    # at the current position in the topological sort
+                    # stop removing elements, escape inner loop
+                    break
+
+        else:
+            if len(D) == 0:
+                raise nx.NetworkXUnfeasible("Graph contains a cycle.")
+
+            # choose next node
+            q = D.pop()
+            # "erase" all edges (q, x)
+            # NOTE: it is important to iterate over edges instead
+            # of successors, so count is updated correctly in multigraphs
+            for _, j in G.out_edges(q):
+                count[j] -= 1
+                assert count[j] >= 0
+                if count[j] == 0:
+                    D.append(j)
+            current_sort.append(q)
+
+            # base for current position might _not_ be fixed yet
+            if len(bases) < len(current_sort):
+                bases.append(q)
+
+        if len(bases) == 0:
+            break
+
+
+@nx._dispatchable
+def is_aperiodic(G):
+    """Returns True if `G` is aperiodic.
+
+    A directed graph is aperiodic if there is no integer k > 1 that
+    divides the length of every cycle in the graph.
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+        A directed graph
+
+    Returns
+    -------
+    bool
+        True if the graph is aperiodic False otherwise
+
+    Raises
+    ------
+    NetworkXError
+        If `G` is not directed
+
+    Examples
+    --------
+    A graph consisting of one cycle, the length of which is 2. Therefore ``k = 2``
+    divides the length of every cycle in the graph and thus the graph
+    is *not aperiodic*::
+
+        >>> DG = nx.DiGraph([(1, 2), (2, 1)])
+        >>> nx.is_aperiodic(DG)
+        False
+
+    A graph consisting of two cycles: one of length 2 and the other of length 3.
+    The cycle lengths are coprime, so there is no single value of k where ``k > 1``
+    that divides each cycle length and therefore the graph is *aperiodic*::
+
+        >>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1), (1, 4), (4, 1)])
+        >>> nx.is_aperiodic(DG)
+        True
+
+    A graph consisting of two cycles: one of length 2 and the other of length 4.
+    The lengths of the cycles share a common factor ``k = 2``, and therefore
+    the graph is *not aperiodic*::
+
+        >>> DG = nx.DiGraph([(1, 2), (2, 1), (3, 4), (4, 5), (5, 6), (6, 3)])
+        >>> nx.is_aperiodic(DG)
+        False
+
+    An acyclic graph, therefore the graph is *not aperiodic*::
+
+        >>> DG = nx.DiGraph([(1, 2), (2, 3)])
+        >>> nx.is_aperiodic(DG)
+        False
+
+    Notes
+    -----
+    This uses the method outlined in [1]_, which runs in $O(m)$ time
+    given $m$ edges in `G`. Note that a graph is not aperiodic if it is
+    acyclic as every integer trivial divides length 0 cycles.
+
+    References
+    ----------
+    .. [1] Jarvis, J. P.; Shier, D. R. (1996),
+       "Graph-theoretic analysis of finite Markov chains,"
+       in Shier, D. R.; Wallenius, K. T., Applied Mathematical Modeling:
+       A Multidisciplinary Approach, CRC Press.
+    """
+    if not G.is_directed():
+        raise nx.NetworkXError("is_aperiodic not defined for undirected graphs")
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept("Graph has no nodes.")
+    s = arbitrary_element(G)
+    levels = {s: 0}
+    this_level = [s]
+    g = 0
+    lev = 1
+    while this_level:
+        next_level = []
+        for u in this_level:
+            for v in G[u]:
+                if v in levels:  # Non-Tree Edge
+                    g = gcd(g, levels[u] - levels[v] + 1)
+                else:  # Tree Edge
+                    next_level.append(v)
+                    levels[v] = lev
+        this_level = next_level
+        lev += 1
+    if len(levels) == len(G):  # All nodes in tree
+        return g == 1
+    else:
+        return g == 1 and nx.is_aperiodic(G.subgraph(set(G) - set(levels)))
+
+
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def transitive_closure(G, reflexive=False):
+    """Returns transitive closure of a graph
+
+    The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that
+    for all v, w in V there is an edge (v, w) in E+ if and only if there
+    is a path from v to w in G.
+
+    Handling of paths from v to v has some flexibility within this definition.
+    A reflexive transitive closure creates a self-loop for the path
+    from v to v of length 0. The usual transitive closure creates a
+    self-loop only if a cycle exists (a path from v to v with length > 0).
+    We also allow an option for no self-loops.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        A directed/undirected graph/multigraph.
+    reflexive : Bool or None, optional (default: False)
+        Determines when cycles create self-loops in the Transitive Closure.
+        If True, trivial cycles (length 0) create self-loops. The result
+        is a reflexive transitive closure of G.
+        If False (the default) non-trivial cycles create self-loops.
+        If None, self-loops are not created.
+
+    Returns
+    -------
+    NetworkX graph
+        The transitive closure of `G`
+
+    Raises
+    ------
+    NetworkXError
+        If `reflexive` not in `{None, True, False}`
+
+    Examples
+    --------
+    The treatment of trivial (i.e. length 0) cycles is controlled by the
+    `reflexive` parameter.
+
+    Trivial (i.e. length 0) cycles do not create self-loops when
+    ``reflexive=False`` (the default)::
+
+        >>> DG = nx.DiGraph([(1, 2), (2, 3)])
+        >>> TC = nx.transitive_closure(DG, reflexive=False)
+        >>> TC.edges()
+        OutEdgeView([(1, 2), (1, 3), (2, 3)])
+
+    However, nontrivial (i.e. length greater than 0) cycles create self-loops
+    when ``reflexive=False`` (the default)::
+
+        >>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+        >>> TC = nx.transitive_closure(DG, reflexive=False)
+        >>> TC.edges()
+        OutEdgeView([(1, 2), (1, 3), (1, 1), (2, 3), (2, 1), (2, 2), (3, 1), (3, 2), (3, 3)])
+
+    Trivial cycles (length 0) create self-loops when ``reflexive=True``::
+
+        >>> DG = nx.DiGraph([(1, 2), (2, 3)])
+        >>> TC = nx.transitive_closure(DG, reflexive=True)
+        >>> TC.edges()
+        OutEdgeView([(1, 2), (1, 1), (1, 3), (2, 3), (2, 2), (3, 3)])
+
+    And the third option is not to create self-loops at all when ``reflexive=None``::
+
+        >>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+        >>> TC = nx.transitive_closure(DG, reflexive=None)
+        >>> TC.edges()
+        OutEdgeView([(1, 2), (1, 3), (2, 3), (2, 1), (3, 1), (3, 2)])
+
+    References
+    ----------
+    .. [1] https://www.ics.uci.edu/~eppstein/PADS/PartialOrder.py
+    """
+    TC = G.copy()
+
+    if reflexive not in {None, True, False}:
+        raise nx.NetworkXError("Incorrect value for the parameter `reflexive`")
+
+    for v in G:
+        if reflexive is None:
+            TC.add_edges_from((v, u) for u in nx.descendants(G, v) if u not in TC[v])
+        elif reflexive is True:
+            TC.add_edges_from(
+                (v, u) for u in nx.descendants(G, v) | {v} if u not in TC[v]
+            )
+        elif reflexive is False:
+            TC.add_edges_from((v, e[1]) for e in nx.edge_bfs(G, v) if e[1] not in TC[v])
+
+    return TC
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def transitive_closure_dag(G, topo_order=None):
+    """Returns the transitive closure of a directed acyclic graph.
+
+    This function is faster than the function `transitive_closure`, but fails
+    if the graph has a cycle.
+
+    The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that
+    for all v, w in V there is an edge (v, w) in E+ if and only if there
+    is a non-null path from v to w in G.
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+        A directed acyclic graph (DAG)
+
+    topo_order: list or tuple, optional
+        A topological order for G (if None, the function will compute one)
+
+    Returns
+    -------
+    NetworkX DiGraph
+        The transitive closure of `G`
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is not directed
+    NetworkXUnfeasible
+        If `G` has a cycle
+
+    Examples
+    --------
+    >>> DG = nx.DiGraph([(1, 2), (2, 3)])
+    >>> TC = nx.transitive_closure_dag(DG)
+    >>> TC.edges()
+    OutEdgeView([(1, 2), (1, 3), (2, 3)])
+
+    Notes
+    -----
+    This algorithm is probably simple enough to be well-known but I didn't find
+    a mention in the literature.
+    """
+    if topo_order is None:
+        topo_order = list(topological_sort(G))
+
+    TC = G.copy()
+
+    # idea: traverse vertices following a reverse topological order, connecting
+    # each vertex to its descendants at distance 2 as we go
+    for v in reversed(topo_order):
+        TC.add_edges_from((v, u) for u in nx.descendants_at_distance(TC, v, 2))
+
+    return TC
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(returns_graph=True)
+def transitive_reduction(G):
+    """Returns transitive reduction of a directed graph
+
+    The transitive reduction of G = (V,E) is a graph G- = (V,E-) such that
+    for all v,w in V there is an edge (v,w) in E- if and only if (v,w) is
+    in E and there is no path from v to w in G with length greater than 1.
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+        A directed acyclic graph (DAG)
+
+    Returns
+    -------
+    NetworkX DiGraph
+        The transitive reduction of `G`
+
+    Raises
+    ------
+    NetworkXError
+        If `G` is not a directed acyclic graph (DAG) transitive reduction is
+        not uniquely defined and a :exc:`NetworkXError` exception is raised.
+
+    Examples
+    --------
+    To perform transitive reduction on a DiGraph:
+
+    >>> DG = nx.DiGraph([(1, 2), (2, 3), (1, 3)])
+    >>> TR = nx.transitive_reduction(DG)
+    >>> list(TR.edges)
+    [(1, 2), (2, 3)]
+
+    To avoid unnecessary data copies, this implementation does not return a
+    DiGraph with node/edge data.
+    To perform transitive reduction on a DiGraph and transfer node/edge data:
+
+    >>> DG = nx.DiGraph()
+    >>> DG.add_edges_from([(1, 2), (2, 3), (1, 3)], color="red")
+    >>> TR = nx.transitive_reduction(DG)
+    >>> TR.add_nodes_from(DG.nodes(data=True))
+    >>> TR.add_edges_from((u, v, DG.edges[u, v]) for u, v in TR.edges)
+    >>> list(TR.edges(data=True))
+    [(1, 2, {'color': 'red'}), (2, 3, {'color': 'red'})]
+
+    References
+    ----------
+    https://en.wikipedia.org/wiki/Transitive_reduction
+
+    """
+    if not is_directed_acyclic_graph(G):
+        msg = "Directed Acyclic Graph required for transitive_reduction"
+        raise nx.NetworkXError(msg)
+    TR = nx.DiGraph()
+    TR.add_nodes_from(G.nodes())
+    descendants = {}
+    # count before removing set stored in descendants
+    check_count = dict(G.in_degree)
+    for u in G:
+        u_nbrs = set(G[u])
+        for v in G[u]:
+            if v in u_nbrs:
+                if v not in descendants:
+                    descendants[v] = {y for x, y in nx.dfs_edges(G, v)}
+                u_nbrs -= descendants[v]
+            check_count[v] -= 1
+            if check_count[v] == 0:
+                del descendants[v]
+        TR.add_edges_from((u, v) for v in u_nbrs)
+    return TR
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def antichains(G, topo_order=None):
+    """Generates antichains from a directed acyclic graph (DAG).
+
+    An antichain is a subset of a partially ordered set such that any
+    two elements in the subset are incomparable.
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+        A directed acyclic graph (DAG)
+
+    topo_order: list or tuple, optional
+        A topological order for G (if None, the function will compute one)
+
+    Yields
+    ------
+    antichain : list
+        a list of nodes in `G` representing an antichain
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is not directed
+
+    NetworkXUnfeasible
+        If `G` contains a cycle
+
+    Examples
+    --------
+    >>> DG = nx.DiGraph([(1, 2), (1, 3)])
+    >>> list(nx.antichains(DG))
+    [[], [3], [2], [2, 3], [1]]
+
+    Notes
+    -----
+    This function was originally developed by Peter Jipsen and Franco Saliola
+    for the SAGE project. It's included in NetworkX with permission from the
+    authors. Original SAGE code at:
+
+    https://github.com/sagemath/sage/blob/master/src/sage/combinat/posets/hasse_diagram.py
+
+    References
+    ----------
+    .. [1] Free Lattices, by R. Freese, J. Jezek and J. B. Nation,
+       AMS, Vol 42, 1995, p. 226.
+    """
+    if topo_order is None:
+        topo_order = list(nx.topological_sort(G))
+
+    TC = nx.transitive_closure_dag(G, topo_order)
+    antichains_stacks = [([], list(reversed(topo_order)))]
+
+    while antichains_stacks:
+        (antichain, stack) = antichains_stacks.pop()
+        # Invariant:
+        #  - the elements of antichain are independent
+        #  - the elements of stack are independent from those of antichain
+        yield antichain
+        while stack:
+            x = stack.pop()
+            new_antichain = antichain + [x]
+            new_stack = [t for t in stack if not ((t in TC[x]) or (x in TC[t]))]
+            antichains_stacks.append((new_antichain, new_stack))
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(edge_attrs={"weight": "default_weight"})
+def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None):
+    """Returns the longest path in a directed acyclic graph (DAG).
+
+    If `G` has edges with `weight` attribute the edge data are used as
+    weight values.
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+        A directed acyclic graph (DAG)
+
+    weight : str, optional
+        Edge data key to use for weight
+
+    default_weight : int, optional
+        The weight of edges that do not have a weight attribute
+
+    topo_order: list or tuple, optional
+        A topological order for `G` (if None, the function will compute one)
+
+    Returns
+    -------
+    list
+        Longest path
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is not directed
+
+    Examples
+    --------
+    >>> DG = nx.DiGraph(
+    ...     [(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})]
+    ... )
+    >>> list(nx.all_simple_paths(DG, 0, 2))
+    [[0, 1, 2], [0, 2]]
+    >>> nx.dag_longest_path(DG)
+    [0, 1, 2]
+    >>> nx.dag_longest_path(DG, weight="cost")
+    [0, 2]
+
+    In the case where multiple valid topological orderings exist, `topo_order`
+    can be used to specify a specific ordering:
+
+    >>> DG = nx.DiGraph([(0, 1), (0, 2)])
+    >>> sorted(nx.all_topological_sorts(DG))  # Valid topological orderings
+    [[0, 1, 2], [0, 2, 1]]
+    >>> nx.dag_longest_path(DG, topo_order=[0, 1, 2])
+    [0, 1]
+    >>> nx.dag_longest_path(DG, topo_order=[0, 2, 1])
+    [0, 2]
+
+    See also
+    --------
+    dag_longest_path_length
+
+    """
+    if not G:
+        return []
+
+    if topo_order is None:
+        topo_order = nx.topological_sort(G)
+
+    dist = {}  # stores {v : (length, u)}
+    for v in topo_order:
+        us = [
+            (
+                dist[u][0]
+                + (
+                    max(data.values(), key=lambda x: x.get(weight, default_weight))
+                    if G.is_multigraph()
+                    else data
+                ).get(weight, default_weight),
+                u,
+            )
+            for u, data in G.pred[v].items()
+        ]
+
+        # Use the best predecessor if there is one and its distance is
+        # non-negative, otherwise terminate.
+        maxu = max(us, key=lambda x: x[0]) if us else (0, v)
+        dist[v] = maxu if maxu[0] >= 0 else (0, v)
+
+    u = None
+    v = max(dist, key=lambda x: dist[x][0])
+    path = []
+    while u != v:
+        path.append(v)
+        u = v
+        v = dist[v][1]
+
+    path.reverse()
+    return path
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(edge_attrs={"weight": "default_weight"})
+def dag_longest_path_length(G, weight="weight", default_weight=1):
+    """Returns the longest path length in a DAG
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+        A directed acyclic graph (DAG)
+
+    weight : string, optional
+        Edge data key to use for weight
+
+    default_weight : int, optional
+        The weight of edges that do not have a weight attribute
+
+    Returns
+    -------
+    int
+        Longest path length
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is not directed
+
+    Examples
+    --------
+    >>> DG = nx.DiGraph(
+    ...     [(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})]
+    ... )
+    >>> list(nx.all_simple_paths(DG, 0, 2))
+    [[0, 1, 2], [0, 2]]
+    >>> nx.dag_longest_path_length(DG)
+    2
+    >>> nx.dag_longest_path_length(DG, weight="cost")
+    42
+
+    See also
+    --------
+    dag_longest_path
+    """
+    path = nx.dag_longest_path(G, weight, default_weight)
+    path_length = 0
+    if G.is_multigraph():
+        for u, v in pairwise(path):
+            i = max(G[u][v], key=lambda x: G[u][v][x].get(weight, default_weight))
+            path_length += G[u][v][i].get(weight, default_weight)
+    else:
+        for u, v in pairwise(path):
+            path_length += G[u][v].get(weight, default_weight)
+
+    return path_length
+
+
+@nx._dispatchable
+def root_to_leaf_paths(G):
+    """Yields root-to-leaf paths in a directed acyclic graph.
+
+    `G` must be a directed acyclic graph. If not, the behavior of this
+    function is undefined. A "root" in this graph is a node of in-degree
+    zero and a "leaf" a node of out-degree zero.
+
+    When invoked, this function iterates over each path from any root to
+    any leaf. A path is a list of nodes.
+
+    """
+    roots = (v for v, d in G.in_degree() if d == 0)
+    leaves = (v for v, d in G.out_degree() if d == 0)
+    all_paths = partial(nx.all_simple_paths, G)
+    # TODO In Python 3, this would be better as `yield from ...`.
+    return chaini(starmap(all_paths, product(roots, leaves)))
+
+
+@not_implemented_for("multigraph")
+@not_implemented_for("undirected")
+@nx._dispatchable(returns_graph=True)
+def dag_to_branching(G):
+    """Returns a branching representing all (overlapping) paths from
+    root nodes to leaf nodes in the given directed acyclic graph.
+
+    As described in :mod:`networkx.algorithms.tree.recognition`, a
+    *branching* is a directed forest in which each node has at most one
+    parent. In other words, a branching is a disjoint union of
+    *arborescences*. For this function, each node of in-degree zero in
+    `G` becomes a root of one of the arborescences, and there will be
+    one leaf node for each distinct path from that root to a leaf node
+    in `G`.
+
+    Each node `v` in `G` with *k* parents becomes *k* distinct nodes in
+    the returned branching, one for each parent, and the sub-DAG rooted
+    at `v` is duplicated for each copy. The algorithm then recurses on
+    the children of each copy of `v`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A directed acyclic graph.
+
+    Returns
+    -------
+    DiGraph
+        The branching in which there is a bijection between root-to-leaf
+        paths in `G` (in which multiple paths may share the same leaf)
+        and root-to-leaf paths in the branching (in which there is a
+        unique path from a root to a leaf).
+
+        Each node has an attribute 'source' whose value is the original
+        node to which this node corresponds. No other graph, node, or
+        edge attributes are copied into this new graph.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is not directed, or if `G` is a multigraph.
+
+    HasACycle
+        If `G` is not acyclic.
+
+    Examples
+    --------
+    To examine which nodes in the returned branching were produced by
+    which original node in the directed acyclic graph, we can collect
+    the mapping from source node to new nodes into a dictionary. For
+    example, consider the directed diamond graph::
+
+        >>> from collections import defaultdict
+        >>> from operator import itemgetter
+        >>>
+        >>> G = nx.DiGraph(nx.utils.pairwise("abd"))
+        >>> G.add_edges_from(nx.utils.pairwise("acd"))
+        >>> B = nx.dag_to_branching(G)
+        >>>
+        >>> sources = defaultdict(set)
+        >>> for v, source in B.nodes(data="source"):
+        ...     sources[source].add(v)
+        >>> len(sources["a"])
+        1
+        >>> len(sources["d"])
+        2
+
+    To copy node attributes from the original graph to the new graph,
+    you can use a dictionary like the one constructed in the above
+    example::
+
+        >>> for source, nodes in sources.items():
+        ...     for v in nodes:
+        ...         B.nodes[v].update(G.nodes[source])
+
+    Notes
+    -----
+    This function is not idempotent in the sense that the node labels in
+    the returned branching may be uniquely generated each time the
+    function is invoked. In fact, the node labels may not be integers;
+    in order to relabel the nodes to be more readable, you can use the
+    :func:`networkx.convert_node_labels_to_integers` function.
+
+    The current implementation of this function uses
+    :func:`networkx.prefix_tree`, so it is subject to the limitations of
+    that function.
+
+    """
+    if has_cycle(G):
+        msg = "dag_to_branching is only defined for acyclic graphs"
+        raise nx.HasACycle(msg)
+    paths = root_to_leaf_paths(G)
+    B = nx.prefix_tree(paths)
+    # Remove the synthetic `root`(0) and `NIL`(-1) nodes from the tree
+    B.remove_node(0)
+    B.remove_node(-1)
+    return B
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def compute_v_structures(G):
+    """Yields 3-node tuples that represent the v-structures in `G`.
+
+    .. deprecated:: 3.4
+
+       `compute_v_structures` actually yields colliders. It will be removed in
+       version 3.6. Use `nx.dag.v_structures` or `nx.dag.colliders` instead.
+
+    Colliders are triples in the directed acyclic graph (DAG) where two parent nodes
+    point to the same child node. V-structures are colliders where the two parent
+    nodes are not adjacent. In a causal graph setting, the parents do not directly
+    depend on each other, but conditioning on the child node provides an association.
+
+    Parameters
+    ----------
+    G : graph
+        A networkx `~networkx.DiGraph`.
+
+    Yields
+    ------
+    A 3-tuple representation of a v-structure
+        Each v-structure is a 3-tuple with the parent, collider, and other parent.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is an undirected graph.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)])
+    >>> nx.is_directed_acyclic_graph(G)
+    True
+    >>> list(nx.compute_v_structures(G))
+    [(0, 4, 2), (0, 5, 4), (0, 5, 1), (4, 5, 1)]
+
+    See Also
+    --------
+    v_structures
+    colliders
+
+    Notes
+    -----
+    This function was written to be used on DAGs, however it works on cyclic graphs
+    too. Since colliders are referred to in the cyclic causal graph literature
+    [2]_ we allow cyclic graphs in this function. It is suggested that you test if
+    your input graph is acyclic as in the example if you want that property.
+
+    References
+    ----------
+    .. [1]  `Pearl's PRIMER <https://bayes.cs.ucla.edu/PRIMER/primer-ch2.pdf>`_
+            Ch-2 page 50: v-structures def.
+    .. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013)
+           "Discovering cyclic causal models with latent variables:
+           a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth
+           Conference on Uncertainty in Artificial Intelligence, pg 301–310,
+           `doi:10.5555/3023638.3023669 <https://dl.acm.org/doi/10.5555/3023638.3023669>`_
+    """
+    import warnings
+
+    warnings.warn(
+        (
+            "\n\n`compute_v_structures` actually yields colliders. It will be\n"
+            "removed in version 3.6. Use `nx.dag.v_structures` or `nx.dag.colliders`\n"
+            "instead.\n"
+        ),
+        category=DeprecationWarning,
+        stacklevel=5,
+    )
+
+    return colliders(G)
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def v_structures(G):
+    """Yields 3-node tuples that represent the v-structures in `G`.
+
+    Colliders are triples in the directed acyclic graph (DAG) where two parent nodes
+    point to the same child node. V-structures are colliders where the two parent
+    nodes are not adjacent. In a causal graph setting, the parents do not directly
+    depend on each other, but conditioning on the child node provides an association.
+
+    Parameters
+    ----------
+    G : graph
+        A networkx `~networkx.DiGraph`.
+
+    Yields
+    ------
+    A 3-tuple representation of a v-structure
+        Each v-structure is a 3-tuple with the parent, collider, and other parent.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is an undirected graph.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)])
+    >>> nx.is_directed_acyclic_graph(G)
+    True
+    >>> list(nx.dag.v_structures(G))
+    [(0, 4, 2), (0, 5, 1), (4, 5, 1)]
+
+    See Also
+    --------
+    colliders
+
+    Notes
+    -----
+    This function was written to be used on DAGs, however it works on cyclic graphs
+    too. Since colliders are referred to in the cyclic causal graph literature
+    [2]_ we allow cyclic graphs in this function. It is suggested that you test if
+    your input graph is acyclic as in the example if you want that property.
+
+    References
+    ----------
+    .. [1]  `Pearl's PRIMER <https://bayes.cs.ucla.edu/PRIMER/primer-ch2.pdf>`_
+            Ch-2 page 50: v-structures def.
+    .. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013)
+           "Discovering cyclic causal models with latent variables:
+           a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth
+           Conference on Uncertainty in Artificial Intelligence, pg 301–310,
+           `doi:10.5555/3023638.3023669 <https://dl.acm.org/doi/10.5555/3023638.3023669>`_
+    """
+    for p1, c, p2 in colliders(G):
+        if not (G.has_edge(p1, p2) or G.has_edge(p2, p1)):
+            yield (p1, c, p2)
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def colliders(G):
+    """Yields 3-node tuples that represent the colliders in `G`.
+
+    In a Directed Acyclic Graph (DAG), if you have three nodes A, B, and C, and
+    there are edges from A to C and from B to C, then C is a collider [1]_ . In
+    a causal graph setting, this means that both events A and B are "causing" C,
+    and conditioning on C provide an association between A and B even if
+    no direct causal relationship exists between A and B.
+
+    Parameters
+    ----------
+    G : graph
+        A networkx `~networkx.DiGraph`.
+
+    Yields
+    ------
+    A 3-tuple representation of a collider
+        Each collider is a 3-tuple with the parent, collider, and other parent.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is an undirected graph.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)])
+    >>> nx.is_directed_acyclic_graph(G)
+    True
+    >>> list(nx.dag.colliders(G))
+    [(0, 4, 2), (0, 5, 4), (0, 5, 1), (4, 5, 1)]
+
+    See Also
+    --------
+    v_structures
+
+    Notes
+    -----
+    This function was written to be used on DAGs, however it works on cyclic graphs
+    too. Since colliders are referred to in the cyclic causal graph literature
+    [2]_ we allow cyclic graphs in this function. It is suggested that you test if
+    your input graph is acyclic as in the example if you want that property.
+
+    References
+    ----------
+    .. [1] `Wikipedia: Collider in causal graphs <https://en.wikipedia.org/wiki/Collider_(statistics)>`_
+    .. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013)
+           "Discovering cyclic causal models with latent variables:
+           a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth
+           Conference on Uncertainty in Artificial Intelligence, pg 301–310,
+           `doi:10.5555/3023638.3023669 <https://dl.acm.org/doi/10.5555/3023638.3023669>`_
+    """
+    for node in G.nodes:
+        for p1, p2 in combinations(G.predecessors(node), 2):
+            yield (p1, node, p2)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_measures.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_measures.py
new file mode 100644
index 00000000..8e15bf8d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_measures.py
@@ -0,0 +1,1022 @@
+"""Graph diameter, radius, eccentricity and other properties."""
+
+import math
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "eccentricity",
+    "diameter",
+    "harmonic_diameter",
+    "radius",
+    "periphery",
+    "center",
+    "barycenter",
+    "resistance_distance",
+    "kemeny_constant",
+    "effective_graph_resistance",
+]
+
+
+def _extrema_bounding(G, compute="diameter", weight=None):
+    """Compute requested extreme distance metric of undirected graph G
+
+    Computation is based on smart lower and upper bounds, and in practice
+    linear in the number of nodes, rather than quadratic (except for some
+    border cases such as complete graphs or circle shaped graphs).
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph
+
+    compute : string denoting the requesting metric
+       "diameter" for the maximal eccentricity value,
+       "radius" for the minimal eccentricity value,
+       "periphery" for the set of nodes with eccentricity equal to the diameter,
+       "center" for the set of nodes with eccentricity equal to the radius,
+       "eccentricities" for the maximum distance from each node to all other nodes in G
+
+    weight : string, function, or None
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
+    Returns
+    -------
+    value : value of the requested metric
+       int for "diameter" and "radius" or
+       list of nodes for "center" and "periphery" or
+       dictionary of eccentricity values keyed by node for "eccentricities"
+
+    Raises
+    ------
+    NetworkXError
+        If the graph consists of multiple components
+    ValueError
+        If `compute` is not one of "diameter", "radius", "periphery", "center", or "eccentricities".
+
+    Notes
+    -----
+    This algorithm was proposed in [1]_ and discussed further in [2]_ and [3]_.
+
+    References
+    ----------
+    .. [1] F. W. Takes, W. A. Kosters,
+       "Determining the diameter of small world networks."
+       Proceedings of the 20th ACM international conference on Information and knowledge management, 2011
+       https://dl.acm.org/doi/abs/10.1145/2063576.2063748
+    .. [2] F. W. Takes, W. A. Kosters,
+       "Computing the Eccentricity Distribution of Large Graphs."
+       Algorithms, 2013
+       https://www.mdpi.com/1999-4893/6/1/100
+    .. [3] M. Borassi, P. Crescenzi, M. Habib, W. A. Kosters, A. Marino, F. W. Takes,
+       "Fast diameter and radius BFS-based computation in (weakly connected) real-world graphs: With an application to the six degrees of separation games. "
+       Theoretical Computer Science, 2015
+       https://www.sciencedirect.com/science/article/pii/S0304397515001644
+    """
+    # init variables
+    degrees = dict(G.degree())  # start with the highest degree node
+    minlowernode = max(degrees, key=degrees.get)
+    N = len(degrees)  # number of nodes
+    # alternate between smallest lower and largest upper bound
+    high = False
+    # status variables
+    ecc_lower = dict.fromkeys(G, 0)
+    ecc_upper = dict.fromkeys(G, N)
+    candidates = set(G)
+
+    # (re)set bound extremes
+    minlower = N
+    maxlower = 0
+    minupper = N
+    maxupper = 0
+
+    # repeat the following until there are no more candidates
+    while candidates:
+        if high:
+            current = maxuppernode  # select node with largest upper bound
+        else:
+            current = minlowernode  # select node with smallest lower bound
+        high = not high
+
+        # get distances from/to current node and derive eccentricity
+        dist = nx.shortest_path_length(G, source=current, weight=weight)
+
+        if len(dist) != N:
+            msg = "Cannot compute metric because graph is not connected."
+            raise nx.NetworkXError(msg)
+        current_ecc = max(dist.values())
+
+        # print status update
+        #        print ("ecc of " + str(current) + " (" + str(ecc_lower[current]) + "/"
+        #        + str(ecc_upper[current]) + ", deg: " + str(dist[current]) + ") is "
+        #        + str(current_ecc))
+        #        print(ecc_upper)
+
+        # (re)set bound extremes
+        maxuppernode = None
+        minlowernode = None
+
+        # update node bounds
+        for i in candidates:
+            # update eccentricity bounds
+            d = dist[i]
+            ecc_lower[i] = low = max(ecc_lower[i], max(d, (current_ecc - d)))
+            ecc_upper[i] = upp = min(ecc_upper[i], current_ecc + d)
+
+            # update min/max values of lower and upper bounds
+            minlower = min(ecc_lower[i], minlower)
+            maxlower = max(ecc_lower[i], maxlower)
+            minupper = min(ecc_upper[i], minupper)
+            maxupper = max(ecc_upper[i], maxupper)
+
+        # update candidate set
+        if compute == "diameter":
+            ruled_out = {
+                i
+                for i in candidates
+                if ecc_upper[i] <= maxlower and 2 * ecc_lower[i] >= maxupper
+            }
+        elif compute == "radius":
+            ruled_out = {
+                i
+                for i in candidates
+                if ecc_lower[i] >= minupper and ecc_upper[i] + 1 <= 2 * minlower
+            }
+        elif compute == "periphery":
+            ruled_out = {
+                i
+                for i in candidates
+                if ecc_upper[i] < maxlower
+                and (maxlower == maxupper or ecc_lower[i] > maxupper)
+            }
+        elif compute == "center":
+            ruled_out = {
+                i
+                for i in candidates
+                if ecc_lower[i] > minupper
+                and (minlower == minupper or ecc_upper[i] + 1 < 2 * minlower)
+            }
+        elif compute == "eccentricities":
+            ruled_out = set()
+        else:
+            msg = "compute must be one of 'diameter', 'radius', 'periphery', 'center', 'eccentricities'"
+            raise ValueError(msg)
+
+        ruled_out.update(i for i in candidates if ecc_lower[i] == ecc_upper[i])
+        candidates -= ruled_out
+
+        #        for i in ruled_out:
+        #            print("removing %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"%
+        #                    (i,ecc_upper[i],maxlower,ecc_lower[i],maxupper))
+        #        print("node %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"%
+        #                    (4,ecc_upper[4],maxlower,ecc_lower[4],maxupper))
+        #        print("NODE 4: %g"%(ecc_upper[4] <= maxlower))
+        #        print("NODE 4: %g"%(2 * ecc_lower[4] >= maxupper))
+        #        print("NODE 4: %g"%(ecc_upper[4] <= maxlower
+        #                            and 2 * ecc_lower[4] >= maxupper))
+
+        # updating maxuppernode and minlowernode for selection in next round
+        for i in candidates:
+            if (
+                minlowernode is None
+                or (
+                    ecc_lower[i] == ecc_lower[minlowernode]
+                    and degrees[i] > degrees[minlowernode]
+                )
+                or (ecc_lower[i] < ecc_lower[minlowernode])
+            ):
+                minlowernode = i
+
+            if (
+                maxuppernode is None
+                or (
+                    ecc_upper[i] == ecc_upper[maxuppernode]
+                    and degrees[i] > degrees[maxuppernode]
+                )
+                or (ecc_upper[i] > ecc_upper[maxuppernode])
+            ):
+                maxuppernode = i
+
+        # print status update
+    #        print (" min=" + str(minlower) + "/" + str(minupper) +
+    #        " max=" + str(maxlower) + "/" + str(maxupper) +
+    #        " candidates: " + str(len(candidates)))
+    #        print("cand:",candidates)
+    #        print("ecc_l",ecc_lower)
+    #        print("ecc_u",ecc_upper)
+    #        wait = input("press Enter to continue")
+
+    # return the correct value of the requested metric
+    if compute == "diameter":
+        return maxlower
+    if compute == "radius":
+        return minupper
+    if compute == "periphery":
+        p = [v for v in G if ecc_lower[v] == maxlower]
+        return p
+    if compute == "center":
+        c = [v for v in G if ecc_upper[v] == minupper]
+        return c
+    if compute == "eccentricities":
+        return ecc_lower
+    return None
+
+
+@nx._dispatchable(edge_attrs="weight")
+def eccentricity(G, v=None, sp=None, weight=None):
+    """Returns the eccentricity of nodes in G.
+
+    The eccentricity of a node v is the maximum distance from v to
+    all other nodes in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph
+
+    v : node, optional
+       Return value of specified node
+
+    sp : dict of dicts, optional
+       All pairs shortest path lengths as a dictionary of dictionaries
+
+    weight : string, function, or None (default=None)
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
+    Returns
+    -------
+    ecc : dictionary
+       A dictionary of eccentricity values keyed by node.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
+    >>> dict(nx.eccentricity(G))
+    {1: 2, 2: 3, 3: 2, 4: 2, 5: 3}
+
+    >>> dict(
+    ...     nx.eccentricity(G, v=[1, 5])
+    ... )  # This returns the eccentricity of node 1 & 5
+    {1: 2, 5: 3}
+
+    """
+    #    if v is None:                # none, use entire graph
+    #        nodes=G.nodes()
+    #    elif v in G:               # is v a single node
+    #        nodes=[v]
+    #    else:                      # assume v is a container of nodes
+    #        nodes=v
+    order = G.order()
+    e = {}
+    for n in G.nbunch_iter(v):
+        if sp is None:
+            length = nx.shortest_path_length(G, source=n, weight=weight)
+
+            L = len(length)
+        else:
+            try:
+                length = sp[n]
+                L = len(length)
+            except TypeError as err:
+                raise nx.NetworkXError('Format of "sp" is invalid.') from err
+        if L != order:
+            if G.is_directed():
+                msg = (
+                    "Found infinite path length because the digraph is not"
+                    " strongly connected"
+                )
+            else:
+                msg = "Found infinite path length because the graph is not" " connected"
+            raise nx.NetworkXError(msg)
+
+        e[n] = max(length.values())
+
+    if v in G:
+        return e[v]  # return single value
+    return e
+
+
+@nx._dispatchable(edge_attrs="weight")
+def diameter(G, e=None, usebounds=False, weight=None):
+    """Returns the diameter of the graph G.
+
+    The diameter is the maximum eccentricity.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph
+
+    e : eccentricity dictionary, optional
+      A precomputed dictionary of eccentricities.
+
+    weight : string, function, or None
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
+    Returns
+    -------
+    d : integer
+       Diameter of graph
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
+    >>> nx.diameter(G)
+    3
+
+    See Also
+    --------
+    eccentricity
+    """
+    if usebounds is True and e is None and not G.is_directed():
+        return _extrema_bounding(G, compute="diameter", weight=weight)
+    if e is None:
+        e = eccentricity(G, weight=weight)
+    return max(e.values())
+
+
+@nx._dispatchable
+def harmonic_diameter(G, sp=None):
+    """Returns the harmonic diameter of the graph G.
+
+    The harmonic diameter of a graph is the harmonic mean of the distances
+    between all pairs of distinct vertices. Graphs that are not strongly
+    connected have infinite diameter and mean distance, making such
+    measures not useful. Restricting the diameter or mean distance to
+    finite distances yields paradoxical values (e.g., a perfect match
+    would have diameter one). The harmonic mean handles gracefully
+    infinite distances (e.g., a perfect match has harmonic diameter equal
+    to the number of vertices minus one), making it possible to assign a
+    meaningful value to all graphs.
+
+    Note that in [1] the harmonic diameter is called "connectivity length":
+    however, "harmonic diameter" is a more standard name from the
+    theory of metric spaces. The name "harmonic mean distance" is perhaps
+    a more descriptive name, but is not used in the literature, so we use the
+    name "harmonic diameter" here.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph
+
+    sp : dict of dicts, optional
+       All-pairs shortest path lengths as a dictionary of dictionaries
+
+    Returns
+    -------
+    hd : float
+       Harmonic diameter of graph
+
+    References
+    ----------
+    .. [1] Massimo Marchiori and Vito Latora, "Harmony in the small-world".
+           *Physica A: Statistical Mechanics and Its Applications*
+           285(3-4), pages 539-546, 2000.
+           <https://doi.org/10.1016/S0378-4371(00)00311-3>
+    """
+    order = G.order()
+
+    sum_invd = 0
+    for n in G:
+        if sp is None:
+            length = nx.single_source_shortest_path_length(G, n)
+        else:
+            try:
+                length = sp[n]
+                L = len(length)
+            except TypeError as err:
+                raise nx.NetworkXError('Format of "sp" is invalid.') from err
+
+        for d in length.values():
+            # Note that this will skip the zero distance from n to itself,
+            # as it should be, but also zero-weight paths in weighted graphs.
+            if d != 0:
+                sum_invd += 1 / d
+
+    if sum_invd != 0:
+        return order * (order - 1) / sum_invd
+    if order > 1:
+        return math.inf
+    return math.nan
+
+
+@nx._dispatchable(edge_attrs="weight")
+def periphery(G, e=None, usebounds=False, weight=None):
+    """Returns the periphery of the graph G.
+
+    The periphery is the set of nodes with eccentricity equal to the diameter.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph
+
+    e : eccentricity dictionary, optional
+      A precomputed dictionary of eccentricities.
+
+    weight : string, function, or None
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
+    Returns
+    -------
+    p : list
+       List of nodes in periphery
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
+    >>> nx.periphery(G)
+    [2, 5]
+
+    See Also
+    --------
+    barycenter
+    center
+    """
+    if usebounds is True and e is None and not G.is_directed():
+        return _extrema_bounding(G, compute="periphery", weight=weight)
+    if e is None:
+        e = eccentricity(G, weight=weight)
+    diameter = max(e.values())
+    p = [v for v in e if e[v] == diameter]
+    return p
+
+
+@nx._dispatchable(edge_attrs="weight")
+def radius(G, e=None, usebounds=False, weight=None):
+    """Returns the radius of the graph G.
+
+    The radius is the minimum eccentricity.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph
+
+    e : eccentricity dictionary, optional
+      A precomputed dictionary of eccentricities.
+
+    weight : string, function, or None
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
+    Returns
+    -------
+    r : integer
+       Radius of graph
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
+    >>> nx.radius(G)
+    2
+
+    """
+    if usebounds is True and e is None and not G.is_directed():
+        return _extrema_bounding(G, compute="radius", weight=weight)
+    if e is None:
+        e = eccentricity(G, weight=weight)
+    return min(e.values())
+
+
+@nx._dispatchable(edge_attrs="weight")
+def center(G, e=None, usebounds=False, weight=None):
+    """Returns the center of the graph G.
+
+    The center is the set of nodes with eccentricity equal to radius.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph
+
+    e : eccentricity dictionary, optional
+      A precomputed dictionary of eccentricities.
+
+    weight : string, function, or None
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
+    Returns
+    -------
+    c : list
+       List of nodes in center
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
+    >>> list(nx.center(G))
+    [1, 3, 4]
+
+    See Also
+    --------
+    barycenter
+    periphery
+    """
+    if usebounds is True and e is None and not G.is_directed():
+        return _extrema_bounding(G, compute="center", weight=weight)
+    if e is None:
+        e = eccentricity(G, weight=weight)
+    radius = min(e.values())
+    p = [v for v in e if e[v] == radius]
+    return p
+
+
+@nx._dispatchable(edge_attrs="weight", mutates_input={"attr": 2})
+def barycenter(G, weight=None, attr=None, sp=None):
+    r"""Calculate barycenter of a connected graph, optionally with edge weights.
+
+    The :dfn:`barycenter` a
+    :func:`connected <networkx.algorithms.components.is_connected>` graph
+    :math:`G` is the subgraph induced by the set of its nodes :math:`v`
+    minimizing the objective function
+
+    .. math::
+
+        \sum_{u \in V(G)} d_G(u, v),
+
+    where :math:`d_G` is the (possibly weighted) :func:`path length
+    <networkx.algorithms.shortest_paths.generic.shortest_path_length>`.
+    The barycenter is also called the :dfn:`median`. See [West01]_, p. 78.
+
+    Parameters
+    ----------
+    G : :class:`networkx.Graph`
+        The connected graph :math:`G`.
+    weight : :class:`str`, optional
+        Passed through to
+        :func:`~networkx.algorithms.shortest_paths.generic.shortest_path_length`.
+    attr : :class:`str`, optional
+        If given, write the value of the objective function to each node's
+        `attr` attribute. Otherwise do not store the value.
+    sp : dict of dicts, optional
+       All pairs shortest path lengths as a dictionary of dictionaries
+
+    Returns
+    -------
+    list
+        Nodes of `G` that induce the barycenter of `G`.
+
+    Raises
+    ------
+    NetworkXNoPath
+        If `G` is disconnected. `G` may appear disconnected to
+        :func:`barycenter` if `sp` is given but is missing shortest path
+        lengths for any pairs.
+    ValueError
+        If `sp` and `weight` are both given.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
+    >>> nx.barycenter(G)
+    [1, 3, 4]
+
+    See Also
+    --------
+    center
+    periphery
+    """
+    if sp is None:
+        sp = nx.shortest_path_length(G, weight=weight)
+    else:
+        sp = sp.items()
+        if weight is not None:
+            raise ValueError("Cannot use both sp, weight arguments together")
+    smallest, barycenter_vertices, n = float("inf"), [], len(G)
+    for v, dists in sp:
+        if len(dists) < n:
+            raise nx.NetworkXNoPath(
+                f"Input graph {G} is disconnected, so every induced subgraph "
+                "has infinite barycentricity."
+            )
+        barycentricity = sum(dists.values())
+        if attr is not None:
+            G.nodes[v][attr] = barycentricity
+        if barycentricity < smallest:
+            smallest = barycentricity
+            barycenter_vertices = [v]
+        elif barycentricity == smallest:
+            barycenter_vertices.append(v)
+    if attr is not None:
+        nx._clear_cache(G)
+    return barycenter_vertices
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=True):
+    """Returns the resistance distance between pairs of nodes in graph G.
+
+    The resistance distance between two nodes of a graph is akin to treating
+    the graph as a grid of resistors with a resistance equal to the provided
+    weight [1]_, [2]_.
+
+    If weight is not provided, then a weight of 1 is used for all edges.
+
+    If two nodes are the same, the resistance distance is zero.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph
+
+    nodeA : node or None, optional (default=None)
+      A node within graph G.
+      If None, compute resistance distance using all nodes as source nodes.
+
+    nodeB : node or None, optional (default=None)
+      A node within graph G.
+      If None, compute resistance distance using all nodes as target nodes.
+
+    weight : string or None, optional (default=None)
+       The edge data key used to compute the resistance distance.
+       If None, then each edge has weight 1.
+
+    invert_weight : boolean (default=True)
+        Proper calculation of resistance distance requires building the
+        Laplacian matrix with the reciprocal of the weight. Not required
+        if the weight is already inverted. Weight cannot be zero.
+
+    Returns
+    -------
+    rd : dict or float
+       If `nodeA` and `nodeB` are given, resistance distance between `nodeA`
+       and `nodeB`. If `nodeA` or `nodeB` is unspecified (the default), a
+       dictionary of nodes with resistance distances as the value.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a directed graph.
+
+    NetworkXError
+        If `G` is not connected, or contains no nodes,
+        or `nodeA` is not in `G` or `nodeB` is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
+    >>> round(nx.resistance_distance(G, 1, 3), 10)
+    0.625
+
+    Notes
+    -----
+    The implementation is based on Theorem A in [2]_. Self-loops are ignored.
+    Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights.
+
+    References
+    ----------
+    .. [1] Wikipedia
+       "Resistance distance."
+       https://en.wikipedia.org/wiki/Resistance_distance
+    .. [2] D. J. Klein and M. Randic.
+        Resistance distance.
+        J. of Math. Chem. 12:81-95, 1993.
+    """
+    import numpy as np
+
+    if len(G) == 0:
+        raise nx.NetworkXError("Graph G must contain at least one node.")
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Graph G must be strongly connected.")
+    if nodeA is not None and nodeA not in G:
+        raise nx.NetworkXError("Node A is not in graph G.")
+    if nodeB is not None and nodeB not in G:
+        raise nx.NetworkXError("Node B is not in graph G.")
+
+    G = G.copy()
+    node_list = list(G)
+
+    # Invert weights
+    if invert_weight and weight is not None:
+        if G.is_multigraph():
+            for u, v, k, d in G.edges(keys=True, data=True):
+                d[weight] = 1 / d[weight]
+        else:
+            for u, v, d in G.edges(data=True):
+                d[weight] = 1 / d[weight]
+
+    # Compute resistance distance using the Pseudo-inverse of the Laplacian
+    # Self-loops are ignored
+    L = nx.laplacian_matrix(G, weight=weight).todense()
+    Linv = np.linalg.pinv(L, hermitian=True)
+
+    # Return relevant distances
+    if nodeA is not None and nodeB is not None:
+        i = node_list.index(nodeA)
+        j = node_list.index(nodeB)
+        return Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
+
+    elif nodeA is not None:
+        i = node_list.index(nodeA)
+        d = {}
+        for n in G:
+            j = node_list.index(n)
+            d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
+        return d
+
+    elif nodeB is not None:
+        j = node_list.index(nodeB)
+        d = {}
+        for n in G:
+            i = node_list.index(n)
+            d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
+        return d
+
+    else:
+        d = {}
+        for n in G:
+            i = node_list.index(n)
+            d[n] = {}
+            for n2 in G:
+                j = node_list.index(n2)
+                d[n][n2] = (
+                    Linv.item(i, i)
+                    + Linv.item(j, j)
+                    - Linv.item(i, j)
+                    - Linv.item(j, i)
+                )
+        return d
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def effective_graph_resistance(G, weight=None, invert_weight=True):
+    """Returns the Effective graph resistance of G.
+
+    Also known as the Kirchhoff index.
+
+    The effective graph resistance is defined as the sum
+    of the resistance distance of every node pair in G [1]_.
+
+    If weight is not provided, then a weight of 1 is used for all edges.
+
+    The effective graph resistance of a disconnected graph is infinite.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph
+
+    weight : string or None, optional (default=None)
+       The edge data key used to compute the effective graph resistance.
+       If None, then each edge has weight 1.
+
+    invert_weight : boolean (default=True)
+        Proper calculation of resistance distance requires building the
+        Laplacian matrix with the reciprocal of the weight. Not required
+        if the weight is already inverted. Weight cannot be zero.
+
+    Returns
+    -------
+    RG : float
+        The effective graph resistance of `G`.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a directed graph.
+
+    NetworkXError
+        If `G` does not contain any nodes.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
+    >>> round(nx.effective_graph_resistance(G), 10)
+    10.25
+
+    Notes
+    -----
+    The implementation is based on Theorem 2.2 in [2]_. Self-loops are ignored.
+    Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights.
+
+    References
+    ----------
+    .. [1] Wolfram
+       "Kirchhoff Index."
+       https://mathworld.wolfram.com/KirchhoffIndex.html
+    .. [2] W. Ellens, F. M. Spieksma, P. Van Mieghem, A. Jamakovic, R. E. Kooij.
+        Effective graph resistance.
+        Lin. Alg. Appl. 435:2491-2506, 2011.
+    """
+    import numpy as np
+
+    if len(G) == 0:
+        raise nx.NetworkXError("Graph G must contain at least one node.")
+
+    # Disconnected graphs have infinite Effective graph resistance
+    if not nx.is_connected(G):
+        return float("inf")
+
+    # Invert weights
+    G = G.copy()
+    if invert_weight and weight is not None:
+        if G.is_multigraph():
+            for u, v, k, d in G.edges(keys=True, data=True):
+                d[weight] = 1 / d[weight]
+        else:
+            for u, v, d in G.edges(data=True):
+                d[weight] = 1 / d[weight]
+
+    # Get Laplacian eigenvalues
+    mu = np.sort(nx.laplacian_spectrum(G, weight=weight))
+
+    # Compute Effective graph resistance based on spectrum of the Laplacian
+    # Self-loops are ignored
+    return float(np.sum(1 / mu[1:]) * G.number_of_nodes())
+
+
+@nx.utils.not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def kemeny_constant(G, *, weight=None):
+    """Returns the Kemeny constant of the given graph.
+
+    The *Kemeny constant* (or Kemeny's constant) of a graph `G`
+    can be computed by regarding the graph as a Markov chain.
+    The Kemeny constant is then the expected number of time steps
+    to transition from a starting state i to a random destination state
+    sampled from the Markov chain's stationary distribution.
+    The Kemeny constant is independent of the chosen initial state [1]_.
+
+    The Kemeny constant measures the time needed for spreading
+    across a graph. Low values indicate a closely connected graph
+    whereas high values indicate a spread-out graph.
+
+    If weight is not provided, then a weight of 1 is used for all edges.
+
+    Since `G` represents a Markov chain, the weights must be positive.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : string or None, optional (default=None)
+       The edge data key used to compute the Kemeny constant.
+       If None, then each edge has weight 1.
+
+    Returns
+    -------
+    float
+        The Kemeny constant of the graph `G`.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If the graph `G` is directed.
+
+    NetworkXError
+        If the graph `G` is not connected, or contains no nodes,
+        or has edges with negative weights.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> round(nx.kemeny_constant(G), 10)
+    3.2
+
+    Notes
+    -----
+    The implementation is based on equation (3.3) in [2]_.
+    Self-loops are allowed and indicate a Markov chain where
+    the state can remain the same. Multi-edges are contracted
+    in one edge with weight equal to the sum of the weights.
+
+    References
+    ----------
+    .. [1] Wikipedia
+       "Kemeny's constant."
+       https://en.wikipedia.org/wiki/Kemeny%27s_constant
+    .. [2] Lovász L.
+        Random walks on graphs: A survey.
+        Paul Erdös is Eighty, vol. 2, Bolyai Society,
+        Mathematical Studies, Keszthely, Hungary (1993), pp. 1-46
+    """
+    import numpy as np
+    import scipy as sp
+
+    if len(G) == 0:
+        raise nx.NetworkXError("Graph G must contain at least one node.")
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Graph G must be connected.")
+    if nx.is_negatively_weighted(G, weight=weight):
+        raise nx.NetworkXError("The weights of graph G must be nonnegative.")
+
+    # Compute matrix H = D^-1/2 A D^-1/2
+    A = nx.adjacency_matrix(G, weight=weight)
+    n, m = A.shape
+    diags = A.sum(axis=1)
+    with np.errstate(divide="ignore"):
+        diags_sqrt = 1.0 / np.sqrt(diags)
+    diags_sqrt[np.isinf(diags_sqrt)] = 0
+    DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, m, n, format="csr"))
+    H = DH @ (A @ DH)
+
+    # Compute eigenvalues of H
+    eig = np.sort(sp.linalg.eigvalsh(H.todense()))
+
+    # Compute the Kemeny constant
+    return float(np.sum(1 / (1 - eig[:-1])))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_regular.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_regular.py
new file mode 100644
index 00000000..27b4d021
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_regular.py
@@ -0,0 +1,238 @@
+"""
+=======================
+Distance-regular graphs
+=======================
+"""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+from .distance_measures import diameter
+
+__all__ = [
+    "is_distance_regular",
+    "is_strongly_regular",
+    "intersection_array",
+    "global_parameters",
+]
+
+
+@nx._dispatchable
+def is_distance_regular(G):
+    """Returns True if the graph is distance regular, False otherwise.
+
+    A connected graph G is distance-regular if for any nodes x,y
+    and any integers i,j=0,1,...,d (where d is the graph
+    diameter), the number of vertices at distance i from x and
+    distance j from y depends only on i,j and the graph distance
+    between x and y, independently of the choice of x and y.
+
+    Parameters
+    ----------
+    G: Networkx graph (undirected)
+
+    Returns
+    -------
+    bool
+      True if the graph is Distance Regular, False otherwise
+
+    Examples
+    --------
+    >>> G = nx.hypercube_graph(6)
+    >>> nx.is_distance_regular(G)
+    True
+
+    See Also
+    --------
+    intersection_array, global_parameters
+
+    Notes
+    -----
+    For undirected and simple graphs only
+
+    References
+    ----------
+    .. [1] Brouwer, A. E.; Cohen, A. M.; and Neumaier, A.
+        Distance-Regular Graphs. New York: Springer-Verlag, 1989.
+    .. [2] Weisstein, Eric W. "Distance-Regular Graph."
+        http://mathworld.wolfram.com/Distance-RegularGraph.html
+
+    """
+    try:
+        intersection_array(G)
+        return True
+    except nx.NetworkXError:
+        return False
+
+
+def global_parameters(b, c):
+    """Returns global parameters for a given intersection array.
+
+    Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d
+    such that for any 2 vertices x,y in G at a distance i=d(x,y), there
+    are exactly c_i neighbors of y at a distance of i-1 from x and b_i
+    neighbors of y at a distance of i+1 from x.
+
+    Thus, a distance regular graph has the global parameters,
+    [[c_0,a_0,b_0],[c_1,a_1,b_1],......,[c_d,a_d,b_d]] for the
+    intersection array  [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d]
+    where a_i+b_i+c_i=k , k= degree of every vertex.
+
+    Parameters
+    ----------
+    b : list
+
+    c : list
+
+    Returns
+    -------
+    iterable
+       An iterable over three tuples.
+
+    Examples
+    --------
+    >>> G = nx.dodecahedral_graph()
+    >>> b, c = nx.intersection_array(G)
+    >>> list(nx.global_parameters(b, c))
+    [(0, 0, 3), (1, 0, 2), (1, 1, 1), (1, 1, 1), (2, 0, 1), (3, 0, 0)]
+
+    References
+    ----------
+    .. [1] Weisstein, Eric W. "Global Parameters."
+       From MathWorld--A Wolfram Web Resource.
+       http://mathworld.wolfram.com/GlobalParameters.html
+
+    See Also
+    --------
+    intersection_array
+    """
+    return ((y, b[0] - x - y, x) for x, y in zip(b + [0], [0] + c))
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def intersection_array(G):
+    """Returns the intersection array of a distance-regular graph.
+
+    Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d
+    such that for any 2 vertices x,y in G at a distance i=d(x,y), there
+    are exactly c_i neighbors of y at a distance of i-1 from x and b_i
+    neighbors of y at a distance of i+1 from x.
+
+    A distance regular graph's intersection array is given by,
+    [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d]
+
+    Parameters
+    ----------
+    G: Networkx graph (undirected)
+
+    Returns
+    -------
+    b,c: tuple of lists
+
+    Examples
+    --------
+    >>> G = nx.icosahedral_graph()
+    >>> nx.intersection_array(G)
+    ([5, 2, 1], [1, 2, 5])
+
+    References
+    ----------
+    .. [1] Weisstein, Eric W. "Intersection Array."
+       From MathWorld--A Wolfram Web Resource.
+       http://mathworld.wolfram.com/IntersectionArray.html
+
+    See Also
+    --------
+    global_parameters
+    """
+    # test for regular graph (all degrees must be equal)
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept("Graph has no nodes.")
+    degree = iter(G.degree())
+    (_, k) = next(degree)
+    for _, knext in degree:
+        if knext != k:
+            raise nx.NetworkXError("Graph is not distance regular.")
+        k = knext
+    path_length = dict(nx.all_pairs_shortest_path_length(G))
+    diameter = max(max(path_length[n].values()) for n in path_length)
+    bint = {}  # 'b' intersection array
+    cint = {}  # 'c' intersection array
+    for u in G:
+        for v in G:
+            try:
+                i = path_length[u][v]
+            except KeyError as err:  # graph must be connected
+                raise nx.NetworkXError("Graph is not distance regular.") from err
+            # number of neighbors of v at a distance of i-1 from u
+            c = len([n for n in G[v] if path_length[n][u] == i - 1])
+            # number of neighbors of v at a distance of i+1 from u
+            b = len([n for n in G[v] if path_length[n][u] == i + 1])
+            # b,c are independent of u and v
+            if cint.get(i, c) != c or bint.get(i, b) != b:
+                raise nx.NetworkXError("Graph is not distance regular")
+            bint[i] = b
+            cint[i] = c
+    return (
+        [bint.get(j, 0) for j in range(diameter)],
+        [cint.get(j + 1, 0) for j in range(diameter)],
+    )
+
+
+# TODO There is a definition for directed strongly regular graphs.
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def is_strongly_regular(G):
+    """Returns True if and only if the given graph is strongly
+    regular.
+
+    An undirected graph is *strongly regular* if
+
+    * it is regular,
+    * each pair of adjacent vertices has the same number of neighbors in
+      common,
+    * each pair of nonadjacent vertices has the same number of neighbors
+      in common.
+
+    Each strongly regular graph is a distance-regular graph.
+    Conversely, if a distance-regular graph has diameter two, then it is
+    a strongly regular graph. For more information on distance-regular
+    graphs, see :func:`is_distance_regular`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected graph.
+
+    Returns
+    -------
+    bool
+        Whether `G` is strongly regular.
+
+    Examples
+    --------
+
+    The cycle graph on five vertices is strongly regular. It is
+    two-regular, each pair of adjacent vertices has no shared neighbors,
+    and each pair of nonadjacent vertices has one shared neighbor::
+
+        >>> G = nx.cycle_graph(5)
+        >>> nx.is_strongly_regular(G)
+        True
+
+    """
+    # Here is an alternate implementation based directly on the
+    # definition of strongly regular graphs:
+    #
+    #     return (all_equal(G.degree().values())
+    #             and all_equal(len(common_neighbors(G, u, v))
+    #                           for u, v in G.edges())
+    #             and all_equal(len(common_neighbors(G, u, v))
+    #                           for u, v in non_edges(G)))
+    #
+    # We instead use the fact that a distance-regular graph of diameter
+    # two is strongly regular.
+    return is_distance_regular(G) and diameter(G) == 2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/dominance.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/dominance.py
new file mode 100644
index 00000000..30cb8115
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/dominance.py
@@ -0,0 +1,135 @@
+"""
+Dominance algorithms.
+"""
+
+from functools import reduce
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["immediate_dominators", "dominance_frontiers"]
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def immediate_dominators(G, start):
+    """Returns the immediate dominators of all nodes of a directed graph.
+
+    Parameters
+    ----------
+    G : a DiGraph or MultiDiGraph
+        The graph where dominance is to be computed.
+
+    start : node
+        The start node of dominance computation.
+
+    Returns
+    -------
+    idom : dict keyed by nodes
+        A dict containing the immediate dominators of each node reachable from
+        `start`.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is undirected.
+
+    NetworkXError
+        If `start` is not in `G`.
+
+    Notes
+    -----
+    Except for `start`, the immediate dominators are the parents of their
+    corresponding nodes in the dominator tree.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 5), (3, 4), (4, 5)])
+    >>> sorted(nx.immediate_dominators(G, 1).items())
+    [(1, 1), (2, 1), (3, 1), (4, 3), (5, 1)]
+
+    References
+    ----------
+    .. [1] Cooper, Keith D., Harvey, Timothy J. and Kennedy, Ken.
+           "A simple, fast dominance algorithm." (2006).
+           https://hdl.handle.net/1911/96345
+    """
+    if start not in G:
+        raise nx.NetworkXError("start is not in G")
+
+    idom = {start: start}
+
+    order = list(nx.dfs_postorder_nodes(G, start))
+    dfn = {u: i for i, u in enumerate(order)}
+    order.pop()
+    order.reverse()
+
+    def intersect(u, v):
+        while u != v:
+            while dfn[u] < dfn[v]:
+                u = idom[u]
+            while dfn[u] > dfn[v]:
+                v = idom[v]
+        return u
+
+    changed = True
+    while changed:
+        changed = False
+        for u in order:
+            new_idom = reduce(intersect, (v for v in G.pred[u] if v in idom))
+            if u not in idom or idom[u] != new_idom:
+                idom[u] = new_idom
+                changed = True
+
+    return idom
+
+
+@nx._dispatchable
+def dominance_frontiers(G, start):
+    """Returns the dominance frontiers of all nodes of a directed graph.
+
+    Parameters
+    ----------
+    G : a DiGraph or MultiDiGraph
+        The graph where dominance is to be computed.
+
+    start : node
+        The start node of dominance computation.
+
+    Returns
+    -------
+    df : dict keyed by nodes
+        A dict containing the dominance frontiers of each node reachable from
+        `start` as lists.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is undirected.
+
+    NetworkXError
+        If `start` is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 5), (3, 4), (4, 5)])
+    >>> sorted((u, sorted(df)) for u, df in nx.dominance_frontiers(G, 1).items())
+    [(1, []), (2, [5]), (3, [5]), (4, [5]), (5, [])]
+
+    References
+    ----------
+    .. [1] Cooper, Keith D., Harvey, Timothy J. and Kennedy, Ken.
+           "A simple, fast dominance algorithm." (2006).
+           https://hdl.handle.net/1911/96345
+    """
+    idom = nx.immediate_dominators(G, start)
+
+    df = {u: set() for u in idom}
+    for u in idom:
+        if len(G.pred[u]) >= 2:
+            for v in G.pred[u]:
+                if v in idom:
+                    while v != idom[u]:
+                        df[v].add(u)
+                        v = idom[v]
+    return df
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/dominating.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/dominating.py
new file mode 100644
index 00000000..ff956f74
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/dominating.py
@@ -0,0 +1,95 @@
+"""Functions for computing dominating sets in a graph."""
+
+from itertools import chain
+
+import networkx as nx
+from networkx.utils import arbitrary_element
+
+__all__ = ["dominating_set", "is_dominating_set"]
+
+
+@nx._dispatchable
+def dominating_set(G, start_with=None):
+    r"""Finds a dominating set for the graph G.
+
+    A *dominating set* for a graph with node set *V* is a subset *D* of
+    *V* such that every node not in *D* is adjacent to at least one
+    member of *D* [1]_.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    start_with : node (default=None)
+        Node to use as a starting point for the algorithm.
+
+    Returns
+    -------
+    D : set
+        A dominating set for G.
+
+    Notes
+    -----
+    This function is an implementation of algorithm 7 in [2]_ which
+    finds some dominating set, not necessarily the smallest one.
+
+    See also
+    --------
+    is_dominating_set
+
+    References
+    ----------
+    .. [1] https://en.wikipedia.org/wiki/Dominating_set
+
+    .. [2] Abdol-Hossein Esfahanian. Connectivity Algorithms.
+        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
+
+    """
+    all_nodes = set(G)
+    if start_with is None:
+        start_with = arbitrary_element(all_nodes)
+    if start_with not in G:
+        raise nx.NetworkXError(f"node {start_with} is not in G")
+    dominating_set = {start_with}
+    dominated_nodes = set(G[start_with])
+    remaining_nodes = all_nodes - dominated_nodes - dominating_set
+    while remaining_nodes:
+        # Choose an arbitrary node and determine its undominated neighbors.
+        v = remaining_nodes.pop()
+        undominated_nbrs = set(G[v]) - dominating_set
+        # Add the node to the dominating set and the neighbors to the
+        # dominated set. Finally, remove all of those nodes from the set
+        # of remaining nodes.
+        dominating_set.add(v)
+        dominated_nodes |= undominated_nbrs
+        remaining_nodes -= undominated_nbrs
+    return dominating_set
+
+
+@nx._dispatchable
+def is_dominating_set(G, nbunch):
+    """Checks if `nbunch` is a dominating set for `G`.
+
+    A *dominating set* for a graph with node set *V* is a subset *D* of
+    *V* such that every node not in *D* is adjacent to at least one
+    member of *D* [1]_.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    nbunch : iterable
+        An iterable of nodes in the graph `G`.
+
+    See also
+    --------
+    dominating_set
+
+    References
+    ----------
+    .. [1] https://en.wikipedia.org/wiki/Dominating_set
+
+    """
+    testset = {n for n in nbunch if n in G}
+    nbrs = set(chain.from_iterable(G[n] for n in testset))
+    return len(set(G) - testset - nbrs) == 0
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/efficiency_measures.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/efficiency_measures.py
new file mode 100644
index 00000000..b8e9d7a9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/efficiency_measures.py
@@ -0,0 +1,167 @@
+"""Provides functions for computing the efficiency of nodes and graphs."""
+
+import networkx as nx
+from networkx.exception import NetworkXNoPath
+
+from ..utils import not_implemented_for
+
+__all__ = ["efficiency", "local_efficiency", "global_efficiency"]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def efficiency(G, u, v):
+    """Returns the efficiency of a pair of nodes in a graph.
+
+    The *efficiency* of a pair of nodes is the multiplicative inverse of the
+    shortest path distance between the nodes [1]_. Returns 0 if no path
+    between nodes.
+
+    Parameters
+    ----------
+    G : :class:`networkx.Graph`
+        An undirected graph for which to compute the average local efficiency.
+    u, v : node
+        Nodes in the graph ``G``.
+
+    Returns
+    -------
+    float
+        Multiplicative inverse of the shortest path distance between the nodes.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.efficiency(G, 2, 3)  # this gives efficiency for node 2 and 3
+    0.5
+
+    Notes
+    -----
+    Edge weights are ignored when computing the shortest path distances.
+
+    See also
+    --------
+    local_efficiency
+    global_efficiency
+
+    References
+    ----------
+    .. [1] Latora, Vito, and Massimo Marchiori.
+           "Efficient behavior of small-world networks."
+           *Physical Review Letters* 87.19 (2001): 198701.
+           <https://doi.org/10.1103/PhysRevLett.87.198701>
+
+    """
+    try:
+        eff = 1 / nx.shortest_path_length(G, u, v)
+    except NetworkXNoPath:
+        eff = 0
+    return eff
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def global_efficiency(G):
+    """Returns the average global efficiency of the graph.
+
+    The *efficiency* of a pair of nodes in a graph is the multiplicative
+    inverse of the shortest path distance between the nodes. The *average
+    global efficiency* of a graph is the average efficiency of all pairs of
+    nodes [1]_.
+
+    Parameters
+    ----------
+    G : :class:`networkx.Graph`
+        An undirected graph for which to compute the average global efficiency.
+
+    Returns
+    -------
+    float
+        The average global efficiency of the graph.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> round(nx.global_efficiency(G), 12)
+    0.916666666667
+
+    Notes
+    -----
+    Edge weights are ignored when computing the shortest path distances.
+
+    See also
+    --------
+    local_efficiency
+
+    References
+    ----------
+    .. [1] Latora, Vito, and Massimo Marchiori.
+           "Efficient behavior of small-world networks."
+           *Physical Review Letters* 87.19 (2001): 198701.
+           <https://doi.org/10.1103/PhysRevLett.87.198701>
+
+    """
+    n = len(G)
+    denom = n * (n - 1)
+    if denom != 0:
+        lengths = nx.all_pairs_shortest_path_length(G)
+        g_eff = 0
+        for source, targets in lengths:
+            for target, distance in targets.items():
+                if distance > 0:
+                    g_eff += 1 / distance
+        g_eff /= denom
+        # g_eff = sum(1 / d for s, tgts in lengths
+        #                   for t, d in tgts.items() if d > 0) / denom
+    else:
+        g_eff = 0
+    # TODO This can be made more efficient by computing all pairs shortest
+    # path lengths in parallel.
+    return g_eff
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def local_efficiency(G):
+    """Returns the average local efficiency of the graph.
+
+    The *efficiency* of a pair of nodes in a graph is the multiplicative
+    inverse of the shortest path distance between the nodes. The *local
+    efficiency* of a node in the graph is the average global efficiency of the
+    subgraph induced by the neighbors of the node. The *average local
+    efficiency* is the average of the local efficiencies of each node [1]_.
+
+    Parameters
+    ----------
+    G : :class:`networkx.Graph`
+        An undirected graph for which to compute the average local efficiency.
+
+    Returns
+    -------
+    float
+        The average local efficiency of the graph.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.local_efficiency(G)
+    0.9166666666666667
+
+    Notes
+    -----
+    Edge weights are ignored when computing the shortest path distances.
+
+    See also
+    --------
+    global_efficiency
+
+    References
+    ----------
+    .. [1] Latora, Vito, and Massimo Marchiori.
+           "Efficient behavior of small-world networks."
+           *Physical Review Letters* 87.19 (2001): 198701.
+           <https://doi.org/10.1103/PhysRevLett.87.198701>
+
+    """
+    efficiency_list = (global_efficiency(G.subgraph(G[v])) for v in G)
+    return sum(efficiency_list) / len(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/euler.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/euler.py
new file mode 100644
index 00000000..2c308e38
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/euler.py
@@ -0,0 +1,470 @@
+"""
+Eulerian circuits and graphs.
+"""
+
+from itertools import combinations
+
+import networkx as nx
+
+from ..utils import arbitrary_element, not_implemented_for
+
+__all__ = [
+    "is_eulerian",
+    "eulerian_circuit",
+    "eulerize",
+    "is_semieulerian",
+    "has_eulerian_path",
+    "eulerian_path",
+]
+
+
+@nx._dispatchable
+def is_eulerian(G):
+    """Returns True if and only if `G` is Eulerian.
+
+    A graph is *Eulerian* if it has an Eulerian circuit. An *Eulerian
+    circuit* is a closed walk that includes each edge of a graph exactly
+    once.
+
+    Graphs with isolated vertices (i.e. vertices with zero degree) are not
+    considered to have Eulerian circuits. Therefore, if the graph is not
+    connected (or not strongly connected, for directed graphs), this function
+    returns False.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph, either directed or undirected.
+
+    Examples
+    --------
+    >>> nx.is_eulerian(nx.DiGraph({0: [3], 1: [2], 2: [3], 3: [0, 1]}))
+    True
+    >>> nx.is_eulerian(nx.complete_graph(5))
+    True
+    >>> nx.is_eulerian(nx.petersen_graph())
+    False
+
+    If you prefer to allow graphs with isolated vertices to have Eulerian circuits,
+    you can first remove such vertices and then call `is_eulerian` as below example shows.
+
+    >>> G = nx.Graph([(0, 1), (1, 2), (0, 2)])
+    >>> G.add_node(3)
+    >>> nx.is_eulerian(G)
+    False
+
+    >>> G.remove_nodes_from(list(nx.isolates(G)))
+    >>> nx.is_eulerian(G)
+    True
+
+
+    """
+    if G.is_directed():
+        # Every node must have equal in degree and out degree and the
+        # graph must be strongly connected
+        return all(
+            G.in_degree(n) == G.out_degree(n) for n in G
+        ) and nx.is_strongly_connected(G)
+    # An undirected Eulerian graph has no vertices of odd degree and
+    # must be connected.
+    return all(d % 2 == 0 for v, d in G.degree()) and nx.is_connected(G)
+
+
+@nx._dispatchable
+def is_semieulerian(G):
+    """Return True iff `G` is semi-Eulerian.
+
+    G is semi-Eulerian if it has an Eulerian path but no Eulerian circuit.
+
+    See Also
+    --------
+    has_eulerian_path
+    is_eulerian
+    """
+    return has_eulerian_path(G) and not is_eulerian(G)
+
+
+def _find_path_start(G):
+    """Return a suitable starting vertex for an Eulerian path.
+
+    If no path exists, return None.
+    """
+    if not has_eulerian_path(G):
+        return None
+
+    if is_eulerian(G):
+        return arbitrary_element(G)
+
+    if G.is_directed():
+        v1, v2 = (v for v in G if G.in_degree(v) != G.out_degree(v))
+        # Determines which is the 'start' node (as opposed to the 'end')
+        if G.out_degree(v1) > G.in_degree(v1):
+            return v1
+        else:
+            return v2
+
+    else:
+        # In an undirected graph randomly choose one of the possibilities
+        start = [v for v in G if G.degree(v) % 2 != 0][0]
+        return start
+
+
+def _simplegraph_eulerian_circuit(G, source):
+    if G.is_directed():
+        degree = G.out_degree
+        edges = G.out_edges
+    else:
+        degree = G.degree
+        edges = G.edges
+    vertex_stack = [source]
+    last_vertex = None
+    while vertex_stack:
+        current_vertex = vertex_stack[-1]
+        if degree(current_vertex) == 0:
+            if last_vertex is not None:
+                yield (last_vertex, current_vertex)
+            last_vertex = current_vertex
+            vertex_stack.pop()
+        else:
+            _, next_vertex = arbitrary_element(edges(current_vertex))
+            vertex_stack.append(next_vertex)
+            G.remove_edge(current_vertex, next_vertex)
+
+
+def _multigraph_eulerian_circuit(G, source):
+    if G.is_directed():
+        degree = G.out_degree
+        edges = G.out_edges
+    else:
+        degree = G.degree
+        edges = G.edges
+    vertex_stack = [(source, None)]
+    last_vertex = None
+    last_key = None
+    while vertex_stack:
+        current_vertex, current_key = vertex_stack[-1]
+        if degree(current_vertex) == 0:
+            if last_vertex is not None:
+                yield (last_vertex, current_vertex, last_key)
+            last_vertex, last_key = current_vertex, current_key
+            vertex_stack.pop()
+        else:
+            triple = arbitrary_element(edges(current_vertex, keys=True))
+            _, next_vertex, next_key = triple
+            vertex_stack.append((next_vertex, next_key))
+            G.remove_edge(current_vertex, next_vertex, next_key)
+
+
+@nx._dispatchable
+def eulerian_circuit(G, source=None, keys=False):
+    """Returns an iterator over the edges of an Eulerian circuit in `G`.
+
+    An *Eulerian circuit* is a closed walk that includes each edge of a
+    graph exactly once.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       A graph, either directed or undirected.
+
+    source : node, optional
+       Starting node for circuit.
+
+    keys : bool
+       If False, edges generated by this function will be of the form
+       ``(u, v)``. Otherwise, edges will be of the form ``(u, v, k)``.
+       This option is ignored unless `G` is a multigraph.
+
+    Returns
+    -------
+    edges : iterator
+       An iterator over edges in the Eulerian circuit.
+
+    Raises
+    ------
+    NetworkXError
+       If the graph is not Eulerian.
+
+    See Also
+    --------
+    is_eulerian
+
+    Notes
+    -----
+    This is a linear time implementation of an algorithm adapted from [1]_.
+
+    For general information about Euler tours, see [2]_.
+
+    References
+    ----------
+    .. [1] J. Edmonds, E. L. Johnson.
+       Matching, Euler tours and the Chinese postman.
+       Mathematical programming, Volume 5, Issue 1 (1973), 111-114.
+    .. [2] https://en.wikipedia.org/wiki/Eulerian_path
+
+    Examples
+    --------
+    To get an Eulerian circuit in an undirected graph::
+
+        >>> G = nx.complete_graph(3)
+        >>> list(nx.eulerian_circuit(G))
+        [(0, 2), (2, 1), (1, 0)]
+        >>> list(nx.eulerian_circuit(G, source=1))
+        [(1, 2), (2, 0), (0, 1)]
+
+    To get the sequence of vertices in an Eulerian circuit::
+
+        >>> [u for u, v in nx.eulerian_circuit(G)]
+        [0, 2, 1]
+
+    """
+    if not is_eulerian(G):
+        raise nx.NetworkXError("G is not Eulerian.")
+    if G.is_directed():
+        G = G.reverse()
+    else:
+        G = G.copy()
+    if source is None:
+        source = arbitrary_element(G)
+    if G.is_multigraph():
+        for u, v, k in _multigraph_eulerian_circuit(G, source):
+            if keys:
+                yield u, v, k
+            else:
+                yield u, v
+    else:
+        yield from _simplegraph_eulerian_circuit(G, source)
+
+
+@nx._dispatchable
+def has_eulerian_path(G, source=None):
+    """Return True iff `G` has an Eulerian path.
+
+    An Eulerian path is a path in a graph which uses each edge of a graph
+    exactly once. If `source` is specified, then this function checks
+    whether an Eulerian path that starts at node `source` exists.
+
+    A directed graph has an Eulerian path iff:
+        - at most one vertex has out_degree - in_degree = 1,
+        - at most one vertex has in_degree - out_degree = 1,
+        - every other vertex has equal in_degree and out_degree,
+        - and all of its vertices belong to a single connected
+          component of the underlying undirected graph.
+
+    If `source` is not None, an Eulerian path starting at `source` exists if no
+    other node has out_degree - in_degree = 1. This is equivalent to either
+    there exists an Eulerian circuit or `source` has out_degree - in_degree = 1
+    and the conditions above hold.
+
+    An undirected graph has an Eulerian path iff:
+        - exactly zero or two vertices have odd degree,
+        - and all of its vertices belong to a single connected component.
+
+    If `source` is not None, an Eulerian path starting at `source` exists if
+    either there exists an Eulerian circuit or `source` has an odd degree and the
+    conditions above hold.
+
+    Graphs with isolated vertices (i.e. vertices with zero degree) are not considered
+    to have an Eulerian path. Therefore, if the graph is not connected (or not strongly
+    connected, for directed graphs), this function returns False.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        The graph to find an euler path in.
+
+    source : node, optional
+        Starting node for path.
+
+    Returns
+    -------
+    Bool : True if G has an Eulerian path.
+
+    Examples
+    --------
+    If you prefer to allow graphs with isolated vertices to have Eulerian path,
+    you can first remove such vertices and then call `has_eulerian_path` as below example shows.
+
+    >>> G = nx.Graph([(0, 1), (1, 2), (0, 2)])
+    >>> G.add_node(3)
+    >>> nx.has_eulerian_path(G)
+    False
+
+    >>> G.remove_nodes_from(list(nx.isolates(G)))
+    >>> nx.has_eulerian_path(G)
+    True
+
+    See Also
+    --------
+    is_eulerian
+    eulerian_path
+    """
+    if nx.is_eulerian(G):
+        return True
+
+    if G.is_directed():
+        ins = G.in_degree
+        outs = G.out_degree
+        # Since we know it is not eulerian, outs - ins must be 1 for source
+        if source is not None and outs[source] - ins[source] != 1:
+            return False
+
+        unbalanced_ins = 0
+        unbalanced_outs = 0
+        for v in G:
+            if ins[v] - outs[v] == 1:
+                unbalanced_ins += 1
+            elif outs[v] - ins[v] == 1:
+                unbalanced_outs += 1
+            elif ins[v] != outs[v]:
+                return False
+
+        return (
+            unbalanced_ins <= 1 and unbalanced_outs <= 1 and nx.is_weakly_connected(G)
+        )
+    else:
+        # We know it is not eulerian, so degree of source must be odd.
+        if source is not None and G.degree[source] % 2 != 1:
+            return False
+
+        # Sum is 2 since we know it is not eulerian (which implies sum is 0)
+        return sum(d % 2 == 1 for v, d in G.degree()) == 2 and nx.is_connected(G)
+
+
+@nx._dispatchable
+def eulerian_path(G, source=None, keys=False):
+    """Return an iterator over the edges of an Eulerian path in `G`.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        The graph in which to look for an eulerian path.
+    source : node or None (default: None)
+        The node at which to start the search. None means search over all
+        starting nodes.
+    keys : Bool (default: False)
+        Indicates whether to yield edge 3-tuples (u, v, edge_key).
+        The default yields edge 2-tuples
+
+    Yields
+    ------
+    Edge tuples along the eulerian path.
+
+    Warning: If `source` provided is not the start node of an Euler path
+    will raise error even if an Euler Path exists.
+    """
+    if not has_eulerian_path(G, source):
+        raise nx.NetworkXError("Graph has no Eulerian paths.")
+    if G.is_directed():
+        G = G.reverse()
+        if source is None or nx.is_eulerian(G) is False:
+            source = _find_path_start(G)
+        if G.is_multigraph():
+            for u, v, k in _multigraph_eulerian_circuit(G, source):
+                if keys:
+                    yield u, v, k
+                else:
+                    yield u, v
+        else:
+            yield from _simplegraph_eulerian_circuit(G, source)
+    else:
+        G = G.copy()
+        if source is None:
+            source = _find_path_start(G)
+        if G.is_multigraph():
+            if keys:
+                yield from reversed(
+                    [(v, u, k) for u, v, k in _multigraph_eulerian_circuit(G, source)]
+                )
+            else:
+                yield from reversed(
+                    [(v, u) for u, v, k in _multigraph_eulerian_circuit(G, source)]
+                )
+        else:
+            yield from reversed(
+                [(v, u) for u, v in _simplegraph_eulerian_circuit(G, source)]
+            )
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(returns_graph=True)
+def eulerize(G):
+    """Transforms a graph into an Eulerian graph.
+
+    If `G` is Eulerian the result is `G` as a MultiGraph, otherwise the result is a smallest
+    (in terms of the number of edges) multigraph whose underlying simple graph is `G`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       An undirected graph
+
+    Returns
+    -------
+    G : NetworkX multigraph
+
+    Raises
+    ------
+    NetworkXError
+       If the graph is not connected.
+
+    See Also
+    --------
+    is_eulerian
+    eulerian_circuit
+
+    References
+    ----------
+    .. [1] J. Edmonds, E. L. Johnson.
+       Matching, Euler tours and the Chinese postman.
+       Mathematical programming, Volume 5, Issue 1 (1973), 111-114.
+    .. [2] https://en.wikipedia.org/wiki/Eulerian_path
+    .. [3] http://web.math.princeton.edu/math_alive/5/Notes1.pdf
+
+    Examples
+    --------
+        >>> G = nx.complete_graph(10)
+        >>> H = nx.eulerize(G)
+        >>> nx.is_eulerian(H)
+        True
+
+    """
+    if G.order() == 0:
+        raise nx.NetworkXPointlessConcept("Cannot Eulerize null graph")
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("G is not connected")
+    odd_degree_nodes = [n for n, d in G.degree() if d % 2 == 1]
+    G = nx.MultiGraph(G)
+    if len(odd_degree_nodes) == 0:
+        return G
+
+    # get all shortest paths between vertices of odd degree
+    odd_deg_pairs_paths = [
+        (m, {n: nx.shortest_path(G, source=m, target=n)})
+        for m, n in combinations(odd_degree_nodes, 2)
+    ]
+
+    # use the number of vertices in a graph + 1 as an upper bound on
+    # the maximum length of a path in G
+    upper_bound_on_max_path_length = len(G) + 1
+
+    # use "len(G) + 1 - len(P)",
+    # where P is a shortest path between vertices n and m,
+    # as edge-weights in a new graph
+    # store the paths in the graph for easy indexing later
+    Gp = nx.Graph()
+    for n, Ps in odd_deg_pairs_paths:
+        for m, P in Ps.items():
+            if n != m:
+                Gp.add_edge(
+                    m, n, weight=upper_bound_on_max_path_length - len(P), path=P
+                )
+
+    # find the minimum weight matching of edges in the weighted graph
+    best_matching = nx.Graph(list(nx.max_weight_matching(Gp)))
+
+    # duplicate each edge along each path in the set of paths in Gp
+    for m, n in best_matching.edges():
+        path = Gp[m][n]["path"]
+        G.add_edges_from(nx.utils.pairwise(path))
+    return G
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__init__.py
new file mode 100644
index 00000000..c5d19abe
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__init__.py
@@ -0,0 +1,11 @@
+from .maxflow import *
+from .mincost import *
+from .boykovkolmogorov import *
+from .dinitz_alg import *
+from .edmondskarp import *
+from .gomory_hu import *
+from .preflowpush import *
+from .shortestaugmentingpath import *
+from .capacityscaling import *
+from .networksimplex import *
+from .utils import build_flow_dict, build_residual_network
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/boykovkolmogorov.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/boykovkolmogorov.py
new file mode 100644
index 00000000..30899c6c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/boykovkolmogorov.py
@@ -0,0 +1,370 @@
+"""
+Boykov-Kolmogorov algorithm for maximum flow problems.
+"""
+
+from collections import deque
+from operator import itemgetter
+
+import networkx as nx
+from networkx.algorithms.flow.utils import build_residual_network
+
+__all__ = ["boykov_kolmogorov"]
+
+
+@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
+def boykov_kolmogorov(
+    G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None
+):
+    r"""Find a maximum single-commodity flow using Boykov-Kolmogorov algorithm.
+
+    This function returns the residual network resulting after computing
+    the maximum flow. See below for details about the conventions
+    NetworkX uses for defining residual networks.
+
+    This algorithm has worse case complexity $O(n^2 m |C|)$ for $n$ nodes, $m$
+    edges, and $|C|$ the cost of the minimum cut [1]_. This implementation
+    uses the marking heuristic defined in [2]_ which improves its running
+    time in many practical problems.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Edges of the graph are expected to have an attribute called
+        'capacity'. If this attribute is not present, the edge is
+        considered to have infinite capacity.
+
+    s : node
+        Source node for the flow.
+
+    t : node
+        Sink node for the flow.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    residual : NetworkX graph
+        Residual network on which the algorithm is to be executed. If None, a
+        new residual network is created. Default value: None.
+
+    value_only : bool
+        If True compute only the value of the maximum flow. This parameter
+        will be ignored by this algorithm because it is not applicable.
+
+    cutoff : integer, float
+        If specified, the algorithm will terminate when the flow value reaches
+        or exceeds the cutoff. In this case, it may be unable to immediately
+        determine a minimum cut. Default value: None.
+
+    Returns
+    -------
+    R : NetworkX DiGraph
+        Residual network after computing the maximum flow.
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support MultiGraph and MultiDiGraph. If
+        the input graph is an instance of one of these two classes, a
+        NetworkXError is raised.
+
+    NetworkXUnbounded
+        If the graph has a path of infinite capacity, the value of a
+        feasible flow on the graph is unbounded above and the function
+        raises a NetworkXUnbounded.
+
+    See also
+    --------
+    :meth:`maximum_flow`
+    :meth:`minimum_cut`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    Notes
+    -----
+    The residual network :samp:`R` from an input graph :samp:`G` has the
+    same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
+    of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
+    self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
+    in :samp:`G`.
+
+    For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
+    is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
+    in :samp:`G` or zero otherwise. If the capacity is infinite,
+    :samp:`R[u][v]['capacity']` will have a high arbitrary finite value
+    that does not affect the solution of the problem. This value is stored in
+    :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
+    :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
+    satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
+
+    The flow value, defined as the total flow into :samp:`t`, the sink, is
+    stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not
+    specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such
+    that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
+    :samp:`s`-:samp:`t` cut.
+
+    Examples
+    --------
+    >>> from networkx.algorithms.flow import boykov_kolmogorov
+
+    The functions that implement flow algorithms and output a residual
+    network, such as this one, are not imported to the base NetworkX
+    namespace, so you have to explicitly import them from the flow package.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_edge("x", "a", capacity=3.0)
+    >>> G.add_edge("x", "b", capacity=1.0)
+    >>> G.add_edge("a", "c", capacity=3.0)
+    >>> G.add_edge("b", "c", capacity=5.0)
+    >>> G.add_edge("b", "d", capacity=4.0)
+    >>> G.add_edge("d", "e", capacity=2.0)
+    >>> G.add_edge("c", "y", capacity=2.0)
+    >>> G.add_edge("e", "y", capacity=3.0)
+    >>> R = boykov_kolmogorov(G, "x", "y")
+    >>> flow_value = nx.maximum_flow_value(G, "x", "y")
+    >>> flow_value
+    3.0
+    >>> flow_value == R.graph["flow_value"]
+    True
+
+    A nice feature of the Boykov-Kolmogorov algorithm is that a partition
+    of the nodes that defines a minimum cut can be easily computed based
+    on the search trees used during the algorithm. These trees are stored
+    in the graph attribute `trees` of the residual network.
+
+    >>> source_tree, target_tree = R.graph["trees"]
+    >>> partition = (set(source_tree), set(G) - set(source_tree))
+
+    Or equivalently:
+
+    >>> partition = (set(G) - set(target_tree), set(target_tree))
+
+    References
+    ----------
+    .. [1] Boykov, Y., & Kolmogorov, V. (2004). An experimental comparison
+           of min-cut/max-flow algorithms for energy minimization in vision.
+           Pattern Analysis and Machine Intelligence, IEEE Transactions on,
+           26(9), 1124-1137.
+           https://doi.org/10.1109/TPAMI.2004.60
+
+    .. [2] Vladimir Kolmogorov. Graph-based Algorithms for Multi-camera
+           Reconstruction Problem. PhD thesis, Cornell University, CS Department,
+           2003. pp. 109-114.
+           https://web.archive.org/web/20170809091249/https://pub.ist.ac.at/~vnk/papers/thesis.pdf
+
+    """
+    R = boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff)
+    R.graph["algorithm"] = "boykov_kolmogorov"
+    nx._clear_cache(R)
+    return R
+
+
+def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff):
+    if s not in G:
+        raise nx.NetworkXError(f"node {str(s)} not in graph")
+    if t not in G:
+        raise nx.NetworkXError(f"node {str(t)} not in graph")
+    if s == t:
+        raise nx.NetworkXError("source and sink are the same node")
+
+    if residual is None:
+        R = build_residual_network(G, capacity)
+    else:
+        R = residual
+
+    # Initialize/reset the residual network.
+    # This is way too slow
+    # nx.set_edge_attributes(R, 0, 'flow')
+    for u in R:
+        for e in R[u].values():
+            e["flow"] = 0
+
+    # Use an arbitrary high value as infinite. It is computed
+    # when building the residual network.
+    INF = R.graph["inf"]
+
+    if cutoff is None:
+        cutoff = INF
+
+    R_succ = R.succ
+    R_pred = R.pred
+
+    def grow():
+        """Bidirectional breadth-first search for the growth stage.
+
+        Returns a connecting edge, that is and edge that connects
+        a node from the source search tree with a node from the
+        target search tree.
+        The first node in the connecting edge is always from the
+        source tree and the last node from the target tree.
+        """
+        while active:
+            u = active[0]
+            if u in source_tree:
+                this_tree = source_tree
+                other_tree = target_tree
+                neighbors = R_succ
+            else:
+                this_tree = target_tree
+                other_tree = source_tree
+                neighbors = R_pred
+            for v, attr in neighbors[u].items():
+                if attr["capacity"] - attr["flow"] > 0:
+                    if v not in this_tree:
+                        if v in other_tree:
+                            return (u, v) if this_tree is source_tree else (v, u)
+                        this_tree[v] = u
+                        dist[v] = dist[u] + 1
+                        timestamp[v] = timestamp[u]
+                        active.append(v)
+                    elif v in this_tree and _is_closer(u, v):
+                        this_tree[v] = u
+                        dist[v] = dist[u] + 1
+                        timestamp[v] = timestamp[u]
+            _ = active.popleft()
+        return None, None
+
+    def augment(u, v):
+        """Augmentation stage.
+
+        Reconstruct path and determine its residual capacity.
+        We start from a connecting edge, which links a node
+        from the source tree to a node from the target tree.
+        The connecting edge is the output of the grow function
+        and the input of this function.
+        """
+        attr = R_succ[u][v]
+        flow = min(INF, attr["capacity"] - attr["flow"])
+        path = [u]
+        # Trace a path from u to s in source_tree.
+        w = u
+        while w != s:
+            n = w
+            w = source_tree[n]
+            attr = R_pred[n][w]
+            flow = min(flow, attr["capacity"] - attr["flow"])
+            path.append(w)
+        path.reverse()
+        # Trace a path from v to t in target_tree.
+        path.append(v)
+        w = v
+        while w != t:
+            n = w
+            w = target_tree[n]
+            attr = R_succ[n][w]
+            flow = min(flow, attr["capacity"] - attr["flow"])
+            path.append(w)
+        # Augment flow along the path and check for saturated edges.
+        it = iter(path)
+        u = next(it)
+        these_orphans = []
+        for v in it:
+            R_succ[u][v]["flow"] += flow
+            R_succ[v][u]["flow"] -= flow
+            if R_succ[u][v]["flow"] == R_succ[u][v]["capacity"]:
+                if v in source_tree:
+                    source_tree[v] = None
+                    these_orphans.append(v)
+                if u in target_tree:
+                    target_tree[u] = None
+                    these_orphans.append(u)
+            u = v
+        orphans.extend(sorted(these_orphans, key=dist.get))
+        return flow
+
+    def adopt():
+        """Adoption stage.
+
+        Reconstruct search trees by adopting or discarding orphans.
+        During augmentation stage some edges got saturated and thus
+        the source and target search trees broke down to forests, with
+        orphans as roots of some of its trees. We have to reconstruct
+        the search trees rooted to source and target before we can grow
+        them again.
+        """
+        while orphans:
+            u = orphans.popleft()
+            if u in source_tree:
+                tree = source_tree
+                neighbors = R_pred
+            else:
+                tree = target_tree
+                neighbors = R_succ
+            nbrs = ((n, attr, dist[n]) for n, attr in neighbors[u].items() if n in tree)
+            for v, attr, d in sorted(nbrs, key=itemgetter(2)):
+                if attr["capacity"] - attr["flow"] > 0:
+                    if _has_valid_root(v, tree):
+                        tree[u] = v
+                        dist[u] = dist[v] + 1
+                        timestamp[u] = time
+                        break
+            else:
+                nbrs = (
+                    (n, attr, dist[n]) for n, attr in neighbors[u].items() if n in tree
+                )
+                for v, attr, d in sorted(nbrs, key=itemgetter(2)):
+                    if attr["capacity"] - attr["flow"] > 0:
+                        if v not in active:
+                            active.append(v)
+                    if tree[v] == u:
+                        tree[v] = None
+                        orphans.appendleft(v)
+                if u in active:
+                    active.remove(u)
+                del tree[u]
+
+    def _has_valid_root(n, tree):
+        path = []
+        v = n
+        while v is not None:
+            path.append(v)
+            if v in (s, t):
+                base_dist = 0
+                break
+            elif timestamp[v] == time:
+                base_dist = dist[v]
+                break
+            v = tree[v]
+        else:
+            return False
+        length = len(path)
+        for i, u in enumerate(path, 1):
+            dist[u] = base_dist + length - i
+            timestamp[u] = time
+        return True
+
+    def _is_closer(u, v):
+        return timestamp[v] <= timestamp[u] and dist[v] > dist[u] + 1
+
+    source_tree = {s: None}
+    target_tree = {t: None}
+    active = deque([s, t])
+    orphans = deque()
+    flow_value = 0
+    # data structures for the marking heuristic
+    time = 1
+    timestamp = {s: time, t: time}
+    dist = {s: 0, t: 0}
+    while flow_value < cutoff:
+        # Growth stage
+        u, v = grow()
+        if u is None:
+            break
+        time += 1
+        # Augmentation stage
+        flow_value += augment(u, v)
+        # Adoption stage
+        adopt()
+
+    if flow_value * 2 > INF:
+        raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.")
+
+    # Add source and target tree in a graph attribute.
+    # A partition that defines a minimum cut can be directly
+    # computed from the search trees as explained in the docstrings.
+    R.graph["trees"] = (source_tree, target_tree)
+    # Add the standard flow_value graph attribute.
+    R.graph["flow_value"] = flow_value
+    return R
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/capacityscaling.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/capacityscaling.py
new file mode 100644
index 00000000..bf68565c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/capacityscaling.py
@@ -0,0 +1,407 @@
+"""
+Capacity scaling minimum cost flow algorithm.
+"""
+
+__all__ = ["capacity_scaling"]
+
+from itertools import chain
+from math import log
+
+import networkx as nx
+
+from ...utils import BinaryHeap, arbitrary_element, not_implemented_for
+
+
+def _detect_unboundedness(R):
+    """Detect infinite-capacity negative cycles."""
+    G = nx.DiGraph()
+    G.add_nodes_from(R)
+
+    # Value simulating infinity.
+    inf = R.graph["inf"]
+    # True infinity.
+    f_inf = float("inf")
+    for u in R:
+        for v, e in R[u].items():
+            # Compute the minimum weight of infinite-capacity (u, v) edges.
+            w = f_inf
+            for k, e in e.items():
+                if e["capacity"] == inf:
+                    w = min(w, e["weight"])
+            if w != f_inf:
+                G.add_edge(u, v, weight=w)
+
+    if nx.negative_edge_cycle(G):
+        raise nx.NetworkXUnbounded(
+            "Negative cost cycle of infinite capacity found. "
+            "Min cost flow may be unbounded below."
+        )
+
+
+@not_implemented_for("undirected")
+def _build_residual_network(G, demand, capacity, weight):
+    """Build a residual network and initialize a zero flow."""
+    if sum(G.nodes[u].get(demand, 0) for u in G) != 0:
+        raise nx.NetworkXUnfeasible("Sum of the demands should be 0.")
+
+    R = nx.MultiDiGraph()
+    R.add_nodes_from(
+        (u, {"excess": -G.nodes[u].get(demand, 0), "potential": 0}) for u in G
+    )
+
+    inf = float("inf")
+    # Detect selfloops with infinite capacities and negative weights.
+    for u, v, e in nx.selfloop_edges(G, data=True):
+        if e.get(weight, 0) < 0 and e.get(capacity, inf) == inf:
+            raise nx.NetworkXUnbounded(
+                "Negative cost cycle of infinite capacity found. "
+                "Min cost flow may be unbounded below."
+            )
+
+    # Extract edges with positive capacities. Self loops excluded.
+    if G.is_multigraph():
+        edge_list = [
+            (u, v, k, e)
+            for u, v, k, e in G.edges(data=True, keys=True)
+            if u != v and e.get(capacity, inf) > 0
+        ]
+    else:
+        edge_list = [
+            (u, v, 0, e)
+            for u, v, e in G.edges(data=True)
+            if u != v and e.get(capacity, inf) > 0
+        ]
+    # Simulate infinity with the larger of the sum of absolute node imbalances
+    # the sum of finite edge capacities or any positive value if both sums are
+    # zero. This allows the infinite-capacity edges to be distinguished for
+    # unboundedness detection and directly participate in residual capacity
+    # calculation.
+    inf = (
+        max(
+            sum(abs(R.nodes[u]["excess"]) for u in R),
+            2
+            * sum(
+                e[capacity]
+                for u, v, k, e in edge_list
+                if capacity in e and e[capacity] != inf
+            ),
+        )
+        or 1
+    )
+    for u, v, k, e in edge_list:
+        r = min(e.get(capacity, inf), inf)
+        w = e.get(weight, 0)
+        # Add both (u, v) and (v, u) into the residual network marked with the
+        # original key. (key[1] == True) indicates the (u, v) is in the
+        # original network.
+        R.add_edge(u, v, key=(k, True), capacity=r, weight=w, flow=0)
+        R.add_edge(v, u, key=(k, False), capacity=0, weight=-w, flow=0)
+
+    # Record the value simulating infinity.
+    R.graph["inf"] = inf
+
+    _detect_unboundedness(R)
+
+    return R
+
+
+def _build_flow_dict(G, R, capacity, weight):
+    """Build a flow dictionary from a residual network."""
+    inf = float("inf")
+    flow_dict = {}
+    if G.is_multigraph():
+        for u in G:
+            flow_dict[u] = {}
+            for v, es in G[u].items():
+                flow_dict[u][v] = {
+                    # Always saturate negative selfloops.
+                    k: (
+                        0
+                        if (
+                            u != v or e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0
+                        )
+                        else e[capacity]
+                    )
+                    for k, e in es.items()
+                }
+            for v, es in R[u].items():
+                if v in flow_dict[u]:
+                    flow_dict[u][v].update(
+                        (k[0], e["flow"]) for k, e in es.items() if e["flow"] > 0
+                    )
+    else:
+        for u in G:
+            flow_dict[u] = {
+                # Always saturate negative selfloops.
+                v: (
+                    0
+                    if (u != v or e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0)
+                    else e[capacity]
+                )
+                for v, e in G[u].items()
+            }
+            flow_dict[u].update(
+                (v, e["flow"])
+                for v, es in R[u].items()
+                for e in es.values()
+                if e["flow"] > 0
+            )
+    return flow_dict
+
+
+@nx._dispatchable(
+    node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}
+)
+def capacity_scaling(
+    G, demand="demand", capacity="capacity", weight="weight", heap=BinaryHeap
+):
+    r"""Find a minimum cost flow satisfying all demands in digraph G.
+
+    This is a capacity scaling successive shortest augmenting path algorithm.
+
+    G is a digraph with edge costs and capacities and in which nodes
+    have demand, i.e., they want to send or receive some amount of
+    flow. A negative demand means that the node wants to send flow, a
+    positive demand means that the node want to receive flow. A flow on
+    the digraph G satisfies all demand if the net flow into each node
+    is equal to the demand of that node.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        DiGraph or MultiDiGraph on which a minimum cost flow satisfying all
+        demands is to be found.
+
+    demand : string
+        Nodes of the graph G are expected to have an attribute demand
+        that indicates how much flow a node wants to send (negative
+        demand) or receive (positive demand). Note that the sum of the
+        demands should be 0 otherwise the problem in not feasible. If
+        this attribute is not present, a node is considered to have 0
+        demand. Default value: 'demand'.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    weight : string
+        Edges of the graph G are expected to have an attribute weight
+        that indicates the cost incurred by sending one unit of flow on
+        that edge. If not present, the weight is considered to be 0.
+        Default value: 'weight'.
+
+    heap : class
+        Type of heap to be used in the algorithm. It should be a subclass of
+        :class:`MinHeap` or implement a compatible interface.
+
+        If a stock heap implementation is to be used, :class:`BinaryHeap` is
+        recommended over :class:`PairingHeap` for Python implementations without
+        optimized attribute accesses (e.g., CPython) despite a slower
+        asymptotic running time. For Python implementations with optimized
+        attribute accesses (e.g., PyPy), :class:`PairingHeap` provides better
+        performance. Default value: :class:`BinaryHeap`.
+
+    Returns
+    -------
+    flowCost : integer
+        Cost of a minimum cost flow satisfying all demands.
+
+    flowDict : dictionary
+        If G is a digraph, a dict-of-dicts keyed by nodes such that
+        flowDict[u][v] is the flow on edge (u, v).
+        If G is a MultiDiGraph, a dict-of-dicts-of-dicts keyed by nodes
+        so that flowDict[u][v][key] is the flow on edge (u, v, key).
+
+    Raises
+    ------
+    NetworkXError
+        This exception is raised if the input graph is not directed,
+        not connected.
+
+    NetworkXUnfeasible
+        This exception is raised in the following situations:
+
+            * The sum of the demands is not zero. Then, there is no
+              flow satisfying all demands.
+            * There is no flow satisfying all demand.
+
+    NetworkXUnbounded
+        This exception is raised if the digraph G has a cycle of
+        negative cost and infinite capacity. Then, the cost of a flow
+        satisfying all demands is unbounded below.
+
+    Notes
+    -----
+    This algorithm does not work if edge weights are floating-point numbers.
+
+    See also
+    --------
+    :meth:`network_simplex`
+
+    Examples
+    --------
+    A simple example of a min cost flow problem.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_node("a", demand=-5)
+    >>> G.add_node("d", demand=5)
+    >>> G.add_edge("a", "b", weight=3, capacity=4)
+    >>> G.add_edge("a", "c", weight=6, capacity=10)
+    >>> G.add_edge("b", "d", weight=1, capacity=9)
+    >>> G.add_edge("c", "d", weight=2, capacity=5)
+    >>> flowCost, flowDict = nx.capacity_scaling(G)
+    >>> flowCost
+    24
+    >>> flowDict
+    {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}}
+
+    It is possible to change the name of the attributes used for the
+    algorithm.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_node("p", spam=-4)
+    >>> G.add_node("q", spam=2)
+    >>> G.add_node("a", spam=-2)
+    >>> G.add_node("d", spam=-1)
+    >>> G.add_node("t", spam=2)
+    >>> G.add_node("w", spam=3)
+    >>> G.add_edge("p", "q", cost=7, vacancies=5)
+    >>> G.add_edge("p", "a", cost=1, vacancies=4)
+    >>> G.add_edge("q", "d", cost=2, vacancies=3)
+    >>> G.add_edge("t", "q", cost=1, vacancies=2)
+    >>> G.add_edge("a", "t", cost=2, vacancies=4)
+    >>> G.add_edge("d", "w", cost=3, vacancies=4)
+    >>> G.add_edge("t", "w", cost=4, vacancies=1)
+    >>> flowCost, flowDict = nx.capacity_scaling(
+    ...     G, demand="spam", capacity="vacancies", weight="cost"
+    ... )
+    >>> flowCost
+    37
+    >>> flowDict
+    {'p': {'q': 2, 'a': 2}, 'q': {'d': 1}, 'a': {'t': 4}, 'd': {'w': 2}, 't': {'q': 1, 'w': 1}, 'w': {}}
+    """
+    R = _build_residual_network(G, demand, capacity, weight)
+
+    inf = float("inf")
+    # Account cost of negative selfloops.
+    flow_cost = sum(
+        0
+        if e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0
+        else e[capacity] * e[weight]
+        for u, v, e in nx.selfloop_edges(G, data=True)
+    )
+
+    # Determine the maximum edge capacity.
+    wmax = max(chain([-inf], (e["capacity"] for u, v, e in R.edges(data=True))))
+    if wmax == -inf:
+        # Residual network has no edges.
+        return flow_cost, _build_flow_dict(G, R, capacity, weight)
+
+    R_nodes = R.nodes
+    R_succ = R.succ
+
+    delta = 2 ** int(log(wmax, 2))
+    while delta >= 1:
+        # Saturate Δ-residual edges with negative reduced costs to achieve
+        # Δ-optimality.
+        for u in R:
+            p_u = R_nodes[u]["potential"]
+            for v, es in R_succ[u].items():
+                for k, e in es.items():
+                    flow = e["capacity"] - e["flow"]
+                    if e["weight"] - p_u + R_nodes[v]["potential"] < 0:
+                        flow = e["capacity"] - e["flow"]
+                        if flow >= delta:
+                            e["flow"] += flow
+                            R_succ[v][u][(k[0], not k[1])]["flow"] -= flow
+                            R_nodes[u]["excess"] -= flow
+                            R_nodes[v]["excess"] += flow
+        # Determine the Δ-active nodes.
+        S = set()
+        T = set()
+        S_add = S.add
+        S_remove = S.remove
+        T_add = T.add
+        T_remove = T.remove
+        for u in R:
+            excess = R_nodes[u]["excess"]
+            if excess >= delta:
+                S_add(u)
+            elif excess <= -delta:
+                T_add(u)
+        # Repeatedly augment flow from S to T along shortest paths until
+        # Δ-feasibility is achieved.
+        while S and T:
+            s = arbitrary_element(S)
+            t = None
+            # Search for a shortest path in terms of reduce costs from s to
+            # any t in T in the Δ-residual network.
+            d = {}
+            pred = {s: None}
+            h = heap()
+            h_insert = h.insert
+            h_get = h.get
+            h_insert(s, 0)
+            while h:
+                u, d_u = h.pop()
+                d[u] = d_u
+                if u in T:
+                    # Path found.
+                    t = u
+                    break
+                p_u = R_nodes[u]["potential"]
+                for v, es in R_succ[u].items():
+                    if v in d:
+                        continue
+                    wmin = inf
+                    # Find the minimum-weighted (u, v) Δ-residual edge.
+                    for k, e in es.items():
+                        if e["capacity"] - e["flow"] >= delta:
+                            w = e["weight"]
+                            if w < wmin:
+                                wmin = w
+                                kmin = k
+                                emin = e
+                    if wmin == inf:
+                        continue
+                    # Update the distance label of v.
+                    d_v = d_u + wmin - p_u + R_nodes[v]["potential"]
+                    if h_insert(v, d_v):
+                        pred[v] = (u, kmin, emin)
+            if t is not None:
+                # Augment Δ units of flow from s to t.
+                while u != s:
+                    v = u
+                    u, k, e = pred[v]
+                    e["flow"] += delta
+                    R_succ[v][u][(k[0], not k[1])]["flow"] -= delta
+                # Account node excess and deficit.
+                R_nodes[s]["excess"] -= delta
+                R_nodes[t]["excess"] += delta
+                if R_nodes[s]["excess"] < delta:
+                    S_remove(s)
+                if R_nodes[t]["excess"] > -delta:
+                    T_remove(t)
+                # Update node potentials.
+                d_t = d[t]
+                for u, d_u in d.items():
+                    R_nodes[u]["potential"] -= d_u - d_t
+            else:
+                # Path not found.
+                S_remove(s)
+        delta //= 2
+
+    if any(R.nodes[u]["excess"] != 0 for u in R):
+        raise nx.NetworkXUnfeasible("No flow satisfying all demands.")
+
+    # Calculate the flow cost.
+    for u in R:
+        for v, es in R_succ[u].items():
+            for e in es.values():
+                flow = e["flow"]
+                if flow > 0:
+                    flow_cost += flow * e["weight"]
+
+    return flow_cost, _build_flow_dict(G, R, capacity, weight)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/dinitz_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/dinitz_alg.py
new file mode 100644
index 00000000..f369642a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/dinitz_alg.py
@@ -0,0 +1,238 @@
+"""
+Dinitz' algorithm for maximum flow problems.
+"""
+
+from collections import deque
+
+import networkx as nx
+from networkx.algorithms.flow.utils import build_residual_network
+from networkx.utils import pairwise
+
+__all__ = ["dinitz"]
+
+
+@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
+def dinitz(G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None):
+    """Find a maximum single-commodity flow using Dinitz' algorithm.
+
+    This function returns the residual network resulting after computing
+    the maximum flow. See below for details about the conventions
+    NetworkX uses for defining residual networks.
+
+    This algorithm has a running time of $O(n^2 m)$ for $n$ nodes and $m$
+    edges [1]_.
+
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Edges of the graph are expected to have an attribute called
+        'capacity'. If this attribute is not present, the edge is
+        considered to have infinite capacity.
+
+    s : node
+        Source node for the flow.
+
+    t : node
+        Sink node for the flow.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    residual : NetworkX graph
+        Residual network on which the algorithm is to be executed. If None, a
+        new residual network is created. Default value: None.
+
+    value_only : bool
+        If True compute only the value of the maximum flow. This parameter
+        will be ignored by this algorithm because it is not applicable.
+
+    cutoff : integer, float
+        If specified, the algorithm will terminate when the flow value reaches
+        or exceeds the cutoff. In this case, it may be unable to immediately
+        determine a minimum cut. Default value: None.
+
+    Returns
+    -------
+    R : NetworkX DiGraph
+        Residual network after computing the maximum flow.
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support MultiGraph and MultiDiGraph. If
+        the input graph is an instance of one of these two classes, a
+        NetworkXError is raised.
+
+    NetworkXUnbounded
+        If the graph has a path of infinite capacity, the value of a
+        feasible flow on the graph is unbounded above and the function
+        raises a NetworkXUnbounded.
+
+    See also
+    --------
+    :meth:`maximum_flow`
+    :meth:`minimum_cut`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    Notes
+    -----
+    The residual network :samp:`R` from an input graph :samp:`G` has the
+    same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
+    of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
+    self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
+    in :samp:`G`.
+
+    For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
+    is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
+    in :samp:`G` or zero otherwise. If the capacity is infinite,
+    :samp:`R[u][v]['capacity']` will have a high arbitrary finite value
+    that does not affect the solution of the problem. This value is stored in
+    :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
+    :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
+    satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
+
+    The flow value, defined as the total flow into :samp:`t`, the sink, is
+    stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not
+    specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such
+    that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
+    :samp:`s`-:samp:`t` cut.
+
+    Examples
+    --------
+    >>> from networkx.algorithms.flow import dinitz
+
+    The functions that implement flow algorithms and output a residual
+    network, such as this one, are not imported to the base NetworkX
+    namespace, so you have to explicitly import them from the flow package.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_edge("x", "a", capacity=3.0)
+    >>> G.add_edge("x", "b", capacity=1.0)
+    >>> G.add_edge("a", "c", capacity=3.0)
+    >>> G.add_edge("b", "c", capacity=5.0)
+    >>> G.add_edge("b", "d", capacity=4.0)
+    >>> G.add_edge("d", "e", capacity=2.0)
+    >>> G.add_edge("c", "y", capacity=2.0)
+    >>> G.add_edge("e", "y", capacity=3.0)
+    >>> R = dinitz(G, "x", "y")
+    >>> flow_value = nx.maximum_flow_value(G, "x", "y")
+    >>> flow_value
+    3.0
+    >>> flow_value == R.graph["flow_value"]
+    True
+
+    References
+    ----------
+    .. [1] Dinitz' Algorithm: The Original Version and Even's Version.
+           2006. Yefim Dinitz. In Theoretical Computer Science. Lecture
+           Notes in Computer Science. Volume 3895. pp 218-240.
+           https://doi.org/10.1007/11685654_10
+
+    """
+    R = dinitz_impl(G, s, t, capacity, residual, cutoff)
+    R.graph["algorithm"] = "dinitz"
+    nx._clear_cache(R)
+    return R
+
+
+def dinitz_impl(G, s, t, capacity, residual, cutoff):
+    if s not in G:
+        raise nx.NetworkXError(f"node {str(s)} not in graph")
+    if t not in G:
+        raise nx.NetworkXError(f"node {str(t)} not in graph")
+    if s == t:
+        raise nx.NetworkXError("source and sink are the same node")
+
+    if residual is None:
+        R = build_residual_network(G, capacity)
+    else:
+        R = residual
+
+    # Initialize/reset the residual network.
+    for u in R:
+        for e in R[u].values():
+            e["flow"] = 0
+
+    # Use an arbitrary high value as infinite. It is computed
+    # when building the residual network.
+    INF = R.graph["inf"]
+
+    if cutoff is None:
+        cutoff = INF
+
+    R_succ = R.succ
+    R_pred = R.pred
+
+    def breath_first_search():
+        parents = {}
+        vertex_dist = {s: 0}
+        queue = deque([(s, 0)])
+        # Record all the potential edges of shortest augmenting paths
+        while queue:
+            if t in parents:
+                break
+            u, dist = queue.popleft()
+            for v, attr in R_succ[u].items():
+                if attr["capacity"] - attr["flow"] > 0:
+                    if v in parents:
+                        if vertex_dist[v] == dist + 1:
+                            parents[v].append(u)
+                    else:
+                        parents[v] = deque([u])
+                        vertex_dist[v] = dist + 1
+                        queue.append((v, dist + 1))
+        return parents
+
+    def depth_first_search(parents):
+        # DFS to find all the shortest augmenting paths
+        """Build a path using DFS starting from the sink"""
+        total_flow = 0
+        u = t
+        # path also functions as a stack
+        path = [u]
+        # The loop ends with no augmenting path left in the layered graph
+        while True:
+            if len(parents[u]) > 0:
+                v = parents[u][0]
+                path.append(v)
+            else:
+                path.pop()
+                if len(path) == 0:
+                    break
+                v = path[-1]
+                parents[v].popleft()
+            # Augment the flow along the path found
+            if v == s:
+                flow = INF
+                for u, v in pairwise(path):
+                    flow = min(flow, R_pred[u][v]["capacity"] - R_pred[u][v]["flow"])
+                for u, v in pairwise(reversed(path)):
+                    R_pred[v][u]["flow"] += flow
+                    R_pred[u][v]["flow"] -= flow
+                    # Find the proper node to continue the search
+                    if R_pred[v][u]["capacity"] - R_pred[v][u]["flow"] == 0:
+                        parents[v].popleft()
+                        while path[-1] != v:
+                            path.pop()
+                total_flow += flow
+                v = path[-1]
+            u = v
+        return total_flow
+
+    flow_value = 0
+    while flow_value < cutoff:
+        parents = breath_first_search()
+        if t not in parents:
+            break
+        this_flow = depth_first_search(parents)
+        if this_flow * 2 > INF:
+            raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.")
+        flow_value += this_flow
+
+    R.graph["flow_value"] = flow_value
+    return R
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/edmondskarp.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/edmondskarp.py
new file mode 100644
index 00000000..50063268
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/edmondskarp.py
@@ -0,0 +1,241 @@
+"""
+Edmonds-Karp algorithm for maximum flow problems.
+"""
+
+import networkx as nx
+from networkx.algorithms.flow.utils import build_residual_network
+
+__all__ = ["edmonds_karp"]
+
+
+def edmonds_karp_core(R, s, t, cutoff):
+    """Implementation of the Edmonds-Karp algorithm."""
+    R_nodes = R.nodes
+    R_pred = R.pred
+    R_succ = R.succ
+
+    inf = R.graph["inf"]
+
+    def augment(path):
+        """Augment flow along a path from s to t."""
+        # Determine the path residual capacity.
+        flow = inf
+        it = iter(path)
+        u = next(it)
+        for v in it:
+            attr = R_succ[u][v]
+            flow = min(flow, attr["capacity"] - attr["flow"])
+            u = v
+        if flow * 2 > inf:
+            raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.")
+        # Augment flow along the path.
+        it = iter(path)
+        u = next(it)
+        for v in it:
+            R_succ[u][v]["flow"] += flow
+            R_succ[v][u]["flow"] -= flow
+            u = v
+        return flow
+
+    def bidirectional_bfs():
+        """Bidirectional breadth-first search for an augmenting path."""
+        pred = {s: None}
+        q_s = [s]
+        succ = {t: None}
+        q_t = [t]
+        while True:
+            q = []
+            if len(q_s) <= len(q_t):
+                for u in q_s:
+                    for v, attr in R_succ[u].items():
+                        if v not in pred and attr["flow"] < attr["capacity"]:
+                            pred[v] = u
+                            if v in succ:
+                                return v, pred, succ
+                            q.append(v)
+                if not q:
+                    return None, None, None
+                q_s = q
+            else:
+                for u in q_t:
+                    for v, attr in R_pred[u].items():
+                        if v not in succ and attr["flow"] < attr["capacity"]:
+                            succ[v] = u
+                            if v in pred:
+                                return v, pred, succ
+                            q.append(v)
+                if not q:
+                    return None, None, None
+                q_t = q
+
+    # Look for shortest augmenting paths using breadth-first search.
+    flow_value = 0
+    while flow_value < cutoff:
+        v, pred, succ = bidirectional_bfs()
+        if pred is None:
+            break
+        path = [v]
+        # Trace a path from s to v.
+        u = v
+        while u != s:
+            u = pred[u]
+            path.append(u)
+        path.reverse()
+        # Trace a path from v to t.
+        u = v
+        while u != t:
+            u = succ[u]
+            path.append(u)
+        flow_value += augment(path)
+
+    return flow_value
+
+
+def edmonds_karp_impl(G, s, t, capacity, residual, cutoff):
+    """Implementation of the Edmonds-Karp algorithm."""
+    if s not in G:
+        raise nx.NetworkXError(f"node {str(s)} not in graph")
+    if t not in G:
+        raise nx.NetworkXError(f"node {str(t)} not in graph")
+    if s == t:
+        raise nx.NetworkXError("source and sink are the same node")
+
+    if residual is None:
+        R = build_residual_network(G, capacity)
+    else:
+        R = residual
+
+    # Initialize/reset the residual network.
+    for u in R:
+        for e in R[u].values():
+            e["flow"] = 0
+
+    if cutoff is None:
+        cutoff = float("inf")
+    R.graph["flow_value"] = edmonds_karp_core(R, s, t, cutoff)
+
+    return R
+
+
+@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
+def edmonds_karp(
+    G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None
+):
+    """Find a maximum single-commodity flow using the Edmonds-Karp algorithm.
+
+    This function returns the residual network resulting after computing
+    the maximum flow. See below for details about the conventions
+    NetworkX uses for defining residual networks.
+
+    This algorithm has a running time of $O(n m^2)$ for $n$ nodes and $m$
+    edges.
+
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Edges of the graph are expected to have an attribute called
+        'capacity'. If this attribute is not present, the edge is
+        considered to have infinite capacity.
+
+    s : node
+        Source node for the flow.
+
+    t : node
+        Sink node for the flow.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    residual : NetworkX graph
+        Residual network on which the algorithm is to be executed. If None, a
+        new residual network is created. Default value: None.
+
+    value_only : bool
+        If True compute only the value of the maximum flow. This parameter
+        will be ignored by this algorithm because it is not applicable.
+
+    cutoff : integer, float
+        If specified, the algorithm will terminate when the flow value reaches
+        or exceeds the cutoff. In this case, it may be unable to immediately
+        determine a minimum cut. Default value: None.
+
+    Returns
+    -------
+    R : NetworkX DiGraph
+        Residual network after computing the maximum flow.
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support MultiGraph and MultiDiGraph. If
+        the input graph is an instance of one of these two classes, a
+        NetworkXError is raised.
+
+    NetworkXUnbounded
+        If the graph has a path of infinite capacity, the value of a
+        feasible flow on the graph is unbounded above and the function
+        raises a NetworkXUnbounded.
+
+    See also
+    --------
+    :meth:`maximum_flow`
+    :meth:`minimum_cut`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    Notes
+    -----
+    The residual network :samp:`R` from an input graph :samp:`G` has the
+    same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
+    of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
+    self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
+    in :samp:`G`.
+
+    For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
+    is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
+    in :samp:`G` or zero otherwise. If the capacity is infinite,
+    :samp:`R[u][v]['capacity']` will have a high arbitrary finite value
+    that does not affect the solution of the problem. This value is stored in
+    :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
+    :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
+    satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
+
+    The flow value, defined as the total flow into :samp:`t`, the sink, is
+    stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not
+    specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such
+    that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
+    :samp:`s`-:samp:`t` cut.
+
+    Examples
+    --------
+    >>> from networkx.algorithms.flow import edmonds_karp
+
+    The functions that implement flow algorithms and output a residual
+    network, such as this one, are not imported to the base NetworkX
+    namespace, so you have to explicitly import them from the flow package.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_edge("x", "a", capacity=3.0)
+    >>> G.add_edge("x", "b", capacity=1.0)
+    >>> G.add_edge("a", "c", capacity=3.0)
+    >>> G.add_edge("b", "c", capacity=5.0)
+    >>> G.add_edge("b", "d", capacity=4.0)
+    >>> G.add_edge("d", "e", capacity=2.0)
+    >>> G.add_edge("c", "y", capacity=2.0)
+    >>> G.add_edge("e", "y", capacity=3.0)
+    >>> R = edmonds_karp(G, "x", "y")
+    >>> flow_value = nx.maximum_flow_value(G, "x", "y")
+    >>> flow_value
+    3.0
+    >>> flow_value == R.graph["flow_value"]
+    True
+
+    """
+    R = edmonds_karp_impl(G, s, t, capacity, residual, cutoff)
+    R.graph["algorithm"] = "edmonds_karp"
+    nx._clear_cache(R)
+    return R
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/gomory_hu.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/gomory_hu.py
new file mode 100644
index 00000000..69913da9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/gomory_hu.py
@@ -0,0 +1,178 @@
+"""
+Gomory-Hu tree of undirected Graphs.
+"""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+from .edmondskarp import edmonds_karp
+from .utils import build_residual_network
+
+default_flow_func = edmonds_karp
+
+__all__ = ["gomory_hu_tree"]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
+def gomory_hu_tree(G, capacity="capacity", flow_func=None):
+    r"""Returns the Gomory-Hu tree of an undirected graph G.
+
+    A Gomory-Hu tree of an undirected graph with capacities is a
+    weighted tree that represents the minimum s-t cuts for all s-t
+    pairs in the graph.
+
+    It only requires `n-1` minimum cut computations instead of the
+    obvious `n(n-1)/2`. The tree represents all s-t cuts as the
+    minimum cut value among any pair of nodes is the minimum edge
+    weight in the shortest path between the two nodes in the
+    Gomory-Hu tree.
+
+    The Gomory-Hu tree also has the property that removing the
+    edge with the minimum weight in the shortest path between
+    any two nodes leaves two connected components that form
+    a partition of the nodes in G that defines the minimum s-t
+    cut.
+
+    See Examples section below for details.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    flow_func : function
+        Function to perform the underlying flow computations. Default value
+        :func:`edmonds_karp`. This function performs better in sparse graphs
+        with right tailed degree distributions.
+        :func:`shortest_augmenting_path` will perform better in denser
+        graphs.
+
+    Returns
+    -------
+    Tree : NetworkX graph
+        A NetworkX graph representing the Gomory-Hu tree of the input graph.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        Raised if the input graph is directed.
+
+    NetworkXError
+        Raised if the input graph is an empty Graph.
+
+    Examples
+    --------
+    >>> G = nx.karate_club_graph()
+    >>> nx.set_edge_attributes(G, 1, "capacity")
+    >>> T = nx.gomory_hu_tree(G)
+    >>> # The value of the minimum cut between any pair
+    ... # of nodes in G is the minimum edge weight in the
+    ... # shortest path between the two nodes in the
+    ... # Gomory-Hu tree.
+    ... def minimum_edge_weight_in_shortest_path(T, u, v):
+    ...     path = nx.shortest_path(T, u, v, weight="weight")
+    ...     return min((T[u][v]["weight"], (u, v)) for (u, v) in zip(path, path[1:]))
+    >>> u, v = 0, 33
+    >>> cut_value, edge = minimum_edge_weight_in_shortest_path(T, u, v)
+    >>> cut_value
+    10
+    >>> nx.minimum_cut_value(G, u, v)
+    10
+    >>> # The Gomory-Hu tree also has the property that removing the
+    ... # edge with the minimum weight in the shortest path between
+    ... # any two nodes leaves two connected components that form
+    ... # a partition of the nodes in G that defines the minimum s-t
+    ... # cut.
+    ... cut_value, edge = minimum_edge_weight_in_shortest_path(T, u, v)
+    >>> T.remove_edge(*edge)
+    >>> U, V = list(nx.connected_components(T))
+    >>> # Thus U and V form a partition that defines a minimum cut
+    ... # between u and v in G. You can compute the edge cut set,
+    ... # that is, the set of edges that if removed from G will
+    ... # disconnect u from v in G, with this information:
+    ... cutset = set()
+    >>> for x, nbrs in ((n, G[n]) for n in U):
+    ...     cutset.update((x, y) for y in nbrs if y in V)
+    >>> # Because we have set the capacities of all edges to 1
+    ... # the cutset contains ten edges
+    ... len(cutset)
+    10
+    >>> # You can use any maximum flow algorithm for the underlying
+    ... # flow computations using the argument flow_func
+    ... from networkx.algorithms import flow
+    >>> T = nx.gomory_hu_tree(G, flow_func=flow.boykov_kolmogorov)
+    >>> cut_value, edge = minimum_edge_weight_in_shortest_path(T, u, v)
+    >>> cut_value
+    10
+    >>> nx.minimum_cut_value(G, u, v, flow_func=flow.boykov_kolmogorov)
+    10
+
+    Notes
+    -----
+    This implementation is based on Gusfield approach [1]_ to compute
+    Gomory-Hu trees, which does not require node contractions and has
+    the same computational complexity than the original method.
+
+    See also
+    --------
+    :func:`minimum_cut`
+    :func:`maximum_flow`
+
+    References
+    ----------
+    .. [1] Gusfield D: Very simple methods for all pairs network flow analysis.
+           SIAM J Comput 19(1):143-155, 1990.
+
+    """
+    if flow_func is None:
+        flow_func = default_flow_func
+
+    if len(G) == 0:  # empty graph
+        msg = "Empty Graph does not have a Gomory-Hu tree representation"
+        raise nx.NetworkXError(msg)
+
+    # Start the tree as a star graph with an arbitrary node at the center
+    tree = {}
+    labels = {}
+    iter_nodes = iter(G)
+    root = next(iter_nodes)
+    for n in iter_nodes:
+        tree[n] = root
+
+    # Reuse residual network
+    R = build_residual_network(G, capacity)
+
+    # For all the leaves in the star graph tree (that is n-1 nodes).
+    for source in tree:
+        # Find neighbor in the tree
+        target = tree[source]
+        # compute minimum cut
+        cut_value, partition = nx.minimum_cut(
+            G, source, target, capacity=capacity, flow_func=flow_func, residual=R
+        )
+        labels[(source, target)] = cut_value
+        # Update the tree
+        # Source will always be in partition[0] and target in partition[1]
+        for node in partition[0]:
+            if node != source and node in tree and tree[node] == target:
+                tree[node] = source
+                labels[node, source] = labels.get((node, target), cut_value)
+        #
+        if target != root and tree[target] in partition[0]:
+            labels[source, tree[target]] = labels[target, tree[target]]
+            labels[target, source] = cut_value
+            tree[source] = tree[target]
+            tree[target] = source
+
+    # Build the tree
+    T = nx.Graph()
+    T.add_nodes_from(G)
+    T.add_weighted_edges_from(((u, v, labels[u, v]) for u, v in tree.items()))
+    return T
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/maxflow.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/maxflow.py
new file mode 100644
index 00000000..7993d87b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/maxflow.py
@@ -0,0 +1,607 @@
+"""
+Maximum flow (and minimum cut) algorithms on capacitated graphs.
+"""
+
+import networkx as nx
+
+from .boykovkolmogorov import boykov_kolmogorov
+from .dinitz_alg import dinitz
+from .edmondskarp import edmonds_karp
+from .preflowpush import preflow_push
+from .shortestaugmentingpath import shortest_augmenting_path
+from .utils import build_flow_dict
+
+# Define the default flow function for computing maximum flow.
+default_flow_func = preflow_push
+
+__all__ = ["maximum_flow", "maximum_flow_value", "minimum_cut", "minimum_cut_value"]
+
+
+@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")})
+def maximum_flow(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs):
+    """Find a maximum single-commodity flow.
+
+    Parameters
+    ----------
+    flowG : NetworkX graph
+        Edges of the graph are expected to have an attribute called
+        'capacity'. If this attribute is not present, the edge is
+        considered to have infinite capacity.
+
+    _s : node
+        Source node for the flow.
+
+    _t : node
+        Sink node for the flow.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes
+        in a capacitated graph. The function has to accept at least three
+        parameters: a Graph or Digraph, a source node, and a target node.
+        And return a residual network that follows NetworkX conventions
+        (see Notes). If flow_func is None, the default maximum
+        flow function (:meth:`preflow_push`) is used. See below for
+        alternative algorithms. The choice of the default function may change
+        from version to version and should not be relied on. Default value:
+        None.
+
+    kwargs : Any other keyword parameter is passed to the function that
+        computes the maximum flow.
+
+    Returns
+    -------
+    flow_value : integer, float
+        Value of the maximum flow, i.e., net outflow from the source.
+
+    flow_dict : dict
+        A dictionary containing the value of the flow that went through
+        each edge.
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support MultiGraph and MultiDiGraph. If
+        the input graph is an instance of one of these two classes, a
+        NetworkXError is raised.
+
+    NetworkXUnbounded
+        If the graph has a path of infinite capacity, the value of a
+        feasible flow on the graph is unbounded above and the function
+        raises a NetworkXUnbounded.
+
+    See also
+    --------
+    :meth:`maximum_flow_value`
+    :meth:`minimum_cut`
+    :meth:`minimum_cut_value`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    Notes
+    -----
+    The function used in the flow_func parameter has to return a residual
+    network that follows NetworkX conventions:
+
+    The residual network :samp:`R` from an input graph :samp:`G` has the
+    same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
+    of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
+    self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
+    in :samp:`G`.
+
+    For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
+    is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
+    in :samp:`G` or zero otherwise. If the capacity is infinite,
+    :samp:`R[u][v]['capacity']` will have a high arbitrary finite value
+    that does not affect the solution of the problem. This value is stored in
+    :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
+    :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
+    satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
+
+    The flow value, defined as the total flow into :samp:`t`, the sink, is
+    stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using
+    only edges :samp:`(u, v)` such that
+    :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
+    :samp:`s`-:samp:`t` cut.
+
+    Specific algorithms may store extra data in :samp:`R`.
+
+    The function should supports an optional boolean parameter value_only. When
+    True, it can optionally terminate the algorithm as soon as the maximum flow
+    value and the minimum cut can be determined.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_edge("x", "a", capacity=3.0)
+    >>> G.add_edge("x", "b", capacity=1.0)
+    >>> G.add_edge("a", "c", capacity=3.0)
+    >>> G.add_edge("b", "c", capacity=5.0)
+    >>> G.add_edge("b", "d", capacity=4.0)
+    >>> G.add_edge("d", "e", capacity=2.0)
+    >>> G.add_edge("c", "y", capacity=2.0)
+    >>> G.add_edge("e", "y", capacity=3.0)
+
+    maximum_flow returns both the value of the maximum flow and a
+    dictionary with all flows.
+
+    >>> flow_value, flow_dict = nx.maximum_flow(G, "x", "y")
+    >>> flow_value
+    3.0
+    >>> print(flow_dict["x"]["b"])
+    1.0
+
+    You can also use alternative algorithms for computing the
+    maximum flow by using the flow_func parameter.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> flow_value == nx.maximum_flow(G, "x", "y", flow_func=shortest_augmenting_path)[
+    ...     0
+    ... ]
+    True
+
+    """
+    if flow_func is None:
+        if kwargs:
+            raise nx.NetworkXError(
+                "You have to explicitly set a flow_func if"
+                " you need to pass parameters via kwargs."
+            )
+        flow_func = default_flow_func
+
+    if not callable(flow_func):
+        raise nx.NetworkXError("flow_func has to be callable.")
+
+    R = flow_func(flowG, _s, _t, capacity=capacity, value_only=False, **kwargs)
+    flow_dict = build_flow_dict(flowG, R)
+
+    return (R.graph["flow_value"], flow_dict)
+
+
+@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")})
+def maximum_flow_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs):
+    """Find the value of maximum single-commodity flow.
+
+    Parameters
+    ----------
+    flowG : NetworkX graph
+        Edges of the graph are expected to have an attribute called
+        'capacity'. If this attribute is not present, the edge is
+        considered to have infinite capacity.
+
+    _s : node
+        Source node for the flow.
+
+    _t : node
+        Sink node for the flow.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes
+        in a capacitated graph. The function has to accept at least three
+        parameters: a Graph or Digraph, a source node, and a target node.
+        And return a residual network that follows NetworkX conventions
+        (see Notes). If flow_func is None, the default maximum
+        flow function (:meth:`preflow_push`) is used. See below for
+        alternative algorithms. The choice of the default function may change
+        from version to version and should not be relied on. Default value:
+        None.
+
+    kwargs : Any other keyword parameter is passed to the function that
+        computes the maximum flow.
+
+    Returns
+    -------
+    flow_value : integer, float
+        Value of the maximum flow, i.e., net outflow from the source.
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support MultiGraph and MultiDiGraph. If
+        the input graph is an instance of one of these two classes, a
+        NetworkXError is raised.
+
+    NetworkXUnbounded
+        If the graph has a path of infinite capacity, the value of a
+        feasible flow on the graph is unbounded above and the function
+        raises a NetworkXUnbounded.
+
+    See also
+    --------
+    :meth:`maximum_flow`
+    :meth:`minimum_cut`
+    :meth:`minimum_cut_value`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    Notes
+    -----
+    The function used in the flow_func parameter has to return a residual
+    network that follows NetworkX conventions:
+
+    The residual network :samp:`R` from an input graph :samp:`G` has the
+    same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
+    of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
+    self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
+    in :samp:`G`.
+
+    For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
+    is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
+    in :samp:`G` or zero otherwise. If the capacity is infinite,
+    :samp:`R[u][v]['capacity']` will have a high arbitrary finite value
+    that does not affect the solution of the problem. This value is stored in
+    :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
+    :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
+    satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
+
+    The flow value, defined as the total flow into :samp:`t`, the sink, is
+    stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using
+    only edges :samp:`(u, v)` such that
+    :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
+    :samp:`s`-:samp:`t` cut.
+
+    Specific algorithms may store extra data in :samp:`R`.
+
+    The function should supports an optional boolean parameter value_only. When
+    True, it can optionally terminate the algorithm as soon as the maximum flow
+    value and the minimum cut can be determined.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_edge("x", "a", capacity=3.0)
+    >>> G.add_edge("x", "b", capacity=1.0)
+    >>> G.add_edge("a", "c", capacity=3.0)
+    >>> G.add_edge("b", "c", capacity=5.0)
+    >>> G.add_edge("b", "d", capacity=4.0)
+    >>> G.add_edge("d", "e", capacity=2.0)
+    >>> G.add_edge("c", "y", capacity=2.0)
+    >>> G.add_edge("e", "y", capacity=3.0)
+
+    maximum_flow_value computes only the value of the
+    maximum flow:
+
+    >>> flow_value = nx.maximum_flow_value(G, "x", "y")
+    >>> flow_value
+    3.0
+
+    You can also use alternative algorithms for computing the
+    maximum flow by using the flow_func parameter.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> flow_value == nx.maximum_flow_value(
+    ...     G, "x", "y", flow_func=shortest_augmenting_path
+    ... )
+    True
+
+    """
+    if flow_func is None:
+        if kwargs:
+            raise nx.NetworkXError(
+                "You have to explicitly set a flow_func if"
+                " you need to pass parameters via kwargs."
+            )
+        flow_func = default_flow_func
+
+    if not callable(flow_func):
+        raise nx.NetworkXError("flow_func has to be callable.")
+
+    R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs)
+
+    return R.graph["flow_value"]
+
+
+@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")})
+def minimum_cut(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs):
+    """Compute the value and the node partition of a minimum (s, t)-cut.
+
+    Use the max-flow min-cut theorem, i.e., the capacity of a minimum
+    capacity cut is equal to the flow value of a maximum flow.
+
+    Parameters
+    ----------
+    flowG : NetworkX graph
+        Edges of the graph are expected to have an attribute called
+        'capacity'. If this attribute is not present, the edge is
+        considered to have infinite capacity.
+
+    _s : node
+        Source node for the flow.
+
+    _t : node
+        Sink node for the flow.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes
+        in a capacitated graph. The function has to accept at least three
+        parameters: a Graph or Digraph, a source node, and a target node.
+        And return a residual network that follows NetworkX conventions
+        (see Notes). If flow_func is None, the default maximum
+        flow function (:meth:`preflow_push`) is used. See below for
+        alternative algorithms. The choice of the default function may change
+        from version to version and should not be relied on. Default value:
+        None.
+
+    kwargs : Any other keyword parameter is passed to the function that
+        computes the maximum flow.
+
+    Returns
+    -------
+    cut_value : integer, float
+        Value of the minimum cut.
+
+    partition : pair of node sets
+        A partitioning of the nodes that defines a minimum cut.
+
+    Raises
+    ------
+    NetworkXUnbounded
+        If the graph has a path of infinite capacity, all cuts have
+        infinite capacity and the function raises a NetworkXError.
+
+    See also
+    --------
+    :meth:`maximum_flow`
+    :meth:`maximum_flow_value`
+    :meth:`minimum_cut_value`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    Notes
+    -----
+    The function used in the flow_func parameter has to return a residual
+    network that follows NetworkX conventions:
+
+    The residual network :samp:`R` from an input graph :samp:`G` has the
+    same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
+    of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
+    self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
+    in :samp:`G`.
+
+    For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
+    is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
+    in :samp:`G` or zero otherwise. If the capacity is infinite,
+    :samp:`R[u][v]['capacity']` will have a high arbitrary finite value
+    that does not affect the solution of the problem. This value is stored in
+    :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
+    :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
+    satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
+
+    The flow value, defined as the total flow into :samp:`t`, the sink, is
+    stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using
+    only edges :samp:`(u, v)` such that
+    :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
+    :samp:`s`-:samp:`t` cut.
+
+    Specific algorithms may store extra data in :samp:`R`.
+
+    The function should supports an optional boolean parameter value_only. When
+    True, it can optionally terminate the algorithm as soon as the maximum flow
+    value and the minimum cut can be determined.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_edge("x", "a", capacity=3.0)
+    >>> G.add_edge("x", "b", capacity=1.0)
+    >>> G.add_edge("a", "c", capacity=3.0)
+    >>> G.add_edge("b", "c", capacity=5.0)
+    >>> G.add_edge("b", "d", capacity=4.0)
+    >>> G.add_edge("d", "e", capacity=2.0)
+    >>> G.add_edge("c", "y", capacity=2.0)
+    >>> G.add_edge("e", "y", capacity=3.0)
+
+    minimum_cut computes both the value of the
+    minimum cut and the node partition:
+
+    >>> cut_value, partition = nx.minimum_cut(G, "x", "y")
+    >>> reachable, non_reachable = partition
+
+    'partition' here is a tuple with the two sets of nodes that define
+    the minimum cut. You can compute the cut set of edges that induce
+    the minimum cut as follows:
+
+    >>> cutset = set()
+    >>> for u, nbrs in ((n, G[n]) for n in reachable):
+    ...     cutset.update((u, v) for v in nbrs if v in non_reachable)
+    >>> print(sorted(cutset))
+    [('c', 'y'), ('x', 'b')]
+    >>> cut_value == sum(G.edges[u, v]["capacity"] for (u, v) in cutset)
+    True
+
+    You can also use alternative algorithms for computing the
+    minimum cut by using the flow_func parameter.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> cut_value == nx.minimum_cut(G, "x", "y", flow_func=shortest_augmenting_path)[0]
+    True
+
+    """
+    if flow_func is None:
+        if kwargs:
+            raise nx.NetworkXError(
+                "You have to explicitly set a flow_func if"
+                " you need to pass parameters via kwargs."
+            )
+        flow_func = default_flow_func
+
+    if not callable(flow_func):
+        raise nx.NetworkXError("flow_func has to be callable.")
+
+    if kwargs.get("cutoff") is not None and flow_func is preflow_push:
+        raise nx.NetworkXError("cutoff should not be specified.")
+
+    R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs)
+    # Remove saturated edges from the residual network
+    cutset = [(u, v, d) for u, v, d in R.edges(data=True) if d["flow"] == d["capacity"]]
+    R.remove_edges_from(cutset)
+
+    # Then, reachable and non reachable nodes from source in the
+    # residual network form the node partition that defines
+    # the minimum cut.
+    non_reachable = set(dict(nx.shortest_path_length(R, target=_t)))
+    partition = (set(flowG) - non_reachable, non_reachable)
+    # Finally add again cutset edges to the residual network to make
+    # sure that it is reusable.
+    R.add_edges_from(cutset)
+    return (R.graph["flow_value"], partition)
+
+
+@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")})
+def minimum_cut_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs):
+    """Compute the value of a minimum (s, t)-cut.
+
+    Use the max-flow min-cut theorem, i.e., the capacity of a minimum
+    capacity cut is equal to the flow value of a maximum flow.
+
+    Parameters
+    ----------
+    flowG : NetworkX graph
+        Edges of the graph are expected to have an attribute called
+        'capacity'. If this attribute is not present, the edge is
+        considered to have infinite capacity.
+
+    _s : node
+        Source node for the flow.
+
+    _t : node
+        Sink node for the flow.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    flow_func : function
+        A function for computing the maximum flow among a pair of nodes
+        in a capacitated graph. The function has to accept at least three
+        parameters: a Graph or Digraph, a source node, and a target node.
+        And return a residual network that follows NetworkX conventions
+        (see Notes). If flow_func is None, the default maximum
+        flow function (:meth:`preflow_push`) is used. See below for
+        alternative algorithms. The choice of the default function may change
+        from version to version and should not be relied on. Default value:
+        None.
+
+    kwargs : Any other keyword parameter is passed to the function that
+        computes the maximum flow.
+
+    Returns
+    -------
+    cut_value : integer, float
+        Value of the minimum cut.
+
+    Raises
+    ------
+    NetworkXUnbounded
+        If the graph has a path of infinite capacity, all cuts have
+        infinite capacity and the function raises a NetworkXError.
+
+    See also
+    --------
+    :meth:`maximum_flow`
+    :meth:`maximum_flow_value`
+    :meth:`minimum_cut`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+    :meth:`shortest_augmenting_path`
+
+    Notes
+    -----
+    The function used in the flow_func parameter has to return a residual
+    network that follows NetworkX conventions:
+
+    The residual network :samp:`R` from an input graph :samp:`G` has the
+    same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
+    of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
+    self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
+    in :samp:`G`.
+
+    For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
+    is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
+    in :samp:`G` or zero otherwise. If the capacity is infinite,
+    :samp:`R[u][v]['capacity']` will have a high arbitrary finite value
+    that does not affect the solution of the problem. This value is stored in
+    :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
+    :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
+    satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
+
+    The flow value, defined as the total flow into :samp:`t`, the sink, is
+    stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using
+    only edges :samp:`(u, v)` such that
+    :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
+    :samp:`s`-:samp:`t` cut.
+
+    Specific algorithms may store extra data in :samp:`R`.
+
+    The function should supports an optional boolean parameter value_only. When
+    True, it can optionally terminate the algorithm as soon as the maximum flow
+    value and the minimum cut can be determined.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_edge("x", "a", capacity=3.0)
+    >>> G.add_edge("x", "b", capacity=1.0)
+    >>> G.add_edge("a", "c", capacity=3.0)
+    >>> G.add_edge("b", "c", capacity=5.0)
+    >>> G.add_edge("b", "d", capacity=4.0)
+    >>> G.add_edge("d", "e", capacity=2.0)
+    >>> G.add_edge("c", "y", capacity=2.0)
+    >>> G.add_edge("e", "y", capacity=3.0)
+
+    minimum_cut_value computes only the value of the
+    minimum cut:
+
+    >>> cut_value = nx.minimum_cut_value(G, "x", "y")
+    >>> cut_value
+    3.0
+
+    You can also use alternative algorithms for computing the
+    minimum cut by using the flow_func parameter.
+
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+    >>> cut_value == nx.minimum_cut_value(
+    ...     G, "x", "y", flow_func=shortest_augmenting_path
+    ... )
+    True
+
+    """
+    if flow_func is None:
+        if kwargs:
+            raise nx.NetworkXError(
+                "You have to explicitly set a flow_func if"
+                " you need to pass parameters via kwargs."
+            )
+        flow_func = default_flow_func
+
+    if not callable(flow_func):
+        raise nx.NetworkXError("flow_func has to be callable.")
+
+    if kwargs.get("cutoff") is not None and flow_func is preflow_push:
+        raise nx.NetworkXError("cutoff should not be specified.")
+
+    R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs)
+
+    return R.graph["flow_value"]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/mincost.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/mincost.py
new file mode 100644
index 00000000..2f9390d7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/mincost.py
@@ -0,0 +1,356 @@
+"""
+Minimum cost flow algorithms on directed connected graphs.
+"""
+
+__all__ = ["min_cost_flow_cost", "min_cost_flow", "cost_of_flow", "max_flow_min_cost"]
+
+import networkx as nx
+
+
+@nx._dispatchable(
+    node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}
+)
+def min_cost_flow_cost(G, demand="demand", capacity="capacity", weight="weight"):
+    r"""Find the cost of a minimum cost flow satisfying all demands in digraph G.
+
+    G is a digraph with edge costs and capacities and in which nodes
+    have demand, i.e., they want to send or receive some amount of
+    flow. A negative demand means that the node wants to send flow, a
+    positive demand means that the node want to receive flow. A flow on
+    the digraph G satisfies all demand if the net flow into each node
+    is equal to the demand of that node.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        DiGraph on which a minimum cost flow satisfying all demands is
+        to be found.
+
+    demand : string
+        Nodes of the graph G are expected to have an attribute demand
+        that indicates how much flow a node wants to send (negative
+        demand) or receive (positive demand). Note that the sum of the
+        demands should be 0 otherwise the problem in not feasible. If
+        this attribute is not present, a node is considered to have 0
+        demand. Default value: 'demand'.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    weight : string
+        Edges of the graph G are expected to have an attribute weight
+        that indicates the cost incurred by sending one unit of flow on
+        that edge. If not present, the weight is considered to be 0.
+        Default value: 'weight'.
+
+    Returns
+    -------
+    flowCost : integer, float
+        Cost of a minimum cost flow satisfying all demands.
+
+    Raises
+    ------
+    NetworkXError
+        This exception is raised if the input graph is not directed or
+        not connected.
+
+    NetworkXUnfeasible
+        This exception is raised in the following situations:
+
+            * The sum of the demands is not zero. Then, there is no
+              flow satisfying all demands.
+            * There is no flow satisfying all demand.
+
+    NetworkXUnbounded
+        This exception is raised if the digraph G has a cycle of
+        negative cost and infinite capacity. Then, the cost of a flow
+        satisfying all demands is unbounded below.
+
+    See also
+    --------
+    cost_of_flow, max_flow_min_cost, min_cost_flow, network_simplex
+
+    Notes
+    -----
+    This algorithm is not guaranteed to work if edge weights or demands
+    are floating point numbers (overflows and roundoff errors can
+    cause problems). As a workaround you can use integer numbers by
+    multiplying the relevant edge attributes by a convenient
+    constant factor (eg 100).
+
+    Examples
+    --------
+    A simple example of a min cost flow problem.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_node("a", demand=-5)
+    >>> G.add_node("d", demand=5)
+    >>> G.add_edge("a", "b", weight=3, capacity=4)
+    >>> G.add_edge("a", "c", weight=6, capacity=10)
+    >>> G.add_edge("b", "d", weight=1, capacity=9)
+    >>> G.add_edge("c", "d", weight=2, capacity=5)
+    >>> flowCost = nx.min_cost_flow_cost(G)
+    >>> flowCost
+    24
+    """
+    return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[0]
+
+
+@nx._dispatchable(
+    node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}
+)
+def min_cost_flow(G, demand="demand", capacity="capacity", weight="weight"):
+    r"""Returns a minimum cost flow satisfying all demands in digraph G.
+
+    G is a digraph with edge costs and capacities and in which nodes
+    have demand, i.e., they want to send or receive some amount of
+    flow. A negative demand means that the node wants to send flow, a
+    positive demand means that the node want to receive flow. A flow on
+    the digraph G satisfies all demand if the net flow into each node
+    is equal to the demand of that node.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        DiGraph on which a minimum cost flow satisfying all demands is
+        to be found.
+
+    demand : string
+        Nodes of the graph G are expected to have an attribute demand
+        that indicates how much flow a node wants to send (negative
+        demand) or receive (positive demand). Note that the sum of the
+        demands should be 0 otherwise the problem in not feasible. If
+        this attribute is not present, a node is considered to have 0
+        demand. Default value: 'demand'.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    weight : string
+        Edges of the graph G are expected to have an attribute weight
+        that indicates the cost incurred by sending one unit of flow on
+        that edge. If not present, the weight is considered to be 0.
+        Default value: 'weight'.
+
+    Returns
+    -------
+    flowDict : dictionary
+        Dictionary of dictionaries keyed by nodes such that
+        flowDict[u][v] is the flow edge (u, v).
+
+    Raises
+    ------
+    NetworkXError
+        This exception is raised if the input graph is not directed or
+        not connected.
+
+    NetworkXUnfeasible
+        This exception is raised in the following situations:
+
+            * The sum of the demands is not zero. Then, there is no
+              flow satisfying all demands.
+            * There is no flow satisfying all demand.
+
+    NetworkXUnbounded
+        This exception is raised if the digraph G has a cycle of
+        negative cost and infinite capacity. Then, the cost of a flow
+        satisfying all demands is unbounded below.
+
+    See also
+    --------
+    cost_of_flow, max_flow_min_cost, min_cost_flow_cost, network_simplex
+
+    Notes
+    -----
+    This algorithm is not guaranteed to work if edge weights or demands
+    are floating point numbers (overflows and roundoff errors can
+    cause problems). As a workaround you can use integer numbers by
+    multiplying the relevant edge attributes by a convenient
+    constant factor (eg 100).
+
+    Examples
+    --------
+    A simple example of a min cost flow problem.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_node("a", demand=-5)
+    >>> G.add_node("d", demand=5)
+    >>> G.add_edge("a", "b", weight=3, capacity=4)
+    >>> G.add_edge("a", "c", weight=6, capacity=10)
+    >>> G.add_edge("b", "d", weight=1, capacity=9)
+    >>> G.add_edge("c", "d", weight=2, capacity=5)
+    >>> flowDict = nx.min_cost_flow(G)
+    >>> flowDict
+    {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}}
+    """
+    return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[1]
+
+
+@nx._dispatchable(edge_attrs={"weight": 0})
+def cost_of_flow(G, flowDict, weight="weight"):
+    """Compute the cost of the flow given by flowDict on graph G.
+
+    Note that this function does not check for the validity of the
+    flow flowDict. This function will fail if the graph G and the
+    flow don't have the same edge set.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        DiGraph on which a minimum cost flow satisfying all demands is
+        to be found.
+
+    weight : string
+        Edges of the graph G are expected to have an attribute weight
+        that indicates the cost incurred by sending one unit of flow on
+        that edge. If not present, the weight is considered to be 0.
+        Default value: 'weight'.
+
+    flowDict : dictionary
+        Dictionary of dictionaries keyed by nodes such that
+        flowDict[u][v] is the flow edge (u, v).
+
+    Returns
+    -------
+    cost : Integer, float
+        The total cost of the flow. This is given by the sum over all
+        edges of the product of the edge's flow and the edge's weight.
+
+    See also
+    --------
+    max_flow_min_cost, min_cost_flow, min_cost_flow_cost, network_simplex
+
+    Notes
+    -----
+    This algorithm is not guaranteed to work if edge weights or demands
+    are floating point numbers (overflows and roundoff errors can
+    cause problems). As a workaround you can use integer numbers by
+    multiplying the relevant edge attributes by a convenient
+    constant factor (eg 100).
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_node("a", demand=-5)
+    >>> G.add_node("d", demand=5)
+    >>> G.add_edge("a", "b", weight=3, capacity=4)
+    >>> G.add_edge("a", "c", weight=6, capacity=10)
+    >>> G.add_edge("b", "d", weight=1, capacity=9)
+    >>> G.add_edge("c", "d", weight=2, capacity=5)
+    >>> flowDict = nx.min_cost_flow(G)
+    >>> flowDict
+    {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}}
+    >>> nx.cost_of_flow(G, flowDict)
+    24
+    """
+    return sum((flowDict[u][v] * d.get(weight, 0) for u, v, d in G.edges(data=True)))
+
+
+@nx._dispatchable(edge_attrs={"capacity": float("inf"), "weight": 0})
+def max_flow_min_cost(G, s, t, capacity="capacity", weight="weight"):
+    """Returns a maximum (s, t)-flow of minimum cost.
+
+    G is a digraph with edge costs and capacities. There is a source
+    node s and a sink node t. This function finds a maximum flow from
+    s to t whose total cost is minimized.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        DiGraph on which a minimum cost flow satisfying all demands is
+        to be found.
+
+    s: node label
+        Source of the flow.
+
+    t: node label
+        Destination of the flow.
+
+    capacity: string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    weight: string
+        Edges of the graph G are expected to have an attribute weight
+        that indicates the cost incurred by sending one unit of flow on
+        that edge. If not present, the weight is considered to be 0.
+        Default value: 'weight'.
+
+    Returns
+    -------
+    flowDict: dictionary
+        Dictionary of dictionaries keyed by nodes such that
+        flowDict[u][v] is the flow edge (u, v).
+
+    Raises
+    ------
+    NetworkXError
+        This exception is raised if the input graph is not directed or
+        not connected.
+
+    NetworkXUnbounded
+        This exception is raised if there is an infinite capacity path
+        from s to t in G. In this case there is no maximum flow. This
+        exception is also raised if the digraph G has a cycle of
+        negative cost and infinite capacity. Then, the cost of a flow
+        is unbounded below.
+
+    See also
+    --------
+    cost_of_flow, min_cost_flow, min_cost_flow_cost, network_simplex
+
+    Notes
+    -----
+    This algorithm is not guaranteed to work if edge weights or demands
+    are floating point numbers (overflows and roundoff errors can
+    cause problems). As a workaround you can use integer numbers by
+    multiplying the relevant edge attributes by a convenient
+    constant factor (eg 100).
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_edges_from(
+    ...     [
+    ...         (1, 2, {"capacity": 12, "weight": 4}),
+    ...         (1, 3, {"capacity": 20, "weight": 6}),
+    ...         (2, 3, {"capacity": 6, "weight": -3}),
+    ...         (2, 6, {"capacity": 14, "weight": 1}),
+    ...         (3, 4, {"weight": 9}),
+    ...         (3, 5, {"capacity": 10, "weight": 5}),
+    ...         (4, 2, {"capacity": 19, "weight": 13}),
+    ...         (4, 5, {"capacity": 4, "weight": 0}),
+    ...         (5, 7, {"capacity": 28, "weight": 2}),
+    ...         (6, 5, {"capacity": 11, "weight": 1}),
+    ...         (6, 7, {"weight": 8}),
+    ...         (7, 4, {"capacity": 6, "weight": 6}),
+    ...     ]
+    ... )
+    >>> mincostFlow = nx.max_flow_min_cost(G, 1, 7)
+    >>> mincost = nx.cost_of_flow(G, mincostFlow)
+    >>> mincost
+    373
+    >>> from networkx.algorithms.flow import maximum_flow
+    >>> maxFlow = maximum_flow(G, 1, 7)[1]
+    >>> nx.cost_of_flow(G, maxFlow) >= mincost
+    True
+    >>> mincostFlowValue = sum((mincostFlow[u][7] for u in G.predecessors(7))) - sum(
+    ...     (mincostFlow[7][v] for v in G.successors(7))
+    ... )
+    >>> mincostFlowValue == nx.maximum_flow_value(G, 1, 7)
+    True
+
+    """
+    maxFlow = nx.maximum_flow_value(G, s, t, capacity=capacity)
+    H = nx.DiGraph(G)
+    H.add_node(s, demand=-maxFlow)
+    H.add_node(t, demand=maxFlow)
+    return min_cost_flow(H, capacity=capacity, weight=weight)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/networksimplex.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/networksimplex.py
new file mode 100644
index 00000000..a9822d96
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/networksimplex.py
@@ -0,0 +1,666 @@
+"""
+Minimum cost flow algorithms on directed connected graphs.
+"""
+
+__all__ = ["network_simplex"]
+
+from itertools import chain, islice, repeat
+from math import ceil, sqrt
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+
+class _DataEssentialsAndFunctions:
+    def __init__(
+        self, G, multigraph, demand="demand", capacity="capacity", weight="weight"
+    ):
+        # Number all nodes and edges and hereafter reference them using ONLY their numbers
+        self.node_list = list(G)  # nodes
+        self.node_indices = {u: i for i, u in enumerate(self.node_list)}  # node indices
+        self.node_demands = [
+            G.nodes[u].get(demand, 0) for u in self.node_list
+        ]  # node demands
+
+        self.edge_sources = []  # edge sources
+        self.edge_targets = []  # edge targets
+        if multigraph:
+            self.edge_keys = []  # edge keys
+        self.edge_indices = {}  # edge indices
+        self.edge_capacities = []  # edge capacities
+        self.edge_weights = []  # edge weights
+
+        if not multigraph:
+            edges = G.edges(data=True)
+        else:
+            edges = G.edges(data=True, keys=True)
+
+        inf = float("inf")
+        edges = (e for e in edges if e[0] != e[1] and e[-1].get(capacity, inf) != 0)
+        for i, e in enumerate(edges):
+            self.edge_sources.append(self.node_indices[e[0]])
+            self.edge_targets.append(self.node_indices[e[1]])
+            if multigraph:
+                self.edge_keys.append(e[2])
+            self.edge_indices[e[:-1]] = i
+            self.edge_capacities.append(e[-1].get(capacity, inf))
+            self.edge_weights.append(e[-1].get(weight, 0))
+
+        # spanning tree specific data to be initialized
+
+        self.edge_count = None  # number of edges
+        self.edge_flow = None  # edge flows
+        self.node_potentials = None  # node potentials
+        self.parent = None  # parent nodes
+        self.parent_edge = None  # edges to parents
+        self.subtree_size = None  # subtree sizes
+        self.next_node_dft = None  # next nodes in depth-first thread
+        self.prev_node_dft = None  # previous nodes in depth-first thread
+        self.last_descendent_dft = None  # last descendants in depth-first thread
+        self._spanning_tree_initialized = (
+            False  # False until initialize_spanning_tree() is called
+        )
+
+    def initialize_spanning_tree(self, n, faux_inf):
+        self.edge_count = len(self.edge_indices)  # number of edges
+        self.edge_flow = list(
+            chain(repeat(0, self.edge_count), (abs(d) for d in self.node_demands))
+        )  # edge flows
+        self.node_potentials = [
+            faux_inf if d <= 0 else -faux_inf for d in self.node_demands
+        ]  # node potentials
+        self.parent = list(chain(repeat(-1, n), [None]))  # parent nodes
+        self.parent_edge = list(
+            range(self.edge_count, self.edge_count + n)
+        )  # edges to parents
+        self.subtree_size = list(chain(repeat(1, n), [n + 1]))  # subtree sizes
+        self.next_node_dft = list(
+            chain(range(1, n), [-1, 0])
+        )  # next nodes in depth-first thread
+        self.prev_node_dft = list(range(-1, n))  # previous nodes in depth-first thread
+        self.last_descendent_dft = list(
+            chain(range(n), [n - 1])
+        )  # last descendants in depth-first thread
+        self._spanning_tree_initialized = True  # True only if all the assignments pass
+
+    def find_apex(self, p, q):
+        """
+        Find the lowest common ancestor of nodes p and q in the spanning tree.
+        """
+        size_p = self.subtree_size[p]
+        size_q = self.subtree_size[q]
+        while True:
+            while size_p < size_q:
+                p = self.parent[p]
+                size_p = self.subtree_size[p]
+            while size_p > size_q:
+                q = self.parent[q]
+                size_q = self.subtree_size[q]
+            if size_p == size_q:
+                if p != q:
+                    p = self.parent[p]
+                    size_p = self.subtree_size[p]
+                    q = self.parent[q]
+                    size_q = self.subtree_size[q]
+                else:
+                    return p
+
+    def trace_path(self, p, w):
+        """
+        Returns the nodes and edges on the path from node p to its ancestor w.
+        """
+        Wn = [p]
+        We = []
+        while p != w:
+            We.append(self.parent_edge[p])
+            p = self.parent[p]
+            Wn.append(p)
+        return Wn, We
+
+    def find_cycle(self, i, p, q):
+        """
+        Returns the nodes and edges on the cycle containing edge i == (p, q)
+        when the latter is added to the spanning tree.
+
+        The cycle is oriented in the direction from p to q.
+        """
+        w = self.find_apex(p, q)
+        Wn, We = self.trace_path(p, w)
+        Wn.reverse()
+        We.reverse()
+        if We != [i]:
+            We.append(i)
+        WnR, WeR = self.trace_path(q, w)
+        del WnR[-1]
+        Wn += WnR
+        We += WeR
+        return Wn, We
+
+    def augment_flow(self, Wn, We, f):
+        """
+        Augment f units of flow along a cycle represented by Wn and We.
+        """
+        for i, p in zip(We, Wn):
+            if self.edge_sources[i] == p:
+                self.edge_flow[i] += f
+            else:
+                self.edge_flow[i] -= f
+
+    def trace_subtree(self, p):
+        """
+        Yield the nodes in the subtree rooted at a node p.
+        """
+        yield p
+        l = self.last_descendent_dft[p]
+        while p != l:
+            p = self.next_node_dft[p]
+            yield p
+
+    def remove_edge(self, s, t):
+        """
+        Remove an edge (s, t) where parent[t] == s from the spanning tree.
+        """
+        size_t = self.subtree_size[t]
+        prev_t = self.prev_node_dft[t]
+        last_t = self.last_descendent_dft[t]
+        next_last_t = self.next_node_dft[last_t]
+        # Remove (s, t).
+        self.parent[t] = None
+        self.parent_edge[t] = None
+        # Remove the subtree rooted at t from the depth-first thread.
+        self.next_node_dft[prev_t] = next_last_t
+        self.prev_node_dft[next_last_t] = prev_t
+        self.next_node_dft[last_t] = t
+        self.prev_node_dft[t] = last_t
+        # Update the subtree sizes and last descendants of the (old) ancestors
+        # of t.
+        while s is not None:
+            self.subtree_size[s] -= size_t
+            if self.last_descendent_dft[s] == last_t:
+                self.last_descendent_dft[s] = prev_t
+            s = self.parent[s]
+
+    def make_root(self, q):
+        """
+        Make a node q the root of its containing subtree.
+        """
+        ancestors = []
+        while q is not None:
+            ancestors.append(q)
+            q = self.parent[q]
+        ancestors.reverse()
+        for p, q in zip(ancestors, islice(ancestors, 1, None)):
+            size_p = self.subtree_size[p]
+            last_p = self.last_descendent_dft[p]
+            prev_q = self.prev_node_dft[q]
+            last_q = self.last_descendent_dft[q]
+            next_last_q = self.next_node_dft[last_q]
+            # Make p a child of q.
+            self.parent[p] = q
+            self.parent[q] = None
+            self.parent_edge[p] = self.parent_edge[q]
+            self.parent_edge[q] = None
+            self.subtree_size[p] = size_p - self.subtree_size[q]
+            self.subtree_size[q] = size_p
+            # Remove the subtree rooted at q from the depth-first thread.
+            self.next_node_dft[prev_q] = next_last_q
+            self.prev_node_dft[next_last_q] = prev_q
+            self.next_node_dft[last_q] = q
+            self.prev_node_dft[q] = last_q
+            if last_p == last_q:
+                self.last_descendent_dft[p] = prev_q
+                last_p = prev_q
+            # Add the remaining parts of the subtree rooted at p as a subtree
+            # of q in the depth-first thread.
+            self.prev_node_dft[p] = last_q
+            self.next_node_dft[last_q] = p
+            self.next_node_dft[last_p] = q
+            self.prev_node_dft[q] = last_p
+            self.last_descendent_dft[q] = last_p
+
+    def add_edge(self, i, p, q):
+        """
+        Add an edge (p, q) to the spanning tree where q is the root of a subtree.
+        """
+        last_p = self.last_descendent_dft[p]
+        next_last_p = self.next_node_dft[last_p]
+        size_q = self.subtree_size[q]
+        last_q = self.last_descendent_dft[q]
+        # Make q a child of p.
+        self.parent[q] = p
+        self.parent_edge[q] = i
+        # Insert the subtree rooted at q into the depth-first thread.
+        self.next_node_dft[last_p] = q
+        self.prev_node_dft[q] = last_p
+        self.prev_node_dft[next_last_p] = last_q
+        self.next_node_dft[last_q] = next_last_p
+        # Update the subtree sizes and last descendants of the (new) ancestors
+        # of q.
+        while p is not None:
+            self.subtree_size[p] += size_q
+            if self.last_descendent_dft[p] == last_p:
+                self.last_descendent_dft[p] = last_q
+            p = self.parent[p]
+
+    def update_potentials(self, i, p, q):
+        """
+        Update the potentials of the nodes in the subtree rooted at a node
+        q connected to its parent p by an edge i.
+        """
+        if q == self.edge_targets[i]:
+            d = self.node_potentials[p] - self.edge_weights[i] - self.node_potentials[q]
+        else:
+            d = self.node_potentials[p] + self.edge_weights[i] - self.node_potentials[q]
+        for q in self.trace_subtree(q):
+            self.node_potentials[q] += d
+
+    def reduced_cost(self, i):
+        """Returns the reduced cost of an edge i."""
+        c = (
+            self.edge_weights[i]
+            - self.node_potentials[self.edge_sources[i]]
+            + self.node_potentials[self.edge_targets[i]]
+        )
+        return c if self.edge_flow[i] == 0 else -c
+
+    def find_entering_edges(self):
+        """Yield entering edges until none can be found."""
+        if self.edge_count == 0:
+            return
+
+        # Entering edges are found by combining Dantzig's rule and Bland's
+        # rule. The edges are cyclically grouped into blocks of size B. Within
+        # each block, Dantzig's rule is applied to find an entering edge. The
+        # blocks to search is determined following Bland's rule.
+        B = int(ceil(sqrt(self.edge_count)))  # pivot block size
+        M = (self.edge_count + B - 1) // B  # number of blocks needed to cover all edges
+        m = 0  # number of consecutive blocks without eligible
+        # entering edges
+        f = 0  # first edge in block
+        while m < M:
+            # Determine the next block of edges.
+            l = f + B
+            if l <= self.edge_count:
+                edges = range(f, l)
+            else:
+                l -= self.edge_count
+                edges = chain(range(f, self.edge_count), range(l))
+            f = l
+            # Find the first edge with the lowest reduced cost.
+            i = min(edges, key=self.reduced_cost)
+            c = self.reduced_cost(i)
+            if c >= 0:
+                # No entering edge found in the current block.
+                m += 1
+            else:
+                # Entering edge found.
+                if self.edge_flow[i] == 0:
+                    p = self.edge_sources[i]
+                    q = self.edge_targets[i]
+                else:
+                    p = self.edge_targets[i]
+                    q = self.edge_sources[i]
+                yield i, p, q
+                m = 0
+        # All edges have nonnegative reduced costs. The current flow is
+        # optimal.
+
+    def residual_capacity(self, i, p):
+        """Returns the residual capacity of an edge i in the direction away
+        from its endpoint p.
+        """
+        return (
+            self.edge_capacities[i] - self.edge_flow[i]
+            if self.edge_sources[i] == p
+            else self.edge_flow[i]
+        )
+
+    def find_leaving_edge(self, Wn, We):
+        """Returns the leaving edge in a cycle represented by Wn and We."""
+        j, s = min(
+            zip(reversed(We), reversed(Wn)),
+            key=lambda i_p: self.residual_capacity(*i_p),
+        )
+        t = self.edge_targets[j] if self.edge_sources[j] == s else self.edge_sources[j]
+        return j, s, t
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(
+    node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0}
+)
+def network_simplex(G, demand="demand", capacity="capacity", weight="weight"):
+    r"""Find a minimum cost flow satisfying all demands in digraph G.
+
+    This is a primal network simplex algorithm that uses the leaving
+    arc rule to prevent cycling.
+
+    G is a digraph with edge costs and capacities and in which nodes
+    have demand, i.e., they want to send or receive some amount of
+    flow. A negative demand means that the node wants to send flow, a
+    positive demand means that the node want to receive flow. A flow on
+    the digraph G satisfies all demand if the net flow into each node
+    is equal to the demand of that node.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        DiGraph on which a minimum cost flow satisfying all demands is
+        to be found.
+
+    demand : string
+        Nodes of the graph G are expected to have an attribute demand
+        that indicates how much flow a node wants to send (negative
+        demand) or receive (positive demand). Note that the sum of the
+        demands should be 0 otherwise the problem in not feasible. If
+        this attribute is not present, a node is considered to have 0
+        demand. Default value: 'demand'.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    weight : string
+        Edges of the graph G are expected to have an attribute weight
+        that indicates the cost incurred by sending one unit of flow on
+        that edge. If not present, the weight is considered to be 0.
+        Default value: 'weight'.
+
+    Returns
+    -------
+    flowCost : integer, float
+        Cost of a minimum cost flow satisfying all demands.
+
+    flowDict : dictionary
+        Dictionary of dictionaries keyed by nodes such that
+        flowDict[u][v] is the flow edge (u, v).
+
+    Raises
+    ------
+    NetworkXError
+        This exception is raised if the input graph is not directed or
+        not connected.
+
+    NetworkXUnfeasible
+        This exception is raised in the following situations:
+
+            * The sum of the demands is not zero. Then, there is no
+              flow satisfying all demands.
+            * There is no flow satisfying all demand.
+
+    NetworkXUnbounded
+        This exception is raised if the digraph G has a cycle of
+        negative cost and infinite capacity. Then, the cost of a flow
+        satisfying all demands is unbounded below.
+
+    Notes
+    -----
+    This algorithm is not guaranteed to work if edge weights or demands
+    are floating point numbers (overflows and roundoff errors can
+    cause problems). As a workaround you can use integer numbers by
+    multiplying the relevant edge attributes by a convenient
+    constant factor (eg 100).
+
+    See also
+    --------
+    cost_of_flow, max_flow_min_cost, min_cost_flow, min_cost_flow_cost
+
+    Examples
+    --------
+    A simple example of a min cost flow problem.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_node("a", demand=-5)
+    >>> G.add_node("d", demand=5)
+    >>> G.add_edge("a", "b", weight=3, capacity=4)
+    >>> G.add_edge("a", "c", weight=6, capacity=10)
+    >>> G.add_edge("b", "d", weight=1, capacity=9)
+    >>> G.add_edge("c", "d", weight=2, capacity=5)
+    >>> flowCost, flowDict = nx.network_simplex(G)
+    >>> flowCost
+    24
+    >>> flowDict
+    {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}}
+
+    The mincost flow algorithm can also be used to solve shortest path
+    problems. To find the shortest path between two nodes u and v,
+    give all edges an infinite capacity, give node u a demand of -1 and
+    node v a demand a 1. Then run the network simplex. The value of a
+    min cost flow will be the distance between u and v and edges
+    carrying positive flow will indicate the path.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_weighted_edges_from(
+    ...     [
+    ...         ("s", "u", 10),
+    ...         ("s", "x", 5),
+    ...         ("u", "v", 1),
+    ...         ("u", "x", 2),
+    ...         ("v", "y", 1),
+    ...         ("x", "u", 3),
+    ...         ("x", "v", 5),
+    ...         ("x", "y", 2),
+    ...         ("y", "s", 7),
+    ...         ("y", "v", 6),
+    ...     ]
+    ... )
+    >>> G.add_node("s", demand=-1)
+    >>> G.add_node("v", demand=1)
+    >>> flowCost, flowDict = nx.network_simplex(G)
+    >>> flowCost == nx.shortest_path_length(G, "s", "v", weight="weight")
+    True
+    >>> sorted([(u, v) for u in flowDict for v in flowDict[u] if flowDict[u][v] > 0])
+    [('s', 'x'), ('u', 'v'), ('x', 'u')]
+    >>> nx.shortest_path(G, "s", "v", weight="weight")
+    ['s', 'x', 'u', 'v']
+
+    It is possible to change the name of the attributes used for the
+    algorithm.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_node("p", spam=-4)
+    >>> G.add_node("q", spam=2)
+    >>> G.add_node("a", spam=-2)
+    >>> G.add_node("d", spam=-1)
+    >>> G.add_node("t", spam=2)
+    >>> G.add_node("w", spam=3)
+    >>> G.add_edge("p", "q", cost=7, vacancies=5)
+    >>> G.add_edge("p", "a", cost=1, vacancies=4)
+    >>> G.add_edge("q", "d", cost=2, vacancies=3)
+    >>> G.add_edge("t", "q", cost=1, vacancies=2)
+    >>> G.add_edge("a", "t", cost=2, vacancies=4)
+    >>> G.add_edge("d", "w", cost=3, vacancies=4)
+    >>> G.add_edge("t", "w", cost=4, vacancies=1)
+    >>> flowCost, flowDict = nx.network_simplex(
+    ...     G, demand="spam", capacity="vacancies", weight="cost"
+    ... )
+    >>> flowCost
+    37
+    >>> flowDict
+    {'p': {'q': 2, 'a': 2}, 'q': {'d': 1}, 'a': {'t': 4}, 'd': {'w': 2}, 't': {'q': 1, 'w': 1}, 'w': {}}
+
+    References
+    ----------
+    .. [1] Z. Kiraly, P. Kovacs.
+           Efficient implementation of minimum-cost flow algorithms.
+           Acta Universitatis Sapientiae, Informatica 4(1):67--118. 2012.
+    .. [2] R. Barr, F. Glover, D. Klingman.
+           Enhancement of spanning tree labeling procedures for network
+           optimization.
+           INFOR 17(1):16--34. 1979.
+    """
+    ###########################################################################
+    # Problem essentials extraction and sanity check
+    ###########################################################################
+
+    if len(G) == 0:
+        raise nx.NetworkXError("graph has no nodes")
+
+    multigraph = G.is_multigraph()
+
+    # extracting data essential to problem
+    DEAF = _DataEssentialsAndFunctions(
+        G, multigraph, demand=demand, capacity=capacity, weight=weight
+    )
+
+    ###########################################################################
+    # Quick Error Detection
+    ###########################################################################
+
+    inf = float("inf")
+    for u, d in zip(DEAF.node_list, DEAF.node_demands):
+        if abs(d) == inf:
+            raise nx.NetworkXError(f"node {u!r} has infinite demand")
+    for e, w in zip(DEAF.edge_indices, DEAF.edge_weights):
+        if abs(w) == inf:
+            raise nx.NetworkXError(f"edge {e!r} has infinite weight")
+    if not multigraph:
+        edges = nx.selfloop_edges(G, data=True)
+    else:
+        edges = nx.selfloop_edges(G, data=True, keys=True)
+    for e in edges:
+        if abs(e[-1].get(weight, 0)) == inf:
+            raise nx.NetworkXError(f"edge {e[:-1]!r} has infinite weight")
+
+    ###########################################################################
+    # Quick Infeasibility Detection
+    ###########################################################################
+
+    if sum(DEAF.node_demands) != 0:
+        raise nx.NetworkXUnfeasible("total node demand is not zero")
+    for e, c in zip(DEAF.edge_indices, DEAF.edge_capacities):
+        if c < 0:
+            raise nx.NetworkXUnfeasible(f"edge {e!r} has negative capacity")
+    if not multigraph:
+        edges = nx.selfloop_edges(G, data=True)
+    else:
+        edges = nx.selfloop_edges(G, data=True, keys=True)
+    for e in edges:
+        if e[-1].get(capacity, inf) < 0:
+            raise nx.NetworkXUnfeasible(f"edge {e[:-1]!r} has negative capacity")
+
+    ###########################################################################
+    # Initialization
+    ###########################################################################
+
+    # Add a dummy node -1 and connect all existing nodes to it with infinite-
+    # capacity dummy edges. Node -1 will serve as the root of the
+    # spanning tree of the network simplex method. The new edges will used to
+    # trivially satisfy the node demands and create an initial strongly
+    # feasible spanning tree.
+    for i, d in enumerate(DEAF.node_demands):
+        # Must be greater-than here. Zero-demand nodes must have
+        # edges pointing towards the root to ensure strong feasibility.
+        if d > 0:
+            DEAF.edge_sources.append(-1)
+            DEAF.edge_targets.append(i)
+        else:
+            DEAF.edge_sources.append(i)
+            DEAF.edge_targets.append(-1)
+    faux_inf = (
+        3
+        * max(
+            chain(
+                [
+                    sum(c for c in DEAF.edge_capacities if c < inf),
+                    sum(abs(w) for w in DEAF.edge_weights),
+                ],
+                (abs(d) for d in DEAF.node_demands),
+            )
+        )
+        or 1
+    )
+
+    n = len(DEAF.node_list)  # number of nodes
+    DEAF.edge_weights.extend(repeat(faux_inf, n))
+    DEAF.edge_capacities.extend(repeat(faux_inf, n))
+
+    # Construct the initial spanning tree.
+    DEAF.initialize_spanning_tree(n, faux_inf)
+
+    ###########################################################################
+    # Pivot loop
+    ###########################################################################
+
+    for i, p, q in DEAF.find_entering_edges():
+        Wn, We = DEAF.find_cycle(i, p, q)
+        j, s, t = DEAF.find_leaving_edge(Wn, We)
+        DEAF.augment_flow(Wn, We, DEAF.residual_capacity(j, s))
+        # Do nothing more if the entering edge is the same as the leaving edge.
+        if i != j:
+            if DEAF.parent[t] != s:
+                # Ensure that s is the parent of t.
+                s, t = t, s
+            if We.index(i) > We.index(j):
+                # Ensure that q is in the subtree rooted at t.
+                p, q = q, p
+            DEAF.remove_edge(s, t)
+            DEAF.make_root(q)
+            DEAF.add_edge(i, p, q)
+            DEAF.update_potentials(i, p, q)
+
+    ###########################################################################
+    # Infeasibility and unboundedness detection
+    ###########################################################################
+
+    if any(DEAF.edge_flow[i] != 0 for i in range(-n, 0)):
+        raise nx.NetworkXUnfeasible("no flow satisfies all node demands")
+
+    if any(DEAF.edge_flow[i] * 2 >= faux_inf for i in range(DEAF.edge_count)) or any(
+        e[-1].get(capacity, inf) == inf and e[-1].get(weight, 0) < 0
+        for e in nx.selfloop_edges(G, data=True)
+    ):
+        raise nx.NetworkXUnbounded("negative cycle with infinite capacity found")
+
+    ###########################################################################
+    # Flow cost calculation and flow dict construction
+    ###########################################################################
+
+    del DEAF.edge_flow[DEAF.edge_count :]
+    flow_cost = sum(w * x for w, x in zip(DEAF.edge_weights, DEAF.edge_flow))
+    flow_dict = {n: {} for n in DEAF.node_list}
+
+    def add_entry(e):
+        """Add a flow dict entry."""
+        d = flow_dict[e[0]]
+        for k in e[1:-2]:
+            try:
+                d = d[k]
+            except KeyError:
+                t = {}
+                d[k] = t
+                d = t
+        d[e[-2]] = e[-1]
+
+    DEAF.edge_sources = (
+        DEAF.node_list[s] for s in DEAF.edge_sources
+    )  # Use original nodes.
+    DEAF.edge_targets = (
+        DEAF.node_list[t] for t in DEAF.edge_targets
+    )  # Use original nodes.
+    if not multigraph:
+        for e in zip(DEAF.edge_sources, DEAF.edge_targets, DEAF.edge_flow):
+            add_entry(e)
+        edges = G.edges(data=True)
+    else:
+        for e in zip(
+            DEAF.edge_sources, DEAF.edge_targets, DEAF.edge_keys, DEAF.edge_flow
+        ):
+            add_entry(e)
+        edges = G.edges(data=True, keys=True)
+    for e in edges:
+        if e[0] != e[1]:
+            if e[-1].get(capacity, inf) == 0:
+                add_entry(e[:-1] + (0,))
+        else:
+            w = e[-1].get(weight, 0)
+            if w >= 0:
+                add_entry(e[:-1] + (0,))
+            else:
+                c = e[-1][capacity]
+                flow_cost += w * c
+                add_entry(e[:-1] + (c,))
+
+    return flow_cost, flow_dict
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/preflowpush.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/preflowpush.py
new file mode 100644
index 00000000..42cadc2e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/preflowpush.py
@@ -0,0 +1,425 @@
+"""
+Highest-label preflow-push algorithm for maximum flow problems.
+"""
+
+from collections import deque
+from itertools import islice
+
+import networkx as nx
+
+from ...utils import arbitrary_element
+from .utils import (
+    CurrentEdge,
+    GlobalRelabelThreshold,
+    Level,
+    build_residual_network,
+    detect_unboundedness,
+)
+
+__all__ = ["preflow_push"]
+
+
+def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only):
+    """Implementation of the highest-label preflow-push algorithm."""
+    if s not in G:
+        raise nx.NetworkXError(f"node {str(s)} not in graph")
+    if t not in G:
+        raise nx.NetworkXError(f"node {str(t)} not in graph")
+    if s == t:
+        raise nx.NetworkXError("source and sink are the same node")
+
+    if global_relabel_freq is None:
+        global_relabel_freq = 0
+    if global_relabel_freq < 0:
+        raise nx.NetworkXError("global_relabel_freq must be nonnegative.")
+
+    if residual is None:
+        R = build_residual_network(G, capacity)
+    else:
+        R = residual
+
+    detect_unboundedness(R, s, t)
+
+    R_nodes = R.nodes
+    R_pred = R.pred
+    R_succ = R.succ
+
+    # Initialize/reset the residual network.
+    for u in R:
+        R_nodes[u]["excess"] = 0
+        for e in R_succ[u].values():
+            e["flow"] = 0
+
+    def reverse_bfs(src):
+        """Perform a reverse breadth-first search from src in the residual
+        network.
+        """
+        heights = {src: 0}
+        q = deque([(src, 0)])
+        while q:
+            u, height = q.popleft()
+            height += 1
+            for v, attr in R_pred[u].items():
+                if v not in heights and attr["flow"] < attr["capacity"]:
+                    heights[v] = height
+                    q.append((v, height))
+        return heights
+
+    # Initialize heights of the nodes.
+    heights = reverse_bfs(t)
+
+    if s not in heights:
+        # t is not reachable from s in the residual network. The maximum flow
+        # must be zero.
+        R.graph["flow_value"] = 0
+        return R
+
+    n = len(R)
+    # max_height represents the height of the highest level below level n with
+    # at least one active node.
+    max_height = max(heights[u] for u in heights if u != s)
+    heights[s] = n
+
+    grt = GlobalRelabelThreshold(n, R.size(), global_relabel_freq)
+
+    # Initialize heights and 'current edge' data structures of the nodes.
+    for u in R:
+        R_nodes[u]["height"] = heights[u] if u in heights else n + 1
+        R_nodes[u]["curr_edge"] = CurrentEdge(R_succ[u])
+
+    def push(u, v, flow):
+        """Push flow units of flow from u to v."""
+        R_succ[u][v]["flow"] += flow
+        R_succ[v][u]["flow"] -= flow
+        R_nodes[u]["excess"] -= flow
+        R_nodes[v]["excess"] += flow
+
+    # The maximum flow must be nonzero now. Initialize the preflow by
+    # saturating all edges emanating from s.
+    for u, attr in R_succ[s].items():
+        flow = attr["capacity"]
+        if flow > 0:
+            push(s, u, flow)
+
+    # Partition nodes into levels.
+    levels = [Level() for i in range(2 * n)]
+    for u in R:
+        if u != s and u != t:
+            level = levels[R_nodes[u]["height"]]
+            if R_nodes[u]["excess"] > 0:
+                level.active.add(u)
+            else:
+                level.inactive.add(u)
+
+    def activate(v):
+        """Move a node from the inactive set to the active set of its level."""
+        if v != s and v != t:
+            level = levels[R_nodes[v]["height"]]
+            if v in level.inactive:
+                level.inactive.remove(v)
+                level.active.add(v)
+
+    def relabel(u):
+        """Relabel a node to create an admissible edge."""
+        grt.add_work(len(R_succ[u]))
+        return (
+            min(
+                R_nodes[v]["height"]
+                for v, attr in R_succ[u].items()
+                if attr["flow"] < attr["capacity"]
+            )
+            + 1
+        )
+
+    def discharge(u, is_phase1):
+        """Discharge a node until it becomes inactive or, during phase 1 (see
+        below), its height reaches at least n. The node is known to have the
+        largest height among active nodes.
+        """
+        height = R_nodes[u]["height"]
+        curr_edge = R_nodes[u]["curr_edge"]
+        # next_height represents the next height to examine after discharging
+        # the current node. During phase 1, it is capped to below n.
+        next_height = height
+        levels[height].active.remove(u)
+        while True:
+            v, attr = curr_edge.get()
+            if height == R_nodes[v]["height"] + 1 and attr["flow"] < attr["capacity"]:
+                flow = min(R_nodes[u]["excess"], attr["capacity"] - attr["flow"])
+                push(u, v, flow)
+                activate(v)
+                if R_nodes[u]["excess"] == 0:
+                    # The node has become inactive.
+                    levels[height].inactive.add(u)
+                    break
+            try:
+                curr_edge.move_to_next()
+            except StopIteration:
+                # We have run off the end of the adjacency list, and there can
+                # be no more admissible edges. Relabel the node to create one.
+                height = relabel(u)
+                if is_phase1 and height >= n - 1:
+                    # Although the node is still active, with a height at least
+                    # n - 1, it is now known to be on the s side of the minimum
+                    # s-t cut. Stop processing it until phase 2.
+                    levels[height].active.add(u)
+                    break
+                # The first relabel operation after global relabeling may not
+                # increase the height of the node since the 'current edge' data
+                # structure is not rewound. Use height instead of (height - 1)
+                # in case other active nodes at the same level are missed.
+                next_height = height
+        R_nodes[u]["height"] = height
+        return next_height
+
+    def gap_heuristic(height):
+        """Apply the gap heuristic."""
+        # Move all nodes at levels (height + 1) to max_height to level n + 1.
+        for level in islice(levels, height + 1, max_height + 1):
+            for u in level.active:
+                R_nodes[u]["height"] = n + 1
+            for u in level.inactive:
+                R_nodes[u]["height"] = n + 1
+            levels[n + 1].active.update(level.active)
+            level.active.clear()
+            levels[n + 1].inactive.update(level.inactive)
+            level.inactive.clear()
+
+    def global_relabel(from_sink):
+        """Apply the global relabeling heuristic."""
+        src = t if from_sink else s
+        heights = reverse_bfs(src)
+        if not from_sink:
+            # s must be reachable from t. Remove t explicitly.
+            del heights[t]
+        max_height = max(heights.values())
+        if from_sink:
+            # Also mark nodes from which t is unreachable for relabeling. This
+            # serves the same purpose as the gap heuristic.
+            for u in R:
+                if u not in heights and R_nodes[u]["height"] < n:
+                    heights[u] = n + 1
+        else:
+            # Shift the computed heights because the height of s is n.
+            for u in heights:
+                heights[u] += n
+            max_height += n
+        del heights[src]
+        for u, new_height in heights.items():
+            old_height = R_nodes[u]["height"]
+            if new_height != old_height:
+                if u in levels[old_height].active:
+                    levels[old_height].active.remove(u)
+                    levels[new_height].active.add(u)
+                else:
+                    levels[old_height].inactive.remove(u)
+                    levels[new_height].inactive.add(u)
+                R_nodes[u]["height"] = new_height
+        return max_height
+
+    # Phase 1: Find the maximum preflow by pushing as much flow as possible to
+    # t.
+
+    height = max_height
+    while height > 0:
+        # Discharge active nodes in the current level.
+        while True:
+            level = levels[height]
+            if not level.active:
+                # All active nodes in the current level have been discharged.
+                # Move to the next lower level.
+                height -= 1
+                break
+            # Record the old height and level for the gap heuristic.
+            old_height = height
+            old_level = level
+            u = arbitrary_element(level.active)
+            height = discharge(u, True)
+            if grt.is_reached():
+                # Global relabeling heuristic: Recompute the exact heights of
+                # all nodes.
+                height = global_relabel(True)
+                max_height = height
+                grt.clear_work()
+            elif not old_level.active and not old_level.inactive:
+                # Gap heuristic: If the level at old_height is empty (a 'gap'),
+                # a minimum cut has been identified. All nodes with heights
+                # above old_height can have their heights set to n + 1 and not
+                # be further processed before a maximum preflow is found.
+                gap_heuristic(old_height)
+                height = old_height - 1
+                max_height = height
+            else:
+                # Update the height of the highest level with at least one
+                # active node.
+                max_height = max(max_height, height)
+
+    # A maximum preflow has been found. The excess at t is the maximum flow
+    # value.
+    if value_only:
+        R.graph["flow_value"] = R_nodes[t]["excess"]
+        return R
+
+    # Phase 2: Convert the maximum preflow into a maximum flow by returning the
+    # excess to s.
+
+    # Relabel all nodes so that they have accurate heights.
+    height = global_relabel(False)
+    grt.clear_work()
+
+    # Continue to discharge the active nodes.
+    while height > n:
+        # Discharge active nodes in the current level.
+        while True:
+            level = levels[height]
+            if not level.active:
+                # All active nodes in the current level have been discharged.
+                # Move to the next lower level.
+                height -= 1
+                break
+            u = arbitrary_element(level.active)
+            height = discharge(u, False)
+            if grt.is_reached():
+                # Global relabeling heuristic.
+                height = global_relabel(False)
+                grt.clear_work()
+
+    R.graph["flow_value"] = R_nodes[t]["excess"]
+    return R
+
+
+@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
+def preflow_push(
+    G, s, t, capacity="capacity", residual=None, global_relabel_freq=1, value_only=False
+):
+    r"""Find a maximum single-commodity flow using the highest-label
+    preflow-push algorithm.
+
+    This function returns the residual network resulting after computing
+    the maximum flow. See below for details about the conventions
+    NetworkX uses for defining residual networks.
+
+    This algorithm has a running time of $O(n^2 \sqrt{m})$ for $n$ nodes and
+    $m$ edges.
+
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Edges of the graph are expected to have an attribute called
+        'capacity'. If this attribute is not present, the edge is
+        considered to have infinite capacity.
+
+    s : node
+        Source node for the flow.
+
+    t : node
+        Sink node for the flow.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    residual : NetworkX graph
+        Residual network on which the algorithm is to be executed. If None, a
+        new residual network is created. Default value: None.
+
+    global_relabel_freq : integer, float
+        Relative frequency of applying the global relabeling heuristic to speed
+        up the algorithm. If it is None, the heuristic is disabled. Default
+        value: 1.
+
+    value_only : bool
+        If False, compute a maximum flow; otherwise, compute a maximum preflow
+        which is enough for computing the maximum flow value. Default value:
+        False.
+
+    Returns
+    -------
+    R : NetworkX DiGraph
+        Residual network after computing the maximum flow.
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support MultiGraph and MultiDiGraph. If
+        the input graph is an instance of one of these two classes, a
+        NetworkXError is raised.
+
+    NetworkXUnbounded
+        If the graph has a path of infinite capacity, the value of a
+        feasible flow on the graph is unbounded above and the function
+        raises a NetworkXUnbounded.
+
+    See also
+    --------
+    :meth:`maximum_flow`
+    :meth:`minimum_cut`
+    :meth:`edmonds_karp`
+    :meth:`shortest_augmenting_path`
+
+    Notes
+    -----
+    The residual network :samp:`R` from an input graph :samp:`G` has the
+    same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
+    of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
+    self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
+    in :samp:`G`. For each node :samp:`u` in :samp:`R`,
+    :samp:`R.nodes[u]['excess']` represents the difference between flow into
+    :samp:`u` and flow out of :samp:`u`.
+
+    For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
+    is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
+    in :samp:`G` or zero otherwise. If the capacity is infinite,
+    :samp:`R[u][v]['capacity']` will have a high arbitrary finite value
+    that does not affect the solution of the problem. This value is stored in
+    :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
+    :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
+    satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
+
+    The flow value, defined as the total flow into :samp:`t`, the sink, is
+    stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using
+    only edges :samp:`(u, v)` such that
+    :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
+    :samp:`s`-:samp:`t` cut.
+
+    Examples
+    --------
+    >>> from networkx.algorithms.flow import preflow_push
+
+    The functions that implement flow algorithms and output a residual
+    network, such as this one, are not imported to the base NetworkX
+    namespace, so you have to explicitly import them from the flow package.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_edge("x", "a", capacity=3.0)
+    >>> G.add_edge("x", "b", capacity=1.0)
+    >>> G.add_edge("a", "c", capacity=3.0)
+    >>> G.add_edge("b", "c", capacity=5.0)
+    >>> G.add_edge("b", "d", capacity=4.0)
+    >>> G.add_edge("d", "e", capacity=2.0)
+    >>> G.add_edge("c", "y", capacity=2.0)
+    >>> G.add_edge("e", "y", capacity=3.0)
+    >>> R = preflow_push(G, "x", "y")
+    >>> flow_value = nx.maximum_flow_value(G, "x", "y")
+    >>> flow_value == R.graph["flow_value"]
+    True
+    >>> # preflow_push also stores the maximum flow value
+    >>> # in the excess attribute of the sink node t
+    >>> flow_value == R.nodes["y"]["excess"]
+    True
+    >>> # For some problems, you might only want to compute a
+    >>> # maximum preflow.
+    >>> R = preflow_push(G, "x", "y", value_only=True)
+    >>> flow_value == R.graph["flow_value"]
+    True
+    >>> flow_value == R.nodes["y"]["excess"]
+    True
+
+    """
+    R = preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only)
+    R.graph["algorithm"] = "preflow_push"
+    nx._clear_cache(R)
+    return R
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/shortestaugmentingpath.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/shortestaugmentingpath.py
new file mode 100644
index 00000000..9f1193f1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/shortestaugmentingpath.py
@@ -0,0 +1,300 @@
+"""
+Shortest augmenting path algorithm for maximum flow problems.
+"""
+
+from collections import deque
+
+import networkx as nx
+
+from .edmondskarp import edmonds_karp_core
+from .utils import CurrentEdge, build_residual_network
+
+__all__ = ["shortest_augmenting_path"]
+
+
+def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, cutoff):
+    """Implementation of the shortest augmenting path algorithm."""
+    if s not in G:
+        raise nx.NetworkXError(f"node {str(s)} not in graph")
+    if t not in G:
+        raise nx.NetworkXError(f"node {str(t)} not in graph")
+    if s == t:
+        raise nx.NetworkXError("source and sink are the same node")
+
+    if residual is None:
+        R = build_residual_network(G, capacity)
+    else:
+        R = residual
+
+    R_nodes = R.nodes
+    R_pred = R.pred
+    R_succ = R.succ
+
+    # Initialize/reset the residual network.
+    for u in R:
+        for e in R_succ[u].values():
+            e["flow"] = 0
+
+    # Initialize heights of the nodes.
+    heights = {t: 0}
+    q = deque([(t, 0)])
+    while q:
+        u, height = q.popleft()
+        height += 1
+        for v, attr in R_pred[u].items():
+            if v not in heights and attr["flow"] < attr["capacity"]:
+                heights[v] = height
+                q.append((v, height))
+
+    if s not in heights:
+        # t is not reachable from s in the residual network. The maximum flow
+        # must be zero.
+        R.graph["flow_value"] = 0
+        return R
+
+    n = len(G)
+    m = R.size() / 2
+
+    # Initialize heights and 'current edge' data structures of the nodes.
+    for u in R:
+        R_nodes[u]["height"] = heights[u] if u in heights else n
+        R_nodes[u]["curr_edge"] = CurrentEdge(R_succ[u])
+
+    # Initialize counts of nodes in each level.
+    counts = [0] * (2 * n - 1)
+    for u in R:
+        counts[R_nodes[u]["height"]] += 1
+
+    inf = R.graph["inf"]
+
+    def augment(path):
+        """Augment flow along a path from s to t."""
+        # Determine the path residual capacity.
+        flow = inf
+        it = iter(path)
+        u = next(it)
+        for v in it:
+            attr = R_succ[u][v]
+            flow = min(flow, attr["capacity"] - attr["flow"])
+            u = v
+        if flow * 2 > inf:
+            raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.")
+        # Augment flow along the path.
+        it = iter(path)
+        u = next(it)
+        for v in it:
+            R_succ[u][v]["flow"] += flow
+            R_succ[v][u]["flow"] -= flow
+            u = v
+        return flow
+
+    def relabel(u):
+        """Relabel a node to create an admissible edge."""
+        height = n - 1
+        for v, attr in R_succ[u].items():
+            if attr["flow"] < attr["capacity"]:
+                height = min(height, R_nodes[v]["height"])
+        return height + 1
+
+    if cutoff is None:
+        cutoff = float("inf")
+
+    # Phase 1: Look for shortest augmenting paths using depth-first search.
+
+    flow_value = 0
+    path = [s]
+    u = s
+    d = n if not two_phase else int(min(m**0.5, 2 * n ** (2.0 / 3)))
+    done = R_nodes[s]["height"] >= d
+    while not done:
+        height = R_nodes[u]["height"]
+        curr_edge = R_nodes[u]["curr_edge"]
+        # Depth-first search for the next node on the path to t.
+        while True:
+            v, attr = curr_edge.get()
+            if height == R_nodes[v]["height"] + 1 and attr["flow"] < attr["capacity"]:
+                # Advance to the next node following an admissible edge.
+                path.append(v)
+                u = v
+                break
+            try:
+                curr_edge.move_to_next()
+            except StopIteration:
+                counts[height] -= 1
+                if counts[height] == 0:
+                    # Gap heuristic: If relabeling causes a level to become
+                    # empty, a minimum cut has been identified. The algorithm
+                    # can now be terminated.
+                    R.graph["flow_value"] = flow_value
+                    return R
+                height = relabel(u)
+                if u == s and height >= d:
+                    if not two_phase:
+                        # t is disconnected from s in the residual network. No
+                        # more augmenting paths exist.
+                        R.graph["flow_value"] = flow_value
+                        return R
+                    else:
+                        # t is at least d steps away from s. End of phase 1.
+                        done = True
+                        break
+                counts[height] += 1
+                R_nodes[u]["height"] = height
+                if u != s:
+                    # After relabeling, the last edge on the path is no longer
+                    # admissible. Retreat one step to look for an alternative.
+                    path.pop()
+                    u = path[-1]
+                    break
+        if u == t:
+            # t is reached. Augment flow along the path and reset it for a new
+            # depth-first search.
+            flow_value += augment(path)
+            if flow_value >= cutoff:
+                R.graph["flow_value"] = flow_value
+                return R
+            path = [s]
+            u = s
+
+    # Phase 2: Look for shortest augmenting paths using breadth-first search.
+    flow_value += edmonds_karp_core(R, s, t, cutoff - flow_value)
+
+    R.graph["flow_value"] = flow_value
+    return R
+
+
+@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
+def shortest_augmenting_path(
+    G,
+    s,
+    t,
+    capacity="capacity",
+    residual=None,
+    value_only=False,
+    two_phase=False,
+    cutoff=None,
+):
+    r"""Find a maximum single-commodity flow using the shortest augmenting path
+    algorithm.
+
+    This function returns the residual network resulting after computing
+    the maximum flow. See below for details about the conventions
+    NetworkX uses for defining residual networks.
+
+    This algorithm has a running time of $O(n^2 m)$ for $n$ nodes and $m$
+    edges.
+
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Edges of the graph are expected to have an attribute called
+        'capacity'. If this attribute is not present, the edge is
+        considered to have infinite capacity.
+
+    s : node
+        Source node for the flow.
+
+    t : node
+        Sink node for the flow.
+
+    capacity : string
+        Edges of the graph G are expected to have an attribute capacity
+        that indicates how much flow the edge can support. If this
+        attribute is not present, the edge is considered to have
+        infinite capacity. Default value: 'capacity'.
+
+    residual : NetworkX graph
+        Residual network on which the algorithm is to be executed. If None, a
+        new residual network is created. Default value: None.
+
+    value_only : bool
+        If True compute only the value of the maximum flow. This parameter
+        will be ignored by this algorithm because it is not applicable.
+
+    two_phase : bool
+        If True, a two-phase variant is used. The two-phase variant improves
+        the running time on unit-capacity networks from $O(nm)$ to
+        $O(\min(n^{2/3}, m^{1/2}) m)$. Default value: False.
+
+    cutoff : integer, float
+        If specified, the algorithm will terminate when the flow value reaches
+        or exceeds the cutoff. In this case, it may be unable to immediately
+        determine a minimum cut. Default value: None.
+
+    Returns
+    -------
+    R : NetworkX DiGraph
+        Residual network after computing the maximum flow.
+
+    Raises
+    ------
+    NetworkXError
+        The algorithm does not support MultiGraph and MultiDiGraph. If
+        the input graph is an instance of one of these two classes, a
+        NetworkXError is raised.
+
+    NetworkXUnbounded
+        If the graph has a path of infinite capacity, the value of a
+        feasible flow on the graph is unbounded above and the function
+        raises a NetworkXUnbounded.
+
+    See also
+    --------
+    :meth:`maximum_flow`
+    :meth:`minimum_cut`
+    :meth:`edmonds_karp`
+    :meth:`preflow_push`
+
+    Notes
+    -----
+    The residual network :samp:`R` from an input graph :samp:`G` has the
+    same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
+    of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
+    self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
+    in :samp:`G`.
+
+    For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
+    is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
+    in :samp:`G` or zero otherwise. If the capacity is infinite,
+    :samp:`R[u][v]['capacity']` will have a high arbitrary finite value
+    that does not affect the solution of the problem. This value is stored in
+    :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
+    :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
+    satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
+
+    The flow value, defined as the total flow into :samp:`t`, the sink, is
+    stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not
+    specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such
+    that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
+    :samp:`s`-:samp:`t` cut.
+
+    Examples
+    --------
+    >>> from networkx.algorithms.flow import shortest_augmenting_path
+
+    The functions that implement flow algorithms and output a residual
+    network, such as this one, are not imported to the base NetworkX
+    namespace, so you have to explicitly import them from the flow package.
+
+    >>> G = nx.DiGraph()
+    >>> G.add_edge("x", "a", capacity=3.0)
+    >>> G.add_edge("x", "b", capacity=1.0)
+    >>> G.add_edge("a", "c", capacity=3.0)
+    >>> G.add_edge("b", "c", capacity=5.0)
+    >>> G.add_edge("b", "d", capacity=4.0)
+    >>> G.add_edge("d", "e", capacity=2.0)
+    >>> G.add_edge("c", "y", capacity=2.0)
+    >>> G.add_edge("e", "y", capacity=3.0)
+    >>> R = shortest_augmenting_path(G, "x", "y")
+    >>> flow_value = nx.maximum_flow_value(G, "x", "y")
+    >>> flow_value
+    3.0
+    >>> flow_value == R.graph["flow_value"]
+    True
+
+    """
+    R = shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, cutoff)
+    R.graph["algorithm"] = "shortest_augmenting_path"
+    nx._clear_cache(R)
+    return R
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gl1.gpickle.bz2 b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gl1.gpickle.bz2
new file mode 100644
index 00000000..e6ed5744
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gl1.gpickle.bz2
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gw1.gpickle.bz2 b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gw1.gpickle.bz2
new file mode 100644
index 00000000..abd0e8a2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gw1.gpickle.bz2
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/netgen-2.gpickle.bz2 b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/netgen-2.gpickle.bz2
new file mode 100644
index 00000000..cd3ea801
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/netgen-2.gpickle.bz2
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_gomory_hu.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_gomory_hu.py
new file mode 100644
index 00000000..1649ec82
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_gomory_hu.py
@@ -0,0 +1,128 @@
+from itertools import combinations
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.flow import (
+    boykov_kolmogorov,
+    dinitz,
+    edmonds_karp,
+    preflow_push,
+    shortest_augmenting_path,
+)
+
+flow_funcs = [
+    boykov_kolmogorov,
+    dinitz,
+    edmonds_karp,
+    preflow_push,
+    shortest_augmenting_path,
+]
+
+
+class TestGomoryHuTree:
+    def minimum_edge_weight(self, T, u, v):
+        path = nx.shortest_path(T, u, v, weight="weight")
+        return min((T[u][v]["weight"], (u, v)) for (u, v) in zip(path, path[1:]))
+
+    def compute_cutset(self, G, T_orig, edge):
+        T = T_orig.copy()
+        T.remove_edge(*edge)
+        U, V = list(nx.connected_components(T))
+        cutset = set()
+        for x, nbrs in ((n, G[n]) for n in U):
+            cutset.update((x, y) for y in nbrs if y in V)
+        return cutset
+
+    def test_default_flow_function_karate_club_graph(self):
+        G = nx.karate_club_graph()
+        nx.set_edge_attributes(G, 1, "capacity")
+        T = nx.gomory_hu_tree(G)
+        assert nx.is_tree(T)
+        for u, v in combinations(G, 2):
+            cut_value, edge = self.minimum_edge_weight(T, u, v)
+            assert nx.minimum_cut_value(G, u, v) == cut_value
+
+    def test_karate_club_graph(self):
+        G = nx.karate_club_graph()
+        nx.set_edge_attributes(G, 1, "capacity")
+        for flow_func in flow_funcs:
+            T = nx.gomory_hu_tree(G, flow_func=flow_func)
+            assert nx.is_tree(T)
+            for u, v in combinations(G, 2):
+                cut_value, edge = self.minimum_edge_weight(T, u, v)
+                assert nx.minimum_cut_value(G, u, v) == cut_value
+
+    def test_davis_southern_women_graph(self):
+        G = nx.davis_southern_women_graph()
+        nx.set_edge_attributes(G, 1, "capacity")
+        for flow_func in flow_funcs:
+            T = nx.gomory_hu_tree(G, flow_func=flow_func)
+            assert nx.is_tree(T)
+            for u, v in combinations(G, 2):
+                cut_value, edge = self.minimum_edge_weight(T, u, v)
+                assert nx.minimum_cut_value(G, u, v) == cut_value
+
+    def test_florentine_families_graph(self):
+        G = nx.florentine_families_graph()
+        nx.set_edge_attributes(G, 1, "capacity")
+        for flow_func in flow_funcs:
+            T = nx.gomory_hu_tree(G, flow_func=flow_func)
+            assert nx.is_tree(T)
+            for u, v in combinations(G, 2):
+                cut_value, edge = self.minimum_edge_weight(T, u, v)
+                assert nx.minimum_cut_value(G, u, v) == cut_value
+
+    @pytest.mark.slow
+    def test_les_miserables_graph_cutset(self):
+        G = nx.les_miserables_graph()
+        nx.set_edge_attributes(G, 1, "capacity")
+        for flow_func in flow_funcs:
+            T = nx.gomory_hu_tree(G, flow_func=flow_func)
+            assert nx.is_tree(T)
+            for u, v in combinations(G, 2):
+                cut_value, edge = self.minimum_edge_weight(T, u, v)
+                assert nx.minimum_cut_value(G, u, v) == cut_value
+
+    def test_karate_club_graph_cutset(self):
+        G = nx.karate_club_graph()
+        nx.set_edge_attributes(G, 1, "capacity")
+        T = nx.gomory_hu_tree(G)
+        assert nx.is_tree(T)
+        u, v = 0, 33
+        cut_value, edge = self.minimum_edge_weight(T, u, v)
+        cutset = self.compute_cutset(G, T, edge)
+        assert cut_value == len(cutset)
+
+    def test_wikipedia_example(self):
+        # Example from https://en.wikipedia.org/wiki/Gomory%E2%80%93Hu_tree
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            (
+                (0, 1, 1),
+                (0, 2, 7),
+                (1, 2, 1),
+                (1, 3, 3),
+                (1, 4, 2),
+                (2, 4, 4),
+                (3, 4, 1),
+                (3, 5, 6),
+                (4, 5, 2),
+            )
+        )
+        for flow_func in flow_funcs:
+            T = nx.gomory_hu_tree(G, capacity="weight", flow_func=flow_func)
+            assert nx.is_tree(T)
+            for u, v in combinations(G, 2):
+                cut_value, edge = self.minimum_edge_weight(T, u, v)
+                assert nx.minimum_cut_value(G, u, v, capacity="weight") == cut_value
+
+    def test_directed_raises(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            G = nx.DiGraph()
+            T = nx.gomory_hu_tree(G)
+
+    def test_empty_raises(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.empty_graph()
+            T = nx.gomory_hu_tree(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow.py
new file mode 100644
index 00000000..d7305a7b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow.py
@@ -0,0 +1,573 @@
+"""Maximum flow algorithms test suite."""
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.flow import (
+    boykov_kolmogorov,
+    build_flow_dict,
+    build_residual_network,
+    dinitz,
+    edmonds_karp,
+    preflow_push,
+    shortest_augmenting_path,
+)
+
+flow_funcs = {
+    boykov_kolmogorov,
+    dinitz,
+    edmonds_karp,
+    preflow_push,
+    shortest_augmenting_path,
+}
+
+max_min_funcs = {nx.maximum_flow, nx.minimum_cut}
+flow_value_funcs = {nx.maximum_flow_value, nx.minimum_cut_value}
+interface_funcs = max_min_funcs | flow_value_funcs
+all_funcs = flow_funcs | interface_funcs
+
+
+def compute_cutset(G, partition):
+    reachable, non_reachable = partition
+    cutset = set()
+    for u, nbrs in ((n, G[n]) for n in reachable):
+        cutset.update((u, v) for v in nbrs if v in non_reachable)
+    return cutset
+
+
+def validate_flows(G, s, t, flowDict, solnValue, capacity, flow_func):
+    errmsg = f"Assertion failed in function: {flow_func.__name__}"
+    assert set(G) == set(flowDict), errmsg
+    for u in G:
+        assert set(G[u]) == set(flowDict[u]), errmsg
+    excess = {u: 0 for u in flowDict}
+    for u in flowDict:
+        for v, flow in flowDict[u].items():
+            if capacity in G[u][v]:
+                assert flow <= G[u][v][capacity]
+            assert flow >= 0, errmsg
+            excess[u] -= flow
+            excess[v] += flow
+    for u, exc in excess.items():
+        if u == s:
+            assert exc == -solnValue, errmsg
+        elif u == t:
+            assert exc == solnValue, errmsg
+        else:
+            assert exc == 0, errmsg
+
+
+def validate_cuts(G, s, t, solnValue, partition, capacity, flow_func):
+    errmsg = f"Assertion failed in function: {flow_func.__name__}"
+    assert all(n in G for n in partition[0]), errmsg
+    assert all(n in G for n in partition[1]), errmsg
+    cutset = compute_cutset(G, partition)
+    assert all(G.has_edge(u, v) for (u, v) in cutset), errmsg
+    assert solnValue == sum(G[u][v][capacity] for (u, v) in cutset), errmsg
+    H = G.copy()
+    H.remove_edges_from(cutset)
+    if not G.is_directed():
+        assert not nx.is_connected(H), errmsg
+    else:
+        assert not nx.is_strongly_connected(H), errmsg
+
+
+def compare_flows_and_cuts(G, s, t, solnValue, capacity="capacity"):
+    for flow_func in flow_funcs:
+        errmsg = f"Assertion failed in function: {flow_func.__name__}"
+        R = flow_func(G, s, t, capacity)
+        # Test both legacy and new implementations.
+        flow_value = R.graph["flow_value"]
+        flow_dict = build_flow_dict(G, R)
+        assert flow_value == solnValue, errmsg
+        validate_flows(G, s, t, flow_dict, solnValue, capacity, flow_func)
+        # Minimum cut
+        cut_value, partition = nx.minimum_cut(
+            G, s, t, capacity=capacity, flow_func=flow_func
+        )
+        validate_cuts(G, s, t, solnValue, partition, capacity, flow_func)
+
+
+class TestMaxflowMinCutCommon:
+    def test_graph1(self):
+        # Trivial undirected graph
+        G = nx.Graph()
+        G.add_edge(1, 2, capacity=1.0)
+
+        # solution flows
+        # {1: {2: 1.0}, 2: {1: 1.0}}
+
+        compare_flows_and_cuts(G, 1, 2, 1.0)
+
+    def test_graph2(self):
+        # A more complex undirected graph
+        # adapted from https://web.archive.org/web/20220815055650/https://www.topcoder.com/thrive/articles/Maximum%20Flow:%20Part%20One
+        G = nx.Graph()
+        G.add_edge("x", "a", capacity=3.0)
+        G.add_edge("x", "b", capacity=1.0)
+        G.add_edge("a", "c", capacity=3.0)
+        G.add_edge("b", "c", capacity=5.0)
+        G.add_edge("b", "d", capacity=4.0)
+        G.add_edge("d", "e", capacity=2.0)
+        G.add_edge("c", "y", capacity=2.0)
+        G.add_edge("e", "y", capacity=3.0)
+
+        # H
+        # {
+        #     "x": {"a": 3, "b": 1},
+        #     "a": {"c": 3, "x": 3},
+        #     "b": {"c": 1, "d": 2, "x": 1},
+        #     "c": {"a": 3, "b": 1, "y": 2},
+        #     "d": {"b": 2, "e": 2},
+        #     "e": {"d": 2, "y": 2},
+        #     "y": {"c": 2, "e": 2},
+        # }
+
+        compare_flows_and_cuts(G, "x", "y", 4.0)
+
+    def test_digraph1(self):
+        # The classic directed graph example
+        G = nx.DiGraph()
+        G.add_edge("a", "b", capacity=1000.0)
+        G.add_edge("a", "c", capacity=1000.0)
+        G.add_edge("b", "c", capacity=1.0)
+        G.add_edge("b", "d", capacity=1000.0)
+        G.add_edge("c", "d", capacity=1000.0)
+
+        # H
+        # {
+        #     "a": {"b": 1000.0, "c": 1000.0},
+        #     "b": {"c": 0, "d": 1000.0},
+        #     "c": {"d": 1000.0},
+        #     "d": {},
+        # }
+
+        compare_flows_and_cuts(G, "a", "d", 2000.0)
+
+    def test_digraph2(self):
+        # An example in which some edges end up with zero flow.
+        G = nx.DiGraph()
+        G.add_edge("s", "b", capacity=2)
+        G.add_edge("s", "c", capacity=1)
+        G.add_edge("c", "d", capacity=1)
+        G.add_edge("d", "a", capacity=1)
+        G.add_edge("b", "a", capacity=2)
+        G.add_edge("a", "t", capacity=2)
+
+        # H
+        # {
+        #     "s": {"b": 2, "c": 0},
+        #     "c": {"d": 0},
+        #     "d": {"a": 0},
+        #     "b": {"a": 2},
+        #     "a": {"t": 2},
+        #     "t": {},
+        # }
+
+        compare_flows_and_cuts(G, "s", "t", 2)
+
+    def test_digraph3(self):
+        # A directed graph example from Cormen et al.
+        G = nx.DiGraph()
+        G.add_edge("s", "v1", capacity=16.0)
+        G.add_edge("s", "v2", capacity=13.0)
+        G.add_edge("v1", "v2", capacity=10.0)
+        G.add_edge("v2", "v1", capacity=4.0)
+        G.add_edge("v1", "v3", capacity=12.0)
+        G.add_edge("v3", "v2", capacity=9.0)
+        G.add_edge("v2", "v4", capacity=14.0)
+        G.add_edge("v4", "v3", capacity=7.0)
+        G.add_edge("v3", "t", capacity=20.0)
+        G.add_edge("v4", "t", capacity=4.0)
+
+        # H
+        # {
+        #     "s": {"v1": 12.0, "v2": 11.0},
+        #     "v2": {"v1": 0, "v4": 11.0},
+        #     "v1": {"v2": 0, "v3": 12.0},
+        #     "v3": {"v2": 0, "t": 19.0},
+        #     "v4": {"v3": 7.0, "t": 4.0},
+        #     "t": {},
+        # }
+
+        compare_flows_and_cuts(G, "s", "t", 23.0)
+
+    def test_digraph4(self):
+        # A more complex directed graph
+        # from https://web.archive.org/web/20220815055650/https://www.topcoder.com/thrive/articles/Maximum%20Flow:%20Part%20One
+        G = nx.DiGraph()
+        G.add_edge("x", "a", capacity=3.0)
+        G.add_edge("x", "b", capacity=1.0)
+        G.add_edge("a", "c", capacity=3.0)
+        G.add_edge("b", "c", capacity=5.0)
+        G.add_edge("b", "d", capacity=4.0)
+        G.add_edge("d", "e", capacity=2.0)
+        G.add_edge("c", "y", capacity=2.0)
+        G.add_edge("e", "y", capacity=3.0)
+
+        # H
+        # {
+        #     "x": {"a": 2.0, "b": 1.0},
+        #     "a": {"c": 2.0},
+        #     "b": {"c": 0, "d": 1.0},
+        #     "c": {"y": 2.0},
+        #     "d": {"e": 1.0},
+        #     "e": {"y": 1.0},
+        #     "y": {},
+        # }
+
+        compare_flows_and_cuts(G, "x", "y", 3.0)
+
+    def test_wikipedia_dinitz_example(self):
+        # Nice example from https://en.wikipedia.org/wiki/Dinic's_algorithm
+        G = nx.DiGraph()
+        G.add_edge("s", 1, capacity=10)
+        G.add_edge("s", 2, capacity=10)
+        G.add_edge(1, 3, capacity=4)
+        G.add_edge(1, 4, capacity=8)
+        G.add_edge(1, 2, capacity=2)
+        G.add_edge(2, 4, capacity=9)
+        G.add_edge(3, "t", capacity=10)
+        G.add_edge(4, 3, capacity=6)
+        G.add_edge(4, "t", capacity=10)
+
+        # solution flows
+        # {
+        #     1: {2: 0, 3: 4, 4: 6},
+        #     2: {4: 9},
+        #     3: {"t": 9},
+        #     4: {3: 5, "t": 10},
+        #     "s": {1: 10, 2: 9},
+        #     "t": {},
+        # }
+
+        compare_flows_and_cuts(G, "s", "t", 19)
+
+    def test_optional_capacity(self):
+        # Test optional capacity parameter.
+        G = nx.DiGraph()
+        G.add_edge("x", "a", spam=3.0)
+        G.add_edge("x", "b", spam=1.0)
+        G.add_edge("a", "c", spam=3.0)
+        G.add_edge("b", "c", spam=5.0)
+        G.add_edge("b", "d", spam=4.0)
+        G.add_edge("d", "e", spam=2.0)
+        G.add_edge("c", "y", spam=2.0)
+        G.add_edge("e", "y", spam=3.0)
+
+        # solution flows
+        # {
+        #     "x": {"a": 2.0, "b": 1.0},
+        #     "a": {"c": 2.0},
+        #     "b": {"c": 0, "d": 1.0},
+        #     "c": {"y": 2.0},
+        #     "d": {"e": 1.0},
+        #     "e": {"y": 1.0},
+        #     "y": {},
+        # }
+        solnValue = 3.0
+        s = "x"
+        t = "y"
+
+        compare_flows_and_cuts(G, s, t, solnValue, capacity="spam")
+
+    def test_digraph_infcap_edges(self):
+        # DiGraph with infinite capacity edges
+        G = nx.DiGraph()
+        G.add_edge("s", "a")
+        G.add_edge("s", "b", capacity=30)
+        G.add_edge("a", "c", capacity=25)
+        G.add_edge("b", "c", capacity=12)
+        G.add_edge("a", "t", capacity=60)
+        G.add_edge("c", "t")
+
+        # H
+        # {
+        #     "s": {"a": 85, "b": 12},
+        #     "a": {"c": 25, "t": 60},
+        #     "b": {"c": 12},
+        #     "c": {"t": 37},
+        #     "t": {},
+        # }
+
+        compare_flows_and_cuts(G, "s", "t", 97)
+
+        # DiGraph with infinite capacity digon
+        G = nx.DiGraph()
+        G.add_edge("s", "a", capacity=85)
+        G.add_edge("s", "b", capacity=30)
+        G.add_edge("a", "c")
+        G.add_edge("c", "a")
+        G.add_edge("b", "c", capacity=12)
+        G.add_edge("a", "t", capacity=60)
+        G.add_edge("c", "t", capacity=37)
+
+        # H
+        # {
+        #     "s": {"a": 85, "b": 12},
+        #     "a": {"c": 25, "t": 60},
+        #     "c": {"a": 0, "t": 37},
+        #     "b": {"c": 12},
+        #     "t": {},
+        # }
+
+        compare_flows_and_cuts(G, "s", "t", 97)
+
+    def test_digraph_infcap_path(self):
+        # Graph with infinite capacity (s, t)-path
+        G = nx.DiGraph()
+        G.add_edge("s", "a")
+        G.add_edge("s", "b", capacity=30)
+        G.add_edge("a", "c")
+        G.add_edge("b", "c", capacity=12)
+        G.add_edge("a", "t", capacity=60)
+        G.add_edge("c", "t")
+
+        for flow_func in all_funcs:
+            pytest.raises(nx.NetworkXUnbounded, flow_func, G, "s", "t")
+
+    def test_graph_infcap_edges(self):
+        # Undirected graph with infinite capacity edges
+        G = nx.Graph()
+        G.add_edge("s", "a")
+        G.add_edge("s", "b", capacity=30)
+        G.add_edge("a", "c", capacity=25)
+        G.add_edge("b", "c", capacity=12)
+        G.add_edge("a", "t", capacity=60)
+        G.add_edge("c", "t")
+
+        # H
+        # {
+        #     "s": {"a": 85, "b": 12},
+        #     "a": {"c": 25, "s": 85, "t": 60},
+        #     "b": {"c": 12, "s": 12},
+        #     "c": {"a": 25, "b": 12, "t": 37},
+        #     "t": {"a": 60, "c": 37},
+        # }
+
+        compare_flows_and_cuts(G, "s", "t", 97)
+
+    def test_digraph5(self):
+        # From ticket #429 by mfrasca.
+        G = nx.DiGraph()
+        G.add_edge("s", "a", capacity=2)
+        G.add_edge("s", "b", capacity=2)
+        G.add_edge("a", "b", capacity=5)
+        G.add_edge("a", "t", capacity=1)
+        G.add_edge("b", "a", capacity=1)
+        G.add_edge("b", "t", capacity=3)
+        # flow solution
+        # {
+        #     "a": {"b": 1, "t": 1},
+        #     "b": {"a": 0, "t": 3},
+        #     "s": {"a": 2, "b": 2},
+        #     "t": {},
+        # }
+        compare_flows_and_cuts(G, "s", "t", 4)
+
+    def test_disconnected(self):
+        G = nx.Graph()
+        G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity")
+        G.remove_node(1)
+        assert nx.maximum_flow_value(G, 0, 3) == 0
+        # flow solution
+        # {0: {}, 2: {3: 0}, 3: {2: 0}}
+        compare_flows_and_cuts(G, 0, 3, 0)
+
+    def test_source_target_not_in_graph(self):
+        G = nx.Graph()
+        G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity")
+        G.remove_node(0)
+        for flow_func in all_funcs:
+            pytest.raises(nx.NetworkXError, flow_func, G, 0, 3)
+        G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity")
+        G.remove_node(3)
+        for flow_func in all_funcs:
+            pytest.raises(nx.NetworkXError, flow_func, G, 0, 3)
+
+    def test_source_target_coincide(self):
+        G = nx.Graph()
+        G.add_node(0)
+        for flow_func in all_funcs:
+            pytest.raises(nx.NetworkXError, flow_func, G, 0, 0)
+
+    def test_multigraphs_raise(self):
+        G = nx.MultiGraph()
+        M = nx.MultiDiGraph()
+        G.add_edges_from([(0, 1), (1, 0)], capacity=True)
+        for flow_func in all_funcs:
+            pytest.raises(nx.NetworkXError, flow_func, G, 0, 0)
+
+
+class TestMaxFlowMinCutInterface:
+    def setup_method(self):
+        G = nx.DiGraph()
+        G.add_edge("x", "a", capacity=3.0)
+        G.add_edge("x", "b", capacity=1.0)
+        G.add_edge("a", "c", capacity=3.0)
+        G.add_edge("b", "c", capacity=5.0)
+        G.add_edge("b", "d", capacity=4.0)
+        G.add_edge("d", "e", capacity=2.0)
+        G.add_edge("c", "y", capacity=2.0)
+        G.add_edge("e", "y", capacity=3.0)
+        self.G = G
+        H = nx.DiGraph()
+        H.add_edge(0, 1, capacity=1.0)
+        H.add_edge(1, 2, capacity=1.0)
+        self.H = H
+
+    def test_flow_func_not_callable(self):
+        elements = ["this_should_be_callable", 10, {1, 2, 3}]
+        G = nx.Graph()
+        G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity")
+        for flow_func in interface_funcs:
+            for element in elements:
+                pytest.raises(nx.NetworkXError, flow_func, G, 0, 1, flow_func=element)
+                pytest.raises(nx.NetworkXError, flow_func, G, 0, 1, flow_func=element)
+
+    def test_flow_func_parameters(self):
+        G = self.G
+        fv = 3.0
+        for interface_func in interface_funcs:
+            for flow_func in flow_funcs:
+                errmsg = (
+                    f"Assertion failed in function: {flow_func.__name__} "
+                    f"in interface {interface_func.__name__}"
+                )
+                result = interface_func(G, "x", "y", flow_func=flow_func)
+                if interface_func in max_min_funcs:
+                    result = result[0]
+                assert fv == result, errmsg
+
+    def test_minimum_cut_no_cutoff(self):
+        G = self.G
+        pytest.raises(
+            nx.NetworkXError,
+            nx.minimum_cut,
+            G,
+            "x",
+            "y",
+            flow_func=preflow_push,
+            cutoff=1.0,
+        )
+        pytest.raises(
+            nx.NetworkXError,
+            nx.minimum_cut_value,
+            G,
+            "x",
+            "y",
+            flow_func=preflow_push,
+            cutoff=1.0,
+        )
+
+    def test_kwargs(self):
+        G = self.H
+        fv = 1.0
+        to_test = (
+            (shortest_augmenting_path, {"two_phase": True}),
+            (preflow_push, {"global_relabel_freq": 5}),
+        )
+        for interface_func in interface_funcs:
+            for flow_func, kwargs in to_test:
+                errmsg = (
+                    f"Assertion failed in function: {flow_func.__name__} "
+                    f"in interface {interface_func.__name__}"
+                )
+                result = interface_func(G, 0, 2, flow_func=flow_func, **kwargs)
+                if interface_func in max_min_funcs:
+                    result = result[0]
+                assert fv == result, errmsg
+
+    def test_kwargs_default_flow_func(self):
+        G = self.H
+        for interface_func in interface_funcs:
+            pytest.raises(
+                nx.NetworkXError, interface_func, G, 0, 1, global_relabel_freq=2
+            )
+
+    def test_reusing_residual(self):
+        G = self.G
+        fv = 3.0
+        s, t = "x", "y"
+        R = build_residual_network(G, "capacity")
+        for interface_func in interface_funcs:
+            for flow_func in flow_funcs:
+                errmsg = (
+                    f"Assertion failed in function: {flow_func.__name__} "
+                    f"in interface {interface_func.__name__}"
+                )
+                for i in range(3):
+                    result = interface_func(
+                        G, "x", "y", flow_func=flow_func, residual=R
+                    )
+                    if interface_func in max_min_funcs:
+                        result = result[0]
+                    assert fv == result, errmsg
+
+
+# Tests specific to one algorithm
+def test_preflow_push_global_relabel_freq():
+    G = nx.DiGraph()
+    G.add_edge(1, 2, capacity=1)
+    R = preflow_push(G, 1, 2, global_relabel_freq=None)
+    assert R.graph["flow_value"] == 1
+    pytest.raises(nx.NetworkXError, preflow_push, G, 1, 2, global_relabel_freq=-1)
+
+
+def test_preflow_push_makes_enough_space():
+    # From ticket #1542
+    G = nx.DiGraph()
+    nx.add_path(G, [0, 1, 3], capacity=1)
+    nx.add_path(G, [1, 2, 3], capacity=1)
+    R = preflow_push(G, 0, 3, value_only=False)
+    assert R.graph["flow_value"] == 1
+
+
+def test_shortest_augmenting_path_two_phase():
+    k = 5
+    p = 1000
+    G = nx.DiGraph()
+    for i in range(k):
+        G.add_edge("s", (i, 0), capacity=1)
+        nx.add_path(G, ((i, j) for j in range(p)), capacity=1)
+        G.add_edge((i, p - 1), "t", capacity=1)
+    R = shortest_augmenting_path(G, "s", "t", two_phase=True)
+    assert R.graph["flow_value"] == k
+    R = shortest_augmenting_path(G, "s", "t", two_phase=False)
+    assert R.graph["flow_value"] == k
+
+
+class TestCutoff:
+    def test_cutoff(self):
+        k = 5
+        p = 1000
+        G = nx.DiGraph()
+        for i in range(k):
+            G.add_edge("s", (i, 0), capacity=2)
+            nx.add_path(G, ((i, j) for j in range(p)), capacity=2)
+            G.add_edge((i, p - 1), "t", capacity=2)
+        R = shortest_augmenting_path(G, "s", "t", two_phase=True, cutoff=k)
+        assert k <= R.graph["flow_value"] <= (2 * k)
+        R = shortest_augmenting_path(G, "s", "t", two_phase=False, cutoff=k)
+        assert k <= R.graph["flow_value"] <= (2 * k)
+        R = edmonds_karp(G, "s", "t", cutoff=k)
+        assert k <= R.graph["flow_value"] <= (2 * k)
+        R = dinitz(G, "s", "t", cutoff=k)
+        assert k <= R.graph["flow_value"] <= (2 * k)
+        R = boykov_kolmogorov(G, "s", "t", cutoff=k)
+        assert k <= R.graph["flow_value"] <= (2 * k)
+
+    def test_complete_graph_cutoff(self):
+        G = nx.complete_graph(5)
+        nx.set_edge_attributes(G, {(u, v): 1 for u, v in G.edges()}, "capacity")
+        for flow_func in [
+            shortest_augmenting_path,
+            edmonds_karp,
+            dinitz,
+            boykov_kolmogorov,
+        ]:
+            for cutoff in [3, 2, 1]:
+                result = nx.maximum_flow_value(
+                    G, 0, 4, flow_func=flow_func, cutoff=cutoff
+                )
+                assert cutoff == result, f"cutoff error in {flow_func.__name__}"
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow_large_graph.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow_large_graph.py
new file mode 100644
index 00000000..b395cbc8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow_large_graph.py
@@ -0,0 +1,156 @@
+"""Maximum flow algorithms test suite on large graphs."""
+
+import bz2
+import importlib.resources
+import os
+import pickle
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.flow import (
+    boykov_kolmogorov,
+    build_flow_dict,
+    build_residual_network,
+    dinitz,
+    edmonds_karp,
+    preflow_push,
+    shortest_augmenting_path,
+)
+
+flow_funcs = [
+    boykov_kolmogorov,
+    dinitz,
+    edmonds_karp,
+    preflow_push,
+    shortest_augmenting_path,
+]
+
+
+def gen_pyramid(N):
+    # This graph admits a flow of value 1 for which every arc is at
+    # capacity (except the arcs incident to the sink which have
+    # infinite capacity).
+    G = nx.DiGraph()
+
+    for i in range(N - 1):
+        cap = 1.0 / (i + 2)
+        for j in range(i + 1):
+            G.add_edge((i, j), (i + 1, j), capacity=cap)
+            cap = 1.0 / (i + 1) - cap
+            G.add_edge((i, j), (i + 1, j + 1), capacity=cap)
+            cap = 1.0 / (i + 2) - cap
+
+    for j in range(N):
+        G.add_edge((N - 1, j), "t")
+
+    return G
+
+
+def read_graph(name):
+    fname = (
+        importlib.resources.files("networkx.algorithms.flow.tests")
+        / f"{name}.gpickle.bz2"
+    )
+
+    with bz2.BZ2File(fname, "rb") as f:
+        G = pickle.load(f)
+    return G
+
+
+def validate_flows(G, s, t, soln_value, R, flow_func):
+    flow_value = R.graph["flow_value"]
+    flow_dict = build_flow_dict(G, R)
+    errmsg = f"Assertion failed in function: {flow_func.__name__}"
+    assert soln_value == flow_value, errmsg
+    assert set(G) == set(flow_dict), errmsg
+    for u in G:
+        assert set(G[u]) == set(flow_dict[u]), errmsg
+    excess = {u: 0 for u in flow_dict}
+    for u in flow_dict:
+        for v, flow in flow_dict[u].items():
+            assert flow <= G[u][v].get("capacity", float("inf")), errmsg
+            assert flow >= 0, errmsg
+            excess[u] -= flow
+            excess[v] += flow
+    for u, exc in excess.items():
+        if u == s:
+            assert exc == -soln_value, errmsg
+        elif u == t:
+            assert exc == soln_value, errmsg
+        else:
+            assert exc == 0, errmsg
+
+
+class TestMaxflowLargeGraph:
+    def test_complete_graph(self):
+        N = 50
+        G = nx.complete_graph(N)
+        nx.set_edge_attributes(G, 5, "capacity")
+        R = build_residual_network(G, "capacity")
+        kwargs = {"residual": R}
+
+        for flow_func in flow_funcs:
+            kwargs["flow_func"] = flow_func
+            errmsg = f"Assertion failed in function: {flow_func.__name__}"
+            flow_value = nx.maximum_flow_value(G, 1, 2, **kwargs)
+            assert flow_value == 5 * (N - 1), errmsg
+
+    def test_pyramid(self):
+        N = 10
+        # N = 100 # this gives a graph with 5051 nodes
+        G = gen_pyramid(N)
+        R = build_residual_network(G, "capacity")
+        kwargs = {"residual": R}
+
+        for flow_func in flow_funcs:
+            kwargs["flow_func"] = flow_func
+            errmsg = f"Assertion failed in function: {flow_func.__name__}"
+            flow_value = nx.maximum_flow_value(G, (0, 0), "t", **kwargs)
+            assert flow_value == pytest.approx(1.0, abs=1e-7)
+
+    def test_gl1(self):
+        G = read_graph("gl1")
+        s = 1
+        t = len(G)
+        R = build_residual_network(G, "capacity")
+        kwargs = {"residual": R}
+
+        # do one flow_func to save time
+        flow_func = flow_funcs[0]
+        validate_flows(G, s, t, 156545, flow_func(G, s, t, **kwargs), flow_func)
+
+    #        for flow_func in flow_funcs:
+    #            validate_flows(G, s, t, 156545, flow_func(G, s, t, **kwargs),
+    #                           flow_func)
+
+    @pytest.mark.slow
+    def test_gw1(self):
+        G = read_graph("gw1")
+        s = 1
+        t = len(G)
+        R = build_residual_network(G, "capacity")
+        kwargs = {"residual": R}
+
+        for flow_func in flow_funcs:
+            validate_flows(G, s, t, 1202018, flow_func(G, s, t, **kwargs), flow_func)
+
+    def test_wlm3(self):
+        G = read_graph("wlm3")
+        s = 1
+        t = len(G)
+        R = build_residual_network(G, "capacity")
+        kwargs = {"residual": R}
+
+        # do one flow_func to save time
+        flow_func = flow_funcs[0]
+        validate_flows(G, s, t, 11875108, flow_func(G, s, t, **kwargs), flow_func)
+
+    #        for flow_func in flow_funcs:
+    #            validate_flows(G, s, t, 11875108, flow_func(G, s, t, **kwargs),
+    #                           flow_func)
+
+    def test_preflow_push_global_relabel(self):
+        G = read_graph("gw1")
+        R = preflow_push(G, 1, len(G), global_relabel_freq=50)
+        assert R.graph["flow_value"] == 1202018
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_mincost.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_mincost.py
new file mode 100644
index 00000000..5b1794b1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_mincost.py
@@ -0,0 +1,476 @@
+import bz2
+import importlib.resources
+import os
+import pickle
+
+import pytest
+
+import networkx as nx
+
+
+class TestMinCostFlow:
+    def test_simple_digraph(self):
+        G = nx.DiGraph()
+        G.add_node("a", demand=-5)
+        G.add_node("d", demand=5)
+        G.add_edge("a", "b", weight=3, capacity=4)
+        G.add_edge("a", "c", weight=6, capacity=10)
+        G.add_edge("b", "d", weight=1, capacity=9)
+        G.add_edge("c", "d", weight=2, capacity=5)
+        flowCost, H = nx.network_simplex(G)
+        soln = {"a": {"b": 4, "c": 1}, "b": {"d": 4}, "c": {"d": 1}, "d": {}}
+        assert flowCost == 24
+        assert nx.min_cost_flow_cost(G) == 24
+        assert H == soln
+        assert nx.min_cost_flow(G) == soln
+        assert nx.cost_of_flow(G, H) == 24
+
+        flowCost, H = nx.capacity_scaling(G)
+        assert flowCost == 24
+        assert nx.cost_of_flow(G, H) == 24
+        assert H == soln
+
+    def test_negcycle_infcap(self):
+        G = nx.DiGraph()
+        G.add_node("s", demand=-5)
+        G.add_node("t", demand=5)
+        G.add_edge("s", "a", weight=1, capacity=3)
+        G.add_edge("a", "b", weight=3)
+        G.add_edge("c", "a", weight=-6)
+        G.add_edge("b", "d", weight=1)
+        G.add_edge("d", "c", weight=-2)
+        G.add_edge("d", "t", weight=1, capacity=3)
+        pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
+        pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G)
+
+    def test_sum_demands_not_zero(self):
+        G = nx.DiGraph()
+        G.add_node("s", demand=-5)
+        G.add_node("t", demand=4)
+        G.add_edge("s", "a", weight=1, capacity=3)
+        G.add_edge("a", "b", weight=3)
+        G.add_edge("a", "c", weight=-6)
+        G.add_edge("b", "d", weight=1)
+        G.add_edge("c", "d", weight=-2)
+        G.add_edge("d", "t", weight=1, capacity=3)
+        pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
+        pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
+
+    def test_no_flow_satisfying_demands(self):
+        G = nx.DiGraph()
+        G.add_node("s", demand=-5)
+        G.add_node("t", demand=5)
+        G.add_edge("s", "a", weight=1, capacity=3)
+        G.add_edge("a", "b", weight=3)
+        G.add_edge("a", "c", weight=-6)
+        G.add_edge("b", "d", weight=1)
+        G.add_edge("c", "d", weight=-2)
+        G.add_edge("d", "t", weight=1, capacity=3)
+        pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
+        pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
+
+    def test_transshipment(self):
+        G = nx.DiGraph()
+        G.add_node("a", demand=1)
+        G.add_node("b", demand=-2)
+        G.add_node("c", demand=-2)
+        G.add_node("d", demand=3)
+        G.add_node("e", demand=-4)
+        G.add_node("f", demand=-4)
+        G.add_node("g", demand=3)
+        G.add_node("h", demand=2)
+        G.add_node("r", demand=3)
+        G.add_edge("a", "c", weight=3)
+        G.add_edge("r", "a", weight=2)
+        G.add_edge("b", "a", weight=9)
+        G.add_edge("r", "c", weight=0)
+        G.add_edge("b", "r", weight=-6)
+        G.add_edge("c", "d", weight=5)
+        G.add_edge("e", "r", weight=4)
+        G.add_edge("e", "f", weight=3)
+        G.add_edge("h", "b", weight=4)
+        G.add_edge("f", "d", weight=7)
+        G.add_edge("f", "h", weight=12)
+        G.add_edge("g", "d", weight=12)
+        G.add_edge("f", "g", weight=-1)
+        G.add_edge("h", "g", weight=-10)
+        flowCost, H = nx.network_simplex(G)
+        soln = {
+            "a": {"c": 0},
+            "b": {"a": 0, "r": 2},
+            "c": {"d": 3},
+            "d": {},
+            "e": {"r": 3, "f": 1},
+            "f": {"d": 0, "g": 3, "h": 2},
+            "g": {"d": 0},
+            "h": {"b": 0, "g": 0},
+            "r": {"a": 1, "c": 1},
+        }
+        assert flowCost == 41
+        assert nx.min_cost_flow_cost(G) == 41
+        assert H == soln
+        assert nx.min_cost_flow(G) == soln
+        assert nx.cost_of_flow(G, H) == 41
+
+        flowCost, H = nx.capacity_scaling(G)
+        assert flowCost == 41
+        assert nx.cost_of_flow(G, H) == 41
+        assert H == soln
+
+    def test_max_flow_min_cost(self):
+        G = nx.DiGraph()
+        G.add_edge("s", "a", bandwidth=6)
+        G.add_edge("s", "c", bandwidth=10, cost=10)
+        G.add_edge("a", "b", cost=6)
+        G.add_edge("b", "d", bandwidth=8, cost=7)
+        G.add_edge("c", "d", cost=10)
+        G.add_edge("d", "t", bandwidth=5, cost=5)
+        soln = {
+            "s": {"a": 5, "c": 0},
+            "a": {"b": 5},
+            "b": {"d": 5},
+            "c": {"d": 0},
+            "d": {"t": 5},
+            "t": {},
+        }
+        flow = nx.max_flow_min_cost(G, "s", "t", capacity="bandwidth", weight="cost")
+        assert flow == soln
+        assert nx.cost_of_flow(G, flow, weight="cost") == 90
+
+        G.add_edge("t", "s", cost=-100)
+        flowCost, flow = nx.capacity_scaling(G, capacity="bandwidth", weight="cost")
+        G.remove_edge("t", "s")
+        assert flowCost == -410
+        assert flow["t"]["s"] == 5
+        del flow["t"]["s"]
+        assert flow == soln
+        assert nx.cost_of_flow(G, flow, weight="cost") == 90
+
+    def test_digraph1(self):
+        # From Bradley, S. P., Hax, A. C. and Magnanti, T. L. Applied
+        # Mathematical Programming. Addison-Wesley, 1977.
+        G = nx.DiGraph()
+        G.add_node(1, demand=-20)
+        G.add_node(4, demand=5)
+        G.add_node(5, demand=15)
+        G.add_edges_from(
+            [
+                (1, 2, {"capacity": 15, "weight": 4}),
+                (1, 3, {"capacity": 8, "weight": 4}),
+                (2, 3, {"weight": 2}),
+                (2, 4, {"capacity": 4, "weight": 2}),
+                (2, 5, {"capacity": 10, "weight": 6}),
+                (3, 4, {"capacity": 15, "weight": 1}),
+                (3, 5, {"capacity": 5, "weight": 3}),
+                (4, 5, {"weight": 2}),
+                (5, 3, {"capacity": 4, "weight": 1}),
+            ]
+        )
+        flowCost, H = nx.network_simplex(G)
+        soln = {
+            1: {2: 12, 3: 8},
+            2: {3: 8, 4: 4, 5: 0},
+            3: {4: 11, 5: 5},
+            4: {5: 10},
+            5: {3: 0},
+        }
+        assert flowCost == 150
+        assert nx.min_cost_flow_cost(G) == 150
+        assert H == soln
+        assert nx.min_cost_flow(G) == soln
+        assert nx.cost_of_flow(G, H) == 150
+
+        flowCost, H = nx.capacity_scaling(G)
+        assert flowCost == 150
+        assert H == soln
+        assert nx.cost_of_flow(G, H) == 150
+
+    def test_digraph2(self):
+        # Example from ticket #430 from mfrasca. Original source:
+        # http://www.cs.princeton.edu/courses/archive/spr03/cs226/lectures/mincost.4up.pdf, slide 11.
+        G = nx.DiGraph()
+        G.add_edge("s", 1, capacity=12)
+        G.add_edge("s", 2, capacity=6)
+        G.add_edge("s", 3, capacity=14)
+        G.add_edge(1, 2, capacity=11, weight=4)
+        G.add_edge(2, 3, capacity=9, weight=6)
+        G.add_edge(1, 4, capacity=5, weight=5)
+        G.add_edge(1, 5, capacity=2, weight=12)
+        G.add_edge(2, 5, capacity=4, weight=4)
+        G.add_edge(2, 6, capacity=2, weight=6)
+        G.add_edge(3, 6, capacity=31, weight=3)
+        G.add_edge(4, 5, capacity=18, weight=4)
+        G.add_edge(5, 6, capacity=9, weight=5)
+        G.add_edge(4, "t", capacity=3)
+        G.add_edge(5, "t", capacity=7)
+        G.add_edge(6, "t", capacity=22)
+        flow = nx.max_flow_min_cost(G, "s", "t")
+        soln = {
+            1: {2: 6, 4: 5, 5: 1},
+            2: {3: 6, 5: 4, 6: 2},
+            3: {6: 20},
+            4: {5: 2, "t": 3},
+            5: {6: 0, "t": 7},
+            6: {"t": 22},
+            "s": {1: 12, 2: 6, 3: 14},
+            "t": {},
+        }
+        assert flow == soln
+
+        G.add_edge("t", "s", weight=-100)
+        flowCost, flow = nx.capacity_scaling(G)
+        G.remove_edge("t", "s")
+        assert flow["t"]["s"] == 32
+        assert flowCost == -3007
+        del flow["t"]["s"]
+        assert flow == soln
+        assert nx.cost_of_flow(G, flow) == 193
+
+    def test_digraph3(self):
+        """Combinatorial Optimization: Algorithms and Complexity,
+        Papadimitriou Steiglitz at page 140 has an example, 7.1, but that
+        admits multiple solutions, so I alter it a bit. From ticket #430
+        by mfrasca."""
+
+        G = nx.DiGraph()
+        G.add_edge("s", "a")
+        G["s"]["a"].update({0: 2, 1: 4})
+        G.add_edge("s", "b")
+        G["s"]["b"].update({0: 2, 1: 1})
+        G.add_edge("a", "b")
+        G["a"]["b"].update({0: 5, 1: 2})
+        G.add_edge("a", "t")
+        G["a"]["t"].update({0: 1, 1: 5})
+        G.add_edge("b", "a")
+        G["b"]["a"].update({0: 1, 1: 3})
+        G.add_edge("b", "t")
+        G["b"]["t"].update({0: 3, 1: 2})
+
+        "PS.ex.7.1: testing main function"
+        sol = nx.max_flow_min_cost(G, "s", "t", capacity=0, weight=1)
+        flow = sum(v for v in sol["s"].values())
+        assert 4 == flow
+        assert 23 == nx.cost_of_flow(G, sol, weight=1)
+        assert sol["s"] == {"a": 2, "b": 2}
+        assert sol["a"] == {"b": 1, "t": 1}
+        assert sol["b"] == {"a": 0, "t": 3}
+        assert sol["t"] == {}
+
+        G.add_edge("t", "s")
+        G["t"]["s"].update({1: -100})
+        flowCost, sol = nx.capacity_scaling(G, capacity=0, weight=1)
+        G.remove_edge("t", "s")
+        flow = sum(v for v in sol["s"].values())
+        assert 4 == flow
+        assert sol["t"]["s"] == 4
+        assert flowCost == -377
+        del sol["t"]["s"]
+        assert sol["s"] == {"a": 2, "b": 2}
+        assert sol["a"] == {"b": 1, "t": 1}
+        assert sol["b"] == {"a": 0, "t": 3}
+        assert sol["t"] == {}
+        assert nx.cost_of_flow(G, sol, weight=1) == 23
+
+    def test_zero_capacity_edges(self):
+        """Address issue raised in ticket #617 by arv."""
+        G = nx.DiGraph()
+        G.add_edges_from(
+            [
+                (1, 2, {"capacity": 1, "weight": 1}),
+                (1, 5, {"capacity": 1, "weight": 1}),
+                (2, 3, {"capacity": 0, "weight": 1}),
+                (2, 5, {"capacity": 1, "weight": 1}),
+                (5, 3, {"capacity": 2, "weight": 1}),
+                (5, 4, {"capacity": 0, "weight": 1}),
+                (3, 4, {"capacity": 2, "weight": 1}),
+            ]
+        )
+        G.nodes[1]["demand"] = -1
+        G.nodes[2]["demand"] = -1
+        G.nodes[4]["demand"] = 2
+
+        flowCost, H = nx.network_simplex(G)
+        soln = {1: {2: 0, 5: 1}, 2: {3: 0, 5: 1}, 3: {4: 2}, 4: {}, 5: {3: 2, 4: 0}}
+        assert flowCost == 6
+        assert nx.min_cost_flow_cost(G) == 6
+        assert H == soln
+        assert nx.min_cost_flow(G) == soln
+        assert nx.cost_of_flow(G, H) == 6
+
+        flowCost, H = nx.capacity_scaling(G)
+        assert flowCost == 6
+        assert H == soln
+        assert nx.cost_of_flow(G, H) == 6
+
+    def test_digon(self):
+        """Check if digons are handled properly. Taken from ticket
+        #618 by arv."""
+        nodes = [(1, {}), (2, {"demand": -4}), (3, {"demand": 4})]
+        edges = [
+            (1, 2, {"capacity": 3, "weight": 600000}),
+            (2, 1, {"capacity": 2, "weight": 0}),
+            (2, 3, {"capacity": 5, "weight": 714285}),
+            (3, 2, {"capacity": 2, "weight": 0}),
+        ]
+        G = nx.DiGraph(edges)
+        G.add_nodes_from(nodes)
+        flowCost, H = nx.network_simplex(G)
+        soln = {1: {2: 0}, 2: {1: 0, 3: 4}, 3: {2: 0}}
+        assert flowCost == 2857140
+        assert nx.min_cost_flow_cost(G) == 2857140
+        assert H == soln
+        assert nx.min_cost_flow(G) == soln
+        assert nx.cost_of_flow(G, H) == 2857140
+
+        flowCost, H = nx.capacity_scaling(G)
+        assert flowCost == 2857140
+        assert H == soln
+        assert nx.cost_of_flow(G, H) == 2857140
+
+    def test_deadend(self):
+        """Check if one-node cycles are handled properly. Taken from ticket
+        #2906 from @sshraven."""
+        G = nx.DiGraph()
+
+        G.add_nodes_from(range(5), demand=0)
+        G.nodes[4]["demand"] = -13
+        G.nodes[3]["demand"] = 13
+
+        G.add_edges_from([(0, 2), (0, 3), (2, 1)], capacity=20, weight=0.1)
+        pytest.raises(nx.NetworkXUnfeasible, nx.min_cost_flow, G)
+
+    def test_infinite_capacity_neg_digon(self):
+        """An infinite capacity negative cost digon results in an unbounded
+        instance."""
+        nodes = [(1, {}), (2, {"demand": -4}), (3, {"demand": 4})]
+        edges = [
+            (1, 2, {"weight": -600}),
+            (2, 1, {"weight": 0}),
+            (2, 3, {"capacity": 5, "weight": 714285}),
+            (3, 2, {"capacity": 2, "weight": 0}),
+        ]
+        G = nx.DiGraph(edges)
+        G.add_nodes_from(nodes)
+        pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G)
+        pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G)
+
+    def test_finite_capacity_neg_digon(self):
+        """The digon should receive the maximum amount of flow it can handle.
+        Taken from ticket #749 by @chuongdo."""
+        G = nx.DiGraph()
+        G.add_edge("a", "b", capacity=1, weight=-1)
+        G.add_edge("b", "a", capacity=1, weight=-1)
+        min_cost = -2
+        assert nx.min_cost_flow_cost(G) == min_cost
+
+        flowCost, H = nx.capacity_scaling(G)
+        assert flowCost == -2
+        assert H == {"a": {"b": 1}, "b": {"a": 1}}
+        assert nx.cost_of_flow(G, H) == -2
+
+    def test_multidigraph(self):
+        """Multidigraphs are acceptable."""
+        G = nx.MultiDiGraph()
+        G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight="capacity")
+        flowCost, H = nx.network_simplex(G)
+        assert flowCost == 0
+        assert H == {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}}
+
+        flowCost, H = nx.capacity_scaling(G)
+        assert flowCost == 0
+        assert H == {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}}
+
+    def test_negative_selfloops(self):
+        """Negative selfloops should cause an exception if uncapacitated and
+        always be saturated otherwise.
+        """
+        G = nx.DiGraph()
+        G.add_edge(1, 1, weight=-1)
+        pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G)
+        pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G)
+        G[1][1]["capacity"] = 2
+        flowCost, H = nx.network_simplex(G)
+        assert flowCost == -2
+        assert H == {1: {1: 2}}
+        flowCost, H = nx.capacity_scaling(G)
+        assert flowCost == -2
+        assert H == {1: {1: 2}}
+
+        G = nx.MultiDiGraph()
+        G.add_edge(1, 1, "x", weight=-1)
+        G.add_edge(1, 1, "y", weight=1)
+        pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G)
+        pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G)
+        G[1][1]["x"]["capacity"] = 2
+        flowCost, H = nx.network_simplex(G)
+        assert flowCost == -2
+        assert H == {1: {1: {"x": 2, "y": 0}}}
+        flowCost, H = nx.capacity_scaling(G)
+        assert flowCost == -2
+        assert H == {1: {1: {"x": 2, "y": 0}}}
+
+    def test_bone_shaped(self):
+        # From #1283
+        G = nx.DiGraph()
+        G.add_node(0, demand=-4)
+        G.add_node(1, demand=2)
+        G.add_node(2, demand=2)
+        G.add_node(3, demand=4)
+        G.add_node(4, demand=-2)
+        G.add_node(5, demand=-2)
+        G.add_edge(0, 1, capacity=4)
+        G.add_edge(0, 2, capacity=4)
+        G.add_edge(4, 3, capacity=4)
+        G.add_edge(5, 3, capacity=4)
+        G.add_edge(0, 3, capacity=0)
+        flowCost, H = nx.network_simplex(G)
+        assert flowCost == 0
+        assert H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}}
+        flowCost, H = nx.capacity_scaling(G)
+        assert flowCost == 0
+        assert H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}}
+
+    def test_exceptions(self):
+        G = nx.Graph()
+        pytest.raises(nx.NetworkXNotImplemented, nx.network_simplex, G)
+        pytest.raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G)
+        G = nx.MultiGraph()
+        pytest.raises(nx.NetworkXNotImplemented, nx.network_simplex, G)
+        pytest.raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G)
+        G = nx.DiGraph()
+        pytest.raises(nx.NetworkXError, nx.network_simplex, G)
+        # pytest.raises(nx.NetworkXError, nx.capacity_scaling, G)
+        G.add_node(0, demand=float("inf"))
+        pytest.raises(nx.NetworkXError, nx.network_simplex, G)
+        pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
+        G.nodes[0]["demand"] = 0
+        G.add_node(1, demand=0)
+        G.add_edge(0, 1, weight=-float("inf"))
+        pytest.raises(nx.NetworkXError, nx.network_simplex, G)
+        pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
+        G[0][1]["weight"] = 0
+        G.add_edge(0, 0, weight=float("inf"))
+        pytest.raises(nx.NetworkXError, nx.network_simplex, G)
+        # pytest.raises(nx.NetworkXError, nx.capacity_scaling, G)
+        G[0][0]["weight"] = 0
+        G[0][1]["capacity"] = -1
+        pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
+        # pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
+        G[0][1]["capacity"] = 0
+        G[0][0]["capacity"] = -1
+        pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
+        # pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
+
+    def test_large(self):
+        fname = (
+            importlib.resources.files("networkx.algorithms.flow.tests")
+            / "netgen-2.gpickle.bz2"
+        )
+        with bz2.BZ2File(fname, "rb") as f:
+            G = pickle.load(f)
+        flowCost, flowDict = nx.network_simplex(G)
+        assert 6749969302 == flowCost
+        assert 6749969302 == nx.cost_of_flow(G, flowDict)
+        flowCost, flowDict = nx.capacity_scaling(G)
+        assert 6749969302 == flowCost
+        assert 6749969302 == nx.cost_of_flow(G, flowDict)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_networksimplex.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_networksimplex.py
new file mode 100644
index 00000000..5b3b5f6d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_networksimplex.py
@@ -0,0 +1,387 @@
+import bz2

+import importlib.resources

+import os

+import pickle

+

+import pytest

+

+import networkx as nx

+

+

+@pytest.fixture

+def simple_flow_graph():

+    G = nx.DiGraph()

+    G.add_node("a", demand=0)

+    G.add_node("b", demand=-5)

+    G.add_node("c", demand=50000000)

+    G.add_node("d", demand=-49999995)

+    G.add_edge("a", "b", weight=3, capacity=4)

+    G.add_edge("a", "c", weight=6, capacity=10)

+    G.add_edge("b", "d", weight=1, capacity=9)

+    G.add_edge("c", "d", weight=2, capacity=5)

+    return G

+

+

+@pytest.fixture

+def simple_no_flow_graph():

+    G = nx.DiGraph()

+    G.add_node("s", demand=-5)

+    G.add_node("t", demand=5)

+    G.add_edge("s", "a", weight=1, capacity=3)

+    G.add_edge("a", "b", weight=3)

+    G.add_edge("a", "c", weight=-6)

+    G.add_edge("b", "d", weight=1)

+    G.add_edge("c", "d", weight=-2)

+    G.add_edge("d", "t", weight=1, capacity=3)

+    return G

+

+

+def get_flowcost_from_flowdict(G, flowDict):

+    """Returns flow cost calculated from flow dictionary"""

+    flowCost = 0

+    for u in flowDict:

+        for v in flowDict[u]:

+            flowCost += flowDict[u][v] * G[u][v]["weight"]

+    return flowCost

+

+

+def test_infinite_demand_raise(simple_flow_graph):

+    G = simple_flow_graph

+    inf = float("inf")

+    nx.set_node_attributes(G, {"a": {"demand": inf}})

+    pytest.raises(nx.NetworkXError, nx.network_simplex, G)

+

+

+def test_neg_infinite_demand_raise(simple_flow_graph):

+    G = simple_flow_graph

+    inf = float("inf")

+    nx.set_node_attributes(G, {"a": {"demand": -inf}})

+    pytest.raises(nx.NetworkXError, nx.network_simplex, G)

+

+

+def test_infinite_weight_raise(simple_flow_graph):

+    G = simple_flow_graph

+    inf = float("inf")

+    nx.set_edge_attributes(

+        G, {("a", "b"): {"weight": inf}, ("b", "d"): {"weight": inf}}

+    )

+    pytest.raises(nx.NetworkXError, nx.network_simplex, G)

+

+

+def test_nonzero_net_demand_raise(simple_flow_graph):

+    G = simple_flow_graph

+    nx.set_node_attributes(G, {"b": {"demand": -4}})

+    pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)

+

+

+def test_negative_capacity_raise(simple_flow_graph):

+    G = simple_flow_graph

+    nx.set_edge_attributes(G, {("a", "b"): {"weight": 1}, ("b", "d"): {"capacity": -9}})

+    pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)

+

+

+def test_no_flow_satisfying_demands(simple_no_flow_graph):

+    G = simple_no_flow_graph

+    pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)

+

+

+def test_sum_demands_not_zero(simple_no_flow_graph):

+    G = simple_no_flow_graph

+    nx.set_node_attributes(G, {"t": {"demand": 4}})

+    pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)

+

+

+def test_google_or_tools_example():

+    """

+    https://developers.google.com/optimization/flow/mincostflow

+    """

+    G = nx.DiGraph()

+    start_nodes = [0, 0, 1, 1, 1, 2, 2, 3, 4]

+    end_nodes = [1, 2, 2, 3, 4, 3, 4, 4, 2]

+    capacities = [15, 8, 20, 4, 10, 15, 4, 20, 5]

+    unit_costs = [4, 4, 2, 2, 6, 1, 3, 2, 3]

+    supplies = [20, 0, 0, -5, -15]

+    answer = 150

+

+    for i in range(len(supplies)):

+        G.add_node(i, demand=(-1) * supplies[i])  # supplies are negative of demand

+

+    for i in range(len(start_nodes)):

+        G.add_edge(

+            start_nodes[i], end_nodes[i], weight=unit_costs[i], capacity=capacities[i]

+        )

+

+    flowCost, flowDict = nx.network_simplex(G)

+    assert flowCost == answer

+    assert flowCost == get_flowcost_from_flowdict(G, flowDict)

+

+

+def test_google_or_tools_example2():

+    """

+    https://developers.google.com/optimization/flow/mincostflow

+    """

+    G = nx.DiGraph()

+    start_nodes = [0, 0, 1, 1, 1, 2, 2, 3, 4, 3]

+    end_nodes = [1, 2, 2, 3, 4, 3, 4, 4, 2, 5]

+    capacities = [15, 8, 20, 4, 10, 15, 4, 20, 5, 10]

+    unit_costs = [4, 4, 2, 2, 6, 1, 3, 2, 3, 4]

+    supplies = [23, 0, 0, -5, -15, -3]

+    answer = 183

+

+    for i in range(len(supplies)):

+        G.add_node(i, demand=(-1) * supplies[i])  # supplies are negative of demand

+

+    for i in range(len(start_nodes)):

+        G.add_edge(

+            start_nodes[i], end_nodes[i], weight=unit_costs[i], capacity=capacities[i]

+        )

+

+    flowCost, flowDict = nx.network_simplex(G)

+    assert flowCost == answer

+    assert flowCost == get_flowcost_from_flowdict(G, flowDict)

+

+

+def test_large():

+    fname = (

+        importlib.resources.files("networkx.algorithms.flow.tests")

+        / "netgen-2.gpickle.bz2"

+    )

+

+    with bz2.BZ2File(fname, "rb") as f:

+        G = pickle.load(f)

+    flowCost, flowDict = nx.network_simplex(G)

+    assert 6749969302 == flowCost

+    assert 6749969302 == nx.cost_of_flow(G, flowDict)

+

+

+def test_simple_digraph():

+    G = nx.DiGraph()

+    G.add_node("a", demand=-5)

+    G.add_node("d", demand=5)

+    G.add_edge("a", "b", weight=3, capacity=4)

+    G.add_edge("a", "c", weight=6, capacity=10)

+    G.add_edge("b", "d", weight=1, capacity=9)

+    G.add_edge("c", "d", weight=2, capacity=5)

+    flowCost, H = nx.network_simplex(G)

+    soln = {"a": {"b": 4, "c": 1}, "b": {"d": 4}, "c": {"d": 1}, "d": {}}

+    assert flowCost == 24

+    assert nx.min_cost_flow_cost(G) == 24

+    assert H == soln

+

+

+def test_negcycle_infcap():

+    G = nx.DiGraph()

+    G.add_node("s", demand=-5)

+    G.add_node("t", demand=5)

+    G.add_edge("s", "a", weight=1, capacity=3)

+    G.add_edge("a", "b", weight=3)

+    G.add_edge("c", "a", weight=-6)

+    G.add_edge("b", "d", weight=1)

+    G.add_edge("d", "c", weight=-2)

+    G.add_edge("d", "t", weight=1, capacity=3)

+    pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)

+

+

+def test_transshipment():

+    G = nx.DiGraph()

+    G.add_node("a", demand=1)

+    G.add_node("b", demand=-2)

+    G.add_node("c", demand=-2)

+    G.add_node("d", demand=3)

+    G.add_node("e", demand=-4)

+    G.add_node("f", demand=-4)

+    G.add_node("g", demand=3)

+    G.add_node("h", demand=2)

+    G.add_node("r", demand=3)

+    G.add_edge("a", "c", weight=3)

+    G.add_edge("r", "a", weight=2)

+    G.add_edge("b", "a", weight=9)

+    G.add_edge("r", "c", weight=0)

+    G.add_edge("b", "r", weight=-6)

+    G.add_edge("c", "d", weight=5)

+    G.add_edge("e", "r", weight=4)

+    G.add_edge("e", "f", weight=3)

+    G.add_edge("h", "b", weight=4)

+    G.add_edge("f", "d", weight=7)

+    G.add_edge("f", "h", weight=12)

+    G.add_edge("g", "d", weight=12)

+    G.add_edge("f", "g", weight=-1)

+    G.add_edge("h", "g", weight=-10)

+    flowCost, H = nx.network_simplex(G)

+    soln = {

+        "a": {"c": 0},

+        "b": {"a": 0, "r": 2},

+        "c": {"d": 3},

+        "d": {},

+        "e": {"r": 3, "f": 1},

+        "f": {"d": 0, "g": 3, "h": 2},

+        "g": {"d": 0},

+        "h": {"b": 0, "g": 0},

+        "r": {"a": 1, "c": 1},

+    }

+    assert flowCost == 41

+    assert H == soln

+

+

+def test_digraph1():

+    # From Bradley, S. P., Hax, A. C. and Magnanti, T. L. Applied

+    # Mathematical Programming. Addison-Wesley, 1977.

+    G = nx.DiGraph()

+    G.add_node(1, demand=-20)

+    G.add_node(4, demand=5)

+    G.add_node(5, demand=15)

+    G.add_edges_from(

+        [

+            (1, 2, {"capacity": 15, "weight": 4}),

+            (1, 3, {"capacity": 8, "weight": 4}),

+            (2, 3, {"weight": 2}),

+            (2, 4, {"capacity": 4, "weight": 2}),

+            (2, 5, {"capacity": 10, "weight": 6}),

+            (3, 4, {"capacity": 15, "weight": 1}),

+            (3, 5, {"capacity": 5, "weight": 3}),

+            (4, 5, {"weight": 2}),

+            (5, 3, {"capacity": 4, "weight": 1}),

+        ]

+    )

+    flowCost, H = nx.network_simplex(G)

+    soln = {

+        1: {2: 12, 3: 8},

+        2: {3: 8, 4: 4, 5: 0},

+        3: {4: 11, 5: 5},

+        4: {5: 10},

+        5: {3: 0},

+    }

+    assert flowCost == 150

+    assert nx.min_cost_flow_cost(G) == 150

+    assert H == soln

+

+

+def test_zero_capacity_edges():

+    """Address issue raised in ticket #617 by arv."""

+    G = nx.DiGraph()

+    G.add_edges_from(

+        [

+            (1, 2, {"capacity": 1, "weight": 1}),

+            (1, 5, {"capacity": 1, "weight": 1}),

+            (2, 3, {"capacity": 0, "weight": 1}),

+            (2, 5, {"capacity": 1, "weight": 1}),

+            (5, 3, {"capacity": 2, "weight": 1}),

+            (5, 4, {"capacity": 0, "weight": 1}),

+            (3, 4, {"capacity": 2, "weight": 1}),

+        ]

+    )

+    G.nodes[1]["demand"] = -1

+    G.nodes[2]["demand"] = -1

+    G.nodes[4]["demand"] = 2

+

+    flowCost, H = nx.network_simplex(G)

+    soln = {1: {2: 0, 5: 1}, 2: {3: 0, 5: 1}, 3: {4: 2}, 4: {}, 5: {3: 2, 4: 0}}

+    assert flowCost == 6

+    assert nx.min_cost_flow_cost(G) == 6

+    assert H == soln

+

+

+def test_digon():

+    """Check if digons are handled properly. Taken from ticket

+    #618 by arv."""

+    nodes = [(1, {}), (2, {"demand": -4}), (3, {"demand": 4})]

+    edges = [

+        (1, 2, {"capacity": 3, "weight": 600000}),

+        (2, 1, {"capacity": 2, "weight": 0}),

+        (2, 3, {"capacity": 5, "weight": 714285}),

+        (3, 2, {"capacity": 2, "weight": 0}),

+    ]

+    G = nx.DiGraph(edges)

+    G.add_nodes_from(nodes)

+    flowCost, H = nx.network_simplex(G)

+    soln = {1: {2: 0}, 2: {1: 0, 3: 4}, 3: {2: 0}}

+    assert flowCost == 2857140

+

+

+def test_deadend():

+    """Check if one-node cycles are handled properly. Taken from ticket

+    #2906 from @sshraven."""

+    G = nx.DiGraph()

+

+    G.add_nodes_from(range(5), demand=0)

+    G.nodes[4]["demand"] = -13

+    G.nodes[3]["demand"] = 13

+

+    G.add_edges_from([(0, 2), (0, 3), (2, 1)], capacity=20, weight=0.1)

+    pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G)

+

+

+def test_infinite_capacity_neg_digon():

+    """An infinite capacity negative cost digon results in an unbounded

+    instance."""

+    nodes = [(1, {}), (2, {"demand": -4}), (3, {"demand": 4})]

+    edges = [

+        (1, 2, {"weight": -600}),

+        (2, 1, {"weight": 0}),

+        (2, 3, {"capacity": 5, "weight": 714285}),

+        (3, 2, {"capacity": 2, "weight": 0}),

+    ]

+    G = nx.DiGraph(edges)

+    G.add_nodes_from(nodes)

+    pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G)

+

+

+def test_multidigraph():

+    """Multidigraphs are acceptable."""

+    G = nx.MultiDiGraph()

+    G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight="capacity")

+    flowCost, H = nx.network_simplex(G)

+    assert flowCost == 0

+    assert H == {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}}

+

+

+def test_negative_selfloops():

+    """Negative selfloops should cause an exception if uncapacitated and

+    always be saturated otherwise.

+    """

+    G = nx.DiGraph()

+    G.add_edge(1, 1, weight=-1)

+    pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G)

+

+    G[1][1]["capacity"] = 2

+    flowCost, H = nx.network_simplex(G)

+    assert flowCost == -2

+    assert H == {1: {1: 2}}

+

+    G = nx.MultiDiGraph()

+    G.add_edge(1, 1, "x", weight=-1)

+    G.add_edge(1, 1, "y", weight=1)

+    pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G)

+

+    G[1][1]["x"]["capacity"] = 2

+    flowCost, H = nx.network_simplex(G)

+    assert flowCost == -2

+    assert H == {1: {1: {"x": 2, "y": 0}}}

+

+

+def test_bone_shaped():

+    # From #1283

+    G = nx.DiGraph()

+    G.add_node(0, demand=-4)

+    G.add_node(1, demand=2)

+    G.add_node(2, demand=2)

+    G.add_node(3, demand=4)

+    G.add_node(4, demand=-2)

+    G.add_node(5, demand=-2)

+    G.add_edge(0, 1, capacity=4)

+    G.add_edge(0, 2, capacity=4)

+    G.add_edge(4, 3, capacity=4)

+    G.add_edge(5, 3, capacity=4)

+    G.add_edge(0, 3, capacity=0)

+    flowCost, H = nx.network_simplex(G)

+    assert flowCost == 0

+    assert H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}}

+

+

+def test_graphs_type_exceptions():

+    G = nx.Graph()

+    pytest.raises(nx.NetworkXNotImplemented, nx.network_simplex, G)

+    G = nx.MultiGraph()

+    pytest.raises(nx.NetworkXNotImplemented, nx.network_simplex, G)

+    G = nx.DiGraph()

+    pytest.raises(nx.NetworkXError, nx.network_simplex, G)

diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/wlm3.gpickle.bz2 b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/wlm3.gpickle.bz2
new file mode 100644
index 00000000..8ce935a8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/wlm3.gpickle.bz2
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/utils.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/utils.py
new file mode 100644
index 00000000..03f1d10f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/utils.py
@@ -0,0 +1,189 @@
+"""
+Utility classes and functions for network flow algorithms.
+"""
+
+from collections import deque
+
+import networkx as nx
+
+__all__ = [
+    "CurrentEdge",
+    "Level",
+    "GlobalRelabelThreshold",
+    "build_residual_network",
+    "detect_unboundedness",
+    "build_flow_dict",
+]
+
+
+class CurrentEdge:
+    """Mechanism for iterating over out-edges incident to a node in a circular
+    manner. StopIteration exception is raised when wraparound occurs.
+    """
+
+    __slots__ = ("_edges", "_it", "_curr")
+
+    def __init__(self, edges):
+        self._edges = edges
+        if self._edges:
+            self._rewind()
+
+    def get(self):
+        return self._curr
+
+    def move_to_next(self):
+        try:
+            self._curr = next(self._it)
+        except StopIteration:
+            self._rewind()
+            raise
+
+    def _rewind(self):
+        self._it = iter(self._edges.items())
+        self._curr = next(self._it)
+
+
+class Level:
+    """Active and inactive nodes in a level."""
+
+    __slots__ = ("active", "inactive")
+
+    def __init__(self):
+        self.active = set()
+        self.inactive = set()
+
+
+class GlobalRelabelThreshold:
+    """Measurement of work before the global relabeling heuristic should be
+    applied.
+    """
+
+    def __init__(self, n, m, freq):
+        self._threshold = (n + m) / freq if freq else float("inf")
+        self._work = 0
+
+    def add_work(self, work):
+        self._work += work
+
+    def is_reached(self):
+        return self._work >= self._threshold
+
+    def clear_work(self):
+        self._work = 0
+
+
+@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
+def build_residual_network(G, capacity):
+    """Build a residual network and initialize a zero flow.
+
+    The residual network :samp:`R` from an input graph :samp:`G` has the
+    same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
+    of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
+    self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
+    in :samp:`G`.
+
+    For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
+    is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
+    in :samp:`G` or zero otherwise. If the capacity is infinite,
+    :samp:`R[u][v]['capacity']` will have a high arbitrary finite value
+    that does not affect the solution of the problem. This value is stored in
+    :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
+    :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
+    satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
+
+    The flow value, defined as the total flow into :samp:`t`, the sink, is
+    stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not
+    specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such
+    that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
+    :samp:`s`-:samp:`t` cut.
+
+    """
+    if G.is_multigraph():
+        raise nx.NetworkXError("MultiGraph and MultiDiGraph not supported (yet).")
+
+    R = nx.DiGraph()
+    R.__networkx_cache__ = None  # Disable caching
+    R.add_nodes_from(G)
+
+    inf = float("inf")
+    # Extract edges with positive capacities. Self loops excluded.
+    edge_list = [
+        (u, v, attr)
+        for u, v, attr in G.edges(data=True)
+        if u != v and attr.get(capacity, inf) > 0
+    ]
+    # Simulate infinity with three times the sum of the finite edge capacities
+    # or any positive value if the sum is zero. This allows the
+    # infinite-capacity edges to be distinguished for unboundedness detection
+    # and directly participate in residual capacity calculation. If the maximum
+    # flow is finite, these edges cannot appear in the minimum cut and thus
+    # guarantee correctness. Since the residual capacity of an
+    # infinite-capacity edge is always at least 2/3 of inf, while that of an
+    # finite-capacity edge is at most 1/3 of inf, if an operation moves more
+    # than 1/3 of inf units of flow to t, there must be an infinite-capacity
+    # s-t path in G.
+    inf = (
+        3
+        * sum(
+            attr[capacity]
+            for u, v, attr in edge_list
+            if capacity in attr and attr[capacity] != inf
+        )
+        or 1
+    )
+    if G.is_directed():
+        for u, v, attr in edge_list:
+            r = min(attr.get(capacity, inf), inf)
+            if not R.has_edge(u, v):
+                # Both (u, v) and (v, u) must be present in the residual
+                # network.
+                R.add_edge(u, v, capacity=r)
+                R.add_edge(v, u, capacity=0)
+            else:
+                # The edge (u, v) was added when (v, u) was visited.
+                R[u][v]["capacity"] = r
+    else:
+        for u, v, attr in edge_list:
+            # Add a pair of edges with equal residual capacities.
+            r = min(attr.get(capacity, inf), inf)
+            R.add_edge(u, v, capacity=r)
+            R.add_edge(v, u, capacity=r)
+
+    # Record the value simulating infinity.
+    R.graph["inf"] = inf
+
+    return R
+
+
+@nx._dispatchable(
+    graphs="R",
+    preserve_edge_attrs={"R": {"capacity": float("inf")}},
+    preserve_graph_attrs=True,
+)
+def detect_unboundedness(R, s, t):
+    """Detect an infinite-capacity s-t path in R."""
+    q = deque([s])
+    seen = {s}
+    inf = R.graph["inf"]
+    while q:
+        u = q.popleft()
+        for v, attr in R[u].items():
+            if attr["capacity"] == inf and v not in seen:
+                if v == t:
+                    raise nx.NetworkXUnbounded(
+                        "Infinite capacity path, flow unbounded above."
+                    )
+                seen.add(v)
+                q.append(v)
+
+
+@nx._dispatchable(graphs={"G": 0, "R": 1}, preserve_edge_attrs={"R": {"flow": None}})
+def build_flow_dict(G, R):
+    """Build a flow dictionary from a residual network."""
+    flow_dict = {}
+    for u in G:
+        flow_dict[u] = {v: 0 for v in G[u]}
+        flow_dict[u].update(
+            (v, attr["flow"]) for v, attr in R[u].items() if attr["flow"] > 0
+        )
+    return flow_dict
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/graph_hashing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/graph_hashing.py
new file mode 100644
index 00000000..7ded847f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/graph_hashing.py
@@ -0,0 +1,328 @@
+"""
+Functions for hashing graphs to strings.
+Isomorphic graphs should be assigned identical hashes.
+For now, only Weisfeiler-Lehman hashing is implemented.
+"""
+
+from collections import Counter, defaultdict
+from hashlib import blake2b
+
+import networkx as nx
+
+__all__ = ["weisfeiler_lehman_graph_hash", "weisfeiler_lehman_subgraph_hashes"]
+
+
+def _hash_label(label, digest_size):
+    return blake2b(label.encode("ascii"), digest_size=digest_size).hexdigest()
+
+
+def _init_node_labels(G, edge_attr, node_attr):
+    if node_attr:
+        return {u: str(dd[node_attr]) for u, dd in G.nodes(data=True)}
+    elif edge_attr:
+        return {u: "" for u in G}
+    else:
+        return {u: str(deg) for u, deg in G.degree()}
+
+
+def _neighborhood_aggregate(G, node, node_labels, edge_attr=None):
+    """
+    Compute new labels for given node by aggregating
+    the labels of each node's neighbors.
+    """
+    label_list = []
+    for nbr in G.neighbors(node):
+        prefix = "" if edge_attr is None else str(G[node][nbr][edge_attr])
+        label_list.append(prefix + node_labels[nbr])
+    return node_labels[node] + "".join(sorted(label_list))
+
+
+@nx.utils.not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr")
+def weisfeiler_lehman_graph_hash(
+    G, edge_attr=None, node_attr=None, iterations=3, digest_size=16
+):
+    """Return Weisfeiler Lehman (WL) graph hash.
+
+    The function iteratively aggregates and hashes neighborhoods of each node.
+    After each node's neighbors are hashed to obtain updated node labels,
+    a hashed histogram of resulting labels is returned as the final hash.
+
+    Hashes are identical for isomorphic graphs and strong guarantees that
+    non-isomorphic graphs will get different hashes. See [1]_ for details.
+
+    If no node or edge attributes are provided, the degree of each node
+    is used as its initial label.
+    Otherwise, node and/or edge labels are used to compute the hash.
+
+    Parameters
+    ----------
+    G : graph
+        The graph to be hashed.
+        Can have node and/or edge attributes. Can also have no attributes.
+    edge_attr : string, optional (default=None)
+        The key in edge attribute dictionary to be used for hashing.
+        If None, edge labels are ignored.
+    node_attr: string, optional (default=None)
+        The key in node attribute dictionary to be used for hashing.
+        If None, and no edge_attr given, use the degrees of the nodes as labels.
+    iterations: int, optional (default=3)
+        Number of neighbor aggregations to perform.
+        Should be larger for larger graphs.
+    digest_size: int, optional (default=16)
+        Size (in bits) of blake2b hash digest to use for hashing node labels.
+
+    Returns
+    -------
+    h : string
+        Hexadecimal string corresponding to hash of the input graph.
+
+    Examples
+    --------
+    Two graphs with edge attributes that are isomorphic, except for
+    differences in the edge labels.
+
+    >>> G1 = nx.Graph()
+    >>> G1.add_edges_from(
+    ...     [
+    ...         (1, 2, {"label": "A"}),
+    ...         (2, 3, {"label": "A"}),
+    ...         (3, 1, {"label": "A"}),
+    ...         (1, 4, {"label": "B"}),
+    ...     ]
+    ... )
+    >>> G2 = nx.Graph()
+    >>> G2.add_edges_from(
+    ...     [
+    ...         (5, 6, {"label": "B"}),
+    ...         (6, 7, {"label": "A"}),
+    ...         (7, 5, {"label": "A"}),
+    ...         (7, 8, {"label": "A"}),
+    ...     ]
+    ... )
+
+    Omitting the `edge_attr` option, results in identical hashes.
+
+    >>> nx.weisfeiler_lehman_graph_hash(G1)
+    '7bc4dde9a09d0b94c5097b219891d81a'
+    >>> nx.weisfeiler_lehman_graph_hash(G2)
+    '7bc4dde9a09d0b94c5097b219891d81a'
+
+    With edge labels, the graphs are no longer assigned
+    the same hash digest.
+
+    >>> nx.weisfeiler_lehman_graph_hash(G1, edge_attr="label")
+    'c653d85538bcf041d88c011f4f905f10'
+    >>> nx.weisfeiler_lehman_graph_hash(G2, edge_attr="label")
+    '3dcd84af1ca855d0eff3c978d88e7ec7'
+
+    Notes
+    -----
+    To return the WL hashes of each subgraph of a graph, use
+    `weisfeiler_lehman_subgraph_hashes`
+
+    Similarity between hashes does not imply similarity between graphs.
+
+    References
+    ----------
+    .. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen,
+       Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman
+       Graph Kernels. Journal of Machine Learning Research. 2011.
+       http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf
+
+    See also
+    --------
+    weisfeiler_lehman_subgraph_hashes
+    """
+
+    def weisfeiler_lehman_step(G, labels, edge_attr=None):
+        """
+        Apply neighborhood aggregation to each node
+        in the graph.
+        Computes a dictionary with labels for each node.
+        """
+        new_labels = {}
+        for node in G.nodes():
+            label = _neighborhood_aggregate(G, node, labels, edge_attr=edge_attr)
+            new_labels[node] = _hash_label(label, digest_size)
+        return new_labels
+
+    # set initial node labels
+    node_labels = _init_node_labels(G, edge_attr, node_attr)
+
+    subgraph_hash_counts = []
+    for _ in range(iterations):
+        node_labels = weisfeiler_lehman_step(G, node_labels, edge_attr=edge_attr)
+        counter = Counter(node_labels.values())
+        # sort the counter, extend total counts
+        subgraph_hash_counts.extend(sorted(counter.items(), key=lambda x: x[0]))
+
+    # hash the final counter
+    return _hash_label(str(tuple(subgraph_hash_counts)), digest_size)
+
+
+@nx.utils.not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr")
+def weisfeiler_lehman_subgraph_hashes(
+    G,
+    edge_attr=None,
+    node_attr=None,
+    iterations=3,
+    digest_size=16,
+    include_initial_labels=False,
+):
+    """
+    Return a dictionary of subgraph hashes by node.
+
+    Dictionary keys are nodes in `G`, and values are a list of hashes.
+    Each hash corresponds to a subgraph rooted at a given node u in `G`.
+    Lists of subgraph hashes are sorted in increasing order of depth from
+    their root node, with the hash at index i corresponding to a subgraph
+    of nodes at most i edges distance from u. Thus, each list will contain
+    `iterations` elements - a hash for a subgraph at each depth. If
+    `include_initial_labels` is set to `True`, each list will additionally
+    have contain a hash of the initial node label (or equivalently a
+    subgraph of depth 0) prepended, totalling ``iterations + 1`` elements.
+
+    The function iteratively aggregates and hashes neighborhoods of each node.
+    This is achieved for each step by replacing for each node its label from
+    the previous iteration with its hashed 1-hop neighborhood aggregate.
+    The new node label is then appended to a list of node labels for each
+    node.
+
+    To aggregate neighborhoods for a node $u$ at each step, all labels of
+    nodes adjacent to $u$ are concatenated. If the `edge_attr` parameter is set,
+    labels for each neighboring node are prefixed with the value of this attribute
+    along the connecting edge from this neighbor to node $u$. The resulting string
+    is then hashed to compress this information into a fixed digest size.
+
+    Thus, at the $i$-th iteration, nodes within $i$ hops influence any given
+    hashed node label. We can therefore say that at depth $i$ for node $u$
+    we have a hash for a subgraph induced by the $i$-hop neighborhood of $u$.
+
+    The output can be used to create general Weisfeiler-Lehman graph kernels,
+    or generate features for graphs or nodes - for example to generate 'words' in
+    a graph as seen in the 'graph2vec' algorithm.
+    See [1]_ & [2]_ respectively for details.
+
+    Hashes are identical for isomorphic subgraphs and there exist strong
+    guarantees that non-isomorphic graphs will get different hashes.
+    See [1]_ for details.
+
+    If no node or edge attributes are provided, the degree of each node
+    is used as its initial label.
+    Otherwise, node and/or edge labels are used to compute the hash.
+
+    Parameters
+    ----------
+    G : graph
+        The graph to be hashed.
+        Can have node and/or edge attributes. Can also have no attributes.
+    edge_attr : string, optional (default=None)
+        The key in edge attribute dictionary to be used for hashing.
+        If None, edge labels are ignored.
+    node_attr : string, optional (default=None)
+        The key in node attribute dictionary to be used for hashing.
+        If None, and no edge_attr given, use the degrees of the nodes as labels.
+        If None, and edge_attr is given, each node starts with an identical label.
+    iterations : int, optional (default=3)
+        Number of neighbor aggregations to perform.
+        Should be larger for larger graphs.
+    digest_size : int, optional (default=16)
+        Size (in bits) of blake2b hash digest to use for hashing node labels.
+        The default size is 16 bits.
+    include_initial_labels : bool, optional (default=False)
+        If True, include the hashed initial node label as the first subgraph
+        hash for each node.
+
+    Returns
+    -------
+    node_subgraph_hashes : dict
+        A dictionary with each key given by a node in G, and each value given
+        by the subgraph hashes in order of depth from the key node.
+
+    Examples
+    --------
+    Finding similar nodes in different graphs:
+
+    >>> G1 = nx.Graph()
+    >>> G1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 5), (4, 6), (5, 7), (6, 7)])
+    >>> G2 = nx.Graph()
+    >>> G2.add_edges_from([(1, 3), (2, 3), (1, 6), (1, 5), (4, 6)])
+    >>> g1_hashes = nx.weisfeiler_lehman_subgraph_hashes(
+    ...     G1, iterations=3, digest_size=8
+    ... )
+    >>> g2_hashes = nx.weisfeiler_lehman_subgraph_hashes(
+    ...     G2, iterations=3, digest_size=8
+    ... )
+
+    Even though G1 and G2 are not isomorphic (they have different numbers of edges),
+    the hash sequence of depth 3 for node 1 in G1 and node 5 in G2 are similar:
+
+    >>> g1_hashes[1]
+    ['a93b64973cfc8897', 'db1b43ae35a1878f', '57872a7d2059c1c0']
+    >>> g2_hashes[5]
+    ['a93b64973cfc8897', 'db1b43ae35a1878f', '1716d2a4012fa4bc']
+
+    The first 2 WL subgraph hashes match. From this we can conclude that it's very
+    likely the neighborhood of 2 hops around these nodes are isomorphic.
+
+    However the 3-hop neighborhoods of ``G1`` and ``G2`` are not isomorphic since the
+    3rd hashes in the lists above are not equal.
+
+    These nodes may be candidates to be classified together since their local topology
+    is similar.
+
+    Notes
+    -----
+    To hash the full graph when subgraph hashes are not needed, use
+    `weisfeiler_lehman_graph_hash` for efficiency.
+
+    Similarity between hashes does not imply similarity between graphs.
+
+    References
+    ----------
+    .. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen,
+       Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman
+       Graph Kernels. Journal of Machine Learning Research. 2011.
+       http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf
+    .. [2] Annamalai Narayanan, Mahinthan Chandramohan, Rajasekar Venkatesan,
+       Lihui Chen, Yang Liu and Shantanu Jaiswa. graph2vec: Learning
+       Distributed Representations of Graphs. arXiv. 2017
+       https://arxiv.org/pdf/1707.05005.pdf
+
+    See also
+    --------
+    weisfeiler_lehman_graph_hash
+    """
+
+    def weisfeiler_lehman_step(G, labels, node_subgraph_hashes, edge_attr=None):
+        """
+        Apply neighborhood aggregation to each node
+        in the graph.
+        Computes a dictionary with labels for each node.
+        Appends the new hashed label to the dictionary of subgraph hashes
+        originating from and indexed by each node in G
+        """
+        new_labels = {}
+        for node in G.nodes():
+            label = _neighborhood_aggregate(G, node, labels, edge_attr=edge_attr)
+            hashed_label = _hash_label(label, digest_size)
+            new_labels[node] = hashed_label
+            node_subgraph_hashes[node].append(hashed_label)
+        return new_labels
+
+    node_labels = _init_node_labels(G, edge_attr, node_attr)
+    if include_initial_labels:
+        node_subgraph_hashes = {
+            k: [_hash_label(v, digest_size)] for k, v in node_labels.items()
+        }
+    else:
+        node_subgraph_hashes = defaultdict(list)
+
+    for _ in range(iterations):
+        node_labels = weisfeiler_lehman_step(
+            G, node_labels, node_subgraph_hashes, edge_attr
+        )
+
+    return dict(node_subgraph_hashes)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/graphical.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/graphical.py
new file mode 100644
index 00000000..d5d82ded
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/graphical.py
@@ -0,0 +1,483 @@
+"""Test sequences for graphiness."""
+
+import heapq
+
+import networkx as nx
+
+__all__ = [
+    "is_graphical",
+    "is_multigraphical",
+    "is_pseudographical",
+    "is_digraphical",
+    "is_valid_degree_sequence_erdos_gallai",
+    "is_valid_degree_sequence_havel_hakimi",
+]
+
+
+@nx._dispatchable(graphs=None)
+def is_graphical(sequence, method="eg"):
+    """Returns True if sequence is a valid degree sequence.
+
+    A degree sequence is valid if some graph can realize it.
+
+    Parameters
+    ----------
+    sequence : list or iterable container
+        A sequence of integer node degrees
+
+    method : "eg" | "hh"  (default: 'eg')
+        The method used to validate the degree sequence.
+        "eg" corresponds to the Erdős-Gallai algorithm
+        [EG1960]_, [choudum1986]_, and
+        "hh" to the Havel-Hakimi algorithm
+        [havel1955]_, [hakimi1962]_, [CL1996]_.
+
+    Returns
+    -------
+    valid : bool
+        True if the sequence is a valid degree sequence and False if not.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> sequence = (d for n, d in G.degree())
+    >>> nx.is_graphical(sequence)
+    True
+
+    To test a non-graphical sequence:
+    >>> sequence_list = [d for n, d in G.degree()]
+    >>> sequence_list[-1] += 1
+    >>> nx.is_graphical(sequence_list)
+    False
+
+    References
+    ----------
+    .. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
+    .. [choudum1986] S.A. Choudum. "A simple proof of the Erdős-Gallai theorem on
+       graph sequences." Bulletin of the Australian Mathematical Society, 33,
+       pp 67-70, 1986. https://doi.org/10.1017/S0004972700002872
+    .. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
+       Casopis Pest. Mat. 80, 477-480, 1955.
+    .. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
+       Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
+    .. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
+       Chapman and Hall/CRC, 1996.
+    """
+    if method == "eg":
+        valid = is_valid_degree_sequence_erdos_gallai(list(sequence))
+    elif method == "hh":
+        valid = is_valid_degree_sequence_havel_hakimi(list(sequence))
+    else:
+        msg = "`method` must be 'eg' or 'hh'"
+        raise nx.NetworkXException(msg)
+    return valid
+
+
+def _basic_graphical_tests(deg_sequence):
+    # Sort and perform some simple tests on the sequence
+    deg_sequence = nx.utils.make_list_of_ints(deg_sequence)
+    p = len(deg_sequence)
+    num_degs = [0] * p
+    dmax, dmin, dsum, n = 0, p, 0, 0
+    for d in deg_sequence:
+        # Reject if degree is negative or larger than the sequence length
+        if d < 0 or d >= p:
+            raise nx.NetworkXUnfeasible
+        # Process only the non-zero integers
+        elif d > 0:
+            dmax, dmin, dsum, n = max(dmax, d), min(dmin, d), dsum + d, n + 1
+            num_degs[d] += 1
+    # Reject sequence if it has odd sum or is oversaturated
+    if dsum % 2 or dsum > n * (n - 1):
+        raise nx.NetworkXUnfeasible
+    return dmax, dmin, dsum, n, num_degs
+
+
+@nx._dispatchable(graphs=None)
+def is_valid_degree_sequence_havel_hakimi(deg_sequence):
+    r"""Returns True if deg_sequence can be realized by a simple graph.
+
+    The validation proceeds using the Havel-Hakimi theorem
+    [havel1955]_, [hakimi1962]_, [CL1996]_.
+    Worst-case run time is $O(s)$ where $s$ is the sum of the sequence.
+
+    Parameters
+    ----------
+    deg_sequence : list
+        A list of integers where each element specifies the degree of a node
+        in a graph.
+
+    Returns
+    -------
+    valid : bool
+        True if deg_sequence is graphical and False if not.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
+    >>> sequence = (d for _, d in G.degree())
+    >>> nx.is_valid_degree_sequence_havel_hakimi(sequence)
+    True
+
+    To test a non-valid sequence:
+    >>> sequence_list = [d for _, d in G.degree()]
+    >>> sequence_list[-1] += 1
+    >>> nx.is_valid_degree_sequence_havel_hakimi(sequence_list)
+    False
+
+    Notes
+    -----
+    The ZZ condition says that for the sequence d if
+
+    .. math::
+        |d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
+
+    then d is graphical.  This was shown in Theorem 6 in [1]_.
+
+    References
+    ----------
+    .. [1] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
+       of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
+    .. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
+       Casopis Pest. Mat. 80, 477-480, 1955.
+    .. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
+       Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
+    .. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
+       Chapman and Hall/CRC, 1996.
+    """
+    try:
+        dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
+    except nx.NetworkXUnfeasible:
+        return False
+    # Accept if sequence has no non-zero degrees or passes the ZZ condition
+    if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
+        return True
+
+    modstubs = [0] * (dmax + 1)
+    # Successively reduce degree sequence by removing the maximum degree
+    while n > 0:
+        # Retrieve the maximum degree in the sequence
+        while num_degs[dmax] == 0:
+            dmax -= 1
+        # If there are not enough stubs to connect to, then the sequence is
+        # not graphical
+        if dmax > n - 1:
+            return False
+
+        # Remove largest stub in list
+        num_degs[dmax], n = num_degs[dmax] - 1, n - 1
+        # Reduce the next dmax largest stubs
+        mslen = 0
+        k = dmax
+        for i in range(dmax):
+            while num_degs[k] == 0:
+                k -= 1
+            num_degs[k], n = num_degs[k] - 1, n - 1
+            if k > 1:
+                modstubs[mslen] = k - 1
+                mslen += 1
+        # Add back to the list any non-zero stubs that were removed
+        for i in range(mslen):
+            stub = modstubs[i]
+            num_degs[stub], n = num_degs[stub] + 1, n + 1
+    return True
+
+
+@nx._dispatchable(graphs=None)
+def is_valid_degree_sequence_erdos_gallai(deg_sequence):
+    r"""Returns True if deg_sequence can be realized by a simple graph.
+
+    The validation is done using the Erdős-Gallai theorem [EG1960]_.
+
+    Parameters
+    ----------
+    deg_sequence : list
+        A list of integers
+
+    Returns
+    -------
+    valid : bool
+        True if deg_sequence is graphical and False if not.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
+    >>> sequence = (d for _, d in G.degree())
+    >>> nx.is_valid_degree_sequence_erdos_gallai(sequence)
+    True
+
+    To test a non-valid sequence:
+    >>> sequence_list = [d for _, d in G.degree()]
+    >>> sequence_list[-1] += 1
+    >>> nx.is_valid_degree_sequence_erdos_gallai(sequence_list)
+    False
+
+    Notes
+    -----
+
+    This implementation uses an equivalent form of the Erdős-Gallai criterion.
+    Worst-case run time is $O(n)$ where $n$ is the length of the sequence.
+
+    Specifically, a sequence d is graphical if and only if the
+    sum of the sequence is even and for all strong indices k in the sequence,
+
+     .. math::
+
+       \sum_{i=1}^{k} d_i \leq k(k-1) + \sum_{j=k+1}^{n} \min(d_i,k)
+             = k(n-1) - ( k \sum_{j=0}^{k-1} n_j - \sum_{j=0}^{k-1} j n_j )
+
+    A strong index k is any index where d_k >= k and the value n_j is the
+    number of occurrences of j in d.  The maximal strong index is called the
+    Durfee index.
+
+    This particular rearrangement comes from the proof of Theorem 3 in [2]_.
+
+    The ZZ condition says that for the sequence d if
+
+    .. math::
+        |d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
+
+    then d is graphical.  This was shown in Theorem 6 in [2]_.
+
+    References
+    ----------
+    .. [1] A. Tripathi and S. Vijay. "A note on a theorem of Erdős & Gallai",
+       Discrete Mathematics, 265, pp. 417-420 (2003).
+    .. [2] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
+       of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
+    .. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
+    """
+    try:
+        dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
+    except nx.NetworkXUnfeasible:
+        return False
+    # Accept if sequence has no non-zero degrees or passes the ZZ condition
+    if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
+        return True
+
+    # Perform the EG checks using the reformulation of Zverovich and Zverovich
+    k, sum_deg, sum_nj, sum_jnj = 0, 0, 0, 0
+    for dk in range(dmax, dmin - 1, -1):
+        if dk < k + 1:  # Check if already past Durfee index
+            return True
+        if num_degs[dk] > 0:
+            run_size = num_degs[dk]  # Process a run of identical-valued degrees
+            if dk < k + run_size:  # Check if end of run is past Durfee index
+                run_size = dk - k  # Adjust back to Durfee index
+            sum_deg += run_size * dk
+            for v in range(run_size):
+                sum_nj += num_degs[k + v]
+                sum_jnj += (k + v) * num_degs[k + v]
+            k += run_size
+            if sum_deg > k * (n - 1) - k * sum_nj + sum_jnj:
+                return False
+    return True
+
+
+@nx._dispatchable(graphs=None)
+def is_multigraphical(sequence):
+    """Returns True if some multigraph can realize the sequence.
+
+    Parameters
+    ----------
+    sequence : list
+        A list of integers
+
+    Returns
+    -------
+    valid : bool
+        True if deg_sequence is a multigraphic degree sequence and False if not.
+
+    Examples
+    --------
+    >>> G = nx.MultiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
+    >>> sequence = (d for _, d in G.degree())
+    >>> nx.is_multigraphical(sequence)
+    True
+
+    To test a non-multigraphical sequence:
+    >>> sequence_list = [d for _, d in G.degree()]
+    >>> sequence_list[-1] += 1
+    >>> nx.is_multigraphical(sequence_list)
+    False
+
+    Notes
+    -----
+    The worst-case run time is $O(n)$ where $n$ is the length of the sequence.
+
+    References
+    ----------
+    .. [1] S. L. Hakimi. "On the realizability of a set of integers as
+       degrees of the vertices of a linear graph", J. SIAM, 10, pp. 496-506
+       (1962).
+    """
+    try:
+        deg_sequence = nx.utils.make_list_of_ints(sequence)
+    except nx.NetworkXError:
+        return False
+    dsum, dmax = 0, 0
+    for d in deg_sequence:
+        if d < 0:
+            return False
+        dsum, dmax = dsum + d, max(dmax, d)
+    if dsum % 2 or dsum < 2 * dmax:
+        return False
+    return True
+
+
+@nx._dispatchable(graphs=None)
+def is_pseudographical(sequence):
+    """Returns True if some pseudograph can realize the sequence.
+
+    Every nonnegative integer sequence with an even sum is pseudographical
+    (see [1]_).
+
+    Parameters
+    ----------
+    sequence : list or iterable container
+        A sequence of integer node degrees
+
+    Returns
+    -------
+    valid : bool
+      True if the sequence is a pseudographic degree sequence and False if not.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
+    >>> sequence = (d for _, d in G.degree())
+    >>> nx.is_pseudographical(sequence)
+    True
+
+    To test a non-pseudographical sequence:
+    >>> sequence_list = [d for _, d in G.degree()]
+    >>> sequence_list[-1] += 1
+    >>> nx.is_pseudographical(sequence_list)
+    False
+
+    Notes
+    -----
+    The worst-case run time is $O(n)$ where n is the length of the sequence.
+
+    References
+    ----------
+    .. [1] F. Boesch and F. Harary. "Line removal algorithms for graphs
+       and their degree lists", IEEE Trans. Circuits and Systems, CAS-23(12),
+       pp. 778-782 (1976).
+    """
+    try:
+        deg_sequence = nx.utils.make_list_of_ints(sequence)
+    except nx.NetworkXError:
+        return False
+    return sum(deg_sequence) % 2 == 0 and min(deg_sequence) >= 0
+
+
+@nx._dispatchable(graphs=None)
+def is_digraphical(in_sequence, out_sequence):
+    r"""Returns True if some directed graph can realize the in- and out-degree
+    sequences.
+
+    Parameters
+    ----------
+    in_sequence : list or iterable container
+        A sequence of integer node in-degrees
+
+    out_sequence : list or iterable container
+        A sequence of integer node out-degrees
+
+    Returns
+    -------
+    valid : bool
+      True if in and out-sequences are digraphic False if not.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
+    >>> in_seq = (d for n, d in G.in_degree())
+    >>> out_seq = (d for n, d in G.out_degree())
+    >>> nx.is_digraphical(in_seq, out_seq)
+    True
+
+    To test a non-digraphical scenario:
+    >>> in_seq_list = [d for n, d in G.in_degree()]
+    >>> in_seq_list[-1] += 1
+    >>> nx.is_digraphical(in_seq_list, out_seq)
+    False
+
+    Notes
+    -----
+    This algorithm is from Kleitman and Wang [1]_.
+    The worst case runtime is $O(s \times \log n)$ where $s$ and $n$ are the
+    sum and length of the sequences respectively.
+
+    References
+    ----------
+    .. [1] D.J. Kleitman and D.L. Wang
+       Algorithms for Constructing Graphs and Digraphs with Given Valences
+       and Factors, Discrete Mathematics, 6(1), pp. 79-88 (1973)
+    """
+    try:
+        in_deg_sequence = nx.utils.make_list_of_ints(in_sequence)
+        out_deg_sequence = nx.utils.make_list_of_ints(out_sequence)
+    except nx.NetworkXError:
+        return False
+    # Process the sequences and form two heaps to store degree pairs with
+    # either zero or non-zero out degrees
+    sumin, sumout, nin, nout = 0, 0, len(in_deg_sequence), len(out_deg_sequence)
+    maxn = max(nin, nout)
+    maxin = 0
+    if maxn == 0:
+        return True
+    stubheap, zeroheap = [], []
+    for n in range(maxn):
+        in_deg, out_deg = 0, 0
+        if n < nout:
+            out_deg = out_deg_sequence[n]
+        if n < nin:
+            in_deg = in_deg_sequence[n]
+        if in_deg < 0 or out_deg < 0:
+            return False
+        sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg)
+        if in_deg > 0:
+            stubheap.append((-1 * out_deg, -1 * in_deg))
+        elif out_deg > 0:
+            zeroheap.append(-1 * out_deg)
+    if sumin != sumout:
+        return False
+    heapq.heapify(stubheap)
+    heapq.heapify(zeroheap)
+
+    modstubs = [(0, 0)] * (maxin + 1)
+    # Successively reduce degree sequence by removing the maximum out degree
+    while stubheap:
+        # Take the first value in the sequence with non-zero in degree
+        (freeout, freein) = heapq.heappop(stubheap)
+        freein *= -1
+        if freein > len(stubheap) + len(zeroheap):
+            return False
+
+        # Attach out stubs to the nodes with the most in stubs
+        mslen = 0
+        for i in range(freein):
+            if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0]):
+                stubout = heapq.heappop(zeroheap)
+                stubin = 0
+            else:
+                (stubout, stubin) = heapq.heappop(stubheap)
+            if stubout == 0:
+                return False
+            # Check if target is now totally connected
+            if stubout + 1 < 0 or stubin < 0:
+                modstubs[mslen] = (stubout + 1, stubin)
+                mslen += 1
+
+        # Add back the nodes to the heap that still have available stubs
+        for i in range(mslen):
+            stub = modstubs[i]
+            if stub[1] < 0:
+                heapq.heappush(stubheap, stub)
+            else:
+                heapq.heappush(zeroheap, stub[0])
+        if freeout < 0:
+            heapq.heappush(zeroheap, freeout)
+    return True
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/hierarchy.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/hierarchy.py
new file mode 100644
index 00000000..d5a05525
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/hierarchy.py
@@ -0,0 +1,57 @@
+"""
+Flow Hierarchy.
+"""
+
+import networkx as nx
+
+__all__ = ["flow_hierarchy"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def flow_hierarchy(G, weight=None):
+    """Returns the flow hierarchy of a directed network.
+
+    Flow hierarchy is defined as the fraction of edges not participating
+    in cycles in a directed graph [1]_.
+
+    Parameters
+    ----------
+    G : DiGraph or MultiDiGraph
+       A directed graph
+
+    weight : string, optional (default=None)
+       Attribute to use for edge weights. If None the weight defaults to 1.
+
+    Returns
+    -------
+    h : float
+       Flow hierarchy value
+
+    Raises
+    ------
+    NetworkXError
+       If `G` is not a directed graph or if `G` has no edges.
+
+    Notes
+    -----
+    The algorithm described in [1]_ computes the flow hierarchy through
+    exponentiation of the adjacency matrix.  This function implements an
+    alternative approach that finds strongly connected components.
+    An edge is in a cycle if and only if it is in a strongly connected
+    component, which can be found in $O(m)$ time using Tarjan's algorithm.
+
+    References
+    ----------
+    .. [1] Luo, J.; Magee, C.L. (2011),
+       Detecting evolving patterns of self-organizing networks by flow
+       hierarchy measurement, Complexity, Volume 16 Issue 6 53-61.
+       DOI: 10.1002/cplx.20368
+       http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf
+    """
+    # corner case: G has no edges
+    if nx.is_empty(G):
+        raise nx.NetworkXError("flow_hierarchy not applicable to empty graphs")
+    if not G.is_directed():
+        raise nx.NetworkXError("G must be a digraph in flow_hierarchy")
+    scc = nx.strongly_connected_components(G)
+    return 1 - sum(G.subgraph(c).size(weight) for c in scc) / G.size(weight)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/hybrid.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/hybrid.py
new file mode 100644
index 00000000..9d3dd307
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/hybrid.py
@@ -0,0 +1,196 @@
+"""
+Provides functions for finding and testing for locally `(k, l)`-connected
+graphs.
+
+"""
+
+import copy
+
+import networkx as nx
+
+__all__ = ["kl_connected_subgraph", "is_kl_connected"]
+
+
+@nx._dispatchable(returns_graph=True)
+def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False):
+    """Returns the maximum locally `(k, l)`-connected subgraph of `G`.
+
+    A graph is locally `(k, l)`-connected if for each edge `(u, v)` in the
+    graph there are at least `l` edge-disjoint paths of length at most `k`
+    joining `u` to `v`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The graph in which to find a maximum locally `(k, l)`-connected
+        subgraph.
+
+    k : integer
+        The maximum length of paths to consider. A higher number means a looser
+        connectivity requirement.
+
+    l : integer
+        The number of edge-disjoint paths. A higher number means a stricter
+        connectivity requirement.
+
+    low_memory : bool
+        If this is True, this function uses an algorithm that uses slightly
+        more time but less memory.
+
+    same_as_graph : bool
+        If True then return a tuple of the form `(H, is_same)`,
+        where `H` is the maximum locally `(k, l)`-connected subgraph and
+        `is_same` is a Boolean representing whether `G` is locally `(k,
+        l)`-connected (and hence, whether `H` is simply a copy of the input
+        graph `G`).
+
+    Returns
+    -------
+    NetworkX graph or two-tuple
+        If `same_as_graph` is True, then this function returns a
+        two-tuple as described above. Otherwise, it returns only the maximum
+        locally `(k, l)`-connected subgraph.
+
+    See also
+    --------
+    is_kl_connected
+
+    References
+    ----------
+    .. [1] Chung, Fan and Linyuan Lu. "The Small World Phenomenon in Hybrid
+           Power Law Graphs." *Complex Networks*. Springer Berlin Heidelberg,
+           2004. 89--104.
+
+    """
+    H = copy.deepcopy(G)  # subgraph we construct by removing from G
+
+    graphOK = True
+    deleted_some = True  # hack to start off the while loop
+    while deleted_some:
+        deleted_some = False
+        # We use `for edge in list(H.edges()):` instead of
+        # `for edge in H.edges():` because we edit the graph `H` in
+        # the loop. Hence using an iterator will result in
+        # `RuntimeError: dictionary changed size during iteration`
+        for edge in list(H.edges()):
+            (u, v) = edge
+            # Get copy of graph needed for this search
+            if low_memory:
+                verts = {u, v}
+                for i in range(k):
+                    for w in verts.copy():
+                        verts.update(G[w])
+                G2 = G.subgraph(verts).copy()
+            else:
+                G2 = copy.deepcopy(G)
+            ###
+            path = [u, v]
+            cnt = 0
+            accept = 0
+            while path:
+                cnt += 1  # Found a path
+                if cnt >= l:
+                    accept = 1
+                    break
+                # record edges along this graph
+                prev = u
+                for w in path:
+                    if prev != w:
+                        G2.remove_edge(prev, w)
+                        prev = w
+                #                path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1?
+                try:
+                    path = nx.shortest_path(G2, u, v)  # ??? should "Cutoff" be k+1?
+                except nx.NetworkXNoPath:
+                    path = False
+            # No Other Paths
+            if accept == 0:
+                H.remove_edge(u, v)
+                deleted_some = True
+                if graphOK:
+                    graphOK = False
+    # We looked through all edges and removed none of them.
+    # So, H is the maximal (k,l)-connected subgraph of G
+    if same_as_graph:
+        return (H, graphOK)
+    return H
+
+
+@nx._dispatchable
+def is_kl_connected(G, k, l, low_memory=False):
+    """Returns True if and only if `G` is locally `(k, l)`-connected.
+
+    A graph is locally `(k, l)`-connected if for each edge `(u, v)` in the
+    graph there are at least `l` edge-disjoint paths of length at most `k`
+    joining `u` to `v`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The graph to test for local `(k, l)`-connectedness.
+
+    k : integer
+        The maximum length of paths to consider. A higher number means a looser
+        connectivity requirement.
+
+    l : integer
+        The number of edge-disjoint paths. A higher number means a stricter
+        connectivity requirement.
+
+    low_memory : bool
+        If this is True, this function uses an algorithm that uses slightly
+        more time but less memory.
+
+    Returns
+    -------
+    bool
+        Whether the graph is locally `(k, l)`-connected subgraph.
+
+    See also
+    --------
+    kl_connected_subgraph
+
+    References
+    ----------
+    .. [1] Chung, Fan and Linyuan Lu. "The Small World Phenomenon in Hybrid
+           Power Law Graphs." *Complex Networks*. Springer Berlin Heidelberg,
+           2004. 89--104.
+
+    """
+    graphOK = True
+    for edge in G.edges():
+        (u, v) = edge
+        # Get copy of graph needed for this search
+        if low_memory:
+            verts = {u, v}
+            for i in range(k):
+                [verts.update(G.neighbors(w)) for w in verts.copy()]
+            G2 = G.subgraph(verts)
+        else:
+            G2 = copy.deepcopy(G)
+        ###
+        path = [u, v]
+        cnt = 0
+        accept = 0
+        while path:
+            cnt += 1  # Found a path
+            if cnt >= l:
+                accept = 1
+                break
+            # record edges along this graph
+            prev = u
+            for w in path:
+                if w != prev:
+                    G2.remove_edge(prev, w)
+                    prev = w
+            #            path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1?
+            try:
+                path = nx.shortest_path(G2, u, v)  # ??? should "Cutoff" be k+1?
+            except nx.NetworkXNoPath:
+                path = False
+        # No Other Paths
+        if accept == 0:
+            graphOK = False
+            break
+    # return status
+    return graphOK
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isolate.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isolate.py
new file mode 100644
index 00000000..1ea8abe9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isolate.py
@@ -0,0 +1,107 @@
+"""
+Functions for identifying isolate (degree zero) nodes.
+"""
+
+import networkx as nx
+
+__all__ = ["is_isolate", "isolates", "number_of_isolates"]
+
+
+@nx._dispatchable
+def is_isolate(G, n):
+    """Determines whether a node is an isolate.
+
+    An *isolate* is a node with no neighbors (that is, with degree
+    zero). For directed graphs, this means no in-neighbors and no
+    out-neighbors.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    n : node
+        A node in `G`.
+
+    Returns
+    -------
+    is_isolate : bool
+       True if and only if `n` has no neighbors.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> G.add_edge(1, 2)
+    >>> G.add_node(3)
+    >>> nx.is_isolate(G, 2)
+    False
+    >>> nx.is_isolate(G, 3)
+    True
+    """
+    return G.degree(n) == 0
+
+
+@nx._dispatchable
+def isolates(G):
+    """Iterator over isolates in the graph.
+
+    An *isolate* is a node with no neighbors (that is, with degree
+    zero). For directed graphs, this means no in-neighbors and no
+    out-neighbors.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    iterator
+        An iterator over the isolates of `G`.
+
+    Examples
+    --------
+    To get a list of all isolates of a graph, use the :class:`list`
+    constructor::
+
+        >>> G = nx.Graph()
+        >>> G.add_edge(1, 2)
+        >>> G.add_node(3)
+        >>> list(nx.isolates(G))
+        [3]
+
+    To remove all isolates in the graph, first create a list of the
+    isolates, then use :meth:`Graph.remove_nodes_from`::
+
+        >>> G.remove_nodes_from(list(nx.isolates(G)))
+        >>> list(G)
+        [1, 2]
+
+    For digraphs, isolates have zero in-degree and zero out_degre::
+
+        >>> G = nx.DiGraph([(0, 1), (1, 2)])
+        >>> G.add_node(3)
+        >>> list(nx.isolates(G))
+        [3]
+
+    """
+    return (n for n, d in G.degree() if d == 0)
+
+
+@nx._dispatchable
+def number_of_isolates(G):
+    """Returns the number of isolates in the graph.
+
+    An *isolate* is a node with no neighbors (that is, with degree
+    zero). For directed graphs, this means no in-neighbors and no
+    out-neighbors.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    int
+        The number of degree zero nodes in the graph `G`.
+
+    """
+    return sum(1 for v in isolates(G))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py
new file mode 100644
index 00000000..58c22688
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py
@@ -0,0 +1,7 @@
+from networkx.algorithms.isomorphism.isomorph import *
+from networkx.algorithms.isomorphism.vf2userfunc import *
+from networkx.algorithms.isomorphism.matchhelpers import *
+from networkx.algorithms.isomorphism.temporalisomorphvf2 import *
+from networkx.algorithms.isomorphism.ismags import *
+from networkx.algorithms.isomorphism.tree_isomorphism import *
+from networkx.algorithms.isomorphism.vf2pp import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py
new file mode 100644
index 00000000..24819faf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py
@@ -0,0 +1,1163 @@
+"""
+ISMAGS Algorithm
+================
+
+Provides a Python implementation of the ISMAGS algorithm. [1]_
+
+It is capable of finding (subgraph) isomorphisms between two graphs, taking the
+symmetry of the subgraph into account. In most cases the VF2 algorithm is
+faster (at least on small graphs) than this implementation, but in some cases
+there is an exponential number of isomorphisms that are symmetrically
+equivalent. In that case, the ISMAGS algorithm will provide only one solution
+per symmetry group.
+
+>>> petersen = nx.petersen_graph()
+>>> ismags = nx.isomorphism.ISMAGS(petersen, petersen)
+>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False))
+>>> len(isomorphisms)
+120
+>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True))
+>>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}]
+>>> answer == isomorphisms
+True
+
+In addition, this implementation also provides an interface to find the
+largest common induced subgraph [2]_ between any two graphs, again taking
+symmetry into account. Given `graph` and `subgraph` the algorithm will remove
+nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of
+`graph`. Since only the symmetry of `subgraph` is taken into account it is
+worth thinking about how you provide your graphs:
+
+>>> graph1 = nx.path_graph(4)
+>>> graph2 = nx.star_graph(3)
+>>> ismags = nx.isomorphism.ISMAGS(graph1, graph2)
+>>> ismags.is_isomorphic()
+False
+>>> largest_common_subgraph = list(ismags.largest_common_subgraph())
+>>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}]
+>>> answer == largest_common_subgraph
+True
+>>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1)
+>>> largest_common_subgraph = list(ismags2.largest_common_subgraph())
+>>> answer = [
+...     {1: 0, 0: 1, 2: 2},
+...     {1: 0, 0: 1, 3: 2},
+...     {2: 0, 0: 1, 1: 2},
+...     {2: 0, 0: 1, 3: 2},
+...     {3: 0, 0: 1, 1: 2},
+...     {3: 0, 0: 1, 2: 2},
+... ]
+>>> answer == largest_common_subgraph
+True
+
+However, when not taking symmetry into account, it doesn't matter:
+
+>>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False))
+>>> answer = [
+...     {1: 0, 0: 1, 2: 2},
+...     {1: 0, 2: 1, 0: 2},
+...     {2: 0, 1: 1, 3: 2},
+...     {2: 0, 3: 1, 1: 2},
+...     {1: 0, 0: 1, 2: 3},
+...     {1: 0, 2: 1, 0: 3},
+...     {2: 0, 1: 1, 3: 3},
+...     {2: 0, 3: 1, 1: 3},
+...     {1: 0, 0: 2, 2: 3},
+...     {1: 0, 2: 2, 0: 3},
+...     {2: 0, 1: 2, 3: 3},
+...     {2: 0, 3: 2, 1: 3},
+... ]
+>>> answer == largest_common_subgraph
+True
+>>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False))
+>>> answer = [
+...     {1: 0, 0: 1, 2: 2},
+...     {1: 0, 0: 1, 3: 2},
+...     {2: 0, 0: 1, 1: 2},
+...     {2: 0, 0: 1, 3: 2},
+...     {3: 0, 0: 1, 1: 2},
+...     {3: 0, 0: 1, 2: 2},
+...     {1: 1, 0: 2, 2: 3},
+...     {1: 1, 0: 2, 3: 3},
+...     {2: 1, 0: 2, 1: 3},
+...     {2: 1, 0: 2, 3: 3},
+...     {3: 1, 0: 2, 1: 3},
+...     {3: 1, 0: 2, 2: 3},
+... ]
+>>> answer == largest_common_subgraph
+True
+
+Notes
+-----
+- The current implementation works for undirected graphs only. The algorithm
+  in general should work for directed graphs as well though.
+- Node keys for both provided graphs need to be fully orderable as well as
+  hashable.
+- Node and edge equality is assumed to be transitive: if A is equal to B, and
+  B is equal to C, then A is equal to C.
+
+References
+----------
+.. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
+   M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
+   Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
+   Enumeration", PLoS One 9(5): e97896, 2014.
+   https://doi.org/10.1371/journal.pone.0097896
+.. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph
+"""
+
+__all__ = ["ISMAGS"]
+
+import itertools
+from collections import Counter, defaultdict
+from functools import reduce, wraps
+
+
+def are_all_equal(iterable):
+    """
+    Returns ``True`` if and only if all elements in `iterable` are equal; and
+    ``False`` otherwise.
+
+    Parameters
+    ----------
+    iterable: collections.abc.Iterable
+        The container whose elements will be checked.
+
+    Returns
+    -------
+    bool
+        ``True`` iff all elements in `iterable` compare equal, ``False``
+        otherwise.
+    """
+    try:
+        shape = iterable.shape
+    except AttributeError:
+        pass
+    else:
+        if len(shape) > 1:
+            message = "The function does not works on multidimensional arrays."
+            raise NotImplementedError(message) from None
+
+    iterator = iter(iterable)
+    first = next(iterator, None)
+    return all(item == first for item in iterator)
+
+
+def make_partitions(items, test):
+    """
+    Partitions items into sets based on the outcome of ``test(item1, item2)``.
+    Pairs of items for which `test` returns `True` end up in the same set.
+
+    Parameters
+    ----------
+    items : collections.abc.Iterable[collections.abc.Hashable]
+        Items to partition
+    test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable]
+        A function that will be called with 2 arguments, taken from items.
+        Should return `True` if those 2 items need to end up in the same
+        partition, and `False` otherwise.
+
+    Returns
+    -------
+    list[set]
+        A list of sets, with each set containing part of the items in `items`,
+        such that ``all(test(*pair) for pair in  itertools.combinations(set, 2))
+        == True``
+
+    Notes
+    -----
+    The function `test` is assumed to be transitive: if ``test(a, b)`` and
+    ``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``.
+    """
+    partitions = []
+    for item in items:
+        for partition in partitions:
+            p_item = next(iter(partition))
+            if test(item, p_item):
+                partition.add(item)
+                break
+        else:  # No break
+            partitions.append({item})
+    return partitions
+
+
+def partition_to_color(partitions):
+    """
+    Creates a dictionary that maps each item in each partition to the index of
+    the partition to which it belongs.
+
+    Parameters
+    ----------
+    partitions: collections.abc.Sequence[collections.abc.Iterable]
+        As returned by :func:`make_partitions`.
+
+    Returns
+    -------
+    dict
+    """
+    colors = {}
+    for color, keys in enumerate(partitions):
+        for key in keys:
+            colors[key] = color
+    return colors
+
+
+def intersect(collection_of_sets):
+    """
+    Given an collection of sets, returns the intersection of those sets.
+
+    Parameters
+    ----------
+    collection_of_sets: collections.abc.Collection[set]
+        A collection of sets.
+
+    Returns
+    -------
+    set
+        An intersection of all sets in `collection_of_sets`. Will have the same
+        type as the item initially taken from `collection_of_sets`.
+    """
+    collection_of_sets = list(collection_of_sets)
+    first = collection_of_sets.pop()
+    out = reduce(set.intersection, collection_of_sets, set(first))
+    return type(first)(out)
+
+
+class ISMAGS:
+    """
+    Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for
+    "Index-based Subgraph Matching Algorithm with General Symmetries". As the
+    name implies, it is symmetry aware and will only generate non-symmetric
+    isomorphisms.
+
+    Notes
+    -----
+    The implementation imposes additional conditions compared to the VF2
+    algorithm on the graphs provided and the comparison functions
+    (:attr:`node_equality` and :attr:`edge_equality`):
+
+     - Node keys in both graphs must be orderable as well as hashable.
+     - Equality must be transitive: if A is equal to B, and B is equal to C,
+       then A must be equal to C.
+
+    Attributes
+    ----------
+    graph: networkx.Graph
+    subgraph: networkx.Graph
+    node_equality: collections.abc.Callable
+        The function called to see if two nodes should be considered equal.
+        It's signature looks like this:
+        ``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``.
+        `node1` is a node in `graph1`, and `node2` a node in `graph2`.
+        Constructed from the argument `node_match`.
+    edge_equality: collections.abc.Callable
+        The function called to see if two edges should be considered equal.
+        It's signature looks like this:
+        ``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``.
+        `edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`.
+        Constructed from the argument `edge_match`.
+
+    References
+    ----------
+    .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
+       M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
+       Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
+       Enumeration", PLoS One 9(5): e97896, 2014.
+       https://doi.org/10.1371/journal.pone.0097896
+    """
+
+    def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None):
+        """
+        Parameters
+        ----------
+        graph: networkx.Graph
+        subgraph: networkx.Graph
+        node_match: collections.abc.Callable or None
+            Function used to determine whether two nodes are equivalent. Its
+            signature should look like ``f(n1: dict, n2: dict) -> bool``, with
+            `n1` and `n2` node property dicts. See also
+            :func:`~networkx.algorithms.isomorphism.categorical_node_match` and
+            friends.
+            If `None`, all nodes are considered equal.
+        edge_match: collections.abc.Callable or None
+            Function used to determine whether two edges are equivalent. Its
+            signature should look like ``f(e1: dict, e2: dict) -> bool``, with
+            `e1` and `e2` edge property dicts. See also
+            :func:`~networkx.algorithms.isomorphism.categorical_edge_match` and
+            friends.
+            If `None`, all edges are considered equal.
+        cache: collections.abc.Mapping
+            A cache used for caching graph symmetries.
+        """
+        # TODO: graph and subgraph setter methods that invalidate the caches.
+        # TODO: allow for precomputed partitions and colors
+        self.graph = graph
+        self.subgraph = subgraph
+        self._symmetry_cache = cache
+        # Naming conventions are taken from the original paper. For your
+        # sanity:
+        #   sg: subgraph
+        #   g: graph
+        #   e: edge(s)
+        #   n: node(s)
+        # So: sgn means "subgraph nodes".
+        self._sgn_partitions_ = None
+        self._sge_partitions_ = None
+
+        self._sgn_colors_ = None
+        self._sge_colors_ = None
+
+        self._gn_partitions_ = None
+        self._ge_partitions_ = None
+
+        self._gn_colors_ = None
+        self._ge_colors_ = None
+
+        self._node_compat_ = None
+        self._edge_compat_ = None
+
+        if node_match is None:
+            self.node_equality = self._node_match_maker(lambda n1, n2: True)
+            self._sgn_partitions_ = [set(self.subgraph.nodes)]
+            self._gn_partitions_ = [set(self.graph.nodes)]
+            self._node_compat_ = {0: 0}
+        else:
+            self.node_equality = self._node_match_maker(node_match)
+        if edge_match is None:
+            self.edge_equality = self._edge_match_maker(lambda e1, e2: True)
+            self._sge_partitions_ = [set(self.subgraph.edges)]
+            self._ge_partitions_ = [set(self.graph.edges)]
+            self._edge_compat_ = {0: 0}
+        else:
+            self.edge_equality = self._edge_match_maker(edge_match)
+
+    @property
+    def _sgn_partitions(self):
+        if self._sgn_partitions_ is None:
+
+            def nodematch(node1, node2):
+                return self.node_equality(self.subgraph, node1, self.subgraph, node2)
+
+            self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch)
+        return self._sgn_partitions_
+
+    @property
+    def _sge_partitions(self):
+        if self._sge_partitions_ is None:
+
+            def edgematch(edge1, edge2):
+                return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2)
+
+            self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch)
+        return self._sge_partitions_
+
+    @property
+    def _gn_partitions(self):
+        if self._gn_partitions_ is None:
+
+            def nodematch(node1, node2):
+                return self.node_equality(self.graph, node1, self.graph, node2)
+
+            self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch)
+        return self._gn_partitions_
+
+    @property
+    def _ge_partitions(self):
+        if self._ge_partitions_ is None:
+
+            def edgematch(edge1, edge2):
+                return self.edge_equality(self.graph, edge1, self.graph, edge2)
+
+            self._ge_partitions_ = make_partitions(self.graph.edges, edgematch)
+        return self._ge_partitions_
+
+    @property
+    def _sgn_colors(self):
+        if self._sgn_colors_ is None:
+            self._sgn_colors_ = partition_to_color(self._sgn_partitions)
+        return self._sgn_colors_
+
+    @property
+    def _sge_colors(self):
+        if self._sge_colors_ is None:
+            self._sge_colors_ = partition_to_color(self._sge_partitions)
+        return self._sge_colors_
+
+    @property
+    def _gn_colors(self):
+        if self._gn_colors_ is None:
+            self._gn_colors_ = partition_to_color(self._gn_partitions)
+        return self._gn_colors_
+
+    @property
+    def _ge_colors(self):
+        if self._ge_colors_ is None:
+            self._ge_colors_ = partition_to_color(self._ge_partitions)
+        return self._ge_colors_
+
+    @property
+    def _node_compatibility(self):
+        if self._node_compat_ is not None:
+            return self._node_compat_
+        self._node_compat_ = {}
+        for sgn_part_color, gn_part_color in itertools.product(
+            range(len(self._sgn_partitions)), range(len(self._gn_partitions))
+        ):
+            sgn = next(iter(self._sgn_partitions[sgn_part_color]))
+            gn = next(iter(self._gn_partitions[gn_part_color]))
+            if self.node_equality(self.subgraph, sgn, self.graph, gn):
+                self._node_compat_[sgn_part_color] = gn_part_color
+        return self._node_compat_
+
+    @property
+    def _edge_compatibility(self):
+        if self._edge_compat_ is not None:
+            return self._edge_compat_
+        self._edge_compat_ = {}
+        for sge_part_color, ge_part_color in itertools.product(
+            range(len(self._sge_partitions)), range(len(self._ge_partitions))
+        ):
+            sge = next(iter(self._sge_partitions[sge_part_color]))
+            ge = next(iter(self._ge_partitions[ge_part_color]))
+            if self.edge_equality(self.subgraph, sge, self.graph, ge):
+                self._edge_compat_[sge_part_color] = ge_part_color
+        return self._edge_compat_
+
+    @staticmethod
+    def _node_match_maker(cmp):
+        @wraps(cmp)
+        def comparer(graph1, node1, graph2, node2):
+            return cmp(graph1.nodes[node1], graph2.nodes[node2])
+
+        return comparer
+
+    @staticmethod
+    def _edge_match_maker(cmp):
+        @wraps(cmp)
+        def comparer(graph1, edge1, graph2, edge2):
+            return cmp(graph1.edges[edge1], graph2.edges[edge2])
+
+        return comparer
+
+    def find_isomorphisms(self, symmetry=True):
+        """Find all subgraph isomorphisms between subgraph and graph
+
+        Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`.
+
+        Parameters
+        ----------
+        symmetry: bool
+            Whether symmetry should be taken into account. If False, found
+            isomorphisms may be symmetrically equivalent.
+
+        Yields
+        ------
+        dict
+            The found isomorphism mappings of {graph_node: subgraph_node}.
+        """
+        # The networkx VF2 algorithm is slightly funny in when it yields an
+        # empty dict and when not.
+        if not self.subgraph:
+            yield {}
+            return
+        elif not self.graph:
+            return
+        elif len(self.graph) < len(self.subgraph):
+            return
+
+        if symmetry:
+            _, cosets = self.analyze_symmetry(
+                self.subgraph, self._sgn_partitions, self._sge_colors
+            )
+            constraints = self._make_constraints(cosets)
+        else:
+            constraints = []
+
+        candidates = self._find_nodecolor_candidates()
+        la_candidates = self._get_lookahead_candidates()
+        for sgn in self.subgraph:
+            extra_candidates = la_candidates[sgn]
+            if extra_candidates:
+                candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)}
+
+        if any(candidates.values()):
+            start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len))
+            candidates[start_sgn] = (intersect(candidates[start_sgn]),)
+            yield from self._map_nodes(start_sgn, candidates, constraints)
+        else:
+            return
+
+    @staticmethod
+    def _find_neighbor_color_count(graph, node, node_color, edge_color):
+        """
+        For `node` in `graph`, count the number of edges of a specific color
+        it has to nodes of a specific color.
+        """
+        counts = Counter()
+        neighbors = graph[node]
+        for neighbor in neighbors:
+            n_color = node_color[neighbor]
+            if (node, neighbor) in edge_color:
+                e_color = edge_color[node, neighbor]
+            else:
+                e_color = edge_color[neighbor, node]
+            counts[e_color, n_color] += 1
+        return counts
+
+    def _get_lookahead_candidates(self):
+        """
+        Returns a mapping of {subgraph node: collection of graph nodes} for
+        which the graph nodes are feasible candidates for the subgraph node, as
+        determined by looking ahead one edge.
+        """
+        g_counts = {}
+        for gn in self.graph:
+            g_counts[gn] = self._find_neighbor_color_count(
+                self.graph, gn, self._gn_colors, self._ge_colors
+            )
+        candidates = defaultdict(set)
+        for sgn in self.subgraph:
+            sg_count = self._find_neighbor_color_count(
+                self.subgraph, sgn, self._sgn_colors, self._sge_colors
+            )
+            new_sg_count = Counter()
+            for (sge_color, sgn_color), count in sg_count.items():
+                try:
+                    ge_color = self._edge_compatibility[sge_color]
+                    gn_color = self._node_compatibility[sgn_color]
+                except KeyError:
+                    pass
+                else:
+                    new_sg_count[ge_color, gn_color] = count
+
+            for gn, g_count in g_counts.items():
+                if all(new_sg_count[x] <= g_count[x] for x in new_sg_count):
+                    # Valid candidate
+                    candidates[sgn].add(gn)
+        return candidates
+
+    def largest_common_subgraph(self, symmetry=True):
+        """
+        Find the largest common induced subgraphs between :attr:`subgraph` and
+        :attr:`graph`.
+
+        Parameters
+        ----------
+        symmetry: bool
+            Whether symmetry should be taken into account. If False, found
+            largest common subgraphs may be symmetrically equivalent.
+
+        Yields
+        ------
+        dict
+            The found isomorphism mappings of {graph_node: subgraph_node}.
+        """
+        # The networkx VF2 algorithm is slightly funny in when it yields an
+        # empty dict and when not.
+        if not self.subgraph:
+            yield {}
+            return
+        elif not self.graph:
+            return
+
+        if symmetry:
+            _, cosets = self.analyze_symmetry(
+                self.subgraph, self._sgn_partitions, self._sge_colors
+            )
+            constraints = self._make_constraints(cosets)
+        else:
+            constraints = []
+
+        candidates = self._find_nodecolor_candidates()
+
+        if any(candidates.values()):
+            yield from self._largest_common_subgraph(candidates, constraints)
+        else:
+            return
+
+    def analyze_symmetry(self, graph, node_partitions, edge_colors):
+        """
+        Find a minimal set of permutations and corresponding co-sets that
+        describe the symmetry of `graph`, given the node and edge equalities
+        given by `node_partitions` and `edge_colors`, respectively.
+
+        Parameters
+        ----------
+        graph : networkx.Graph
+            The graph whose symmetry should be analyzed.
+        node_partitions : list of sets
+            A list of sets containing node keys. Node keys in the same set
+            are considered equivalent. Every node key in `graph` should be in
+            exactly one of the sets. If all nodes are equivalent, this should
+            be ``[set(graph.nodes)]``.
+        edge_colors : dict mapping edges to their colors
+            A dict mapping every edge in `graph` to its corresponding color.
+            Edges with the same color are considered equivalent. If all edges
+            are equivalent, this should be ``{e: 0 for e in graph.edges}``.
+
+
+        Returns
+        -------
+        set[frozenset]
+            The found permutations. This is a set of frozensets of pairs of node
+            keys which can be exchanged without changing :attr:`subgraph`.
+        dict[collections.abc.Hashable, set[collections.abc.Hashable]]
+            The found co-sets. The co-sets is a dictionary of
+            ``{node key: set of node keys}``.
+            Every key-value pair describes which ``values`` can be interchanged
+            without changing nodes less than ``key``.
+        """
+        if self._symmetry_cache is not None:
+            key = hash(
+                (
+                    tuple(graph.nodes),
+                    tuple(graph.edges),
+                    tuple(map(tuple, node_partitions)),
+                    tuple(edge_colors.items()),
+                )
+            )
+            if key in self._symmetry_cache:
+                return self._symmetry_cache[key]
+        node_partitions = list(
+            self._refine_node_partitions(graph, node_partitions, edge_colors)
+        )
+        assert len(node_partitions) == 1
+        node_partitions = node_partitions[0]
+        permutations, cosets = self._process_ordered_pair_partitions(
+            graph, node_partitions, node_partitions, edge_colors
+        )
+        if self._symmetry_cache is not None:
+            self._symmetry_cache[key] = permutations, cosets
+        return permutations, cosets
+
+    def is_isomorphic(self, symmetry=False):
+        """
+        Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and
+        False otherwise.
+
+        Returns
+        -------
+        bool
+        """
+        return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic(
+            symmetry
+        )
+
+    def subgraph_is_isomorphic(self, symmetry=False):
+        """
+        Returns True if a subgraph of :attr:`graph` is isomorphic to
+        :attr:`subgraph` and False otherwise.
+
+        Returns
+        -------
+        bool
+        """
+        # symmetry=False, since we only need to know whether there is any
+        # example; figuring out all symmetry elements probably costs more time
+        # than it gains.
+        isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None)
+        return isom is not None
+
+    def isomorphisms_iter(self, symmetry=True):
+        """
+        Does the same as :meth:`find_isomorphisms` if :attr:`graph` and
+        :attr:`subgraph` have the same number of nodes.
+        """
+        if len(self.graph) == len(self.subgraph):
+            yield from self.subgraph_isomorphisms_iter(symmetry=symmetry)
+
+    def subgraph_isomorphisms_iter(self, symmetry=True):
+        """Alternative name for :meth:`find_isomorphisms`."""
+        return self.find_isomorphisms(symmetry)
+
+    def _find_nodecolor_candidates(self):
+        """
+        Per node in subgraph find all nodes in graph that have the same color.
+        """
+        candidates = defaultdict(set)
+        for sgn in self.subgraph.nodes:
+            sgn_color = self._sgn_colors[sgn]
+            if sgn_color in self._node_compatibility:
+                gn_color = self._node_compatibility[sgn_color]
+                candidates[sgn].add(frozenset(self._gn_partitions[gn_color]))
+            else:
+                candidates[sgn].add(frozenset())
+        candidates = dict(candidates)
+        for sgn, options in candidates.items():
+            candidates[sgn] = frozenset(options)
+        return candidates
+
+    @staticmethod
+    def _make_constraints(cosets):
+        """
+        Turn cosets into constraints.
+        """
+        constraints = []
+        for node_i, node_ts in cosets.items():
+            for node_t in node_ts:
+                if node_i != node_t:
+                    # Node i must be smaller than node t.
+                    constraints.append((node_i, node_t))
+        return constraints
+
+    @staticmethod
+    def _find_node_edge_color(graph, node_colors, edge_colors):
+        """
+        For every node in graph, come up with a color that combines 1) the
+        color of the node, and 2) the number of edges of a color to each type
+        of node.
+        """
+        counts = defaultdict(lambda: defaultdict(int))
+        for node1, node2 in graph.edges:
+            if (node1, node2) in edge_colors:
+                # FIXME directed graphs
+                ecolor = edge_colors[node1, node2]
+            else:
+                ecolor = edge_colors[node2, node1]
+            # Count per node how many edges it has of what color to nodes of
+            # what color
+            counts[node1][ecolor, node_colors[node2]] += 1
+            counts[node2][ecolor, node_colors[node1]] += 1
+
+        node_edge_colors = {}
+        for node in graph.nodes:
+            node_edge_colors[node] = node_colors[node], set(counts[node].items())
+
+        return node_edge_colors
+
+    @staticmethod
+    def _get_permutations_by_length(items):
+        """
+        Get all permutations of items, but only permute items with the same
+        length.
+
+        >>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]]))
+        >>> answer = [
+        ...     (([1], [2]), ([3, 4], [4, 5])),
+        ...     (([1], [2]), ([4, 5], [3, 4])),
+        ...     (([2], [1]), ([3, 4], [4, 5])),
+        ...     (([2], [1]), ([4, 5], [3, 4])),
+        ... ]
+        >>> found == answer
+        True
+        """
+        by_len = defaultdict(list)
+        for item in items:
+            by_len[len(item)].append(item)
+
+        yield from itertools.product(
+            *(itertools.permutations(by_len[l]) for l in sorted(by_len))
+        )
+
+    @classmethod
+    def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False):
+        """
+        Given a partition of nodes in graph, make the partitions smaller such
+        that all nodes in a partition have 1) the same color, and 2) the same
+        number of edges to specific other partitions.
+        """
+
+        def equal_color(node1, node2):
+            return node_edge_colors[node1] == node_edge_colors[node2]
+
+        node_partitions = list(node_partitions)
+        node_colors = partition_to_color(node_partitions)
+        node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors)
+        if all(
+            are_all_equal(node_edge_colors[node] for node in partition)
+            for partition in node_partitions
+        ):
+            yield node_partitions
+            return
+
+        new_partitions = []
+        output = [new_partitions]
+        for partition in node_partitions:
+            if not are_all_equal(node_edge_colors[node] for node in partition):
+                refined = make_partitions(partition, equal_color)
+                if (
+                    branch
+                    and len(refined) != 1
+                    and len({len(r) for r in refined}) != len([len(r) for r in refined])
+                ):
+                    # This is where it breaks. There are multiple new cells
+                    # in refined with the same length, and their order
+                    # matters.
+                    # So option 1) Hit it with a big hammer and simply make all
+                    # orderings.
+                    permutations = cls._get_permutations_by_length(refined)
+                    new_output = []
+                    for n_p in output:
+                        for permutation in permutations:
+                            new_output.append(n_p + list(permutation[0]))
+                    output = new_output
+                else:
+                    for n_p in output:
+                        n_p.extend(sorted(refined, key=len))
+            else:
+                for n_p in output:
+                    n_p.append(partition)
+        for n_p in output:
+            yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch)
+
+    def _edges_of_same_color(self, sgn1, sgn2):
+        """
+        Returns all edges in :attr:`graph` that have the same colour as the
+        edge between sgn1 and sgn2 in :attr:`subgraph`.
+        """
+        if (sgn1, sgn2) in self._sge_colors:
+            # FIXME directed graphs
+            sge_color = self._sge_colors[sgn1, sgn2]
+        else:
+            sge_color = self._sge_colors[sgn2, sgn1]
+        if sge_color in self._edge_compatibility:
+            ge_color = self._edge_compatibility[sge_color]
+            g_edges = self._ge_partitions[ge_color]
+        else:
+            g_edges = []
+        return g_edges
+
+    def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None):
+        """
+        Find all subgraph isomorphisms honoring constraints.
+        """
+        if mapping is None:
+            mapping = {}
+        else:
+            mapping = mapping.copy()
+        if to_be_mapped is None:
+            to_be_mapped = set(self.subgraph.nodes)
+
+        # Note, we modify candidates here. Doesn't seem to affect results, but
+        # remember this.
+        # candidates = candidates.copy()
+        sgn_candidates = intersect(candidates[sgn])
+        candidates[sgn] = frozenset([sgn_candidates])
+        for gn in sgn_candidates:
+            # We're going to try to map sgn to gn.
+            if gn in mapping.values() or sgn not in to_be_mapped:
+                # gn is already mapped to something
+                continue  # pragma: no cover
+
+            # REDUCTION and COMBINATION
+            mapping[sgn] = gn
+            # BASECASE
+            if to_be_mapped == set(mapping.keys()):
+                yield {v: k for k, v in mapping.items()}
+                continue
+            left_to_map = to_be_mapped - set(mapping.keys())
+
+            new_candidates = candidates.copy()
+            sgn_nbrs = set(self.subgraph[sgn])
+            not_gn_nbrs = set(self.graph.nodes) - set(self.graph[gn])
+            for sgn2 in left_to_map:
+                if sgn2 not in sgn_nbrs:
+                    gn2_options = not_gn_nbrs
+                else:
+                    # Get all edges to gn of the right color:
+                    g_edges = self._edges_of_same_color(sgn, sgn2)
+                    # FIXME directed graphs
+                    # And all nodes involved in those which are connected to gn
+                    gn2_options = {n for e in g_edges for n in e if gn in e}
+                # Node color compatibility should be taken care of by the
+                # initial candidate lists made by find_subgraphs
+
+                # Add gn2_options to the right collection. Since new_candidates
+                # is a dict of frozensets of frozensets of node indices it's
+                # a bit clunky. We can't do .add, and + also doesn't work. We
+                # could do |, but I deem union to be clearer.
+                new_candidates[sgn2] = new_candidates[sgn2].union(
+                    [frozenset(gn2_options)]
+                )
+
+                if (sgn, sgn2) in constraints:
+                    gn2_options = {gn2 for gn2 in self.graph if gn2 > gn}
+                elif (sgn2, sgn) in constraints:
+                    gn2_options = {gn2 for gn2 in self.graph if gn2 < gn}
+                else:
+                    continue  # pragma: no cover
+                new_candidates[sgn2] = new_candidates[sgn2].union(
+                    [frozenset(gn2_options)]
+                )
+
+            # The next node is the one that is unmapped and has fewest
+            # candidates
+            next_sgn = min(left_to_map, key=lambda n: min(new_candidates[n], key=len))
+            yield from self._map_nodes(
+                next_sgn,
+                new_candidates,
+                constraints,
+                mapping=mapping,
+                to_be_mapped=to_be_mapped,
+            )
+            # Unmap sgn-gn. Strictly not necessary since it'd get overwritten
+            # when making a new mapping for sgn.
+            # del mapping[sgn]
+
+    def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None):
+        """
+        Find all largest common subgraphs honoring constraints.
+        """
+        if to_be_mapped is None:
+            to_be_mapped = {frozenset(self.subgraph.nodes)}
+
+        # The LCS problem is basically a repeated subgraph isomorphism problem
+        # with smaller and smaller subgraphs. We store the nodes that are
+        # "part of" the subgraph in to_be_mapped, and we make it a little
+        # smaller every iteration.
+
+        current_size = len(next(iter(to_be_mapped), []))
+
+        found_iso = False
+        if current_size <= len(self.graph):
+            # There's no point in trying to find isomorphisms of
+            # graph >= subgraph if subgraph has more nodes than graph.
+
+            # Try the isomorphism first with the nodes with lowest ID. So sort
+            # them. Those are more likely to be part of the final
+            # correspondence. This makes finding the first answer(s) faster. In
+            # theory.
+            for nodes in sorted(to_be_mapped, key=sorted):
+                # Find the isomorphism between subgraph[to_be_mapped] <= graph
+                next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len))
+                isomorphs = self._map_nodes(
+                    next_sgn, candidates, constraints, to_be_mapped=nodes
+                )
+
+                # This is effectively `yield from isomorphs`, except that we look
+                # whether an item was yielded.
+                try:
+                    item = next(isomorphs)
+                except StopIteration:
+                    pass
+                else:
+                    yield item
+                    yield from isomorphs
+                    found_iso = True
+
+        # BASECASE
+        if found_iso or current_size == 1:
+            # Shrinking has no point because either 1) we end up with a smaller
+            # common subgraph (and we want the largest), or 2) there'll be no
+            # more subgraph.
+            return
+
+        left_to_be_mapped = set()
+        for nodes in to_be_mapped:
+            for sgn in nodes:
+                # We're going to remove sgn from to_be_mapped, but subject to
+                # symmetry constraints. We know that for every constraint we
+                # have those subgraph nodes are equal. So whenever we would
+                # remove the lower part of a constraint, remove the higher
+                # instead. This is all dealth with by _remove_node. And because
+                # left_to_be_mapped is a set, we don't do double work.
+
+                # And finally, make the subgraph one node smaller.
+                # REDUCTION
+                new_nodes = self._remove_node(sgn, nodes, constraints)
+                left_to_be_mapped.add(new_nodes)
+        # COMBINATION
+        yield from self._largest_common_subgraph(
+            candidates, constraints, to_be_mapped=left_to_be_mapped
+        )
+
+    @staticmethod
+    def _remove_node(node, nodes, constraints):
+        """
+        Returns a new set where node has been removed from nodes, subject to
+        symmetry constraints. We know, that for every constraint we have
+        those subgraph nodes are equal. So whenever we would remove the
+        lower part of a constraint, remove the higher instead.
+        """
+        while True:
+            for low, high in constraints:
+                if low == node and high in nodes:
+                    node = high
+                    break
+            else:  # no break, couldn't find node in constraints
+                break
+        return frozenset(nodes - {node})
+
+    @staticmethod
+    def _find_permutations(top_partitions, bottom_partitions):
+        """
+        Return the pairs of top/bottom partitions where the partitions are
+        different. Ensures that all partitions in both top and bottom
+        partitions have size 1.
+        """
+        # Find permutations
+        permutations = set()
+        for top, bot in zip(top_partitions, bottom_partitions):
+            # top and bot have only one element
+            if len(top) != 1 or len(bot) != 1:
+                raise IndexError(
+                    "Not all nodes are coupled. This is"
+                    f" impossible: {top_partitions}, {bottom_partitions}"
+                )
+            if top != bot:
+                permutations.add(frozenset((next(iter(top)), next(iter(bot)))))
+        return permutations
+
+    @staticmethod
+    def _update_orbits(orbits, permutations):
+        """
+        Update orbits based on permutations. Orbits is modified in place.
+        For every pair of items in permutations their respective orbits are
+        merged.
+        """
+        for permutation in permutations:
+            node, node2 = permutation
+            # Find the orbits that contain node and node2, and replace the
+            # orbit containing node with the union
+            first = second = None
+            for idx, orbit in enumerate(orbits):
+                if first is not None and second is not None:
+                    break
+                if node in orbit:
+                    first = idx
+                if node2 in orbit:
+                    second = idx
+            if first != second:
+                orbits[first].update(orbits[second])
+                del orbits[second]
+
+    def _couple_nodes(
+        self,
+        top_partitions,
+        bottom_partitions,
+        pair_idx,
+        t_node,
+        b_node,
+        graph,
+        edge_colors,
+    ):
+        """
+        Generate new partitions from top and bottom_partitions where t_node is
+        coupled to b_node. pair_idx is the index of the partitions where t_ and
+        b_node can be found.
+        """
+        t_partition = top_partitions[pair_idx]
+        b_partition = bottom_partitions[pair_idx]
+        assert t_node in t_partition and b_node in b_partition
+        # Couple node to node2. This means they get their own partition
+        new_top_partitions = [top.copy() for top in top_partitions]
+        new_bottom_partitions = [bot.copy() for bot in bottom_partitions]
+        new_t_groups = {t_node}, t_partition - {t_node}
+        new_b_groups = {b_node}, b_partition - {b_node}
+        # Replace the old partitions with the coupled ones
+        del new_top_partitions[pair_idx]
+        del new_bottom_partitions[pair_idx]
+        new_top_partitions[pair_idx:pair_idx] = new_t_groups
+        new_bottom_partitions[pair_idx:pair_idx] = new_b_groups
+
+        new_top_partitions = self._refine_node_partitions(
+            graph, new_top_partitions, edge_colors
+        )
+        new_bottom_partitions = self._refine_node_partitions(
+            graph, new_bottom_partitions, edge_colors, branch=True
+        )
+        new_top_partitions = list(new_top_partitions)
+        assert len(new_top_partitions) == 1
+        new_top_partitions = new_top_partitions[0]
+        for bot in new_bottom_partitions:
+            yield list(new_top_partitions), bot
+
+    def _process_ordered_pair_partitions(
+        self,
+        graph,
+        top_partitions,
+        bottom_partitions,
+        edge_colors,
+        orbits=None,
+        cosets=None,
+    ):
+        """
+        Processes ordered pair partitions as per the reference paper. Finds and
+        returns all permutations and cosets that leave the graph unchanged.
+        """
+        if orbits is None:
+            orbits = [{node} for node in graph.nodes]
+        else:
+            # Note that we don't copy orbits when we are given one. This means
+            # we leak information between the recursive branches. This is
+            # intentional!
+            orbits = orbits
+        if cosets is None:
+            cosets = {}
+        else:
+            cosets = cosets.copy()
+
+        assert all(
+            len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions)
+        )
+
+        # BASECASE
+        if all(len(top) == 1 for top in top_partitions):
+            # All nodes are mapped
+            permutations = self._find_permutations(top_partitions, bottom_partitions)
+            self._update_orbits(orbits, permutations)
+            if permutations:
+                return [permutations], cosets
+            else:
+                return [], cosets
+
+        permutations = []
+        unmapped_nodes = {
+            (node, idx)
+            for idx, t_partition in enumerate(top_partitions)
+            for node in t_partition
+            if len(t_partition) > 1
+        }
+        node, pair_idx = min(unmapped_nodes)
+        b_partition = bottom_partitions[pair_idx]
+
+        for node2 in sorted(b_partition):
+            if len(b_partition) == 1:
+                # Can never result in symmetry
+                continue
+            if node != node2 and any(
+                node in orbit and node2 in orbit for orbit in orbits
+            ):
+                # Orbit prune branch
+                continue
+            # REDUCTION
+            # Couple node to node2
+            partitions = self._couple_nodes(
+                top_partitions,
+                bottom_partitions,
+                pair_idx,
+                node,
+                node2,
+                graph,
+                edge_colors,
+            )
+            for opp in partitions:
+                new_top_partitions, new_bottom_partitions = opp
+
+                new_perms, new_cosets = self._process_ordered_pair_partitions(
+                    graph,
+                    new_top_partitions,
+                    new_bottom_partitions,
+                    edge_colors,
+                    orbits,
+                    cosets,
+                )
+                # COMBINATION
+                permutations += new_perms
+                cosets.update(new_cosets)
+
+        mapped = {
+            k
+            for top, bottom in zip(top_partitions, bottom_partitions)
+            for k in top
+            if len(top) == 1 and top == bottom
+        }
+        ks = {k for k in graph.nodes if k < node}
+        # Have all nodes with ID < node been mapped?
+        find_coset = ks <= mapped and node not in cosets
+        if find_coset:
+            # Find the orbit that contains node
+            for orbit in orbits:
+                if node in orbit:
+                    cosets[node] = orbit.copy()
+        return permutations, cosets
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py
new file mode 100644
index 00000000..fc3a3fc6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py
@@ -0,0 +1,249 @@
+"""
+Graph isomorphism functions.
+"""
+
+import networkx as nx
+from networkx.exception import NetworkXError
+
+__all__ = [
+    "could_be_isomorphic",
+    "fast_could_be_isomorphic",
+    "faster_could_be_isomorphic",
+    "is_isomorphic",
+]
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1})
+def could_be_isomorphic(G1, G2):
+    """Returns False if graphs are definitely not isomorphic.
+    True does NOT guarantee isomorphism.
+
+    Parameters
+    ----------
+    G1, G2 : graphs
+       The two graphs G1 and G2 must be the same type.
+
+    Notes
+    -----
+    Checks for matching degree, triangle, and number of cliques sequences.
+    The triangle sequence contains the number of triangles each node is part of.
+    The clique sequence contains for each node the number of maximal cliques
+    involving that node.
+
+    """
+
+    # Check global properties
+    if G1.order() != G2.order():
+        return False
+
+    # Check local properties
+    d1 = G1.degree()
+    t1 = nx.triangles(G1)
+    clqs_1 = list(nx.find_cliques(G1))
+    c1 = {n: sum(1 for c in clqs_1 if n in c) for n in G1}  # number of cliques
+    props1 = [[d, t1[v], c1[v]] for v, d in d1]
+    props1.sort()
+
+    d2 = G2.degree()
+    t2 = nx.triangles(G2)
+    clqs_2 = list(nx.find_cliques(G2))
+    c2 = {n: sum(1 for c in clqs_2 if n in c) for n in G2}  # number of cliques
+    props2 = [[d, t2[v], c2[v]] for v, d in d2]
+    props2.sort()
+
+    if props1 != props2:
+        return False
+
+    # OK...
+    return True
+
+
+graph_could_be_isomorphic = could_be_isomorphic
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1})
+def fast_could_be_isomorphic(G1, G2):
+    """Returns False if graphs are definitely not isomorphic.
+
+    True does NOT guarantee isomorphism.
+
+    Parameters
+    ----------
+    G1, G2 : graphs
+       The two graphs G1 and G2 must be the same type.
+
+    Notes
+    -----
+    Checks for matching degree and triangle sequences. The triangle
+    sequence contains the number of triangles each node is part of.
+    """
+    # Check global properties
+    if G1.order() != G2.order():
+        return False
+
+    # Check local properties
+    d1 = G1.degree()
+    t1 = nx.triangles(G1)
+    props1 = [[d, t1[v]] for v, d in d1]
+    props1.sort()
+
+    d2 = G2.degree()
+    t2 = nx.triangles(G2)
+    props2 = [[d, t2[v]] for v, d in d2]
+    props2.sort()
+
+    if props1 != props2:
+        return False
+
+    # OK...
+    return True
+
+
+fast_graph_could_be_isomorphic = fast_could_be_isomorphic
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1})
+def faster_could_be_isomorphic(G1, G2):
+    """Returns False if graphs are definitely not isomorphic.
+
+    True does NOT guarantee isomorphism.
+
+    Parameters
+    ----------
+    G1, G2 : graphs
+       The two graphs G1 and G2 must be the same type.
+
+    Notes
+    -----
+    Checks for matching degree sequences.
+    """
+    # Check global properties
+    if G1.order() != G2.order():
+        return False
+
+    # Check local properties
+    d1 = sorted(d for n, d in G1.degree())
+    d2 = sorted(d for n, d in G2.degree())
+
+    if d1 != d2:
+        return False
+
+    # OK...
+    return True
+
+
+faster_graph_could_be_isomorphic = faster_could_be_isomorphic
+
+
+@nx._dispatchable(
+    graphs={"G1": 0, "G2": 1},
+    preserve_edge_attrs="edge_match",
+    preserve_node_attrs="node_match",
+)
+def is_isomorphic(G1, G2, node_match=None, edge_match=None):
+    """Returns True if the graphs G1 and G2 are isomorphic and False otherwise.
+
+    Parameters
+    ----------
+    G1, G2: graphs
+        The two graphs G1 and G2 must be the same type.
+
+    node_match : callable
+        A function that returns True if node n1 in G1 and n2 in G2 should
+        be considered equal during the isomorphism test.
+        If node_match is not specified then node attributes are not considered.
+
+        The function will be called like
+
+           node_match(G1.nodes[n1], G2.nodes[n2]).
+
+        That is, the function will receive the node attribute dictionaries
+        for n1 and n2 as inputs.
+
+    edge_match : callable
+        A function that returns True if the edge attribute dictionary
+        for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
+        be considered equal during the isomorphism test.  If edge_match is
+        not specified then edge attributes are not considered.
+
+        The function will be called like
+
+           edge_match(G1[u1][v1], G2[u2][v2]).
+
+        That is, the function will receive the edge attribute dictionaries
+        of the edges under consideration.
+
+    Notes
+    -----
+    Uses the vf2 algorithm [1]_.
+
+    Examples
+    --------
+    >>> import networkx.algorithms.isomorphism as iso
+
+    For digraphs G1 and G2, using 'weight' edge attribute (default: 1)
+
+    >>> G1 = nx.DiGraph()
+    >>> G2 = nx.DiGraph()
+    >>> nx.add_path(G1, [1, 2, 3, 4], weight=1)
+    >>> nx.add_path(G2, [10, 20, 30, 40], weight=2)
+    >>> em = iso.numerical_edge_match("weight", 1)
+    >>> nx.is_isomorphic(G1, G2)  # no weights considered
+    True
+    >>> nx.is_isomorphic(G1, G2, edge_match=em)  # match weights
+    False
+
+    For multidigraphs G1 and G2, using 'fill' node attribute (default: '')
+
+    >>> G1 = nx.MultiDiGraph()
+    >>> G2 = nx.MultiDiGraph()
+    >>> G1.add_nodes_from([1, 2, 3], fill="red")
+    >>> G2.add_nodes_from([10, 20, 30, 40], fill="red")
+    >>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5)
+    >>> nx.add_path(G2, [10, 20, 30, 40], weight=3)
+    >>> nm = iso.categorical_node_match("fill", "red")
+    >>> nx.is_isomorphic(G1, G2, node_match=nm)
+    True
+
+    For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7)
+
+    >>> G1.add_edge(1, 2, weight=7)
+    1
+    >>> G2.add_edge(10, 20)
+    1
+    >>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6)
+    >>> nx.is_isomorphic(G1, G2, edge_match=em)
+    True
+
+    For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes
+    with default values 7 and 2.5. Also using 'fill' node attribute with
+    default value 'red'.
+
+    >>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5])
+    >>> nm = iso.categorical_node_match("fill", "red")
+    >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm)
+    True
+
+    See Also
+    --------
+    numerical_node_match, numerical_edge_match, numerical_multiedge_match
+    categorical_node_match, categorical_edge_match, categorical_multiedge_match
+
+    References
+    ----------
+    .. [1]  L. P. Cordella, P. Foggia, C. Sansone, M. Vento,
+       "An Improved Algorithm for Matching Large Graphs",
+       3rd IAPR-TC15 Workshop  on Graph-based Representations in
+       Pattern Recognition, Cuen, pp. 149-159, 2001.
+       https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
+    """
+    if G1.is_directed() and G2.is_directed():
+        GM = nx.algorithms.isomorphism.DiGraphMatcher
+    elif (not G1.is_directed()) and (not G2.is_directed()):
+        GM = nx.algorithms.isomorphism.GraphMatcher
+    else:
+        raise NetworkXError("Graphs G1 and G2 are not of the same type.")
+
+    gm = GM(G1, G2, node_match=node_match, edge_match=edge_match)
+
+    return gm.is_isomorphic()
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py
new file mode 100644
index 00000000..cb2f1e8f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py
@@ -0,0 +1,1238 @@
+"""
+*************
+VF2 Algorithm
+*************
+
+An implementation of VF2 algorithm for graph isomorphism testing.
+
+The simplest interface to use this module is to call the
+:func:`is_isomorphic <networkx.algorithms.isomorphism.is_isomorphic>`
+function.
+
+Introduction
+------------
+
+The GraphMatcher and DiGraphMatcher are responsible for matching
+graphs or directed graphs in a predetermined manner.  This
+usually means a check for an isomorphism, though other checks
+are also possible.  For example, a subgraph of one graph
+can be checked for isomorphism to a second graph.
+
+Matching is done via syntactic feasibility. It is also possible
+to check for semantic feasibility. Feasibility, then, is defined
+as the logical AND of the two functions.
+
+To include a semantic check, the (Di)GraphMatcher class should be
+subclassed, and the
+:meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
+function should be redefined.  By default, the semantic feasibility function always
+returns ``True``.  The effect of this is that semantics are not
+considered in the matching of G1 and G2.
+
+Examples
+--------
+
+Suppose G1 and G2 are isomorphic graphs. Verification is as follows:
+
+>>> from networkx.algorithms import isomorphism
+>>> G1 = nx.path_graph(4)
+>>> G2 = nx.path_graph(4)
+>>> GM = isomorphism.GraphMatcher(G1, G2)
+>>> GM.is_isomorphic()
+True
+
+GM.mapping stores the isomorphism mapping from G1 to G2.
+
+>>> GM.mapping
+{0: 0, 1: 1, 2: 2, 3: 3}
+
+
+Suppose G1 and G2 are isomorphic directed graphs.
+Verification is as follows:
+
+>>> G1 = nx.path_graph(4, create_using=nx.DiGraph)
+>>> G2 = nx.path_graph(4, create_using=nx.DiGraph)
+>>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
+>>> DiGM.is_isomorphic()
+True
+
+DiGM.mapping stores the isomorphism mapping from G1 to G2.
+
+>>> DiGM.mapping
+{0: 0, 1: 1, 2: 2, 3: 3}
+
+
+
+Subgraph Isomorphism
+--------------------
+Graph theory literature can be ambiguous about the meaning of the
+above statement, and we seek to clarify it now.
+
+In the VF2 literature, a mapping ``M`` is said to be a graph-subgraph
+isomorphism iff ``M`` is an isomorphism between ``G2`` and a subgraph of ``G1``.
+Thus, to say that ``G1`` and ``G2`` are graph-subgraph isomorphic is to say
+that a subgraph of ``G1`` is isomorphic to ``G2``.
+
+Other literature uses the phrase 'subgraph isomorphic' as in '``G1`` does
+not have a subgraph isomorphic to ``G2``'.  Another use is as an in adverb
+for isomorphic.  Thus, to say that ``G1`` and ``G2`` are subgraph isomorphic
+is to say that a subgraph of ``G1`` is isomorphic to ``G2``.
+
+Finally, the term 'subgraph' can have multiple meanings. In this
+context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced
+subgraph isomorphisms are not directly supported, but one should be
+able to perform the check by making use of
+:func:`line_graph <networkx.generators.line.line_graph>`. For
+subgraphs which are not induced, the term 'monomorphism' is preferred
+over 'isomorphism'.
+
+Let ``G = (N, E)`` be a graph with a set of nodes ``N`` and set of edges ``E``.
+
+If ``G' = (N', E')`` is a subgraph, then:
+    ``N'`` is a subset of ``N`` and
+    ``E'`` is a subset of ``E``.
+
+If ``G' = (N', E')`` is a node-induced subgraph, then:
+    ``N'`` is a subset of ``N`` and
+    ``E'`` is the subset of edges in ``E`` relating nodes in ``N'``.
+
+If ``G' = (N', E')`` is an edge-induced subgraph, then:
+    ``N'`` is the subset of nodes in ``N`` related by edges in ``E'`` and
+    ``E'`` is a subset of ``E``.
+
+If ``G' = (N', E')`` is a monomorphism, then:
+    ``N'`` is a subset of ``N`` and
+    ``E'`` is a subset of the set of edges in ``E`` relating nodes in ``N'``.
+
+Note that if ``G'`` is a node-induced subgraph of ``G``, then it is always a
+subgraph monomorphism of ``G``, but the opposite is not always true, as a
+monomorphism can have fewer edges.
+
+References
+----------
+[1]   Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento,
+      "A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs",
+      IEEE Transactions on Pattern Analysis and Machine Intelligence,
+      vol. 26,  no. 10,  pp. 1367-1372,  Oct.,  2004.
+      http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf
+
+[2]   L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved
+      Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop
+      on Graph-based Representations in Pattern Recognition, Cuen,
+      pp. 149-159, 2001.
+      https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
+
+See Also
+--------
+:meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
+:meth:`syntactic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.syntactic_feasibility>`
+
+Notes
+-----
+
+The implementation handles both directed and undirected graphs as well
+as multigraphs.
+
+In general, the subgraph isomorphism problem is NP-complete whereas the
+graph isomorphism problem is most likely not NP-complete (although no
+polynomial-time algorithm is known to exist).
+
+"""
+
+# This work was originally coded by Christopher Ellison
+# as part of the Computational Mechanics Python (CMPy) project.
+# James P. Crutchfield, principal investigator.
+# Complexity Sciences Center and Physics Department, UC Davis.
+
+import sys
+
+__all__ = ["GraphMatcher", "DiGraphMatcher"]
+
+
+class GraphMatcher:
+    """Implementation of VF2 algorithm for matching undirected graphs.
+
+    Suitable for Graph and MultiGraph instances.
+    """
+
+    def __init__(self, G1, G2):
+        """Initialize GraphMatcher.
+
+        Parameters
+        ----------
+        G1,G2: NetworkX Graph or MultiGraph instances.
+           The two graphs to check for isomorphism or monomorphism.
+
+        Examples
+        --------
+        To create a GraphMatcher which checks for syntactic feasibility:
+
+        >>> from networkx.algorithms import isomorphism
+        >>> G1 = nx.path_graph(4)
+        >>> G2 = nx.path_graph(4)
+        >>> GM = isomorphism.GraphMatcher(G1, G2)
+        """
+        self.G1 = G1
+        self.G2 = G2
+        self.G1_nodes = set(G1.nodes())
+        self.G2_nodes = set(G2.nodes())
+        self.G2_node_order = {n: i for i, n in enumerate(G2)}
+
+        # Set recursion limit.
+        self.old_recursion_limit = sys.getrecursionlimit()
+        expected_max_recursion_level = len(self.G2)
+        if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
+            # Give some breathing room.
+            sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
+
+        # Declare that we will be searching for a graph-graph isomorphism.
+        self.test = "graph"
+
+        # Initialize state
+        self.initialize()
+
+    def reset_recursion_limit(self):
+        """Restores the recursion limit."""
+        # TODO:
+        # Currently, we use recursion and set the recursion level higher.
+        # It would be nice to restore the level, but because the
+        # (Di)GraphMatcher classes make use of cyclic references, garbage
+        # collection will never happen when we define __del__() to
+        # restore the recursion level. The result is a memory leak.
+        # So for now, we do not automatically restore the recursion level,
+        # and instead provide a method to do this manually. Eventually,
+        # we should turn this into a non-recursive implementation.
+        sys.setrecursionlimit(self.old_recursion_limit)
+
+    def candidate_pairs_iter(self):
+        """Iterator over candidate pairs of nodes in G1 and G2."""
+
+        # All computations are done using the current state!
+
+        G1_nodes = self.G1_nodes
+        G2_nodes = self.G2_nodes
+        min_key = self.G2_node_order.__getitem__
+
+        # First we compute the inout-terminal sets.
+        T1_inout = [node for node in self.inout_1 if node not in self.core_1]
+        T2_inout = [node for node in self.inout_2 if node not in self.core_2]
+
+        # If T1_inout and T2_inout are both nonempty.
+        # P(s) = T1_inout x {min T2_inout}
+        if T1_inout and T2_inout:
+            node_2 = min(T2_inout, key=min_key)
+            for node_1 in T1_inout:
+                yield node_1, node_2
+
+        else:
+            # If T1_inout and T2_inout were both empty....
+            # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
+            # if not (T1_inout or T2_inout):  # as suggested by  [2], incorrect
+            if 1:  # as inferred from [1], correct
+                # First we determine the candidate node for G2
+                other_node = min(G2_nodes - set(self.core_2), key=min_key)
+                for node in self.G1:
+                    if node not in self.core_1:
+                        yield node, other_node
+
+        # For all other cases, we don't have any candidate pairs.
+
+    def initialize(self):
+        """Reinitializes the state of the algorithm.
+
+        This method should be redefined if using something other than GMState.
+        If only subclassing GraphMatcher, a redefinition is not necessary.
+
+        """
+
+        # core_1[n] contains the index of the node paired with n, which is m,
+        #           provided n is in the mapping.
+        # core_2[m] contains the index of the node paired with m, which is n,
+        #           provided m is in the mapping.
+        self.core_1 = {}
+        self.core_2 = {}
+
+        # See the paper for definitions of M_x and T_x^{y}
+
+        # inout_1[n]  is non-zero if n is in M_1 or in T_1^{inout}
+        # inout_2[m]  is non-zero if m is in M_2 or in T_2^{inout}
+        #
+        # The value stored is the depth of the SSR tree when the node became
+        # part of the corresponding set.
+        self.inout_1 = {}
+        self.inout_2 = {}
+        # Practically, these sets simply store the nodes in the subgraph.
+
+        self.state = GMState(self)
+
+        # Provide a convenient way to access the isomorphism mapping.
+        self.mapping = self.core_1.copy()
+
+    def is_isomorphic(self):
+        """Returns True if G1 and G2 are isomorphic graphs."""
+
+        # Let's do two very quick checks!
+        # QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)?
+        # For now, I just copy the code.
+
+        # Check global properties
+        if self.G1.order() != self.G2.order():
+            return False
+
+        # Check local properties
+        d1 = sorted(d for n, d in self.G1.degree())
+        d2 = sorted(d for n, d in self.G2.degree())
+        if d1 != d2:
+            return False
+
+        try:
+            x = next(self.isomorphisms_iter())
+            return True
+        except StopIteration:
+            return False
+
+    def isomorphisms_iter(self):
+        """Generator over isomorphisms between G1 and G2."""
+        # Declare that we are looking for a graph-graph isomorphism.
+        self.test = "graph"
+        self.initialize()
+        yield from self.match()
+
+    def match(self):
+        """Extends the isomorphism mapping.
+
+        This function is called recursively to determine if a complete
+        isomorphism can be found between G1 and G2.  It cleans up the class
+        variables after each recursive call. If an isomorphism is found,
+        we yield the mapping.
+
+        """
+        if len(self.core_1) == len(self.G2):
+            # Save the final mapping, otherwise garbage collection deletes it.
+            self.mapping = self.core_1.copy()
+            # The mapping is complete.
+            yield self.mapping
+        else:
+            for G1_node, G2_node in self.candidate_pairs_iter():
+                if self.syntactic_feasibility(G1_node, G2_node):
+                    if self.semantic_feasibility(G1_node, G2_node):
+                        # Recursive call, adding the feasible state.
+                        newstate = self.state.__class__(self, G1_node, G2_node)
+                        yield from self.match()
+
+                        # restore data structures
+                        newstate.restore()
+
+    def semantic_feasibility(self, G1_node, G2_node):
+        """Returns True if adding (G1_node, G2_node) is semantically feasible.
+
+        The semantic feasibility function should return True if it is
+        acceptable to add the candidate pair (G1_node, G2_node) to the current
+        partial isomorphism mapping.   The logic should focus on semantic
+        information contained in the edge data or a formalized node class.
+
+        By acceptable, we mean that the subsequent mapping can still become a
+        complete isomorphism mapping.  Thus, if adding the candidate pair
+        definitely makes it so that the subsequent mapping cannot become a
+        complete isomorphism mapping, then this function must return False.
+
+        The default semantic feasibility function always returns True. The
+        effect is that semantics are not considered in the matching of G1
+        and G2.
+
+        The semantic checks might differ based on the what type of test is
+        being performed.  A keyword description of the test is stored in
+        self.test.  Here is a quick description of the currently implemented
+        tests::
+
+          test='graph'
+            Indicates that the graph matcher is looking for a graph-graph
+            isomorphism.
+
+          test='subgraph'
+            Indicates that the graph matcher is looking for a subgraph-graph
+            isomorphism such that a subgraph of G1 is isomorphic to G2.
+
+          test='mono'
+            Indicates that the graph matcher is looking for a subgraph-graph
+            monomorphism such that a subgraph of G1 is monomorphic to G2.
+
+        Any subclass which redefines semantic_feasibility() must maintain
+        the above form to keep the match() method functional. Implementations
+        should consider multigraphs.
+        """
+        return True
+
+    def subgraph_is_isomorphic(self):
+        """Returns `True` if a subgraph of ``G1`` is isomorphic to ``G2``.
+
+        Examples
+        --------
+        When creating the `GraphMatcher`, the order of the arguments is important
+
+        >>> G = nx.Graph([("A", "B"), ("B", "C"), ("A", "C")])
+        >>> H = nx.Graph([(0, 1), (1, 2), (0, 2), (1, 3), (0, 4)])
+
+        Check whether a subgraph of G is isomorphic to H:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
+        >>> isomatcher.subgraph_is_isomorphic()
+        False
+
+        Check whether a subgraph of H is isomorphic to G:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
+        >>> isomatcher.subgraph_is_isomorphic()
+        True
+        """
+        try:
+            x = next(self.subgraph_isomorphisms_iter())
+            return True
+        except StopIteration:
+            return False
+
+    def subgraph_is_monomorphic(self):
+        """Returns `True` if a subgraph of ``G1`` is monomorphic to ``G2``.
+
+        Examples
+        --------
+        When creating the `GraphMatcher`, the order of the arguments is important.
+
+        >>> G = nx.Graph([("A", "B"), ("B", "C")])
+        >>> H = nx.Graph([(0, 1), (1, 2), (0, 2)])
+
+        Check whether a subgraph of G is monomorphic to H:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
+        >>> isomatcher.subgraph_is_monomorphic()
+        False
+
+        Check whether a subgraph of H is isomorphic to G:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
+        >>> isomatcher.subgraph_is_monomorphic()
+        True
+        """
+        try:
+            x = next(self.subgraph_monomorphisms_iter())
+            return True
+        except StopIteration:
+            return False
+
+    def subgraph_isomorphisms_iter(self):
+        """Generator over isomorphisms between a subgraph of ``G1`` and ``G2``.
+
+        Examples
+        --------
+        When creating the `GraphMatcher`, the order of the arguments is important
+
+        >>> G = nx.Graph([("A", "B"), ("B", "C"), ("A", "C")])
+        >>> H = nx.Graph([(0, 1), (1, 2), (0, 2), (1, 3), (0, 4)])
+
+        Yield isomorphic mappings between ``H`` and subgraphs of ``G``:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
+        >>> list(isomatcher.subgraph_isomorphisms_iter())
+        []
+
+        Yield isomorphic mappings  between ``G`` and subgraphs of ``H``:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
+        >>> next(isomatcher.subgraph_isomorphisms_iter())
+        {0: 'A', 1: 'B', 2: 'C'}
+
+        """
+        # Declare that we are looking for graph-subgraph isomorphism.
+        self.test = "subgraph"
+        self.initialize()
+        yield from self.match()
+
+    def subgraph_monomorphisms_iter(self):
+        """Generator over monomorphisms between a subgraph of ``G1`` and ``G2``.
+
+        Examples
+        --------
+        When creating the `GraphMatcher`, the order of the arguments is important.
+
+        >>> G = nx.Graph([("A", "B"), ("B", "C")])
+        >>> H = nx.Graph([(0, 1), (1, 2), (0, 2)])
+
+        Yield monomorphic mappings between ``H`` and subgraphs of ``G``:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
+        >>> list(isomatcher.subgraph_monomorphisms_iter())
+        []
+
+        Yield monomorphic mappings  between ``G`` and subgraphs of ``H``:
+
+        >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
+        >>> next(isomatcher.subgraph_monomorphisms_iter())
+        {0: 'A', 1: 'B', 2: 'C'}
+        """
+        # Declare that we are looking for graph-subgraph monomorphism.
+        self.test = "mono"
+        self.initialize()
+        yield from self.match()
+
+    def syntactic_feasibility(self, G1_node, G2_node):
+        """Returns True if adding (G1_node, G2_node) is syntactically feasible.
+
+        This function returns True if it is adding the candidate pair
+        to the current partial isomorphism/monomorphism mapping is allowable.
+        The addition is allowable if the inclusion of the candidate pair does
+        not make it impossible for an isomorphism/monomorphism to be found.
+        """
+
+        # The VF2 algorithm was designed to work with graphs having, at most,
+        # one edge connecting any two nodes.  This is not the case when
+        # dealing with an MultiGraphs.
+        #
+        # Basically, when we test the look-ahead rules R_neighbor, we will
+        # make sure that the number of edges are checked. We also add
+        # a R_self check to verify that the number of selfloops is acceptable.
+        #
+        # Users might be comparing Graph instances with MultiGraph instances.
+        # So the generic GraphMatcher class must work with MultiGraphs.
+        # Care must be taken since the value in the innermost dictionary is a
+        # singlet for Graph instances.  For MultiGraphs, the value in the
+        # innermost dictionary is a list.
+
+        ###
+        # Test at each step to get a return value as soon as possible.
+        ###
+
+        # Look ahead 0
+
+        # R_self
+
+        # The number of selfloops for G1_node must equal the number of
+        # self-loops for G2_node. Without this check, we would fail on
+        # R_neighbor at the next recursion level. But it is good to prune the
+        # search tree now.
+
+        if self.test == "mono":
+            if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
+                G2_node, G2_node
+            ):
+                return False
+        else:
+            if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
+                G2_node, G2_node
+            ):
+                return False
+
+        # R_neighbor
+
+        # For each neighbor n' of n in the partial mapping, the corresponding
+        # node m' is a neighbor of m, and vice versa. Also, the number of
+        # edges must be equal.
+        if self.test != "mono":
+            for neighbor in self.G1[G1_node]:
+                if neighbor in self.core_1:
+                    if self.core_1[neighbor] not in self.G2[G2_node]:
+                        return False
+                    elif self.G1.number_of_edges(
+                        neighbor, G1_node
+                    ) != self.G2.number_of_edges(self.core_1[neighbor], G2_node):
+                        return False
+
+        for neighbor in self.G2[G2_node]:
+            if neighbor in self.core_2:
+                if self.core_2[neighbor] not in self.G1[G1_node]:
+                    return False
+                elif self.test == "mono":
+                    if self.G1.number_of_edges(
+                        self.core_2[neighbor], G1_node
+                    ) < self.G2.number_of_edges(neighbor, G2_node):
+                        return False
+                else:
+                    if self.G1.number_of_edges(
+                        self.core_2[neighbor], G1_node
+                    ) != self.G2.number_of_edges(neighbor, G2_node):
+                        return False
+
+        if self.test != "mono":
+            # Look ahead 1
+
+            # R_terminout
+            # The number of neighbors of n in T_1^{inout} is equal to the
+            # number of neighbors of m that are in T_2^{inout}, and vice versa.
+            num1 = 0
+            for neighbor in self.G1[G1_node]:
+                if (neighbor in self.inout_1) and (neighbor not in self.core_1):
+                    num1 += 1
+            num2 = 0
+            for neighbor in self.G2[G2_node]:
+                if (neighbor in self.inout_2) and (neighbor not in self.core_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # Look ahead 2
+
+            # R_new
+
+            # The number of neighbors of n that are neither in the core_1 nor
+            # T_1^{inout} is equal to the number of neighbors of m
+            # that are neither in core_2 nor T_2^{inout}.
+            num1 = 0
+            for neighbor in self.G1[G1_node]:
+                if neighbor not in self.inout_1:
+                    num1 += 1
+            num2 = 0
+            for neighbor in self.G2[G2_node]:
+                if neighbor not in self.inout_2:
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+        # Otherwise, this node pair is syntactically feasible!
+        return True
+
+
+class DiGraphMatcher(GraphMatcher):
+    """Implementation of VF2 algorithm for matching directed graphs.
+
+    Suitable for DiGraph and MultiDiGraph instances.
+    """
+
+    def __init__(self, G1, G2):
+        """Initialize DiGraphMatcher.
+
+        G1 and G2 should be nx.Graph or nx.MultiGraph instances.
+
+        Examples
+        --------
+        To create a GraphMatcher which checks for syntactic feasibility:
+
+        >>> from networkx.algorithms import isomorphism
+        >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
+        >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
+        >>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
+        """
+        super().__init__(G1, G2)
+
+    def candidate_pairs_iter(self):
+        """Iterator over candidate pairs of nodes in G1 and G2."""
+
+        # All computations are done using the current state!
+
+        G1_nodes = self.G1_nodes
+        G2_nodes = self.G2_nodes
+        min_key = self.G2_node_order.__getitem__
+
+        # First we compute the out-terminal sets.
+        T1_out = [node for node in self.out_1 if node not in self.core_1]
+        T2_out = [node for node in self.out_2 if node not in self.core_2]
+
+        # If T1_out and T2_out are both nonempty.
+        # P(s) = T1_out x {min T2_out}
+        if T1_out and T2_out:
+            node_2 = min(T2_out, key=min_key)
+            for node_1 in T1_out:
+                yield node_1, node_2
+
+        # If T1_out and T2_out were both empty....
+        # We compute the in-terminal sets.
+
+        # elif not (T1_out or T2_out):   # as suggested by [2], incorrect
+        else:  # as suggested by [1], correct
+            T1_in = [node for node in self.in_1 if node not in self.core_1]
+            T2_in = [node for node in self.in_2 if node not in self.core_2]
+
+            # If T1_in and T2_in are both nonempty.
+            # P(s) = T1_out x {min T2_out}
+            if T1_in and T2_in:
+                node_2 = min(T2_in, key=min_key)
+                for node_1 in T1_in:
+                    yield node_1, node_2
+
+            # If all terminal sets are empty...
+            # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
+
+            # elif not (T1_in or T2_in):   # as suggested by  [2], incorrect
+            else:  # as inferred from [1], correct
+                node_2 = min(G2_nodes - set(self.core_2), key=min_key)
+                for node_1 in G1_nodes:
+                    if node_1 not in self.core_1:
+                        yield node_1, node_2
+
+        # For all other cases, we don't have any candidate pairs.
+
+    def initialize(self):
+        """Reinitializes the state of the algorithm.
+
+        This method should be redefined if using something other than DiGMState.
+        If only subclassing GraphMatcher, a redefinition is not necessary.
+        """
+
+        # core_1[n] contains the index of the node paired with n, which is m,
+        #           provided n is in the mapping.
+        # core_2[m] contains the index of the node paired with m, which is n,
+        #           provided m is in the mapping.
+        self.core_1 = {}
+        self.core_2 = {}
+
+        # See the paper for definitions of M_x and T_x^{y}
+
+        # in_1[n]  is non-zero if n is in M_1 or in T_1^{in}
+        # out_1[n] is non-zero if n is in M_1 or in T_1^{out}
+        #
+        # in_2[m]  is non-zero if m is in M_2 or in T_2^{in}
+        # out_2[m] is non-zero if m is in M_2 or in T_2^{out}
+        #
+        # The value stored is the depth of the search tree when the node became
+        # part of the corresponding set.
+        self.in_1 = {}
+        self.in_2 = {}
+        self.out_1 = {}
+        self.out_2 = {}
+
+        self.state = DiGMState(self)
+
+        # Provide a convenient way to access the isomorphism mapping.
+        self.mapping = self.core_1.copy()
+
+    def syntactic_feasibility(self, G1_node, G2_node):
+        """Returns True if adding (G1_node, G2_node) is syntactically feasible.
+
+        This function returns True if it is adding the candidate pair
+        to the current partial isomorphism/monomorphism mapping is allowable.
+        The addition is allowable if the inclusion of the candidate pair does
+        not make it impossible for an isomorphism/monomorphism to be found.
+        """
+
+        # The VF2 algorithm was designed to work with graphs having, at most,
+        # one edge connecting any two nodes.  This is not the case when
+        # dealing with an MultiGraphs.
+        #
+        # Basically, when we test the look-ahead rules R_pred and R_succ, we
+        # will make sure that the number of edges are checked.  We also add
+        # a R_self check to verify that the number of selfloops is acceptable.
+
+        # Users might be comparing DiGraph instances with MultiDiGraph
+        # instances. So the generic DiGraphMatcher class must work with
+        # MultiDiGraphs. Care must be taken since the value in the innermost
+        # dictionary is a singlet for DiGraph instances.  For MultiDiGraphs,
+        # the value in the innermost dictionary is a list.
+
+        ###
+        # Test at each step to get a return value as soon as possible.
+        ###
+
+        # Look ahead 0
+
+        # R_self
+
+        # The number of selfloops for G1_node must equal the number of
+        # self-loops for G2_node. Without this check, we would fail on R_pred
+        # at the next recursion level. This should prune the tree even further.
+        if self.test == "mono":
+            if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
+                G2_node, G2_node
+            ):
+                return False
+        else:
+            if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
+                G2_node, G2_node
+            ):
+                return False
+
+        # R_pred
+
+        # For each predecessor n' of n in the partial mapping, the
+        # corresponding node m' is a predecessor of m, and vice versa. Also,
+        # the number of edges must be equal
+        if self.test != "mono":
+            for predecessor in self.G1.pred[G1_node]:
+                if predecessor in self.core_1:
+                    if self.core_1[predecessor] not in self.G2.pred[G2_node]:
+                        return False
+                    elif self.G1.number_of_edges(
+                        predecessor, G1_node
+                    ) != self.G2.number_of_edges(self.core_1[predecessor], G2_node):
+                        return False
+
+        for predecessor in self.G2.pred[G2_node]:
+            if predecessor in self.core_2:
+                if self.core_2[predecessor] not in self.G1.pred[G1_node]:
+                    return False
+                elif self.test == "mono":
+                    if self.G1.number_of_edges(
+                        self.core_2[predecessor], G1_node
+                    ) < self.G2.number_of_edges(predecessor, G2_node):
+                        return False
+                else:
+                    if self.G1.number_of_edges(
+                        self.core_2[predecessor], G1_node
+                    ) != self.G2.number_of_edges(predecessor, G2_node):
+                        return False
+
+        # R_succ
+
+        # For each successor n' of n in the partial mapping, the corresponding
+        # node m' is a successor of m, and vice versa. Also, the number of
+        # edges must be equal.
+        if self.test != "mono":
+            for successor in self.G1[G1_node]:
+                if successor in self.core_1:
+                    if self.core_1[successor] not in self.G2[G2_node]:
+                        return False
+                    elif self.G1.number_of_edges(
+                        G1_node, successor
+                    ) != self.G2.number_of_edges(G2_node, self.core_1[successor]):
+                        return False
+
+        for successor in self.G2[G2_node]:
+            if successor in self.core_2:
+                if self.core_2[successor] not in self.G1[G1_node]:
+                    return False
+                elif self.test == "mono":
+                    if self.G1.number_of_edges(
+                        G1_node, self.core_2[successor]
+                    ) < self.G2.number_of_edges(G2_node, successor):
+                        return False
+                else:
+                    if self.G1.number_of_edges(
+                        G1_node, self.core_2[successor]
+                    ) != self.G2.number_of_edges(G2_node, successor):
+                        return False
+
+        if self.test != "mono":
+            # Look ahead 1
+
+            # R_termin
+            # The number of predecessors of n that are in T_1^{in} is equal to the
+            # number of predecessors of m that are in T_2^{in}.
+            num1 = 0
+            for predecessor in self.G1.pred[G1_node]:
+                if (predecessor in self.in_1) and (predecessor not in self.core_1):
+                    num1 += 1
+            num2 = 0
+            for predecessor in self.G2.pred[G2_node]:
+                if (predecessor in self.in_2) and (predecessor not in self.core_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # The number of successors of n that are in T_1^{in} is equal to the
+            # number of successors of m that are in T_2^{in}.
+            num1 = 0
+            for successor in self.G1[G1_node]:
+                if (successor in self.in_1) and (successor not in self.core_1):
+                    num1 += 1
+            num2 = 0
+            for successor in self.G2[G2_node]:
+                if (successor in self.in_2) and (successor not in self.core_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # R_termout
+
+            # The number of predecessors of n that are in T_1^{out} is equal to the
+            # number of predecessors of m that are in T_2^{out}.
+            num1 = 0
+            for predecessor in self.G1.pred[G1_node]:
+                if (predecessor in self.out_1) and (predecessor not in self.core_1):
+                    num1 += 1
+            num2 = 0
+            for predecessor in self.G2.pred[G2_node]:
+                if (predecessor in self.out_2) and (predecessor not in self.core_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # The number of successors of n that are in T_1^{out} is equal to the
+            # number of successors of m that are in T_2^{out}.
+            num1 = 0
+            for successor in self.G1[G1_node]:
+                if (successor in self.out_1) and (successor not in self.core_1):
+                    num1 += 1
+            num2 = 0
+            for successor in self.G2[G2_node]:
+                if (successor in self.out_2) and (successor not in self.core_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # Look ahead 2
+
+            # R_new
+
+            # The number of predecessors of n that are neither in the core_1 nor
+            # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m
+            # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
+            num1 = 0
+            for predecessor in self.G1.pred[G1_node]:
+                if (predecessor not in self.in_1) and (predecessor not in self.out_1):
+                    num1 += 1
+            num2 = 0
+            for predecessor in self.G2.pred[G2_node]:
+                if (predecessor not in self.in_2) and (predecessor not in self.out_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+            # The number of successors of n that are neither in the core_1 nor
+            # T_1^{in} nor T_1^{out} is equal to the number of successors of m
+            # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
+            num1 = 0
+            for successor in self.G1[G1_node]:
+                if (successor not in self.in_1) and (successor not in self.out_1):
+                    num1 += 1
+            num2 = 0
+            for successor in self.G2[G2_node]:
+                if (successor not in self.in_2) and (successor not in self.out_2):
+                    num2 += 1
+            if self.test == "graph":
+                if num1 != num2:
+                    return False
+            else:  # self.test == 'subgraph'
+                if not (num1 >= num2):
+                    return False
+
+        # Otherwise, this node pair is syntactically feasible!
+        return True
+
+    def subgraph_is_isomorphic(self):
+        """Returns `True` if a subgraph of ``G1`` is isomorphic to ``G2``.
+
+        Examples
+        --------
+        When creating the `DiGraphMatcher`, the order of the arguments is important
+
+        >>> G = nx.DiGraph([("A", "B"), ("B", "A"), ("B", "C"), ("C", "B")])
+        >>> H = nx.DiGraph(nx.path_graph(5))
+
+        Check whether a subgraph of G is isomorphic to H:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
+        >>> isomatcher.subgraph_is_isomorphic()
+        False
+
+        Check whether a subgraph of H is isomorphic to G:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
+        >>> isomatcher.subgraph_is_isomorphic()
+        True
+        """
+        return super().subgraph_is_isomorphic()
+
+    def subgraph_is_monomorphic(self):
+        """Returns `True` if a subgraph of ``G1`` is monomorphic to ``G2``.
+
+        Examples
+        --------
+        When creating the `DiGraphMatcher`, the order of the arguments is important.
+
+        >>> G = nx.DiGraph([("A", "B"), ("C", "B"), ("D", "C")])
+        >>> H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 2)])
+
+        Check whether a subgraph of G is monomorphic to H:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
+        >>> isomatcher.subgraph_is_monomorphic()
+        False
+
+        Check whether a subgraph of H is isomorphic to G:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
+        >>> isomatcher.subgraph_is_monomorphic()
+        True
+        """
+        return super().subgraph_is_monomorphic()
+
+    def subgraph_isomorphisms_iter(self):
+        """Generator over isomorphisms between a subgraph of ``G1`` and ``G2``.
+
+        Examples
+        --------
+        When creating the `DiGraphMatcher`, the order of the arguments is important
+
+        >>> G = nx.DiGraph([("B", "C"), ("C", "B"), ("C", "D"), ("D", "C")])
+        >>> H = nx.DiGraph(nx.path_graph(5))
+
+        Yield isomorphic mappings between ``H`` and subgraphs of ``G``:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
+        >>> list(isomatcher.subgraph_isomorphisms_iter())
+        []
+
+        Yield isomorphic mappings between ``G`` and subgraphs of ``H``:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
+        >>> next(isomatcher.subgraph_isomorphisms_iter())
+        {0: 'B', 1: 'C', 2: 'D'}
+        """
+        return super().subgraph_isomorphisms_iter()
+
+    def subgraph_monomorphisms_iter(self):
+        """Generator over monomorphisms between a subgraph of ``G1`` and ``G2``.
+
+        Examples
+        --------
+        When creating the `DiGraphMatcher`, the order of the arguments is important.
+
+        >>> G = nx.DiGraph([("A", "B"), ("C", "B"), ("D", "C")])
+        >>> H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 2)])
+
+        Yield monomorphic mappings between ``H`` and subgraphs of ``G``:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
+        >>> list(isomatcher.subgraph_monomorphisms_iter())
+        []
+
+        Yield monomorphic mappings between ``G`` and subgraphs of ``H``:
+
+        >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
+        >>> next(isomatcher.subgraph_monomorphisms_iter())
+        {3: 'A', 2: 'B', 1: 'C', 0: 'D'}
+        """
+        return super().subgraph_monomorphisms_iter()
+
+
+class GMState:
+    """Internal representation of state for the GraphMatcher class.
+
+    This class is used internally by the GraphMatcher class.  It is used
+    only to store state specific data. There will be at most G2.order() of
+    these objects in memory at a time, due to the depth-first search
+    strategy employed by the VF2 algorithm.
+    """
+
+    def __init__(self, GM, G1_node=None, G2_node=None):
+        """Initializes GMState object.
+
+        Pass in the GraphMatcher to which this GMState belongs and the
+        new node pair that will be added to the GraphMatcher's current
+        isomorphism mapping.
+        """
+        self.GM = GM
+
+        # Initialize the last stored node pair.
+        self.G1_node = None
+        self.G2_node = None
+        self.depth = len(GM.core_1)
+
+        if G1_node is None or G2_node is None:
+            # Then we reset the class variables
+            GM.core_1 = {}
+            GM.core_2 = {}
+            GM.inout_1 = {}
+            GM.inout_2 = {}
+
+        # Watch out! G1_node == 0 should evaluate to True.
+        if G1_node is not None and G2_node is not None:
+            # Add the node pair to the isomorphism mapping.
+            GM.core_1[G1_node] = G2_node
+            GM.core_2[G2_node] = G1_node
+
+            # Store the node that was added last.
+            self.G1_node = G1_node
+            self.G2_node = G2_node
+
+            # Now we must update the other two vectors.
+            # We will add only if it is not in there already!
+            self.depth = len(GM.core_1)
+
+            # First we add the new nodes...
+            if G1_node not in GM.inout_1:
+                GM.inout_1[G1_node] = self.depth
+            if G2_node not in GM.inout_2:
+                GM.inout_2[G2_node] = self.depth
+
+            # Now we add every other node...
+
+            # Updates for T_1^{inout}
+            new_nodes = set()
+            for node in GM.core_1:
+                new_nodes.update(
+                    [neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1]
+                )
+            for node in new_nodes:
+                if node not in GM.inout_1:
+                    GM.inout_1[node] = self.depth
+
+            # Updates for T_2^{inout}
+            new_nodes = set()
+            for node in GM.core_2:
+                new_nodes.update(
+                    [neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2]
+                )
+            for node in new_nodes:
+                if node not in GM.inout_2:
+                    GM.inout_2[node] = self.depth
+
+    def restore(self):
+        """Deletes the GMState object and restores the class variables."""
+        # First we remove the node that was added from the core vectors.
+        # Watch out! G1_node == 0 should evaluate to True.
+        if self.G1_node is not None and self.G2_node is not None:
+            del self.GM.core_1[self.G1_node]
+            del self.GM.core_2[self.G2_node]
+
+        # Now we revert the other two vectors.
+        # Thus, we delete all entries which have this depth level.
+        for vector in (self.GM.inout_1, self.GM.inout_2):
+            for node in list(vector.keys()):
+                if vector[node] == self.depth:
+                    del vector[node]
+
+
+class DiGMState:
+    """Internal representation of state for the DiGraphMatcher class.
+
+    This class is used internally by the DiGraphMatcher class.  It is used
+    only to store state specific data. There will be at most G2.order() of
+    these objects in memory at a time, due to the depth-first search
+    strategy employed by the VF2 algorithm.
+
+    """
+
+    def __init__(self, GM, G1_node=None, G2_node=None):
+        """Initializes DiGMState object.
+
+        Pass in the DiGraphMatcher to which this DiGMState belongs and the
+        new node pair that will be added to the GraphMatcher's current
+        isomorphism mapping.
+        """
+        self.GM = GM
+
+        # Initialize the last stored node pair.
+        self.G1_node = None
+        self.G2_node = None
+        self.depth = len(GM.core_1)
+
+        if G1_node is None or G2_node is None:
+            # Then we reset the class variables
+            GM.core_1 = {}
+            GM.core_2 = {}
+            GM.in_1 = {}
+            GM.in_2 = {}
+            GM.out_1 = {}
+            GM.out_2 = {}
+
+        # Watch out! G1_node == 0 should evaluate to True.
+        if G1_node is not None and G2_node is not None:
+            # Add the node pair to the isomorphism mapping.
+            GM.core_1[G1_node] = G2_node
+            GM.core_2[G2_node] = G1_node
+
+            # Store the node that was added last.
+            self.G1_node = G1_node
+            self.G2_node = G2_node
+
+            # Now we must update the other four vectors.
+            # We will add only if it is not in there already!
+            self.depth = len(GM.core_1)
+
+            # First we add the new nodes...
+            for vector in (GM.in_1, GM.out_1):
+                if G1_node not in vector:
+                    vector[G1_node] = self.depth
+            for vector in (GM.in_2, GM.out_2):
+                if G2_node not in vector:
+                    vector[G2_node] = self.depth
+
+            # Now we add every other node...
+
+            # Updates for T_1^{in}
+            new_nodes = set()
+            for node in GM.core_1:
+                new_nodes.update(
+                    [
+                        predecessor
+                        for predecessor in GM.G1.predecessors(node)
+                        if predecessor not in GM.core_1
+                    ]
+                )
+            for node in new_nodes:
+                if node not in GM.in_1:
+                    GM.in_1[node] = self.depth
+
+            # Updates for T_2^{in}
+            new_nodes = set()
+            for node in GM.core_2:
+                new_nodes.update(
+                    [
+                        predecessor
+                        for predecessor in GM.G2.predecessors(node)
+                        if predecessor not in GM.core_2
+                    ]
+                )
+            for node in new_nodes:
+                if node not in GM.in_2:
+                    GM.in_2[node] = self.depth
+
+            # Updates for T_1^{out}
+            new_nodes = set()
+            for node in GM.core_1:
+                new_nodes.update(
+                    [
+                        successor
+                        for successor in GM.G1.successors(node)
+                        if successor not in GM.core_1
+                    ]
+                )
+            for node in new_nodes:
+                if node not in GM.out_1:
+                    GM.out_1[node] = self.depth
+
+            # Updates for T_2^{out}
+            new_nodes = set()
+            for node in GM.core_2:
+                new_nodes.update(
+                    [
+                        successor
+                        for successor in GM.G2.successors(node)
+                        if successor not in GM.core_2
+                    ]
+                )
+            for node in new_nodes:
+                if node not in GM.out_2:
+                    GM.out_2[node] = self.depth
+
+    def restore(self):
+        """Deletes the DiGMState object and restores the class variables."""
+
+        # First we remove the node that was added from the core vectors.
+        # Watch out! G1_node == 0 should evaluate to True.
+        if self.G1_node is not None and self.G2_node is not None:
+            del self.GM.core_1[self.G1_node]
+            del self.GM.core_2[self.G2_node]
+
+        # Now we revert the other four vectors.
+        # Thus, we delete all entries which have this depth level.
+        for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2):
+            for node in list(vector.keys()):
+                if vector[node] == self.depth:
+                    del vector[node]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py
new file mode 100644
index 00000000..b48820d4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py
@@ -0,0 +1,352 @@
+"""Functions which help end users define customize node_match and
+edge_match functions to use during isomorphism checks.
+"""
+
+import math
+import types
+from itertools import permutations
+
+__all__ = [
+    "categorical_node_match",
+    "categorical_edge_match",
+    "categorical_multiedge_match",
+    "numerical_node_match",
+    "numerical_edge_match",
+    "numerical_multiedge_match",
+    "generic_node_match",
+    "generic_edge_match",
+    "generic_multiedge_match",
+]
+
+
+def copyfunc(f, name=None):
+    """Returns a deepcopy of a function."""
+    return types.FunctionType(
+        f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__
+    )
+
+
+def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08):
+    """Returns True if x and y are sufficiently close, elementwise.
+
+    Parameters
+    ----------
+    rtol : float
+        The relative error tolerance.
+    atol : float
+        The absolute error tolerance.
+
+    """
+    # assume finite weights, see numpy.allclose() for reference
+    return all(math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol) for xi, yi in zip(x, y))
+
+
+categorical_doc = """
+Returns a comparison function for a categorical node attribute.
+
+The value(s) of the attr(s) must be hashable and comparable via the ==
+operator since they are placed into a set([]) object.  If the sets from
+G1 and G2 are the same, then the constructed function returns True.
+
+Parameters
+----------
+attr : string | list
+    The categorical node attribute to compare, or a list of categorical
+    node attributes to compare.
+default : value | list
+    The default value for the categorical node attribute, or a list of
+    default values for the categorical node attributes.
+
+Returns
+-------
+match : function
+    The customized, categorical `node_match` function.
+
+Examples
+--------
+>>> import networkx.algorithms.isomorphism as iso
+>>> nm = iso.categorical_node_match("size", 1)
+>>> nm = iso.categorical_node_match(["color", "size"], ["red", 2])
+
+"""
+
+
+def categorical_node_match(attr, default):
+    if isinstance(attr, str):
+
+        def match(data1, data2):
+            return data1.get(attr, default) == data2.get(attr, default)
+
+    else:
+        attrs = list(zip(attr, default))  # Python 3
+
+        def match(data1, data2):
+            return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs)
+
+    return match
+
+
+categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match")
+
+
+def categorical_multiedge_match(attr, default):
+    if isinstance(attr, str):
+
+        def match(datasets1, datasets2):
+            values1 = {data.get(attr, default) for data in datasets1.values()}
+            values2 = {data.get(attr, default) for data in datasets2.values()}
+            return values1 == values2
+
+    else:
+        attrs = list(zip(attr, default))  # Python 3
+
+        def match(datasets1, datasets2):
+            values1 = set()
+            for data1 in datasets1.values():
+                x = tuple(data1.get(attr, d) for attr, d in attrs)
+                values1.add(x)
+            values2 = set()
+            for data2 in datasets2.values():
+                x = tuple(data2.get(attr, d) for attr, d in attrs)
+                values2.add(x)
+            return values1 == values2
+
+    return match
+
+
+# Docstrings for categorical functions.
+categorical_node_match.__doc__ = categorical_doc
+categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge")
+tmpdoc = categorical_doc.replace("node", "edge")
+tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match")
+categorical_multiedge_match.__doc__ = tmpdoc
+
+
+numerical_doc = """
+Returns a comparison function for a numerical node attribute.
+
+The value(s) of the attr(s) must be numerical and sortable.  If the
+sorted list of values from G1 and G2 are the same within some
+tolerance, then the constructed function returns True.
+
+Parameters
+----------
+attr : string | list
+    The numerical node attribute to compare, or a list of numerical
+    node attributes to compare.
+default : value | list
+    The default value for the numerical node attribute, or a list of
+    default values for the numerical node attributes.
+rtol : float
+    The relative error tolerance.
+atol : float
+    The absolute error tolerance.
+
+Returns
+-------
+match : function
+    The customized, numerical `node_match` function.
+
+Examples
+--------
+>>> import networkx.algorithms.isomorphism as iso
+>>> nm = iso.numerical_node_match("weight", 1.0)
+>>> nm = iso.numerical_node_match(["weight", "linewidth"], [0.25, 0.5])
+
+"""
+
+
+def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
+    if isinstance(attr, str):
+
+        def match(data1, data2):
+            return math.isclose(
+                data1.get(attr, default),
+                data2.get(attr, default),
+                rel_tol=rtol,
+                abs_tol=atol,
+            )
+
+    else:
+        attrs = list(zip(attr, default))  # Python 3
+
+        def match(data1, data2):
+            values1 = [data1.get(attr, d) for attr, d in attrs]
+            values2 = [data2.get(attr, d) for attr, d in attrs]
+            return allclose(values1, values2, rtol=rtol, atol=atol)
+
+    return match
+
+
+numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match")
+
+
+def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
+    if isinstance(attr, str):
+
+        def match(datasets1, datasets2):
+            values1 = sorted(data.get(attr, default) for data in datasets1.values())
+            values2 = sorted(data.get(attr, default) for data in datasets2.values())
+            return allclose(values1, values2, rtol=rtol, atol=atol)
+
+    else:
+        attrs = list(zip(attr, default))  # Python 3
+
+        def match(datasets1, datasets2):
+            values1 = []
+            for data1 in datasets1.values():
+                x = tuple(data1.get(attr, d) for attr, d in attrs)
+                values1.append(x)
+            values2 = []
+            for data2 in datasets2.values():
+                x = tuple(data2.get(attr, d) for attr, d in attrs)
+                values2.append(x)
+            values1.sort()
+            values2.sort()
+            for xi, yi in zip(values1, values2):
+                if not allclose(xi, yi, rtol=rtol, atol=atol):
+                    return False
+            else:
+                return True
+
+    return match
+
+
+# Docstrings for numerical functions.
+numerical_node_match.__doc__ = numerical_doc
+numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge")
+tmpdoc = numerical_doc.replace("node", "edge")
+tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match")
+numerical_multiedge_match.__doc__ = tmpdoc
+
+
+generic_doc = """
+Returns a comparison function for a generic attribute.
+
+The value(s) of the attr(s) are compared using the specified
+operators. If all the attributes are equal, then the constructed
+function returns True.
+
+Parameters
+----------
+attr : string | list
+    The node attribute to compare, or a list of node attributes
+    to compare.
+default : value | list
+    The default value for the node attribute, or a list of
+    default values for the node attributes.
+op : callable | list
+    The operator to use when comparing attribute values, or a list
+    of operators to use when comparing values for each attribute.
+
+Returns
+-------
+match : function
+    The customized, generic `node_match` function.
+
+Examples
+--------
+>>> from operator import eq
+>>> from math import isclose
+>>> from networkx.algorithms.isomorphism import generic_node_match
+>>> nm = generic_node_match("weight", 1.0, isclose)
+>>> nm = generic_node_match("color", "red", eq)
+>>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
+
+"""
+
+
+def generic_node_match(attr, default, op):
+    if isinstance(attr, str):
+
+        def match(data1, data2):
+            return op(data1.get(attr, default), data2.get(attr, default))
+
+    else:
+        attrs = list(zip(attr, default, op))  # Python 3
+
+        def match(data1, data2):
+            for attr, d, operator in attrs:
+                if not operator(data1.get(attr, d), data2.get(attr, d)):
+                    return False
+            else:
+                return True
+
+    return match
+
+
+generic_edge_match = copyfunc(generic_node_match, "generic_edge_match")
+
+
+def generic_multiedge_match(attr, default, op):
+    """Returns a comparison function for a generic attribute.
+
+    The value(s) of the attr(s) are compared using the specified
+    operators. If all the attributes are equal, then the constructed
+    function returns True. Potentially, the constructed edge_match
+    function can be slow since it must verify that no isomorphism
+    exists between the multiedges before it returns False.
+
+    Parameters
+    ----------
+    attr : string | list
+        The edge attribute to compare, or a list of node attributes
+        to compare.
+    default : value | list
+        The default value for the edge attribute, or a list of
+        default values for the edgeattributes.
+    op : callable | list
+        The operator to use when comparing attribute values, or a list
+        of operators to use when comparing values for each attribute.
+
+    Returns
+    -------
+    match : function
+        The customized, generic `edge_match` function.
+
+    Examples
+    --------
+    >>> from operator import eq
+    >>> from math import isclose
+    >>> from networkx.algorithms.isomorphism import generic_node_match
+    >>> nm = generic_node_match("weight", 1.0, isclose)
+    >>> nm = generic_node_match("color", "red", eq)
+    >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
+
+    """
+
+    # This is slow, but generic.
+    # We must test every possible isomorphism between the edges.
+    if isinstance(attr, str):
+        attr = [attr]
+        default = [default]
+        op = [op]
+    attrs = list(zip(attr, default))  # Python 3
+
+    def match(datasets1, datasets2):
+        values1 = []
+        for data1 in datasets1.values():
+            x = tuple(data1.get(attr, d) for attr, d in attrs)
+            values1.append(x)
+        values2 = []
+        for data2 in datasets2.values():
+            x = tuple(data2.get(attr, d) for attr, d in attrs)
+            values2.append(x)
+        for vals2 in permutations(values2):
+            for xi, yi in zip(values1, vals2):
+                if not all(map(lambda x, y, z: z(x, y), xi, yi, op)):
+                    # This is not an isomorphism, go to next permutation.
+                    break
+            else:
+                # Then we found an isomorphism.
+                return True
+        else:
+            # Then there are no isomorphisms between the multiedges.
+            return False
+
+    return match
+
+
+# Docstrings for numerical functions.
+generic_node_match.__doc__ = generic_doc
+generic_edge_match.__doc__ = generic_doc.replace("node", "edge")
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py
new file mode 100644
index 00000000..62cacc77
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py
@@ -0,0 +1,308 @@
+"""
+*****************************
+Time-respecting VF2 Algorithm
+*****************************
+
+An extension of the VF2 algorithm for time-respecting graph isomorphism
+testing in temporal graphs.
+
+A temporal graph is one in which edges contain a datetime attribute,
+denoting when interaction occurred between the incident nodes. A
+time-respecting subgraph of a temporal graph is a subgraph such that
+all interactions incident to a node occurred within a time threshold,
+delta, of each other. A directed time-respecting subgraph has the
+added constraint that incoming interactions to a node must precede
+outgoing interactions from the same node - this enforces a sense of
+directed flow.
+
+Introduction
+------------
+
+The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher
+extend the GraphMatcher and DiGraphMatcher classes, respectively,
+to include temporal constraints on matches. This is achieved through
+a semantic check, via the semantic_feasibility() function.
+
+As well as including G1 (the graph in which to seek embeddings) and
+G2 (the subgraph structure of interest), the name of the temporal
+attribute on the edges and the time threshold, delta, must be supplied
+as arguments to the matching constructors.
+
+A delta of zero is the strictest temporal constraint on the match -
+only embeddings in which all interactions occur at the same time will
+be returned. A delta of one day will allow embeddings in which
+adjacent interactions occur up to a day apart.
+
+Examples
+--------
+
+Examples will be provided when the datetime type has been incorporated.
+
+
+Temporal Subgraph Isomorphism
+-----------------------------
+
+A brief discussion of the somewhat diverse current literature will be
+included here.
+
+References
+----------
+
+[1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In:
+The 2013 IEEE/ACM International Conference on Advances in Social
+Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013:
+pages 1451 - 1452. [65]
+
+For a discussion of the literature on temporal networks:
+
+[3] P. Holme and J. Saramaki. Temporal networks. Physics Reports,
+519(3):97–125, 2012.
+
+Notes
+-----
+
+Handles directed and undirected graphs and graphs with parallel edges.
+
+"""
+
+import networkx as nx
+
+from .isomorphvf2 import DiGraphMatcher, GraphMatcher
+
+__all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"]
+
+
+class TimeRespectingGraphMatcher(GraphMatcher):
+    def __init__(self, G1, G2, temporal_attribute_name, delta):
+        """Initialize TimeRespectingGraphMatcher.
+
+        G1 and G2 should be nx.Graph or nx.MultiGraph instances.
+
+        Examples
+        --------
+        To create a TimeRespectingGraphMatcher which checks for
+        syntactic and semantic feasibility:
+
+        >>> from networkx.algorithms import isomorphism
+        >>> from datetime import timedelta
+        >>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
+
+        >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
+
+        >>> GM = isomorphism.TimeRespectingGraphMatcher(
+        ...     G1, G2, "date", timedelta(days=1)
+        ... )
+        """
+        self.temporal_attribute_name = temporal_attribute_name
+        self.delta = delta
+        super().__init__(G1, G2)
+
+    def one_hop(self, Gx, Gx_node, neighbors):
+        """
+        Edges one hop out from a node in the mapping should be
+        time-respecting with respect to each other.
+        """
+        dates = []
+        for n in neighbors:
+            if isinstance(Gx, nx.Graph):  # Graph G[u][v] returns the data dictionary.
+                dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
+            else:  # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+                for edge in Gx[Gx_node][
+                    n
+                ].values():  # Iterates all edges between node pair.
+                    dates.append(edge[self.temporal_attribute_name])
+        if any(x is None for x in dates):
+            raise ValueError("Datetime not supplied for at least one edge.")
+        return not dates or max(dates) - min(dates) <= self.delta
+
+    def two_hop(self, Gx, core_x, Gx_node, neighbors):
+        """
+        Paths of length 2 from Gx_node should be time-respecting.
+        """
+        return all(
+            self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node])
+            for v in neighbors
+        )
+
+    def semantic_feasibility(self, G1_node, G2_node):
+        """Returns True if adding (G1_node, G2_node) is semantically
+        feasible.
+
+        Any subclass which redefines semantic_feasibility() must
+        maintain the self.tests if needed, to keep the match() method
+        functional. Implementations should consider multigraphs.
+        """
+        neighbors = [n for n in self.G1[G1_node] if n in self.core_1]
+        if not self.one_hop(self.G1, G1_node, neighbors):  # Fail fast on first node.
+            return False
+        if not self.two_hop(self.G1, self.core_1, G1_node, neighbors):
+            return False
+        # Otherwise, this node is semantically feasible!
+        return True
+
+
+class TimeRespectingDiGraphMatcher(DiGraphMatcher):
+    def __init__(self, G1, G2, temporal_attribute_name, delta):
+        """Initialize TimeRespectingDiGraphMatcher.
+
+        G1 and G2 should be nx.DiGraph or nx.MultiDiGraph instances.
+
+        Examples
+        --------
+        To create a TimeRespectingDiGraphMatcher which checks for
+        syntactic and semantic feasibility:
+
+        >>> from networkx.algorithms import isomorphism
+        >>> from datetime import timedelta
+        >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
+
+        >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
+
+        >>> GM = isomorphism.TimeRespectingDiGraphMatcher(
+        ...     G1, G2, "date", timedelta(days=1)
+        ... )
+        """
+        self.temporal_attribute_name = temporal_attribute_name
+        self.delta = delta
+        super().__init__(G1, G2)
+
+    def get_pred_dates(self, Gx, Gx_node, core_x, pred):
+        """
+        Get the dates of edges from predecessors.
+        """
+        pred_dates = []
+        if isinstance(Gx, nx.DiGraph):  # Graph G[u][v] returns the data dictionary.
+            for n in pred:
+                pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name])
+        else:  # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+            for n in pred:
+                for edge in Gx[n][
+                    Gx_node
+                ].values():  # Iterates all edge data between node pair.
+                    pred_dates.append(edge[self.temporal_attribute_name])
+        return pred_dates
+
+    def get_succ_dates(self, Gx, Gx_node, core_x, succ):
+        """
+        Get the dates of edges to successors.
+        """
+        succ_dates = []
+        if isinstance(Gx, nx.DiGraph):  # Graph G[u][v] returns the data dictionary.
+            for n in succ:
+                succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
+        else:  # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+            for n in succ:
+                for edge in Gx[Gx_node][
+                    n
+                ].values():  # Iterates all edge data between node pair.
+                    succ_dates.append(edge[self.temporal_attribute_name])
+        return succ_dates
+
+    def one_hop(self, Gx, Gx_node, core_x, pred, succ):
+        """
+        The ego node.
+        """
+        pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred)
+        succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ)
+        return self.test_one(pred_dates, succ_dates) and self.test_two(
+            pred_dates, succ_dates
+        )
+
+    def two_hop_pred(self, Gx, Gx_node, core_x, pred):
+        """
+        The predecessors of the ego node.
+        """
+        return all(
+            self.one_hop(
+                Gx,
+                p,
+                core_x,
+                self.preds(Gx, core_x, p),
+                self.succs(Gx, core_x, p, Gx_node),
+            )
+            for p in pred
+        )
+
+    def two_hop_succ(self, Gx, Gx_node, core_x, succ):
+        """
+        The successors of the ego node.
+        """
+        return all(
+            self.one_hop(
+                Gx,
+                s,
+                core_x,
+                self.preds(Gx, core_x, s, Gx_node),
+                self.succs(Gx, core_x, s),
+            )
+            for s in succ
+        )
+
+    def preds(self, Gx, core_x, v, Gx_node=None):
+        pred = [n for n in Gx.predecessors(v) if n in core_x]
+        if Gx_node:
+            pred.append(Gx_node)
+        return pred
+
+    def succs(self, Gx, core_x, v, Gx_node=None):
+        succ = [n for n in Gx.successors(v) if n in core_x]
+        if Gx_node:
+            succ.append(Gx_node)
+        return succ
+
+    def test_one(self, pred_dates, succ_dates):
+        """
+        Edges one hop out from Gx_node in the mapping should be
+        time-respecting with respect to each other, regardless of
+        direction.
+        """
+        time_respecting = True
+        dates = pred_dates + succ_dates
+
+        if any(x is None for x in dates):
+            raise ValueError("Date or datetime not supplied for at least one edge.")
+
+        dates.sort()  # Small to large.
+        if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta):
+            time_respecting = False
+        return time_respecting
+
+    def test_two(self, pred_dates, succ_dates):
+        """
+        Edges from a dual Gx_node in the mapping should be ordered in
+        a time-respecting manner.
+        """
+        time_respecting = True
+        pred_dates.sort()
+        succ_dates.sort()
+        # First out before last in; negative of the necessary condition for time-respect.
+        if (
+            0 < len(succ_dates)
+            and 0 < len(pred_dates)
+            and succ_dates[0] < pred_dates[-1]
+        ):
+            time_respecting = False
+        return time_respecting
+
+    def semantic_feasibility(self, G1_node, G2_node):
+        """Returns True if adding (G1_node, G2_node) is semantically
+        feasible.
+
+        Any subclass which redefines semantic_feasibility() must
+        maintain the self.tests if needed, to keep the match() method
+        functional. Implementations should consider multigraphs.
+        """
+        pred, succ = (
+            [n for n in self.G1.predecessors(G1_node) if n in self.core_1],
+            [n for n in self.G1.successors(G1_node) if n in self.core_1],
+        )
+        if not self.one_hop(
+            self.G1, G1_node, self.core_1, pred, succ
+        ):  # Fail fast on first node.
+            return False
+        if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred):
+            return False
+        if not self.two_hop_succ(self.G1, G1_node, self.core_1, succ):
+            return False
+        # Otherwise, this node is semantically feasible!
+        return True
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99
new file mode 100644
index 00000000..dac54f00
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99
new file mode 100644
index 00000000..6c6af680
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99
new file mode 100644
index 00000000..60c3a3ce
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99
new file mode 100644
index 00000000..02368720
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99
Binary files differdiff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py
new file mode 100644
index 00000000..bc4070ac
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py
@@ -0,0 +1,327 @@
+"""
+Tests for ISMAGS isomorphism algorithm.
+"""
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+def _matches_to_sets(matches):
+    """
+    Helper function to facilitate comparing collections of dictionaries in
+    which order does not matter.
+    """
+    return {frozenset(m.items()) for m in matches}
+
+
+class TestSelfIsomorphism:
+    data = [
+        (
+            [
+                (0, {"name": "a"}),
+                (1, {"name": "a"}),
+                (2, {"name": "b"}),
+                (3, {"name": "b"}),
+                (4, {"name": "a"}),
+                (5, {"name": "a"}),
+            ],
+            [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)],
+        ),
+        (range(1, 5), [(1, 2), (2, 4), (4, 3), (3, 1)]),
+        (
+            [],
+            [
+                (0, 1),
+                (1, 2),
+                (2, 3),
+                (3, 4),
+                (4, 5),
+                (5, 0),
+                (0, 6),
+                (6, 7),
+                (2, 8),
+                (8, 9),
+                (4, 10),
+                (10, 11),
+            ],
+        ),
+        ([], [(0, 1), (1, 2), (1, 4), (2, 3), (3, 5), (3, 6)]),
+    ]
+
+    def test_self_isomorphism(self):
+        """
+        For some small, symmetric graphs, make sure that 1) they are isomorphic
+        to themselves, and 2) that only the identity mapping is found.
+        """
+        for node_data, edge_data in self.data:
+            graph = nx.Graph()
+            graph.add_nodes_from(node_data)
+            graph.add_edges_from(edge_data)
+
+            ismags = iso.ISMAGS(
+                graph, graph, node_match=iso.categorical_node_match("name", None)
+            )
+            assert ismags.is_isomorphic()
+            assert ismags.subgraph_is_isomorphic()
+            assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
+                {n: n for n in graph.nodes}
+            ]
+
+    def test_edgecase_self_isomorphism(self):
+        """
+        This edgecase is one of the cases in which it is hard to find all
+        symmetry elements.
+        """
+        graph = nx.Graph()
+        nx.add_path(graph, range(5))
+        graph.add_edges_from([(2, 5), (5, 6)])
+
+        ismags = iso.ISMAGS(graph, graph)
+        ismags_answer = list(ismags.find_isomorphisms(True))
+        assert ismags_answer == [{n: n for n in graph.nodes}]
+
+        graph = nx.relabel_nodes(graph, {0: 0, 1: 1, 2: 2, 3: 3, 4: 6, 5: 4, 6: 5})
+        ismags = iso.ISMAGS(graph, graph)
+        ismags_answer = list(ismags.find_isomorphisms(True))
+        assert ismags_answer == [{n: n for n in graph.nodes}]
+
+    def test_directed_self_isomorphism(self):
+        """
+        For some small, directed, symmetric graphs, make sure that 1) they are
+        isomorphic to themselves, and 2) that only the identity mapping is
+        found.
+        """
+        for node_data, edge_data in self.data:
+            graph = nx.Graph()
+            graph.add_nodes_from(node_data)
+            graph.add_edges_from(edge_data)
+
+            ismags = iso.ISMAGS(
+                graph, graph, node_match=iso.categorical_node_match("name", None)
+            )
+            assert ismags.is_isomorphic()
+            assert ismags.subgraph_is_isomorphic()
+            assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
+                {n: n for n in graph.nodes}
+            ]
+
+
+class TestSubgraphIsomorphism:
+    def test_isomorphism(self):
+        g1 = nx.Graph()
+        nx.add_cycle(g1, range(4))
+
+        g2 = nx.Graph()
+        nx.add_cycle(g2, range(4))
+        g2.add_edges_from(list(zip(g2, range(4, 8))))
+        ismags = iso.ISMAGS(g2, g1)
+        assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
+            {n: n for n in g1.nodes}
+        ]
+
+    def test_isomorphism2(self):
+        g1 = nx.Graph()
+        nx.add_path(g1, range(3))
+
+        g2 = g1.copy()
+        g2.add_edge(1, 3)
+
+        ismags = iso.ISMAGS(g2, g1)
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
+        expected_symmetric = [
+            {0: 0, 1: 1, 2: 2},
+            {0: 0, 1: 1, 3: 2},
+            {2: 0, 1: 1, 3: 2},
+        ]
+        assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
+
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
+        expected_asymmetric = [
+            {0: 2, 1: 1, 2: 0},
+            {0: 2, 1: 1, 3: 0},
+            {2: 2, 1: 1, 3: 0},
+        ]
+        assert _matches_to_sets(matches) == _matches_to_sets(
+            expected_symmetric + expected_asymmetric
+        )
+
+    def test_labeled_nodes(self):
+        g1 = nx.Graph()
+        nx.add_cycle(g1, range(3))
+        g1.nodes[1]["attr"] = True
+
+        g2 = g1.copy()
+        g2.add_edge(1, 3)
+        ismags = iso.ISMAGS(g2, g1, node_match=lambda x, y: x == y)
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
+        expected_symmetric = [{0: 0, 1: 1, 2: 2}]
+        assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
+
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
+        expected_asymmetric = [{0: 2, 1: 1, 2: 0}]
+        assert _matches_to_sets(matches) == _matches_to_sets(
+            expected_symmetric + expected_asymmetric
+        )
+
+    def test_labeled_edges(self):
+        g1 = nx.Graph()
+        nx.add_cycle(g1, range(3))
+        g1.edges[1, 2]["attr"] = True
+
+        g2 = g1.copy()
+        g2.add_edge(1, 3)
+        ismags = iso.ISMAGS(g2, g1, edge_match=lambda x, y: x == y)
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
+        expected_symmetric = [{0: 0, 1: 1, 2: 2}]
+        assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
+
+        matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
+        expected_asymmetric = [{1: 2, 0: 0, 2: 1}]
+        assert _matches_to_sets(matches) == _matches_to_sets(
+            expected_symmetric + expected_asymmetric
+        )
+
+
+class TestWikipediaExample:
+    # Nodes 'a', 'b', 'c' and 'd' form a column.
+    # Nodes 'g', 'h', 'i' and 'j' form a column.
+    g1edges = [
+        ["a", "g"],
+        ["a", "h"],
+        ["a", "i"],
+        ["b", "g"],
+        ["b", "h"],
+        ["b", "j"],
+        ["c", "g"],
+        ["c", "i"],
+        ["c", "j"],
+        ["d", "h"],
+        ["d", "i"],
+        ["d", "j"],
+    ]
+
+    # Nodes 1,2,3,4 form the clockwise corners of a large square.
+    # Nodes 5,6,7,8 form the clockwise corners of a small square
+    g2edges = [
+        [1, 2],
+        [2, 3],
+        [3, 4],
+        [4, 1],
+        [5, 6],
+        [6, 7],
+        [7, 8],
+        [8, 5],
+        [1, 5],
+        [2, 6],
+        [3, 7],
+        [4, 8],
+    ]
+
+    def test_graph(self):
+        g1 = nx.Graph()
+        g2 = nx.Graph()
+        g1.add_edges_from(self.g1edges)
+        g2.add_edges_from(self.g2edges)
+        gm = iso.ISMAGS(g1, g2)
+        assert gm.is_isomorphic()
+
+
+class TestLargestCommonSubgraph:
+    def test_mcis(self):
+        # Example graphs from DOI: 10.1002/spe.588
+        graph1 = nx.Graph()
+        graph1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 4), (4, 5)])
+        graph1.nodes[1]["color"] = 0
+
+        graph2 = nx.Graph()
+        graph2.add_edges_from(
+            [(1, 2), (2, 3), (2, 4), (3, 4), (3, 5), (5, 6), (5, 7), (6, 7)]
+        )
+        graph2.nodes[1]["color"] = 1
+        graph2.nodes[6]["color"] = 2
+        graph2.nodes[7]["color"] = 2
+
+        ismags = iso.ISMAGS(
+            graph1, graph2, node_match=iso.categorical_node_match("color", None)
+        )
+        assert list(ismags.subgraph_isomorphisms_iter(True)) == []
+        assert list(ismags.subgraph_isomorphisms_iter(False)) == []
+        found_mcis = _matches_to_sets(ismags.largest_common_subgraph())
+        expected = _matches_to_sets(
+            [{2: 2, 3: 4, 4: 3, 5: 5}, {2: 4, 3: 2, 4: 3, 5: 5}]
+        )
+        assert expected == found_mcis
+
+        ismags = iso.ISMAGS(
+            graph2, graph1, node_match=iso.categorical_node_match("color", None)
+        )
+        assert list(ismags.subgraph_isomorphisms_iter(True)) == []
+        assert list(ismags.subgraph_isomorphisms_iter(False)) == []
+        found_mcis = _matches_to_sets(ismags.largest_common_subgraph())
+        # Same answer, but reversed.
+        expected = _matches_to_sets(
+            [{2: 2, 3: 4, 4: 3, 5: 5}, {4: 2, 2: 3, 3: 4, 5: 5}]
+        )
+        assert expected == found_mcis
+
+    def test_symmetry_mcis(self):
+        graph1 = nx.Graph()
+        nx.add_path(graph1, range(4))
+
+        graph2 = nx.Graph()
+        nx.add_path(graph2, range(3))
+        graph2.add_edge(1, 3)
+
+        # Only the symmetry of graph2 is taken into account here.
+        ismags1 = iso.ISMAGS(
+            graph1, graph2, node_match=iso.categorical_node_match("color", None)
+        )
+        assert list(ismags1.subgraph_isomorphisms_iter(True)) == []
+        found_mcis = _matches_to_sets(ismags1.largest_common_subgraph())
+        expected = _matches_to_sets([{0: 0, 1: 1, 2: 2}, {1: 0, 3: 2, 2: 1}])
+        assert expected == found_mcis
+
+        # Only the symmetry of graph1 is taken into account here.
+        ismags2 = iso.ISMAGS(
+            graph2, graph1, node_match=iso.categorical_node_match("color", None)
+        )
+        assert list(ismags2.subgraph_isomorphisms_iter(True)) == []
+        found_mcis = _matches_to_sets(ismags2.largest_common_subgraph())
+        expected = _matches_to_sets(
+            [
+                {3: 2, 0: 0, 1: 1},
+                {2: 0, 0: 2, 1: 1},
+                {3: 0, 0: 2, 1: 1},
+                {3: 0, 1: 1, 2: 2},
+                {0: 0, 1: 1, 2: 2},
+                {2: 0, 3: 2, 1: 1},
+            ]
+        )
+
+        assert expected == found_mcis
+
+        found_mcis1 = _matches_to_sets(ismags1.largest_common_subgraph(False))
+        found_mcis2 = ismags2.largest_common_subgraph(False)
+        found_mcis2 = [{v: k for k, v in d.items()} for d in found_mcis2]
+        found_mcis2 = _matches_to_sets(found_mcis2)
+
+        expected = _matches_to_sets(
+            [
+                {3: 2, 1: 3, 2: 1},
+                {2: 0, 0: 2, 1: 1},
+                {1: 2, 3: 3, 2: 1},
+                {3: 0, 1: 3, 2: 1},
+                {0: 2, 2: 3, 1: 1},
+                {3: 0, 1: 2, 2: 1},
+                {2: 0, 0: 3, 1: 1},
+                {0: 0, 2: 3, 1: 1},
+                {1: 0, 3: 3, 2: 1},
+                {1: 0, 3: 2, 2: 1},
+                {0: 3, 1: 1, 2: 2},
+                {0: 0, 1: 1, 2: 2},
+            ]
+        )
+        assert expected == found_mcis1
+        assert expected == found_mcis2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py
new file mode 100644
index 00000000..548af808
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py
@@ -0,0 +1,48 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+class TestIsomorph:
+    @classmethod
+    def setup_class(cls):
+        cls.G1 = nx.Graph()
+        cls.G2 = nx.Graph()
+        cls.G3 = nx.Graph()
+        cls.G4 = nx.Graph()
+        cls.G5 = nx.Graph()
+        cls.G6 = nx.Graph()
+        cls.G1.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 3]])
+        cls.G2.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50]])
+        cls.G3.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 5]])
+        cls.G4.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 4]])
+        cls.G5.add_edges_from([[1, 2], [1, 3]])
+        cls.G6.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50], [20, 50]])
+
+    def test_could_be_isomorphic(self):
+        assert iso.could_be_isomorphic(self.G1, self.G2)
+        assert iso.could_be_isomorphic(self.G1, self.G3)
+        assert not iso.could_be_isomorphic(self.G1, self.G4)
+        assert iso.could_be_isomorphic(self.G3, self.G2)
+        assert not iso.could_be_isomorphic(self.G1, self.G6)
+
+    def test_fast_could_be_isomorphic(self):
+        assert iso.fast_could_be_isomorphic(self.G3, self.G2)
+        assert not iso.fast_could_be_isomorphic(self.G3, self.G5)
+        assert not iso.fast_could_be_isomorphic(self.G1, self.G6)
+
+    def test_faster_could_be_isomorphic(self):
+        assert iso.faster_could_be_isomorphic(self.G3, self.G2)
+        assert not iso.faster_could_be_isomorphic(self.G3, self.G5)
+        assert not iso.faster_could_be_isomorphic(self.G1, self.G6)
+
+    def test_is_isomorphic(self):
+        assert iso.is_isomorphic(self.G1, self.G2)
+        assert not iso.is_isomorphic(self.G1, self.G4)
+        assert iso.is_isomorphic(self.G1.to_directed(), self.G2.to_directed())
+        assert not iso.is_isomorphic(self.G1.to_directed(), self.G4.to_directed())
+        with pytest.raises(
+            nx.NetworkXError, match="Graphs G1 and G2 are not of the same type."
+        ):
+            iso.is_isomorphic(self.G1.to_directed(), self.G1)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py
new file mode 100644
index 00000000..413dfaf3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py
@@ -0,0 +1,410 @@
+"""
+Tests for VF2 isomorphism algorithm.
+"""
+
+import importlib.resources
+import os
+import random
+import struct
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+class TestWikipediaExample:
+    # Source: https://en.wikipedia.org/wiki/Graph_isomorphism
+
+    # Nodes 'a', 'b', 'c' and 'd' form a column.
+    # Nodes 'g', 'h', 'i' and 'j' form a column.
+    g1edges = [
+        ["a", "g"],
+        ["a", "h"],
+        ["a", "i"],
+        ["b", "g"],
+        ["b", "h"],
+        ["b", "j"],
+        ["c", "g"],
+        ["c", "i"],
+        ["c", "j"],
+        ["d", "h"],
+        ["d", "i"],
+        ["d", "j"],
+    ]
+
+    # Nodes 1,2,3,4 form the clockwise corners of a large square.
+    # Nodes 5,6,7,8 form the clockwise corners of a small square
+    g2edges = [
+        [1, 2],
+        [2, 3],
+        [3, 4],
+        [4, 1],
+        [5, 6],
+        [6, 7],
+        [7, 8],
+        [8, 5],
+        [1, 5],
+        [2, 6],
+        [3, 7],
+        [4, 8],
+    ]
+
+    def test_graph(self):
+        g1 = nx.Graph()
+        g2 = nx.Graph()
+        g1.add_edges_from(self.g1edges)
+        g2.add_edges_from(self.g2edges)
+        gm = iso.GraphMatcher(g1, g2)
+        assert gm.is_isomorphic()
+        # Just testing some cases
+        assert gm.subgraph_is_monomorphic()
+
+        mapping = sorted(gm.mapping.items())
+
+    # this mapping is only one of the possibilities
+    # so this test needs to be reconsidered
+    #        isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8),
+    #                  ('g', 2), ('h', 5), ('i', 4), ('j', 7)]
+    #        assert_equal(mapping, isomap)
+
+    def test_subgraph(self):
+        g1 = nx.Graph()
+        g2 = nx.Graph()
+        g1.add_edges_from(self.g1edges)
+        g2.add_edges_from(self.g2edges)
+        g3 = g2.subgraph([1, 2, 3, 4])
+        gm = iso.GraphMatcher(g1, g3)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_subgraph_mono(self):
+        g1 = nx.Graph()
+        g2 = nx.Graph()
+        g1.add_edges_from(self.g1edges)
+        g2.add_edges_from([[1, 2], [2, 3], [3, 4]])
+        gm = iso.GraphMatcher(g1, g2)
+        assert gm.subgraph_is_monomorphic()
+
+
+class TestVF2GraphDB:
+    # https://web.archive.org/web/20090303210205/http://amalfi.dis.unina.it/graph/db/
+
+    @staticmethod
+    def create_graph(filename):
+        """Creates a Graph instance from the filename."""
+
+        # The file is assumed to be in the format from the VF2 graph database.
+        # Each file is composed of 16-bit numbers (unsigned short int).
+        # So we will want to read 2 bytes at a time.
+
+        # We can read the number as follows:
+        #   number = struct.unpack('<H', file.read(2))
+        # This says, expect the data in little-endian encoding
+        # as an unsigned short int and unpack 2 bytes from the file.
+
+        fh = open(filename, mode="rb")
+
+        # Grab the number of nodes.
+        # Node numeration is 0-based, so the first node has index 0.
+        nodes = struct.unpack("<H", fh.read(2))[0]
+
+        graph = nx.Graph()
+        for from_node in range(nodes):
+            # Get the number of edges.
+            edges = struct.unpack("<H", fh.read(2))[0]
+            for edge in range(edges):
+                # Get the terminal node.
+                to_node = struct.unpack("<H", fh.read(2))[0]
+                graph.add_edge(from_node, to_node)
+
+        fh.close()
+        return graph
+
+    def test_graph(self):
+        head = importlib.resources.files("networkx.algorithms.isomorphism.tests")
+        g1 = self.create_graph(head / "iso_r01_s80.A99")
+        g2 = self.create_graph(head / "iso_r01_s80.B99")
+        gm = iso.GraphMatcher(g1, g2)
+        assert gm.is_isomorphic()
+
+    def test_subgraph(self):
+        # A is the subgraph
+        # B is the full graph
+        head = importlib.resources.files("networkx.algorithms.isomorphism.tests")
+        subgraph = self.create_graph(head / "si2_b06_m200.A99")
+        graph = self.create_graph(head / "si2_b06_m200.B99")
+        gm = iso.GraphMatcher(graph, subgraph)
+        assert gm.subgraph_is_isomorphic()
+        # Just testing some cases
+        assert gm.subgraph_is_monomorphic()
+
+    # There isn't a similar test implemented for subgraph monomorphism,
+    # feel free to create one.
+
+
+class TestAtlas:
+    @classmethod
+    def setup_class(cls):
+        global atlas
+        from networkx.generators import atlas
+
+        cls.GAG = atlas.graph_atlas_g()
+
+    def test_graph_atlas(self):
+        # Atlas = nx.graph_atlas_g()[0:208] # 208, 6 nodes or less
+        Atlas = self.GAG[0:100]
+        alphabet = list(range(26))
+        for graph in Atlas:
+            nlist = list(graph)
+            labels = alphabet[: len(nlist)]
+            for s in range(10):
+                random.shuffle(labels)
+                d = dict(zip(nlist, labels))
+                relabel = nx.relabel_nodes(graph, d)
+                gm = iso.GraphMatcher(graph, relabel)
+                assert gm.is_isomorphic()
+
+
+def test_multiedge():
+    # Simple test for multigraphs
+    # Need something much more rigorous
+    edges = [
+        (0, 1),
+        (1, 2),
+        (2, 3),
+        (3, 4),
+        (4, 5),
+        (5, 6),
+        (6, 7),
+        (7, 8),
+        (8, 9),
+        (9, 10),
+        (10, 11),
+        (10, 11),
+        (11, 12),
+        (11, 12),
+        (12, 13),
+        (12, 13),
+        (13, 14),
+        (13, 14),
+        (14, 15),
+        (14, 15),
+        (15, 16),
+        (15, 16),
+        (16, 17),
+        (16, 17),
+        (17, 18),
+        (17, 18),
+        (18, 19),
+        (18, 19),
+        (19, 0),
+        (19, 0),
+    ]
+    nodes = list(range(20))
+
+    for g1 in [nx.MultiGraph(), nx.MultiDiGraph()]:
+        g1.add_edges_from(edges)
+        for _ in range(10):
+            new_nodes = list(nodes)
+            random.shuffle(new_nodes)
+            d = dict(zip(nodes, new_nodes))
+            g2 = nx.relabel_nodes(g1, d)
+            if not g1.is_directed():
+                gm = iso.GraphMatcher(g1, g2)
+            else:
+                gm = iso.DiGraphMatcher(g1, g2)
+            assert gm.is_isomorphic()
+            # Testing if monomorphism works in multigraphs
+            assert gm.subgraph_is_monomorphic()
+
+
+def test_selfloop():
+    # Simple test for graphs with selfloops
+    edges = [
+        (0, 1),
+        (0, 2),
+        (1, 2),
+        (1, 3),
+        (2, 2),
+        (2, 4),
+        (3, 1),
+        (3, 2),
+        (4, 2),
+        (4, 5),
+        (5, 4),
+    ]
+    nodes = list(range(6))
+
+    for g1 in [nx.Graph(), nx.DiGraph()]:
+        g1.add_edges_from(edges)
+        for _ in range(100):
+            new_nodes = list(nodes)
+            random.shuffle(new_nodes)
+            d = dict(zip(nodes, new_nodes))
+            g2 = nx.relabel_nodes(g1, d)
+            if not g1.is_directed():
+                gm = iso.GraphMatcher(g1, g2)
+            else:
+                gm = iso.DiGraphMatcher(g1, g2)
+            assert gm.is_isomorphic()
+
+
+def test_selfloop_mono():
+    # Simple test for graphs with selfloops
+    edges0 = [
+        (0, 1),
+        (0, 2),
+        (1, 2),
+        (1, 3),
+        (2, 4),
+        (3, 1),
+        (3, 2),
+        (4, 2),
+        (4, 5),
+        (5, 4),
+    ]
+    edges = edges0 + [(2, 2)]
+    nodes = list(range(6))
+
+    for g1 in [nx.Graph(), nx.DiGraph()]:
+        g1.add_edges_from(edges)
+        for _ in range(100):
+            new_nodes = list(nodes)
+            random.shuffle(new_nodes)
+            d = dict(zip(nodes, new_nodes))
+            g2 = nx.relabel_nodes(g1, d)
+            g2.remove_edges_from(nx.selfloop_edges(g2))
+            if not g1.is_directed():
+                gm = iso.GraphMatcher(g2, g1)
+            else:
+                gm = iso.DiGraphMatcher(g2, g1)
+            assert not gm.subgraph_is_monomorphic()
+
+
+def test_isomorphism_iter1():
+    # As described in:
+    # http://groups.google.com/group/networkx-discuss/browse_thread/thread/2ff65c67f5e3b99f/d674544ebea359bb?fwc=1
+    g1 = nx.DiGraph()
+    g2 = nx.DiGraph()
+    g3 = nx.DiGraph()
+    g1.add_edge("A", "B")
+    g1.add_edge("B", "C")
+    g2.add_edge("Y", "Z")
+    g3.add_edge("Z", "Y")
+    gm12 = iso.DiGraphMatcher(g1, g2)
+    gm13 = iso.DiGraphMatcher(g1, g3)
+    x = list(gm12.subgraph_isomorphisms_iter())
+    y = list(gm13.subgraph_isomorphisms_iter())
+    assert {"A": "Y", "B": "Z"} in x
+    assert {"B": "Y", "C": "Z"} in x
+    assert {"A": "Z", "B": "Y"} in y
+    assert {"B": "Z", "C": "Y"} in y
+    assert len(x) == len(y)
+    assert len(x) == 2
+
+
+def test_monomorphism_iter1():
+    g1 = nx.DiGraph()
+    g2 = nx.DiGraph()
+    g1.add_edge("A", "B")
+    g1.add_edge("B", "C")
+    g1.add_edge("C", "A")
+    g2.add_edge("X", "Y")
+    g2.add_edge("Y", "Z")
+    gm12 = iso.DiGraphMatcher(g1, g2)
+    x = list(gm12.subgraph_monomorphisms_iter())
+    assert {"A": "X", "B": "Y", "C": "Z"} in x
+    assert {"A": "Y", "B": "Z", "C": "X"} in x
+    assert {"A": "Z", "B": "X", "C": "Y"} in x
+    assert len(x) == 3
+    gm21 = iso.DiGraphMatcher(g2, g1)
+    # Check if StopIteration exception returns False
+    assert not gm21.subgraph_is_monomorphic()
+
+
+def test_isomorphism_iter2():
+    # Path
+    for L in range(2, 10):
+        g1 = nx.path_graph(L)
+        gm = iso.GraphMatcher(g1, g1)
+        s = len(list(gm.isomorphisms_iter()))
+        assert s == 2
+    # Cycle
+    for L in range(3, 10):
+        g1 = nx.cycle_graph(L)
+        gm = iso.GraphMatcher(g1, g1)
+        s = len(list(gm.isomorphisms_iter()))
+        assert s == 2 * L
+
+
+def test_multiple():
+    # Verify that we can use the graph matcher multiple times
+    edges = [("A", "B"), ("B", "A"), ("B", "C")]
+    for g1, g2 in [(nx.Graph(), nx.Graph()), (nx.DiGraph(), nx.DiGraph())]:
+        g1.add_edges_from(edges)
+        g2.add_edges_from(edges)
+        g3 = nx.subgraph(g2, ["A", "B"])
+        if not g1.is_directed():
+            gmA = iso.GraphMatcher(g1, g2)
+            gmB = iso.GraphMatcher(g1, g3)
+        else:
+            gmA = iso.DiGraphMatcher(g1, g2)
+            gmB = iso.DiGraphMatcher(g1, g3)
+        assert gmA.is_isomorphic()
+        g2.remove_node("C")
+        if not g1.is_directed():
+            gmA = iso.GraphMatcher(g1, g2)
+        else:
+            gmA = iso.DiGraphMatcher(g1, g2)
+        assert gmA.subgraph_is_isomorphic()
+        assert gmB.subgraph_is_isomorphic()
+        assert gmA.subgraph_is_monomorphic()
+        assert gmB.subgraph_is_monomorphic()
+
+
+#        for m in [gmB.mapping, gmB.mapping]:
+#            assert_true(m['A'] == 'A')
+#            assert_true(m['B'] == 'B')
+#            assert_true('C' not in m)
+
+
+def test_noncomparable_nodes():
+    node1 = object()
+    node2 = object()
+    node3 = object()
+
+    # Graph
+    G = nx.path_graph([node1, node2, node3])
+    gm = iso.GraphMatcher(G, G)
+    assert gm.is_isomorphic()
+    # Just testing some cases
+    assert gm.subgraph_is_monomorphic()
+
+    # DiGraph
+    G = nx.path_graph([node1, node2, node3], create_using=nx.DiGraph)
+    H = nx.path_graph([node3, node2, node1], create_using=nx.DiGraph)
+    dgm = iso.DiGraphMatcher(G, H)
+    assert dgm.is_isomorphic()
+    # Just testing some cases
+    assert gm.subgraph_is_monomorphic()
+
+
+def test_monomorphism_edge_match():
+    G = nx.DiGraph()
+    G.add_node(1)
+    G.add_node(2)
+    G.add_edge(1, 2, label="A")
+    G.add_edge(2, 1, label="B")
+    G.add_edge(2, 2, label="C")
+
+    SG = nx.DiGraph()
+    SG.add_node(5)
+    SG.add_node(6)
+    SG.add_edge(5, 6, label="A")
+
+    gm = iso.DiGraphMatcher(G, SG, edge_match=iso.categorical_edge_match("label", None))
+    assert gm.subgraph_is_monomorphic()
+
+
+def test_isomorphvf2pp_multidigraphs():
+    g = nx.MultiDiGraph({0: [1, 1, 2, 2, 3], 1: [2, 3, 3], 2: [3]})
+    h = nx.MultiDiGraph({0: [1, 1, 2, 2, 3], 1: [2, 3, 3], 3: [2]})
+    assert not (nx.vf2pp_is_isomorphic(g, h))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py
new file mode 100644
index 00000000..4d70347f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py
@@ -0,0 +1,64 @@
+from operator import eq
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+def test_categorical_node_match():
+    nm = iso.categorical_node_match(["x", "y", "z"], [None] * 3)
+    assert nm({"x": 1, "y": 2, "z": 3}, {"x": 1, "y": 2, "z": 3})
+    assert not nm({"x": 1, "y": 2, "z": 2}, {"x": 1, "y": 2, "z": 1})
+
+
+class TestGenericMultiEdgeMatch:
+    def setup_method(self):
+        self.G1 = nx.MultiDiGraph()
+        self.G2 = nx.MultiDiGraph()
+        self.G3 = nx.MultiDiGraph()
+        self.G4 = nx.MultiDiGraph()
+        attr_dict1 = {"id": "edge1", "minFlow": 0, "maxFlow": 10}
+        attr_dict2 = {"id": "edge2", "minFlow": -3, "maxFlow": 7}
+        attr_dict3 = {"id": "edge3", "minFlow": 13, "maxFlow": 117}
+        attr_dict4 = {"id": "edge4", "minFlow": 13, "maxFlow": 117}
+        attr_dict5 = {"id": "edge5", "minFlow": 8, "maxFlow": 12}
+        attr_dict6 = {"id": "edge6", "minFlow": 8, "maxFlow": 12}
+        for attr_dict in [
+            attr_dict1,
+            attr_dict2,
+            attr_dict3,
+            attr_dict4,
+            attr_dict5,
+            attr_dict6,
+        ]:
+            self.G1.add_edge(1, 2, **attr_dict)
+        for attr_dict in [
+            attr_dict5,
+            attr_dict3,
+            attr_dict6,
+            attr_dict1,
+            attr_dict4,
+            attr_dict2,
+        ]:
+            self.G2.add_edge(2, 3, **attr_dict)
+        for attr_dict in [attr_dict3, attr_dict5]:
+            self.G3.add_edge(3, 4, **attr_dict)
+        for attr_dict in [attr_dict6, attr_dict4]:
+            self.G4.add_edge(4, 5, **attr_dict)
+
+    def test_generic_multiedge_match(self):
+        full_match = iso.generic_multiedge_match(
+            ["id", "flowMin", "flowMax"], [None] * 3, [eq] * 3
+        )
+        flow_match = iso.generic_multiedge_match(
+            ["flowMin", "flowMax"], [None] * 2, [eq] * 2
+        )
+        min_flow_match = iso.generic_multiedge_match("flowMin", None, eq)
+        id_match = iso.generic_multiedge_match("id", None, eq)
+        assert flow_match(self.G1[1][2], self.G2[2][3])
+        assert min_flow_match(self.G1[1][2], self.G2[2][3])
+        assert id_match(self.G1[1][2], self.G2[2][3])
+        assert full_match(self.G1[1][2], self.G2[2][3])
+        assert flow_match(self.G3[3][4], self.G4[4][5])
+        assert min_flow_match(self.G3[3][4], self.G4[4][5])
+        assert not id_match(self.G3[3][4], self.G4[4][5])
+        assert not full_match(self.G3[3][4], self.G4[4][5])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py
new file mode 100644
index 00000000..1fe70a42
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py
@@ -0,0 +1,212 @@
+"""
+Tests for the temporal aspect of the Temporal VF2 isomorphism algorithm.
+"""
+
+from datetime import date, datetime, timedelta
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+def provide_g1_edgelist():
+    return [(0, 1), (0, 2), (1, 2), (2, 4), (1, 3), (3, 4), (4, 5)]
+
+
+def put_same_time(G, att_name):
+    for e in G.edges(data=True):
+        e[2][att_name] = date(2015, 1, 1)
+    return G
+
+
+def put_same_datetime(G, att_name):
+    for e in G.edges(data=True):
+        e[2][att_name] = datetime(2015, 1, 1)
+    return G
+
+
+def put_sequence_time(G, att_name):
+    current_date = date(2015, 1, 1)
+    for e in G.edges(data=True):
+        current_date += timedelta(days=1)
+        e[2][att_name] = current_date
+    return G
+
+
+def put_time_config_0(G, att_name):
+    G[0][1][att_name] = date(2015, 1, 2)
+    G[0][2][att_name] = date(2015, 1, 2)
+    G[1][2][att_name] = date(2015, 1, 3)
+    G[1][3][att_name] = date(2015, 1, 1)
+    G[2][4][att_name] = date(2015, 1, 1)
+    G[3][4][att_name] = date(2015, 1, 3)
+    G[4][5][att_name] = date(2015, 1, 3)
+    return G
+
+
+def put_time_config_1(G, att_name):
+    G[0][1][att_name] = date(2015, 1, 2)
+    G[0][2][att_name] = date(2015, 1, 1)
+    G[1][2][att_name] = date(2015, 1, 3)
+    G[1][3][att_name] = date(2015, 1, 1)
+    G[2][4][att_name] = date(2015, 1, 2)
+    G[3][4][att_name] = date(2015, 1, 4)
+    G[4][5][att_name] = date(2015, 1, 3)
+    return G
+
+
+def put_time_config_2(G, att_name):
+    G[0][1][att_name] = date(2015, 1, 1)
+    G[0][2][att_name] = date(2015, 1, 1)
+    G[1][2][att_name] = date(2015, 1, 3)
+    G[1][3][att_name] = date(2015, 1, 2)
+    G[2][4][att_name] = date(2015, 1, 2)
+    G[3][4][att_name] = date(2015, 1, 3)
+    G[4][5][att_name] = date(2015, 1, 2)
+    return G
+
+
+class TestTimeRespectingGraphMatcher:
+    """
+    A test class for the undirected temporal graph matcher.
+    """
+
+    def provide_g1_topology(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(provide_g1_edgelist())
+        return G1
+
+    def provide_g2_path_3edges(self):
+        G2 = nx.Graph()
+        G2.add_edges_from([(0, 1), (1, 2), (2, 3)])
+        return G2
+
+    def test_timdelta_zero_timeRespecting_returnsTrue(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_same_time(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_timdelta_zero_datetime_timeRespecting_returnsTrue(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_same_datetime(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_attNameStrange_timdelta_zero_timeRespecting_returnsTrue(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "strange_name"
+        G1 = put_same_time(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_notTimeRespecting_returnsFalse(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_sequence_time(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        assert not gm.subgraph_is_isomorphic()
+
+    def test_timdelta_one_config0_returns_no_embeddings(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_0(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 0
+
+    def test_timdelta_one_config1_returns_four_embedding(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_1(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 4
+
+    def test_timdelta_one_config2_returns_ten_embeddings(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_2(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
+        L = list(gm.subgraph_isomorphisms_iter())
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 10
+
+
+class TestDiTimeRespectingGraphMatcher:
+    """
+    A test class for the directed time-respecting graph matcher.
+    """
+
+    def provide_g1_topology(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(provide_g1_edgelist())
+        return G1
+
+    def provide_g2_path_3edges(self):
+        G2 = nx.DiGraph()
+        G2.add_edges_from([(0, 1), (1, 2), (2, 3)])
+        return G2
+
+    def test_timdelta_zero_same_dates_returns_true(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_same_time(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_attNameStrange_timdelta_zero_same_dates_returns_true(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "strange"
+        G1 = put_same_time(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta()
+        gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
+        assert gm.subgraph_is_isomorphic()
+
+    def test_timdelta_one_config0_returns_no_embeddings(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_0(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 0
+
+    def test_timdelta_one_config1_returns_one_embedding(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_1(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 1
+
+    def test_timdelta_one_config2_returns_two_embeddings(self):
+        G1 = self.provide_g1_topology()
+        temporal_name = "date"
+        G1 = put_time_config_2(G1, temporal_name)
+        G2 = self.provide_g2_path_3edges()
+        d = timedelta(days=1)
+        gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
+        count_match = len(list(gm.subgraph_isomorphisms_iter()))
+        assert count_match == 2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py
new file mode 100644
index 00000000..fa1ab9bb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py
@@ -0,0 +1,292 @@
+import random
+import time
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.isomorphism.tree_isomorphism import (
+    rooted_tree_isomorphism,
+    tree_isomorphism,
+)
+from networkx.classes.function import is_directed
+
+
+@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph))
+def test_tree_isomorphism_raises_on_directed_and_multigraphs(graph_constructor):
+    t1 = graph_constructor([(0, 1)])
+    t2 = graph_constructor([(1, 2)])
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.isomorphism.tree_isomorphism(t1, t2)
+
+
+# have this work for graph
+# given two trees (either the directed or undirected)
+# transform t2 according to the isomorphism
+# and confirm it is identical to t1
+# randomize the order of the edges when constructing
+def check_isomorphism(t1, t2, isomorphism):
+    # get the name of t1, given the name in t2
+    mapping = {v2: v1 for (v1, v2) in isomorphism}
+
+    # these should be the same
+    d1 = is_directed(t1)
+    d2 = is_directed(t2)
+    assert d1 == d2
+
+    edges_1 = []
+    for u, v in t1.edges():
+        if d1:
+            edges_1.append((u, v))
+        else:
+            # if not directed, then need to
+            # put the edge in a consistent direction
+            if u < v:
+                edges_1.append((u, v))
+            else:
+                edges_1.append((v, u))
+
+    edges_2 = []
+    for u, v in t2.edges():
+        # translate to names for t1
+        u = mapping[u]
+        v = mapping[v]
+        if d2:
+            edges_2.append((u, v))
+        else:
+            if u < v:
+                edges_2.append((u, v))
+            else:
+                edges_2.append((v, u))
+
+    return sorted(edges_1) == sorted(edges_2)
+
+
+def test_hardcoded():
+    print("hardcoded test")
+
+    # define a test problem
+    edges_1 = [
+        ("a", "b"),
+        ("a", "c"),
+        ("a", "d"),
+        ("b", "e"),
+        ("b", "f"),
+        ("e", "j"),
+        ("e", "k"),
+        ("c", "g"),
+        ("c", "h"),
+        ("g", "m"),
+        ("d", "i"),
+        ("f", "l"),
+    ]
+
+    edges_2 = [
+        ("v", "y"),
+        ("v", "z"),
+        ("u", "x"),
+        ("q", "u"),
+        ("q", "v"),
+        ("p", "t"),
+        ("n", "p"),
+        ("n", "q"),
+        ("n", "o"),
+        ("o", "r"),
+        ("o", "s"),
+        ("s", "w"),
+    ]
+
+    # there are two possible correct isomorphisms
+    # it currently returns isomorphism1
+    # but the second is also correct
+    isomorphism1 = [
+        ("a", "n"),
+        ("b", "q"),
+        ("c", "o"),
+        ("d", "p"),
+        ("e", "v"),
+        ("f", "u"),
+        ("g", "s"),
+        ("h", "r"),
+        ("i", "t"),
+        ("j", "y"),
+        ("k", "z"),
+        ("l", "x"),
+        ("m", "w"),
+    ]
+
+    # could swap y and z
+    isomorphism2 = [
+        ("a", "n"),
+        ("b", "q"),
+        ("c", "o"),
+        ("d", "p"),
+        ("e", "v"),
+        ("f", "u"),
+        ("g", "s"),
+        ("h", "r"),
+        ("i", "t"),
+        ("j", "z"),
+        ("k", "y"),
+        ("l", "x"),
+        ("m", "w"),
+    ]
+
+    t1 = nx.Graph()
+    t1.add_edges_from(edges_1)
+    root1 = "a"
+
+    t2 = nx.Graph()
+    t2.add_edges_from(edges_2)
+    root2 = "n"
+
+    isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
+
+    # is correct by hand
+    assert isomorphism in (isomorphism1, isomorphism2)
+
+    # check algorithmically
+    assert check_isomorphism(t1, t2, isomorphism)
+
+    # try again as digraph
+    t1 = nx.DiGraph()
+    t1.add_edges_from(edges_1)
+    root1 = "a"
+
+    t2 = nx.DiGraph()
+    t2.add_edges_from(edges_2)
+    root2 = "n"
+
+    isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
+
+    # is correct by hand
+    assert isomorphism in (isomorphism1, isomorphism2)
+
+    # check algorithmically
+    assert check_isomorphism(t1, t2, isomorphism)
+
+
+# randomly swap a tuple (a,b)
+def random_swap(t):
+    (a, b) = t
+    if random.randint(0, 1) == 1:
+        return (a, b)
+    else:
+        return (b, a)
+
+
+# given a tree t1, create a new tree t2
+# that is isomorphic to t1, with a known isomorphism
+# and test that our algorithm found the right one
+def positive_single_tree(t1):
+    assert nx.is_tree(t1)
+
+    nodes1 = list(t1.nodes())
+    # get a random permutation of this
+    nodes2 = nodes1.copy()
+    random.shuffle(nodes2)
+
+    # this is one isomorphism, however they may be multiple
+    # so we don't necessarily get this one back
+    someisomorphism = list(zip(nodes1, nodes2))
+
+    # map from old to new
+    map1to2 = dict(someisomorphism)
+
+    # get the edges with the transformed names
+    edges2 = [random_swap((map1to2[u], map1to2[v])) for (u, v) in t1.edges()]
+    # randomly permute, to ensure we're not relying on edge order somehow
+    random.shuffle(edges2)
+
+    # so t2 is isomorphic to t1
+    t2 = nx.Graph()
+    t2.add_edges_from(edges2)
+
+    # lets call our code to see if t1 and t2 are isomorphic
+    isomorphism = tree_isomorphism(t1, t2)
+
+    # make sure we got a correct solution
+    # although not necessarily someisomorphism
+    assert len(isomorphism) > 0
+    assert check_isomorphism(t1, t2, isomorphism)
+
+
+# run positive_single_tree over all the
+# non-isomorphic trees for k from 4 to maxk
+# k = 4 is the first level that has more than 1 non-isomorphic tree
+# k = 13 takes about 2.86 seconds to run on my laptop
+# larger values run slow down significantly
+# as the number of trees grows rapidly
+def test_positive(maxk=14):
+    print("positive test")
+
+    for k in range(2, maxk + 1):
+        start_time = time.time()
+        trial = 0
+        for t in nx.nonisomorphic_trees(k):
+            positive_single_tree(t)
+            trial += 1
+        print(k, trial, time.time() - start_time)
+
+
+# test the trivial case of a single node in each tree
+# note that nonisomorphic_trees doesn't work for k = 1
+def test_trivial():
+    print("trivial test")
+
+    # back to an undirected graph
+    t1 = nx.Graph()
+    t1.add_node("a")
+    root1 = "a"
+
+    t2 = nx.Graph()
+    t2.add_node("n")
+    root2 = "n"
+
+    isomorphism = rooted_tree_isomorphism(t1, root1, t2, root2)
+
+    assert isomorphism == [("a", "n")]
+
+    assert check_isomorphism(t1, t2, isomorphism)
+
+
+# test another trivial case where the two graphs have
+# different numbers of nodes
+def test_trivial_2():
+    print("trivial test 2")
+
+    edges_1 = [("a", "b"), ("a", "c")]
+
+    edges_2 = [("v", "y")]
+
+    t1 = nx.Graph()
+    t1.add_edges_from(edges_1)
+
+    t2 = nx.Graph()
+    t2.add_edges_from(edges_2)
+
+    isomorphism = tree_isomorphism(t1, t2)
+
+    # they cannot be isomorphic,
+    # since they have different numbers of nodes
+    assert isomorphism == []
+
+
+# the function nonisomorphic_trees generates all the non-isomorphic
+# trees of a given size.  Take each pair of these and verify that
+# they are not isomorphic
+# k = 4 is the first level that has more than 1 non-isomorphic tree
+# k = 11 takes about 4.76 seconds to run on my laptop
+# larger values run slow down significantly
+# as the number of trees grows rapidly
+def test_negative(maxk=11):
+    print("negative test")
+
+    for k in range(4, maxk + 1):
+        test_trees = list(nx.nonisomorphic_trees(k))
+        start_time = time.time()
+        trial = 0
+        for i in range(len(test_trees) - 1):
+            for j in range(i + 1, len(test_trees)):
+                trial += 1
+                assert tree_isomorphism(test_trees[i], test_trees[j]) == []
+        print(k, trial, time.time() - start_time)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py
new file mode 100644
index 00000000..5f3fb901
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py
@@ -0,0 +1,1608 @@
+import itertools as it
+
+import pytest
+
+import networkx as nx
+from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism
+
+labels_same = ["blue"]
+
+labels_many = [
+    "white",
+    "red",
+    "blue",
+    "green",
+    "orange",
+    "black",
+    "purple",
+    "yellow",
+    "brown",
+    "cyan",
+    "solarized",
+    "pink",
+    "none",
+]
+
+
+class TestPreCheck:
+    def test_first_graph_empty(self):
+        G1 = nx.Graph()
+        G2 = nx.Graph([(0, 1), (1, 2)])
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_second_graph_empty(self):
+        G1 = nx.Graph([(0, 1), (1, 2)])
+        G2 = nx.Graph()
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_order1(self):
+        G1 = nx.path_graph(5)
+        G2 = nx.path_graph(6)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_order2(self):
+        G1 = nx.barbell_graph(100, 20)
+        G2 = nx.barbell_graph(101, 20)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_order3(self):
+        G1 = nx.complete_graph(7)
+        G2 = nx.complete_graph(8)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_degree_sequences1(self):
+        G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4)])
+        G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4), (2, 5)])
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+        G2.remove_node(3)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
+
+        assert vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_degree_sequences2(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (0, 2),
+                (2, 3),
+                (3, 4),
+                (4, 5),
+                (5, 6),
+                (6, 3),
+                (4, 7),
+                (7, 8),
+                (8, 3),
+            ]
+        )
+        G2 = G1.copy()
+        G2.add_edge(8, 0)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+        G1.add_edge(6, 1)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
+
+        assert vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_degree_sequences3(self):
+        G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
+        G2 = nx.Graph(
+            [(0, 1), (0, 6), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)]
+        )
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+        G1.add_edge(3, 5)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
+
+        assert vf2pp_is_isomorphic(G1, G2)
+
+    def test_label_distribution(self):
+        G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
+        G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
+
+        colors1 = ["blue", "blue", "blue", "yellow", "black", "purple", "purple"]
+        colors2 = ["blue", "blue", "yellow", "yellow", "black", "purple", "purple"]
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors1[::-1]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(colors2[::-1]))), "label")
+
+        assert not vf2pp_is_isomorphic(G1, G2, node_label="label")
+        G2.nodes[3]["label"] = "blue"
+        assert vf2pp_is_isomorphic(G1, G2, node_label="label")
+
+
+class TestAllGraphTypesEdgeCases:
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
+    def test_both_graphs_empty(self, graph_type):
+        G = graph_type()
+        H = graph_type()
+        assert vf2pp_isomorphism(G, H) is None
+
+        G.add_node(0)
+
+        assert vf2pp_isomorphism(G, H) is None
+        assert vf2pp_isomorphism(H, G) is None
+
+        H.add_node(0)
+        assert vf2pp_isomorphism(G, H) == {0: 0}
+
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
+    def test_first_graph_empty(self, graph_type):
+        G = graph_type()
+        H = graph_type([(0, 1)])
+        assert vf2pp_isomorphism(G, H) is None
+
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
+    def test_second_graph_empty(self, graph_type):
+        G = graph_type([(0, 1)])
+        H = graph_type()
+        assert vf2pp_isomorphism(G, H) is None
+
+
+class TestGraphISOVF2pp:
+    def test_custom_graph1_same_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Add edge making G1 symmetrical
+        G1.add_edge(3, 7)
+        G1.nodes[7]["label"] = "blue"
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make G2 isomorphic to G1
+        G2.add_edges_from([(mapped[3], "X"), (mapped[6], mapped[5])])
+        G1.add_edge(4, 7)
+        G2.nodes["X"]["label"] = "blue"
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Re-structure maintaining isomorphism
+        G1.remove_edges_from([(1, 4), (1, 3)])
+        G2.remove_edges_from([(mapped[1], mapped[5]), (mapped[1], mapped[2])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_custom_graph1_different_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_custom_graph2_same_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Obtain two isomorphic subgraphs from the graph
+        G2.remove_edge(mapped[1], mapped[2])
+        G2.add_edge(mapped[1], mapped[4])
+        H1 = nx.Graph(G1.subgraph([2, 3, 4, 7]))
+        H2 = nx.Graph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]]))
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Add edges maintaining isomorphism
+        H1.add_edges_from([(3, 7), (4, 7)])
+        H2.add_edges_from([(mapped[1], mapped[6]), (mapped[4], mapped[6])])
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+    def test_custom_graph2_different_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+
+        # Adding new nodes
+        G1.add_node(0)
+        G2.add_node("Z")
+        G1.nodes[0]["label"] = G1.nodes[1]["label"]
+        G2.nodes["Z"]["label"] = G1.nodes[1]["label"]
+        mapped.update({0: "Z"})
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Change the color of one of the nodes
+        G2.nodes["Z"]["label"] = G1.nodes[2]["label"]
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Add an extra edge
+        G1.nodes[0]["label"] = "blue"
+        G2.nodes["Z"]["label"] = "blue"
+        G1.add_edge(0, 1)
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Add extra edge to both
+        G2.add_edge("Z", "A")
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_custom_graph3_same_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (5, 8),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (5, 2),
+        ]
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect nodes maintaining symmetry
+        G1.add_edges_from([(6, 9), (7, 8)])
+        G2.add_edges_from([(mapped[6], mapped[8]), (mapped[7], mapped[9])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make isomorphic
+        G1.add_edges_from([(6, 8), (7, 9)])
+        G2.add_edges_from([(mapped[6], mapped[9]), (mapped[7], mapped[8])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect more nodes
+        G1.add_edges_from([(2, 7), (3, 6)])
+        G2.add_edges_from([(mapped[2], mapped[7]), (mapped[3], mapped[6])])
+        G1.add_node(10)
+        G2.add_node("Z")
+        G1.nodes[10]["label"] = "blue"
+        G2.nodes["Z"]["label"] = "blue"
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect the newly added node, to opposite sides of the graph
+        G1.add_edges_from([(10, 1), (10, 5), (10, 8)])
+        G2.add_edges_from([("Z", mapped[1]), ("Z", mapped[4]), ("Z", mapped[9])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Get two subgraphs that are not isomorphic but are easy to make
+        H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10]))
+        H2 = nx.Graph(
+            G2.subgraph(
+                [mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"]
+            )
+        )
+        assert vf2pp_isomorphism(H1, H2, node_label="label") is None
+
+        # Restructure both to make them isomorphic
+        H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)])
+        H2.add_edges_from(
+            [("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])]
+        )
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Add edges with opposite direction in each Graph
+        H1.add_edge(3, 5)
+        H2.add_edge(mapped[5], mapped[7])
+        assert vf2pp_isomorphism(H1, H2, node_label="label") is None
+
+    def test_custom_graph3_different_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (5, 8),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (5, 2),
+        ]
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Add extra edge to G1
+        G1.add_edge(1, 7)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Compensate in G2
+        G2.add_edge(9, 1)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Add extra node
+        G1.add_node("A")
+        G2.add_node("K")
+        G1.nodes["A"]["label"] = "green"
+        G2.nodes["K"]["label"] = "green"
+        mapped.update({"A": "K"})
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Connect A to one side of G1 and K to the opposite
+        G1.add_edge("A", 6)
+        G2.add_edge("K", 5)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make the graphs symmetrical
+        G1.add_edge(1, 5)
+        G1.add_edge(2, 9)
+        G2.add_edge(9, 3)
+        G2.add_edge(8, 4)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Assign same colors so the two opposite sides are identical
+        for node in G1.nodes():
+            color = "red"
+            G1.nodes[node]["label"] = color
+            G2.nodes[mapped[node]]["label"] = color
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_custom_graph4_different_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 2),
+            (2, 3),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (8, 7),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_custom_graph4_same_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 2),
+            (2, 3),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (8, 7),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Add nodes of different label
+        G1.add_node(0)
+        G2.add_node("z")
+        G1.nodes[0]["label"] = "green"
+        G2.nodes["z"]["label"] = "blue"
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make the labels identical
+        G2.nodes["z"]["label"] = "green"
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Change the structure of the graphs, keeping them isomorphic
+        G1.add_edge(2, 5)
+        G2.remove_edge("i", "l")
+        G2.add_edge("g", "l")
+        G2.add_edge("m", "f")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Change the structure of the disconnected sub-graph, keeping it isomorphic
+        G1.remove_node(13)
+        G2.remove_node("d")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect the newly added node to the disconnected graph, which now is just a path of size 3
+        G1.add_edge(0, 10)
+        G2.add_edge("e", "z")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect the two disconnected sub-graphs, forming a single graph
+        G1.add_edge(11, 3)
+        G1.add_edge(0, 8)
+        G2.add_edge("a", "l")
+        G2.add_edge("z", "j")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_custom_graph5_same_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 7),
+            (4, 8),
+            (5, 8),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Add different edges in each graph, maintaining symmetry
+        G1.add_edges_from([(3, 6), (2, 7), (2, 5), (1, 3), (4, 7), (6, 8)])
+        G2.add_edges_from(
+            [
+                (mapped[6], mapped[3]),
+                (mapped[2], mapped[7]),
+                (mapped[1], mapped[6]),
+                (mapped[5], mapped[7]),
+                (mapped[3], mapped[8]),
+                (mapped[2], mapped[4]),
+            ]
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Obtain two different but isomorphic subgraphs from G1 and G2
+        H1 = nx.Graph(G1.subgraph([1, 5, 8, 6, 7, 3]))
+        H2 = nx.Graph(
+            G2.subgraph(
+                [mapped[1], mapped[4], mapped[8], mapped[7], mapped[3], mapped[5]]
+            )
+        )
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Delete corresponding node from the two graphs
+        H1.remove_node(8)
+        H2.remove_node(mapped[7])
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Re-orient, maintaining isomorphism
+        H1.add_edge(1, 6)
+        H1.remove_edge(3, 6)
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+    def test_custom_graph5_different_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 7),
+            (4, 8),
+            (5, 8),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        colors = ["red", "blue", "grey", "none", "brown", "solarized", "yellow", "pink"]
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Assign different colors to matching nodes
+        c = 0
+        for node in G1.nodes():
+            color1 = colors[c]
+            color2 = colors[(c + 3) % len(colors)]
+            G1.nodes[node]["label"] = color1
+            G2.nodes[mapped[node]]["label"] = color2
+            c += 1
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Get symmetrical sub-graphs of G1,G2 and compare them
+        H1 = G1.subgraph([1, 5])
+        H2 = G2.subgraph(["i", "c"])
+        c = 0
+        for node1, node2 in zip(H1.nodes(), H2.nodes()):
+            H1.nodes[node1]["label"] = "red"
+            H2.nodes[node2]["label"] = "red"
+            c += 1
+
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+    def test_disconnected_graph_all_same_labels(self):
+        G1 = nx.Graph()
+        G1.add_nodes_from(list(range(10)))
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_disconnected_graph_all_different_labels(self):
+        G1 = nx.Graph()
+        G1.add_nodes_from(list(range(10)))
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_disconnected_graph_some_same_labels(self):
+        G1 = nx.Graph()
+        G1.add_nodes_from(list(range(10)))
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        colors = [
+            "white",
+            "white",
+            "white",
+            "purple",
+            "purple",
+            "red",
+            "red",
+            "pink",
+            "pink",
+            "pink",
+        ]
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors))), "label")
+        nx.set_node_attributes(
+            G2, dict(zip([mapped[n] for n in G1], it.cycle(colors))), "label"
+        )
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+
+class TestMultiGraphISOVF2pp:
+    def test_custom_multigraph1_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 4),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (2, 6),
+            (3, 4),
+            (3, 4),
+            (5, 1),
+            (5, 1),
+            (5, 2),
+            (5, 2),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Transfer the 2-clique to the right side of G1
+        G1.remove_edges_from([(2, 6), (2, 6)])
+        G1.add_edges_from([(3, 6), (3, 6)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Delete an edges, making them symmetrical, so the position of the 2-clique doesn't matter
+        G2.remove_edge(mapped[1], mapped[4])
+        G1.remove_edge(1, 4)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add self-loops
+        G1.add_edges_from([(5, 5), (5, 5), (1, 1)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Compensate in G2
+        G2.add_edges_from(
+            [(mapped[1], mapped[1]), (mapped[4], mapped[4]), (mapped[4], mapped[4])]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_custom_multigraph1_different_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 4),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (2, 6),
+            (3, 4),
+            (3, 4),
+            (5, 1),
+            (5, 1),
+            (5, 2),
+            (5, 2),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Re-structure G1, maintaining the degree sequence
+        G1.remove_edge(1, 4)
+        G1.add_edge(1, 5)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Restructure G2, making it isomorphic to G1
+        G2.remove_edge("A", "D")
+        G2.add_edge("A", "Z")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Add edge from node to itself
+        G1.add_edges_from([(6, 6), (6, 6), (6, 6)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Same for G2
+        G2.add_edges_from([("E", "E"), ("E", "E"), ("E", "E")])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+    def test_custom_multigraph2_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (1, 5),
+            (1, 5),
+            (1, 5),
+            (5, 6),
+            (2, 3),
+            (2, 3),
+            (2, 4),
+            (3, 4),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (2, 7),
+            (2, 7),
+            (2, 7),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Obtain two non-isomorphic subgraphs from the graph
+        G2.remove_edges_from([(mapped[1], mapped[2]), (mapped[1], mapped[2])])
+        G2.add_edge(mapped[1], mapped[4])
+        H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 7]))
+        H2 = nx.MultiGraph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]]))
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Make them isomorphic
+        H1.remove_edge(3, 4)
+        H1.add_edges_from([(2, 3), (2, 4), (2, 4)])
+        H2.add_edges_from([(mapped[5], mapped[6]), (mapped[5], mapped[6])])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Remove triangle edge
+        H1.remove_edges_from([(2, 3), (2, 3), (2, 3)])
+        H2.remove_edges_from([(mapped[5], mapped[4])] * 3)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Change the edge orientation such that H1 is rotated H2
+        H1.remove_edges_from([(2, 7), (2, 7)])
+        H1.add_edges_from([(3, 4), (3, 4)])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Add extra edges maintaining degree sequence, but in a non-symmetrical manner
+        H2.add_edge(mapped[5], mapped[1])
+        H1.add_edge(3, 4)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+    def test_custom_multigraph2_different_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (1, 5),
+            (1, 5),
+            (1, 5),
+            (5, 6),
+            (2, 3),
+            (2, 3),
+            (2, 4),
+            (3, 4),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (2, 7),
+            (2, 7),
+            (2, 7),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Re-structure G1
+        G1.remove_edge(2, 7)
+        G1.add_edge(5, 6)
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Same for G2
+        G2.remove_edge("B", "C")
+        G2.add_edge("G", "F")
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Delete node from G1 and G2, keeping them isomorphic
+        G1.remove_node(3)
+        G2.remove_node("D")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Change G1 edges
+        G1.remove_edge(1, 2)
+        G1.remove_edge(2, 7)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make G2 identical to G1, but with different edge orientation and different labels
+        G2.add_edges_from([("A", "C"), ("C", "E"), ("C", "E")])
+        G2.remove_edges_from(
+            [("A", "G"), ("A", "G"), ("F", "G"), ("E", "G"), ("E", "G")]
+        )
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make all labels the same, so G1 and G2 are also isomorphic
+        for n1, n2 in zip(G1.nodes(), G2.nodes()):
+            G1.nodes[n1]["label"] = "blue"
+            G2.nodes[n2]["label"] = "blue"
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_custom_multigraph3_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 3),
+            (2, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (4, 9),
+            (4, 9),
+            (5, 8),
+            (5, 8),
+            (8, 9),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (6, 7),
+            (6, 7),
+            (5, 2),
+        ]
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect nodes maintaining symmetry
+        G1.add_edges_from([(6, 9), (7, 8), (5, 8), (4, 9), (4, 9)])
+        G2.add_edges_from(
+            [
+                (mapped[6], mapped[8]),
+                (mapped[7], mapped[9]),
+                (mapped[5], mapped[8]),
+                (mapped[4], mapped[9]),
+                (mapped[4], mapped[9]),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make isomorphic
+        G1.add_edges_from([(6, 8), (6, 8), (7, 9), (7, 9), (7, 9)])
+        G2.add_edges_from(
+            [
+                (mapped[6], mapped[8]),
+                (mapped[6], mapped[9]),
+                (mapped[7], mapped[8]),
+                (mapped[7], mapped[9]),
+                (mapped[7], mapped[9]),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect more nodes
+        G1.add_edges_from([(2, 7), (2, 7), (3, 6), (3, 6)])
+        G2.add_edges_from(
+            [
+                (mapped[2], mapped[7]),
+                (mapped[2], mapped[7]),
+                (mapped[3], mapped[6]),
+                (mapped[3], mapped[6]),
+            ]
+        )
+        G1.add_node(10)
+        G2.add_node("Z")
+        G1.nodes[10]["label"] = "blue"
+        G2.nodes["Z"]["label"] = "blue"
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect the newly added node, to opposite sides of the graph
+        G1.add_edges_from([(10, 1), (10, 5), (10, 8), (10, 10), (10, 10)])
+        G2.add_edges_from(
+            [
+                ("Z", mapped[1]),
+                ("Z", mapped[4]),
+                ("Z", mapped[9]),
+                ("Z", "Z"),
+                ("Z", "Z"),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # We connected the new node to opposite sides, so G1 must be symmetrical to G2. Re-structure them to be so
+        G1.remove_edges_from([(1, 3), (4, 9), (4, 9), (7, 9)])
+        G2.remove_edges_from(
+            [
+                (mapped[1], mapped[3]),
+                (mapped[4], mapped[9]),
+                (mapped[4], mapped[9]),
+                (mapped[7], mapped[9]),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Get two subgraphs that are not isomorphic but are easy to make
+        H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10]))
+        H2 = nx.Graph(
+            G2.subgraph(
+                [mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"]
+            )
+        )
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Restructure both to make them isomorphic
+        H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)])
+        H2.add_edges_from(
+            [("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])]
+        )
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Remove one self-loop in H2
+        H2.remove_edge("Z", "Z")
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Compensate in H1
+        H1.remove_edge(10, 10)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+    def test_custom_multigraph3_different_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 3),
+            (2, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (4, 9),
+            (4, 9),
+            (5, 8),
+            (5, 8),
+            (8, 9),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (6, 7),
+            (6, 7),
+            (5, 2),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Delete edge maintaining isomorphism
+        G1.remove_edge(4, 9)
+        G2.remove_edge(4, 6)
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Change edge orientation such that G1 mirrors G2
+        G1.add_edges_from([(4, 9), (1, 2), (1, 2)])
+        G1.remove_edges_from([(1, 3), (1, 3)])
+        G2.add_edges_from([(3, 5), (7, 9)])
+        G2.remove_edge(8, 9)
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make all labels the same, so G1 and G2 are also isomorphic
+        for n1, n2 in zip(G1.nodes(), G2.nodes()):
+            G1.nodes[n1]["label"] = "blue"
+            G2.nodes[n2]["label"] = "blue"
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        G1.add_node(10)
+        G2.add_node("Z")
+        G1.nodes[10]["label"] = "green"
+        G2.nodes["Z"]["label"] = "green"
+
+        # Add different number of edges between the new nodes and themselves
+        G1.add_edges_from([(10, 10), (10, 10)])
+        G2.add_edges_from([("Z", "Z")])
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make the number of self-edges equal
+        G1.remove_edge(10, 10)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect the new node to the graph
+        G1.add_edges_from([(10, 3), (10, 4)])
+        G2.add_edges_from([("Z", 8), ("Z", 3)])
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Remove central node
+        G1.remove_node(4)
+        G2.remove_node(3)
+        G1.add_edges_from([(5, 6), (5, 6), (5, 7)])
+        G2.add_edges_from([(1, 6), (1, 6), (6, 2)])
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_custom_multigraph4_same_labels(self):
+        G1 = nx.MultiGraph()
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (2, 2),
+            (2, 3),
+            (3, 8),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (3, 6),
+            (6, 6),
+            (8, 7),
+            (7, 7),
+            (8, 9),
+            (9, 9),
+            (8, 9),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+            (10, 10),
+            (10, 11),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add extra but corresponding edges to both graphs
+        G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)])
+        G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Obtain subgraphs
+        H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6, 10, 11, 12, 13]))
+        H2 = nx.MultiGraph(
+            G2.subgraph(
+                [
+                    mapped[2],
+                    mapped[3],
+                    mapped[8],
+                    mapped[9],
+                    mapped[10],
+                    mapped[11],
+                    mapped[12],
+                    mapped[13],
+                ]
+            )
+        )
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Make them isomorphic
+        H2.remove_edges_from(
+            [(mapped[3], mapped[2]), (mapped[9], mapped[8]), (mapped[2], mapped[2])]
+        )
+        H2.add_edges_from([(mapped[9], mapped[9]), (mapped[2], mapped[8])])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Re-structure the disconnected sub-graph
+        H1.remove_node(12)
+        H2.remove_node(mapped[12])
+        H1.add_edge(13, 13)
+        H2.add_edge(mapped[13], mapped[13])
+
+        # Connect the two disconnected components, forming a single graph
+        H1.add_edges_from([(3, 13), (6, 11)])
+        H2.add_edges_from([(mapped[8], mapped[10]), (mapped[2], mapped[11])])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Change orientation of self-loops in one graph, maintaining the degree sequence
+        H1.remove_edges_from([(2, 2), (3, 6)])
+        H1.add_edges_from([(6, 6), (2, 3)])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+    def test_custom_multigraph4_different_labels(self):
+        G1 = nx.MultiGraph()
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (2, 2),
+            (2, 3),
+            (3, 8),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (3, 6),
+            (6, 6),
+            (8, 7),
+            (7, 7),
+            (8, 9),
+            (9, 9),
+            (8, 9),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m == mapped
+
+        # Add extra but corresponding edges to both graphs
+        G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)])
+        G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m == mapped
+
+        # Obtain isomorphic subgraphs
+        H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6]))
+        H2 = nx.MultiGraph(G2.subgraph(["m", "l", "j", "i"]))
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Delete the 3-clique, keeping only the path-graph. Also, H1 mirrors H2
+        H1.remove_node(4)
+        H2.remove_node("j")
+        H1.remove_edges_from([(2, 2), (2, 3), (6, 6)])
+        H2.remove_edges_from([("l", "i"), ("m", "m"), ("m", "m")])
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Assign the same labels so that mirroring means isomorphic
+        for n1, n2 in zip(H1.nodes(), H2.nodes()):
+            H1.nodes[n1]["label"] = "red"
+            H2.nodes[n2]["label"] = "red"
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Leave only one node with self-loop
+        H1.remove_nodes_from([3, 6])
+        H2.remove_nodes_from(["m", "l"])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Remove one self-loop from H1
+        H1.remove_edge(2, 2)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Same for H2
+        H2.remove_edge("i", "i")
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Compose H1 with the disconnected sub-graph of G1. Same for H2
+        S1 = nx.compose(H1, nx.MultiGraph(G1.subgraph([10, 11, 12, 13])))
+        S2 = nx.compose(H2, nx.MultiGraph(G2.subgraph(["a", "b", "d", "e"])))
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Connect the two components
+        S1.add_edges_from([(13, 13), (13, 13), (2, 13)])
+        S2.add_edges_from([("a", "a"), ("a", "a"), ("i", "e")])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+    def test_custom_multigraph5_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 7),
+            (4, 8),
+            (5, 8),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add multiple edges and self-loops, maintaining isomorphism
+        G1.add_edges_from(
+            [(1, 2), (1, 2), (3, 7), (8, 8), (8, 8), (7, 8), (2, 3), (5, 6)]
+        )
+        G2.add_edges_from(
+            [
+                ("a", "h"),
+                ("a", "h"),
+                ("d", "j"),
+                ("c", "c"),
+                ("c", "c"),
+                ("j", "c"),
+                ("d", "h"),
+                ("g", "b"),
+            ]
+        )
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Make G2 to be the rotated G1
+        G2.remove_edges_from(
+            [
+                ("a", "h"),
+                ("a", "h"),
+                ("d", "j"),
+                ("c", "c"),
+                ("c", "c"),
+                ("j", "c"),
+                ("d", "h"),
+                ("g", "b"),
+            ]
+        )
+        G2.add_edges_from(
+            [
+                ("d", "i"),
+                ("a", "h"),
+                ("g", "b"),
+                ("g", "b"),
+                ("i", "i"),
+                ("i", "i"),
+                ("b", "j"),
+                ("d", "j"),
+            ]
+        )
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_disconnected_multigraph_all_same_labels(self):
+        G1 = nx.MultiGraph()
+        G1.add_nodes_from(list(range(10)))
+        G1.add_edges_from([(i, i) for i in range(10)])
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add self-loops to non-mapped nodes. Should be the same, as the graph is disconnected.
+        G1.add_edges_from([(i, i) for i in range(5, 8)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Compensate in G2
+        G2.add_edges_from([(i, i) for i in range(3)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add one more self-loop in G2
+        G2.add_edges_from([(0, 0), (1, 1), (1, 1)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Compensate in G1
+        G1.add_edges_from([(5, 5), (7, 7), (7, 7)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_disconnected_multigraph_all_different_labels(self):
+        G1 = nx.MultiGraph()
+        G1.add_nodes_from(list(range(10)))
+        G1.add_edges_from([(i, i) for i in range(10)])
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Add self-loops to non-mapped nodes. Now it is not the same, as there are different labels
+        G1.add_edges_from([(i, i) for i in range(5, 8)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Add self-loops to non mapped nodes in G2 as well
+        G2.add_edges_from([(mapped[i], mapped[i]) for i in range(3)] * 7)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Add self-loops to mapped nodes in G2
+        G2.add_edges_from([(mapped[i], mapped[i]) for i in range(5, 8)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Add self-loops to G1 so that they are even in both graphs
+        G1.add_edges_from([(i, i) for i in range(3)] * 7)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+
+class TestDiGraphISOVF2pp:
+    def test_wikipedia_graph(self):
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (3, 2),
+            (6, 2),
+            (3, 4),
+            (7, 3),
+            (4, 8),
+            (5, 8),
+            (6, 5),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1 = nx.DiGraph(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        assert vf2pp_isomorphism(G1, G2) == mapped
+
+        # Change the direction of an edge
+        G1.remove_edge(1, 5)
+        G1.add_edge(5, 1)
+        assert vf2pp_isomorphism(G1, G2) is None
+
+    def test_non_isomorphic_same_degree_sequence(self):
+        r"""
+                G1                           G2
+        x--------------x              x--------------x
+        | \            |              | \            |
+        |  x-------x   |              |  x-------x   |
+        |  |       |   |              |  |       |   |
+        |  x-------x   |              |  x-------x   |
+        | /            |              |            \ |
+        x--------------x              x--------------x
+        """
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (4, 1),
+            (3, 2),
+            (3, 4),
+            (4, 8),
+            (5, 8),
+            (6, 5),
+            (6, 7),
+            (7, 8),
+        ]
+        edges2 = [
+            (1, 5),
+            (1, 2),
+            (4, 1),
+            (3, 2),
+            (4, 3),
+            (5, 8),
+            (6, 5),
+            (6, 7),
+            (3, 7),
+            (8, 7),
+        ]
+
+        G1 = nx.DiGraph(edges1)
+        G2 = nx.DiGraph(edges2)
+        assert vf2pp_isomorphism(G1, G2) is None
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py
new file mode 100644
index 00000000..0e29b1be
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py
@@ -0,0 +1,3106 @@
+import itertools as it
+
+import pytest
+
+import networkx as nx
+from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism
+from networkx.algorithms.isomorphism.vf2pp import (
+    _consistent_PT,
+    _cut_PT,
+    _feasibility,
+    _find_candidates,
+    _find_candidates_Di,
+    _GraphParameters,
+    _initialize_parameters,
+    _matching_order,
+    _restore_Tinout,
+    _restore_Tinout_Di,
+    _StateParameters,
+    _update_Tinout,
+)
+
+labels_same = ["blue"]
+
+labels_many = [
+    "white",
+    "red",
+    "blue",
+    "green",
+    "orange",
+    "black",
+    "purple",
+    "yellow",
+    "brown",
+    "cyan",
+    "solarized",
+    "pink",
+    "none",
+]
+
+
+class TestNodeOrdering:
+    def test_empty_graph(self):
+        G1 = nx.Graph()
+        G2 = nx.Graph()
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        assert len(set(_matching_order(gparams))) == 0
+
+    def test_single_node(self):
+        G1 = nx.Graph()
+        G2 = nx.Graph()
+        G1.add_node(1)
+        G2.add_node(1)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip(G2, it.cycle(labels_many))),
+            "label",
+        )
+        l1, l2 = (
+            nx.get_node_attributes(G1, "label"),
+            nx.get_node_attributes(G2, "label"),
+        )
+
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+        m = _matching_order(gparams)
+        assert m == [1]
+
+    def test_matching_order(self):
+        labels = [
+            "blue",
+            "blue",
+            "red",
+            "red",
+            "red",
+            "red",
+            "green",
+            "green",
+            "green",
+            "yellow",
+            "purple",
+            "purple",
+            "blue",
+            "blue",
+        ]
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (0, 2),
+                (1, 2),
+                (2, 5),
+                (2, 4),
+                (1, 3),
+                (1, 4),
+                (3, 6),
+                (4, 6),
+                (6, 7),
+                (7, 8),
+                (9, 10),
+                (9, 11),
+                (11, 12),
+                (11, 13),
+                (12, 13),
+                (10, 13),
+            ]
+        )
+        G2 = G1.copy()
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip(G2, it.cycle(labels))),
+            "label",
+        )
+        l1, l2 = (
+            nx.get_node_attributes(G1, "label"),
+            nx.get_node_attributes(G2, "label"),
+        )
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        expected = [9, 11, 10, 13, 12, 1, 2, 4, 0, 3, 6, 5, 7, 8]
+        assert _matching_order(gparams) == expected
+
+    def test_matching_order_all_branches(self):
+        G1 = nx.Graph(
+            [(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (1, 3), (1, 4), (2, 4), (3, 4)]
+        )
+        G1.add_node(5)
+        G2 = G1.copy()
+
+        G1.nodes[0]["label"] = "black"
+        G1.nodes[1]["label"] = "blue"
+        G1.nodes[2]["label"] = "blue"
+        G1.nodes[3]["label"] = "red"
+        G1.nodes[4]["label"] = "red"
+        G1.nodes[5]["label"] = "blue"
+
+        G2.nodes[0]["label"] = "black"
+        G2.nodes[1]["label"] = "blue"
+        G2.nodes[2]["label"] = "blue"
+        G2.nodes[3]["label"] = "red"
+        G2.nodes[4]["label"] = "red"
+        G2.nodes[5]["label"] = "blue"
+
+        l1, l2 = (
+            nx.get_node_attributes(G1, "label"),
+            nx.get_node_attributes(G2, "label"),
+        )
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        expected = [0, 4, 1, 3, 2, 5]
+        assert _matching_order(gparams) == expected
+
+
+class TestGraphCandidateSelection:
+    G1_edges = [
+        (1, 2),
+        (1, 4),
+        (1, 5),
+        (2, 3),
+        (2, 4),
+        (3, 4),
+        (4, 5),
+        (1, 6),
+        (6, 7),
+        (6, 8),
+        (8, 9),
+        (7, 9),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+
+    def test_no_covered_neighbors_no_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5}
+        T1_tilde = {0, 3, 6}
+        T2 = {"g", "h", "b", "d", "e"}
+        T2_tilde = {"x", "c", "f"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1 = {2, 4, 5, 6}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {
+            self.mapped[u],
+            self.mapped[8],
+            self.mapped[3],
+            self.mapped[9],
+        }
+
+    def test_no_covered_neighbors_with_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5, 6}
+        T1_tilde = {0, 3}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Change label of disconnected node
+        G1.nodes[u]["label"] = "blue"
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        # No candidate
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == set()
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1 = {2, 4, 5, 6}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2 = {"b", "d", "e", "f"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        G1.nodes[8]["label"] = G1.nodes[7]["label"]
+        G2.nodes[self.mapped[8]]["label"] = G1.nodes[7]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[8]}
+
+    def test_covered_neighbors_no_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        l1 = dict(G1.nodes(data=None, default=-1))
+        l2 = dict(G2.nodes(data=None, default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5, 6}
+        T1_tilde = {0, 3}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+    def test_covered_neighbors_with_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5, 6}
+        T1_tilde = {0, 3}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Assign to 2, the same label as 6
+        G1.nodes[2]["label"] = G1.nodes[u]["label"]
+        G2.nodes[self.mapped[2]]["label"] = G1.nodes[u]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+
+class TestDiGraphCandidateSelection:
+    G1_edges = [
+        (1, 2),
+        (1, 4),
+        (5, 1),
+        (2, 3),
+        (4, 2),
+        (3, 4),
+        (4, 5),
+        (1, 6),
+        (6, 7),
+        (6, 8),
+        (8, 9),
+        (7, 9),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+
+    def test_no_covered_neighbors_no_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1_out = {2, 4, 6}
+        T1_in = {5}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[8], self.mapped[3]}
+
+    def test_no_covered_neighbors_with_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Change label of disconnected node
+        G1.nodes[u]["label"] = "blue"
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        # No candidate
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == set()
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1_out = {2, 4, 6}
+        T1_in = {5}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        G1.nodes[8]["label"] = G1.nodes[7]["label"]
+        G2.nodes[self.mapped[8]]["label"] = G1.nodes[7]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[8]}
+
+    def test_covered_neighbors_no_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+
+        l1 = dict(G1.nodes(data=None, default=-1))
+        l2 = dict(G2.nodes(data=None, default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Change the direction of an edge to make the degree orientation same as first candidate of u.
+        G1.remove_edge(4, 2)
+        G1.add_edge(2, 4)
+        G2.remove_edge("d", "b")
+        G2.add_edge("b", "d")
+
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+    def test_covered_neighbors_with_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1.remove_edge(4, 2)
+        G1.add_edge(2, 4)
+        G2.remove_edge("d", "b")
+        G2.add_edge("b", "d")
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Assign to 2, the same label as 6
+        G1.nodes[2]["label"] = G1.nodes[u]["label"]
+        G2.nodes[self.mapped[2]]["label"] = G1.nodes[u]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+        # Change the direction of an edge to make the degree orientation same as first candidate of u.
+        G1.remove_edge(2, 4)
+        G1.add_edge(4, 2)
+        G2.remove_edge("b", "d")
+        G2.add_edge("d", "b")
+
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+    def test_same_in_out_degrees_no_candidate(self):
+        g1 = nx.DiGraph([(4, 1), (4, 2), (3, 4), (5, 4), (6, 4)])
+        g2 = nx.DiGraph([(1, 4), (2, 4), (3, 4), (4, 5), (4, 6)])
+
+        l1 = dict(g1.nodes(data=None, default=-1))
+        l2 = dict(g2.nodes(data=None, default=-1))
+        gparams = _GraphParameters(
+            g1,
+            g2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        g2.in_degree(), g2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        g1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(g1.in_degree, g1.out_degree)
+        }
+
+        m = {1: 1, 2: 2, 3: 3}
+        m_rev = m.copy()
+
+        T1_out = {4}
+        T1_in = {4}
+        T1_tilde = {5, 6}
+        T2_out = {4}
+        T2_in = {4}
+        T2_tilde = {5, 6}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 4
+        # despite the same in and out degree, there's no candidate for u=4
+        candidates = _find_candidates_Di(u, gparams, sparams, g1_degree)
+        assert candidates == set()
+        # Notice how the regular candidate selection method returns wrong result.
+        assert _find_candidates(u, gparams, sparams, g1_degree) == {4}
+
+
+class TestGraphISOFeasibility:
+    def test_const_covered_neighbors(self):
+        G1 = nx.Graph([(0, 1), (1, 2), (3, 0), (3, 2)])
+        G2 = nx.Graph([("a", "b"), ("b", "c"), ("k", "a"), ("k", "c")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_no_covered_neighbors(self):
+        G1 = nx.Graph([(0, 1), (1, 2), (3, 4), (3, 5)])
+        G2 = nx.Graph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "z")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_mixed_covered_uncovered_neighbors(self):
+        G1 = nx.Graph([(0, 1), (1, 2), (3, 0), (3, 2), (3, 4), (3, 5)])
+        G2 = nx.Graph(
+            [("a", "b"), ("b", "c"), ("k", "a"), ("k", "c"), ("k", "w"), ("k", "z")]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_fail_cases(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        G2 = nx.Graph(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("k", "f"),
+                ("k", "g"),
+                ("e", "b"),
+                ("f", "d"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 10, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete one uncovered neighbor of u. Notice how it still passes the test.
+        # Two reasons for this:
+        #   1. If u, v had different degrees from the beginning, they wouldn't
+        #      be selected as candidates in the first place.
+        #   2. Even if they are selected, consistency is basically 1-look-ahead,
+        #      meaning that we take into consideration the relation of the
+        #      candidates with their mapped neighbors. The node we deleted is
+        #      not a covered neighbor.
+        #      Such nodes will be checked by the cut_PT function, which is
+        #      basically the 2-look-ahead, checking the relation of the
+        #      candidates with T1, T2 (in which belongs the node we just deleted).
+        G1.remove_node(6)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of u in G1
+        G1.add_edge(u, 2)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edge(v, "c")
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of v in G2
+        G2.add_edge(v, "x")
+        G1.add_node(7)
+        sparams.mapping.update({7: "x"})
+        sparams.reverse_mapping.update({"x": 7})
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compendate in G1
+        G1.add_edge(u, 7)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph))
+    def test_cut_inconsistent_labels(self, graph_type):
+        G1 = graph_type(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        G2 = graph_type(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("k", "f"),
+                ("k", "g"),
+                ("e", "b"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        l1.update({6: "green"})  # Change the label of one neighbor of u
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+
+        u, v = 10, "k"
+        assert _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_consistent_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        G2 = nx.Graph(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("k", "f"),
+                ("k", "g"),
+                ("e", "b"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5},
+            None,
+            {6},
+            None,
+            {"e", "f"},
+            None,
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_same_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5},
+            None,
+            {6},
+            None,
+            {"e", "f"},
+            None,
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G1[u] and T1, so it's not the same as the one between G2[v] and T2
+        G1.remove_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
+        G2.remove_edge(v, mapped[6])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 6)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add disconnected nodes, which will form the new Ti_out
+        G1.add_nodes_from([6, 7, 8])
+        G2.add_nodes_from(["g", "y", "z"])
+        sparams.T1_tilde.update({6, 7, 8})
+        sparams.T2_tilde.update({"g", "y", "z"})
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add some new nodes to the mapping
+        sparams.mapping.update({6: "g", 7: "y"})
+        sparams.reverse_mapping.update({"g": 6, "y": 7})
+
+        # Add more nodes to T1, T2.
+        G1.add_edges_from([(6, 20), (7, 20), (6, 21)])
+        G2.add_edges_from([("g", "i"), ("g", "j"), ("y", "j")])
+
+        sparams.mapping.update({20: "j", 21: "i"})
+        sparams.reverse_mapping.update({"j": 20, "i": 21})
+        sparams.T1.update({20, 21})
+        sparams.T2.update({"i", "j"})
+        sparams.T1_tilde.difference_update({6, 7})
+        sparams.T2_tilde.difference_update({"g", "y"})
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add nodes from the new T1 and T2, as neighbors of u and v respectively
+        G1.add_edges_from([(u, 20), (u, 21)])
+        G2.add_edges_from([(v, "i"), (v, "j")])
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the edges, maintaining the G1[u]-T1 intersection
+        G1.remove_edge(u, 20)
+        G1.add_edge(u, 4)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Connect u to 8 which is still in T1_tilde
+        G1.add_edge(u, 8)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for v and z, so that inters(G1[u], T1out) == inters(G2[v], T2out)
+        G2.add_edge(v, "z")
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_different_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 3),
+                (20, 5),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                3: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
+        l1.update({9: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # compensate in G2
+        l2.update({mapped[9]: "red"})
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G2[v] and T2_tilde
+        G2.remove_edge(v, mapped[8])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G1[u] and T1_tilde
+        G1.remove_edge(u, 8)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
+        G1.add_edge(8, 3)
+        G2.add_edge(mapped[8], mapped[3])
+        sparams.T1.add(8)
+        sparams.T2.add(mapped[8])
+        sparams.T1_tilde.remove(8)
+        sparams.T2_tilde.remove(mapped[8])
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove neighbor of u from T1
+        G1.remove_node(5)
+        l1.pop(5)
+        sparams.T1.remove(5)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same in G2
+        G2.remove_node(mapped[5])
+        l2.pop(mapped[5])
+        sparams.T2.remove(mapped[5])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_feasibility_same_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 2),
+                (20, 5),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {mapped[n]: "blue" for n in G1.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edge(mapped[20], mapped[2])
+        G2.add_edge(mapped[20], mapped[3])
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edge(20, 2)
+        G1.add_edge(20, 3)
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        G2.add_edge(v, mapped[10])
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_feasibility_different_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 2),
+                (20, 5),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                2: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edge(mapped[20], mapped[2])
+        G2.add_edge(mapped[20], mapped[3])
+        l2.update({mapped[3]: "green"})
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edge(20, 2)
+        G1.add_edge(20, 3)
+        l1.update({3: "green"})
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        l1.update({5: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+
+class TestMultiGraphISOFeasibility:
+    def test_const_covered_neighbors(self):
+        G1 = nx.MultiGraph(
+            [(0, 1), (0, 1), (1, 2), (3, 0), (3, 0), (3, 0), (3, 2), (3, 2)]
+        )
+        G2 = nx.MultiGraph(
+            [
+                ("a", "b"),
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "c"),
+                ("k", "c"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_no_covered_neighbors(self):
+        G1 = nx.MultiGraph([(0, 1), (0, 1), (1, 2), (3, 4), (3, 4), (3, 5)])
+        G2 = nx.MultiGraph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "w"), ("k", "z")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_mixed_covered_uncovered_neighbors(self):
+        G1 = nx.MultiGraph(
+            [(0, 1), (1, 2), (3, 0), (3, 0), (3, 0), (3, 2), (3, 2), (3, 4), (3, 5)]
+        )
+        G2 = nx.MultiGraph(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "c"),
+                ("k", "c"),
+                ("k", "w"),
+                ("k", "z"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_fail_cases(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 0),
+                (10, 0),
+                (10, 3),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 10, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete one uncovered neighbor of u. Notice how it still passes the test. Two reasons for this:
+        # 1. If u, v had different degrees from the beginning, they wouldn't be selected as candidates in the first
+        #    place.
+        # 2. Even if they are selected, consistency is basically 1-look-ahead, meaning that we take into consideration
+        #    the relation of the candidates with their mapped neighbors. The node we deleted is not a covered neighbor.
+        #    Such nodes will be checked by the cut_PT function, which is basically the 2-look-ahead, checking the
+        #    relation of the candidates with T1, T2 (in which belongs the node we just deleted).
+        G1.remove_node(6)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of u in G1
+        G1.add_edge(u, 2)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edge(v, "c")
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of v in G2
+        G2.add_edge(v, "x")
+        G1.add_node(7)
+        sparams.mapping.update({7: "x"})
+        sparams.reverse_mapping.update({"x": 7})
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compendate in G1
+        G1.add_edge(u, 7)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete an edge between u and a covered neighbor
+        G1.remove_edges_from([(u, 0), (u, 0)])
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edges_from([(v, mapped[0]), (v, mapped[0])])
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Remove an edge between v and a covered neighbor
+        G2.remove_edge(v, mapped[3])
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 3)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_cut_same_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 0),
+                (10, 0),
+                (10, 3),
+                (10, 3),
+                (10, 4),
+                (10, 4),
+                (10, 5),
+                (10, 5),
+                (10, 5),
+                (10, 5),
+                (10, 6),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5},
+            None,
+            {6},
+            None,
+            {"e", "f"},
+            None,
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove one of the multiple edges between u and a neighbor
+        G1.remove_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G1.remove_edge(u, 4)
+        G2.remove_edges_from([(v, mapped[4]), (v, mapped[4])])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
+        G2.remove_edge(v, mapped[6])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 6)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add more edges between u and neighbor which belongs in T1_tilde
+        G1.add_edges_from([(u, 5), (u, 5), (u, 5)])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edges_from([(v, mapped[5]), (v, mapped[5]), (v, mapped[5])])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add disconnected nodes, which will form the new Ti_out
+        G1.add_nodes_from([6, 7, 8])
+        G2.add_nodes_from(["g", "y", "z"])
+        G1.add_edges_from([(u, 6), (u, 6), (u, 6), (u, 8)])
+        G2.add_edges_from([(v, "g"), (v, "g"), (v, "g"), (v, "z")])
+
+        sparams.T1_tilde.update({6, 7, 8})
+        sparams.T2_tilde.update({"g", "y", "z"})
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add some new nodes to the mapping
+        sparams.mapping.update({6: "g", 7: "y"})
+        sparams.reverse_mapping.update({"g": 6, "y": 7})
+
+        # Add more nodes to T1, T2.
+        G1.add_edges_from([(6, 20), (7, 20), (6, 21)])
+        G2.add_edges_from([("g", "i"), ("g", "j"), ("y", "j")])
+
+        sparams.T1.update({20, 21})
+        sparams.T2.update({"i", "j"})
+        sparams.T1_tilde.difference_update({6, 7})
+        sparams.T2_tilde.difference_update({"g", "y"})
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove some edges
+        G2.remove_edge(v, "g")
+        assert _cut_PT(u, v, gparams, sparams)
+
+        G1.remove_edge(u, 6)
+        G1.add_edge(u, 8)
+        G2.add_edge(v, "z")
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add nodes from the new T1 and T2, as neighbors of u and v respectively
+        G1.add_edges_from([(u, 20), (u, 20), (u, 20), (u, 21)])
+        G2.add_edges_from([(v, "i"), (v, "i"), (v, "i"), (v, "j")])
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the edges
+        G1.remove_edge(u, 20)
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        G2.remove_edge(v, "i")
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_different_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (0, 1),
+                (1, 2),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 9),
+                (20, 9),
+                (20, 15),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (20, 11),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 8),
+                (20, 3),
+                (20, 3),
+                (20, 5),
+                (20, 5),
+                (20, 5),
+                (20, 0),
+                (20, 0),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                3: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
+        l1.update({9: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # compensate in G2
+        l2.update({mapped[9]: "red"})
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Delete one from the multiple edges
+        G2.remove_edge(v, mapped[8])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G1[u] and T1_tilde
+        G1.remove_edge(u, 8)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
+        G1.add_edges_from([(8, 3), (8, 3), (8, u)])
+        G2.add_edges_from([(mapped[8], mapped[3]), (mapped[8], mapped[3])])
+        sparams.T1.add(8)
+        sparams.T2.add(mapped[8])
+        sparams.T1_tilde.remove(8)
+        sparams.T2_tilde.remove(mapped[8])
+
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Fix uneven edges
+        G1.remove_edge(8, u)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove neighbor of u from T1
+        G1.remove_node(5)
+        l1.pop(5)
+        sparams.T1.remove(5)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same in G2
+        G2.remove_node(mapped[5])
+        l2.pop(mapped[5])
+        sparams.T2.remove(mapped[5])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_feasibility_same_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (0, 1),
+                (1, 2),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 9),
+                (20, 9),
+                (20, 15),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (20, 11),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 8),
+                (20, 3),
+                (20, 3),
+                (20, 5),
+                (20, 5),
+                (20, 5),
+                (20, 0),
+                (20, 0),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {mapped[n]: "blue" for n in G1.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edges_from([(mapped[20], mapped[3]), (mapped[20], mapped[3])])
+        G2.add_edges_from([(mapped[20], mapped[2]), (mapped[20], mapped[2])])
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edges_from([(20, 3), (20, 3)])
+        G1.add_edges_from([(20, 2), (20, 2)])
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        G2.add_edges_from([(v, mapped[10])] * 5)
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Pass all tests
+        G1.add_edges_from([(u, 10)] * 5)
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_feasibility_different_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (0, 1),
+                (1, 2),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 9),
+                (20, 9),
+                (20, 15),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (20, 11),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 8),
+                (20, 2),
+                (20, 2),
+                (20, 5),
+                (20, 5),
+                (20, 5),
+                (20, 0),
+                (20, 0),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                2: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edges_from([(mapped[20], mapped[2]), (mapped[20], mapped[2])])
+        G2.add_edges_from([(mapped[20], mapped[3]), (mapped[20], mapped[3])])
+        l2.update({mapped[3]: "green"})
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edges_from([(20, 2), (20, 2)])
+        G1.add_edges_from([(20, 3), (20, 3)])
+        l1.update({3: "green"})
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        l1.update({5: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+
+class TestDiGraphISOFeasibility:
+    def test_const_covered_neighbors(self):
+        G1 = nx.DiGraph([(0, 1), (1, 2), (0, 3), (2, 3)])
+        G2 = nx.DiGraph([("a", "b"), ("b", "c"), ("a", "k"), ("c", "k")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_no_covered_neighbors(self):
+        G1 = nx.DiGraph([(0, 1), (1, 2), (3, 4), (3, 5)])
+        G2 = nx.DiGraph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "z")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_mixed_covered_uncovered_neighbors(self):
+        G1 = nx.DiGraph([(0, 1), (1, 2), (3, 0), (3, 2), (3, 4), (3, 5)])
+        G2 = nx.DiGraph(
+            [("a", "b"), ("b", "c"), ("k", "a"), ("k", "c"), ("k", "w"), ("k", "z")]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_fail_cases(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        G2 = nx.DiGraph(
+            [
+                ("a", "b"),
+                ("c", "b"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("f", "k"),
+                ("k", "g"),
+                ("b", "e"),
+                ("f", "d"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 10, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete one uncovered neighbor of u. Notice how it still passes the
+        # test. Two reasons for this:
+        #   1. If u, v had different degrees from the beginning, they wouldn't
+        #      be selected as candidates in the first place.
+        #   2. Even if they are selected, consistency is basically
+        #      1-look-ahead, meaning that we take into consideration the
+        #      relation of the candidates with their mapped neighbors.
+        #      The node we deleted is not a covered neighbor.
+        #      Such nodes will be checked by the cut_PT function, which is
+        #      basically the 2-look-ahead, checking the relation of the
+        #      candidates with T1, T2 (in which belongs the node we just deleted).
+        G1.remove_node(6)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of u in G1
+        G1.add_edge(u, 2)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edge(v, "c")
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of v in G2
+        G2.add_edge(v, "x")
+        G1.add_node(7)
+        sparams.mapping.update({7: "x"})
+        sparams.reverse_mapping.update({"x": 7})
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.add_edge(u, 7)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_cut_inconsistent_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        G2 = nx.DiGraph(
+            [
+                ("a", "b"),
+                ("c", "b"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("f", "k"),
+                ("k", "g"),
+                ("b", "e"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        l1.update({5: "green"})  # Change the label of one neighbor of u
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+
+        u, v = 10, "k"
+        assert _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_consistent_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        G2 = nx.DiGraph(
+            [
+                ("a", "b"),
+                ("c", "b"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("f", "k"),
+                ("k", "g"),
+                ("b", "e"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4},
+            {5, 10},
+            {6},
+            None,
+            {"e"},
+            {"f", "k"},
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_same_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4},
+            {5, 10},
+            {6},
+            None,
+            {"e"},
+            {"f", "k"},
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G1[u] and T1_out, so it's not the same as the one between G2[v] and T2_out
+        G1.remove_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G1[u] and T1_in, so it's not the same as the one between G2[v] and T2_in
+        G1.remove_edge(5, u)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edge(mapped[5], v)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
+        G2.remove_edge(v, mapped[6])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 6)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add disconnected nodes, which will form the new Ti_tilde
+        G1.add_nodes_from([6, 7, 8])
+        G2.add_nodes_from(["g", "y", "z"])
+        sparams.T1_tilde.update({6, 7, 8})
+        sparams.T2_tilde.update({"g", "y", "z"})
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_different_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (1, 2),
+                (14, 1),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (10, 4),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 3),
+                (20, 5),
+                (0, 20),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                3: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 20},
+            {14, 20},
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "x"},
+            {"o", "x"},
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
+        l1.update({9: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # compensate in G2
+        l2.update({mapped[9]: "red"})
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1_out
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2_out
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1_in
+        G1.add_edge(u, 14)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2_in
+        G2.add_edge(v, mapped[14])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G2[v] and T2_tilde
+        G2.remove_edge(v, mapped[8])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G1[u] and T1_tilde
+        G1.remove_edge(u, 8)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
+        G1.add_edge(8, 3)
+        G2.add_edge(mapped[8], mapped[3])
+        sparams.T1.add(8)
+        sparams.T2.add(mapped[8])
+        sparams.T1_tilde.remove(8)
+        sparams.T2_tilde.remove(mapped[8])
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove neighbor of u from T1
+        G1.remove_node(5)
+        l1.pop(5)
+        sparams.T1.remove(5)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same in G2
+        G2.remove_node(mapped[5])
+        l2.pop(mapped[5])
+        sparams.T2.remove(mapped[5])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_predecessor_T1_in_fail(self):
+        G1 = nx.DiGraph(
+            [(0, 1), (0, 3), (4, 0), (1, 5), (5, 2), (3, 6), (4, 6), (6, 5)]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            {3, 5},
+            {4, 5},
+            {6},
+            None,
+            {"d", "f"},
+            {"f"},  # mapped[4] is missing from T2_in
+            {"g"},
+            None,
+        )
+
+        u, v = 6, "g"
+        assert _cut_PT(u, v, gparams, sparams)
+
+        sparams.T2_in.add("e")
+        assert not _cut_PT(u, v, gparams, sparams)
+
+
+class TestGraphTinoutUpdating:
+    edges = [
+        (1, 3),
+        (2, 3),
+        (3, 4),
+        (4, 9),
+        (4, 5),
+        (3, 9),
+        (5, 8),
+        (5, 7),
+        (8, 7),
+        (6, 7),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+    G1 = nx.Graph()
+    G1.add_edges_from(edges)
+    G1.add_node(0)
+    G2 = nx.relabel_nodes(G1, mapping=mapped)
+
+    def test_updating(self):
+        G2_degree = dict(self.G2.degree)
+        gparams, sparams = _initialize_parameters(self.G1, self.G2, G2_degree)
+        m, m_rev, T1, _, T1_tilde, _, T2, _, T2_tilde, _ = sparams
+
+        # Add node to the mapping
+        m[4] = self.mapped[4]
+        m_rev[self.mapped[4]] = 4
+        _update_Tinout(4, self.mapped[4], gparams, sparams)
+
+        assert T1 == {3, 5, 9}
+        assert T2 == {"c", "i", "e"}
+        assert T1_tilde == {0, 1, 2, 6, 7, 8}
+        assert T2_tilde == {"x", "a", "b", "f", "g", "h"}
+
+        # Add node to the mapping
+        m[5] = self.mapped[5]
+        m_rev.update({self.mapped[5]: 5})
+        _update_Tinout(5, self.mapped[5], gparams, sparams)
+
+        assert T1 == {3, 9, 8, 7}
+        assert T2 == {"c", "i", "h", "g"}
+        assert T1_tilde == {0, 1, 2, 6}
+        assert T2_tilde == {"x", "a", "b", "f"}
+
+        # Add node to the mapping
+        m[6] = self.mapped[6]
+        m_rev.update({self.mapped[6]: 6})
+        _update_Tinout(6, self.mapped[6], gparams, sparams)
+
+        assert T1 == {3, 9, 8, 7}
+        assert T2 == {"c", "i", "h", "g"}
+        assert T1_tilde == {0, 1, 2}
+        assert T2_tilde == {"x", "a", "b"}
+
+        # Add node to the mapping
+        m[3] = self.mapped[3]
+        m_rev.update({self.mapped[3]: 3})
+        _update_Tinout(3, self.mapped[3], gparams, sparams)
+
+        assert T1 == {1, 2, 9, 8, 7}
+        assert T2 == {"a", "b", "i", "h", "g"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Add node to the mapping
+        m[0] = self.mapped[0]
+        m_rev.update({self.mapped[0]: 0})
+        _update_Tinout(0, self.mapped[0], gparams, sparams)
+
+        assert T1 == {1, 2, 9, 8, 7}
+        assert T2 == {"a", "b", "i", "h", "g"}
+        assert T1_tilde == set()
+        assert T2_tilde == set()
+
+    def test_restoring(self):
+        m = {0: "x", 3: "c", 4: "d", 5: "e", 6: "f"}
+        m_rev = {"x": 0, "c": 3, "d": 4, "e": 5, "f": 6}
+
+        T1 = {1, 2, 7, 9, 8}
+        T2 = {"a", "b", "g", "i", "h"}
+        T1_tilde = set()
+        T2_tilde = set()
+
+        gparams = _GraphParameters(self.G1, self.G2, {}, {}, {}, {}, {})
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        # Remove a node from the mapping
+        m.pop(0)
+        m_rev.pop("x")
+        _restore_Tinout(0, self.mapped[0], gparams, sparams)
+
+        assert T1 == {1, 2, 7, 9, 8}
+        assert T2 == {"a", "b", "g", "i", "h"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Remove a node from the mapping
+        m.pop(6)
+        m_rev.pop("f")
+        _restore_Tinout(6, self.mapped[6], gparams, sparams)
+
+        assert T1 == {1, 2, 7, 9, 8}
+        assert T2 == {"a", "b", "g", "i", "h"}
+        assert T1_tilde == {0, 6}
+        assert T2_tilde == {"x", "f"}
+
+        # Remove a node from the mapping
+        m.pop(3)
+        m_rev.pop("c")
+        _restore_Tinout(3, self.mapped[3], gparams, sparams)
+
+        assert T1 == {7, 9, 8, 3}
+        assert T2 == {"g", "i", "h", "c"}
+        assert T1_tilde == {0, 6, 1, 2}
+        assert T2_tilde == {"x", "f", "a", "b"}
+
+        # Remove a node from the mapping
+        m.pop(5)
+        m_rev.pop("e")
+        _restore_Tinout(5, self.mapped[5], gparams, sparams)
+
+        assert T1 == {9, 3, 5}
+        assert T2 == {"i", "c", "e"}
+        assert T1_tilde == {0, 6, 1, 2, 7, 8}
+        assert T2_tilde == {"x", "f", "a", "b", "g", "h"}
+
+        # Remove a node from the mapping
+        m.pop(4)
+        m_rev.pop("d")
+        _restore_Tinout(4, self.mapped[4], gparams, sparams)
+
+        assert T1 == set()
+        assert T2 == set()
+        assert T1_tilde == set(self.G1.nodes())
+        assert T2_tilde == set(self.G2.nodes())
+
+
+class TestDiGraphTinoutUpdating:
+    edges = [
+        (1, 3),
+        (3, 2),
+        (3, 4),
+        (4, 9),
+        (4, 5),
+        (3, 9),
+        (5, 8),
+        (5, 7),
+        (8, 7),
+        (7, 6),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+    G1 = nx.DiGraph(edges)
+    G1.add_node(0)
+    G2 = nx.relabel_nodes(G1, mapping=mapped)
+
+    def test_updating(self):
+        G2_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(
+                self.G2.in_degree, self.G2.out_degree
+            )
+        }
+        gparams, sparams = _initialize_parameters(self.G1, self.G2, G2_degree)
+        m, m_rev, T1_out, T1_in, T1_tilde, _, T2_out, T2_in, T2_tilde, _ = sparams
+
+        # Add node to the mapping
+        m[4] = self.mapped[4]
+        m_rev[self.mapped[4]] = 4
+        _update_Tinout(4, self.mapped[4], gparams, sparams)
+
+        assert T1_out == {5, 9}
+        assert T1_in == {3}
+        assert T2_out == {"i", "e"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 1, 2, 6, 7, 8}
+        assert T2_tilde == {"x", "a", "b", "f", "g", "h"}
+
+        # Add node to the mapping
+        m[5] = self.mapped[5]
+        m_rev[self.mapped[5]] = 5
+        _update_Tinout(5, self.mapped[5], gparams, sparams)
+
+        assert T1_out == {9, 8, 7}
+        assert T1_in == {3}
+        assert T2_out == {"i", "g", "h"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 1, 2, 6}
+        assert T2_tilde == {"x", "a", "b", "f"}
+
+        # Add node to the mapping
+        m[6] = self.mapped[6]
+        m_rev[self.mapped[6]] = 6
+        _update_Tinout(6, self.mapped[6], gparams, sparams)
+
+        assert T1_out == {9, 8, 7}
+        assert T1_in == {3, 7}
+        assert T2_out == {"i", "g", "h"}
+        assert T2_in == {"c", "g"}
+        assert T1_tilde == {0, 1, 2}
+        assert T2_tilde == {"x", "a", "b"}
+
+        # Add node to the mapping
+        m[3] = self.mapped[3]
+        m_rev[self.mapped[3]] = 3
+        _update_Tinout(3, self.mapped[3], gparams, sparams)
+
+        assert T1_out == {9, 8, 7, 2}
+        assert T1_in == {7, 1}
+        assert T2_out == {"i", "g", "h", "b"}
+        assert T2_in == {"g", "a"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Add node to the mapping
+        m[0] = self.mapped[0]
+        m_rev[self.mapped[0]] = 0
+        _update_Tinout(0, self.mapped[0], gparams, sparams)
+
+        assert T1_out == {9, 8, 7, 2}
+        assert T1_in == {7, 1}
+        assert T2_out == {"i", "g", "h", "b"}
+        assert T2_in == {"g", "a"}
+        assert T1_tilde == set()
+        assert T2_tilde == set()
+
+    def test_restoring(self):
+        m = {0: "x", 3: "c", 4: "d", 5: "e", 6: "f"}
+        m_rev = {"x": 0, "c": 3, "d": 4, "e": 5, "f": 6}
+
+        T1_out = {2, 7, 9, 8}
+        T1_in = {1, 7}
+        T2_out = {"b", "g", "i", "h"}
+        T2_in = {"a", "g"}
+        T1_tilde = set()
+        T2_tilde = set()
+
+        gparams = _GraphParameters(self.G1, self.G2, {}, {}, {}, {}, {})
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        # Remove a node from the mapping
+        m.pop(0)
+        m_rev.pop("x")
+        _restore_Tinout_Di(0, self.mapped[0], gparams, sparams)
+
+        assert T1_out == {2, 7, 9, 8}
+        assert T1_in == {1, 7}
+        assert T2_out == {"b", "g", "i", "h"}
+        assert T2_in == {"a", "g"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Remove a node from the mapping
+        m.pop(6)
+        m_rev.pop("f")
+        _restore_Tinout_Di(6, self.mapped[6], gparams, sparams)
+
+        assert T1_out == {2, 9, 8, 7}
+        assert T1_in == {1}
+        assert T2_out == {"b", "i", "h", "g"}
+        assert T2_in == {"a"}
+        assert T1_tilde == {0, 6}
+        assert T2_tilde == {"x", "f"}
+
+        # Remove a node from the mapping
+        m.pop(3)
+        m_rev.pop("c")
+        _restore_Tinout_Di(3, self.mapped[3], gparams, sparams)
+
+        assert T1_out == {9, 8, 7}
+        assert T1_in == {3}
+        assert T2_out == {"i", "h", "g"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 6, 1, 2}
+        assert T2_tilde == {"x", "f", "a", "b"}
+
+        # Remove a node from the mapping
+        m.pop(5)
+        m_rev.pop("e")
+        _restore_Tinout_Di(5, self.mapped[5], gparams, sparams)
+
+        assert T1_out == {9, 5}
+        assert T1_in == {3}
+        assert T2_out == {"i", "e"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 6, 1, 2, 8, 7}
+        assert T2_tilde == {"x", "f", "a", "b", "h", "g"}
+
+        # Remove a node from the mapping
+        m.pop(4)
+        m_rev.pop("d")
+        _restore_Tinout_Di(4, self.mapped[4], gparams, sparams)
+
+        assert T1_out == set()
+        assert T1_in == set()
+        assert T2_out == set()
+        assert T2_in == set()
+        assert T1_tilde == set(self.G1.nodes())
+        assert T2_tilde == set(self.G2.nodes())
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
new file mode 100644
index 00000000..b44f4588
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
@@ -0,0 +1,200 @@
+"""
+Tests for VF2 isomorphism algorithm for weighted graphs.
+"""
+
+import math
+from operator import eq
+
+import networkx as nx
+import networkx.algorithms.isomorphism as iso
+
+
+def test_simple():
+    # 16 simple tests
+    w = "weight"
+    edges = [(0, 0, 1), (0, 0, 1.5), (0, 1, 2), (1, 0, 3)]
+    for g1 in [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()]:
+        g1.add_weighted_edges_from(edges)
+        g2 = g1.subgraph(g1.nodes())
+        if g1.is_multigraph():
+            em = iso.numerical_multiedge_match("weight", 1)
+        else:
+            em = iso.numerical_edge_match("weight", 1)
+        assert nx.is_isomorphic(g1, g2, edge_match=em)
+
+        for mod1, mod2 in [(False, True), (True, False), (True, True)]:
+            # mod1 tests a regular edge
+            # mod2 tests a selfloop
+            if g2.is_multigraph():
+                if mod1:
+                    data1 = {0: {"weight": 10}}
+                if mod2:
+                    data2 = {0: {"weight": 1}, 1: {"weight": 2.5}}
+            else:
+                if mod1:
+                    data1 = {"weight": 10}
+                if mod2:
+                    data2 = {"weight": 2.5}
+
+            g2 = g1.subgraph(g1.nodes()).copy()
+            if mod1:
+                if not g1.is_directed():
+                    g2._adj[1][0] = data1
+                    g2._adj[0][1] = data1
+                else:
+                    g2._succ[1][0] = data1
+                    g2._pred[0][1] = data1
+            if mod2:
+                if not g1.is_directed():
+                    g2._adj[0][0] = data2
+                else:
+                    g2._succ[0][0] = data2
+                    g2._pred[0][0] = data2
+
+            assert not nx.is_isomorphic(g1, g2, edge_match=em)
+
+
+def test_weightkey():
+    g1 = nx.DiGraph()
+    g2 = nx.DiGraph()
+
+    g1.add_edge("A", "B", weight=1)
+    g2.add_edge("C", "D", weight=0)
+
+    assert nx.is_isomorphic(g1, g2)
+    em = iso.numerical_edge_match("nonexistent attribute", 1)
+    assert nx.is_isomorphic(g1, g2, edge_match=em)
+    em = iso.numerical_edge_match("weight", 1)
+    assert not nx.is_isomorphic(g1, g2, edge_match=em)
+
+    g2 = nx.DiGraph()
+    g2.add_edge("C", "D")
+    assert nx.is_isomorphic(g1, g2, edge_match=em)
+
+
+class TestNodeMatch_Graph:
+    def setup_method(self):
+        self.g1 = nx.Graph()
+        self.g2 = nx.Graph()
+        self.build()
+
+    def build(self):
+        self.nm = iso.categorical_node_match("color", "")
+        self.em = iso.numerical_edge_match("weight", 1)
+
+        self.g1.add_node("A", color="red")
+        self.g2.add_node("C", color="blue")
+
+        self.g1.add_edge("A", "B", weight=1)
+        self.g2.add_edge("C", "D", weight=1)
+
+    def test_noweight_nocolor(self):
+        assert nx.is_isomorphic(self.g1, self.g2)
+
+    def test_color1(self):
+        assert not nx.is_isomorphic(self.g1, self.g2, node_match=self.nm)
+
+    def test_color2(self):
+        self.g1.nodes["A"]["color"] = "blue"
+        assert nx.is_isomorphic(self.g1, self.g2, node_match=self.nm)
+
+    def test_weight1(self):
+        assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
+
+    def test_weight2(self):
+        self.g1.add_edge("A", "B", weight=2)
+        assert not nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
+
+    def test_colorsandweights1(self):
+        iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em)
+        assert not iso
+
+    def test_colorsandweights2(self):
+        self.g1.nodes["A"]["color"] = "blue"
+        iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em)
+        assert iso
+
+    def test_colorsandweights3(self):
+        # make the weights disagree
+        self.g1.add_edge("A", "B", weight=2)
+        assert not nx.is_isomorphic(
+            self.g1, self.g2, node_match=self.nm, edge_match=self.em
+        )
+
+
+class TestEdgeMatch_MultiGraph:
+    def setup_method(self):
+        self.g1 = nx.MultiGraph()
+        self.g2 = nx.MultiGraph()
+        self.GM = iso.MultiGraphMatcher
+        self.build()
+
+    def build(self):
+        g1 = self.g1
+        g2 = self.g2
+
+        # We will assume integer weights only.
+        g1.add_edge("A", "B", color="green", weight=0, size=0.5)
+        g1.add_edge("A", "B", color="red", weight=1, size=0.35)
+        g1.add_edge("A", "B", color="red", weight=2, size=0.65)
+
+        g2.add_edge("C", "D", color="green", weight=1, size=0.5)
+        g2.add_edge("C", "D", color="red", weight=0, size=0.45)
+        g2.add_edge("C", "D", color="red", weight=2, size=0.65)
+
+        if g1.is_multigraph():
+            self.em = iso.numerical_multiedge_match("weight", 1)
+            self.emc = iso.categorical_multiedge_match("color", "")
+            self.emcm = iso.categorical_multiedge_match(["color", "weight"], ["", 1])
+            self.emg1 = iso.generic_multiedge_match("color", "red", eq)
+            self.emg2 = iso.generic_multiedge_match(
+                ["color", "weight", "size"],
+                ["red", 1, 0.5],
+                [eq, eq, math.isclose],
+            )
+        else:
+            self.em = iso.numerical_edge_match("weight", 1)
+            self.emc = iso.categorical_edge_match("color", "")
+            self.emcm = iso.categorical_edge_match(["color", "weight"], ["", 1])
+            self.emg1 = iso.generic_multiedge_match("color", "red", eq)
+            self.emg2 = iso.generic_edge_match(
+                ["color", "weight", "size"],
+                ["red", 1, 0.5],
+                [eq, eq, math.isclose],
+            )
+
+    def test_weights_only(self):
+        assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
+
+    def test_colors_only(self):
+        gm = self.GM(self.g1, self.g2, edge_match=self.emc)
+        assert gm.is_isomorphic()
+
+    def test_colorsandweights(self):
+        gm = self.GM(self.g1, self.g2, edge_match=self.emcm)
+        assert not gm.is_isomorphic()
+
+    def test_generic1(self):
+        gm = self.GM(self.g1, self.g2, edge_match=self.emg1)
+        assert gm.is_isomorphic()
+
+    def test_generic2(self):
+        gm = self.GM(self.g1, self.g2, edge_match=self.emg2)
+        assert not gm.is_isomorphic()
+
+
+class TestEdgeMatch_DiGraph(TestNodeMatch_Graph):
+    def setup_method(self):
+        TestNodeMatch_Graph.setup_method(self)
+        self.g1 = nx.DiGraph()
+        self.g2 = nx.DiGraph()
+        self.build()
+
+
+class TestEdgeMatch_MultiDiGraph(TestEdgeMatch_MultiGraph):
+    def setup_method(self):
+        TestEdgeMatch_MultiGraph.setup_method(self)
+        self.g1 = nx.MultiDiGraph()
+        self.g2 = nx.MultiDiGraph()
+        self.GM = iso.MultiDiGraphMatcher
+        self.build()
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py
new file mode 100644
index 00000000..e409d515
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py
@@ -0,0 +1,284 @@
+"""
+An algorithm for finding if two undirected trees are isomorphic,
+and if so returns an isomorphism between the two sets of nodes.
+
+This algorithm uses a routine to tell if two rooted trees (trees with a
+specified root node) are isomorphic, which may be independently useful.
+
+This implements an algorithm from:
+The Design and Analysis of Computer Algorithms
+by Aho, Hopcroft, and Ullman
+Addison-Wesley Publishing 1974
+Example 3.2 pp. 84-86.
+
+A more understandable version of this algorithm is described in:
+Homework Assignment 5
+McGill University SOCS 308-250B, Winter 2002
+by Matthew Suderman
+http://crypto.cs.mcgill.ca/~crepeau/CS250/2004/HW5+.pdf
+"""
+
+import networkx as nx
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = ["rooted_tree_isomorphism", "tree_isomorphism"]
+
+
+@nx._dispatchable(graphs={"t1": 0, "t2": 2}, returns_graph=True)
+def root_trees(t1, root1, t2, root2):
+    """Create a single digraph dT of free trees t1 and t2
+    #   with roots root1 and root2 respectively
+    # rename the nodes with consecutive integers
+    # so that all nodes get a unique name between both trees
+
+    # our new "fake" root node is 0
+    # t1 is numbers from 1 ... n
+    # t2 is numbered from n+1 to 2n
+    """
+
+    dT = nx.DiGraph()
+
+    newroot1 = 1  # left root will be 1
+    newroot2 = nx.number_of_nodes(t1) + 1  # right will be n+1
+
+    # may be overlap in node names here so need separate maps
+    # given the old name, what is the new
+    namemap1 = {root1: newroot1}
+    namemap2 = {root2: newroot2}
+
+    # add an edge from our new root to root1 and root2
+    dT.add_edge(0, namemap1[root1])
+    dT.add_edge(0, namemap2[root2])
+
+    for i, (v1, v2) in enumerate(nx.bfs_edges(t1, root1)):
+        namemap1[v2] = i + namemap1[root1] + 1
+        dT.add_edge(namemap1[v1], namemap1[v2])
+
+    for i, (v1, v2) in enumerate(nx.bfs_edges(t2, root2)):
+        namemap2[v2] = i + namemap2[root2] + 1
+        dT.add_edge(namemap2[v1], namemap2[v2])
+
+    # now we really want the inverse of namemap1 and namemap2
+    # giving the old name given the new
+    # since the values of namemap1 and namemap2 are unique
+    # there won't be collisions
+    namemap = {}
+    for old, new in namemap1.items():
+        namemap[new] = old
+    for old, new in namemap2.items():
+        namemap[new] = old
+
+    return (dT, namemap, newroot1, newroot2)
+
+
+# figure out the level of each node, with 0 at root
+@nx._dispatchable
+def assign_levels(G, root):
+    level = {}
+    level[root] = 0
+    for v1, v2 in nx.bfs_edges(G, root):
+        level[v2] = level[v1] + 1
+
+    return level
+
+
+# now group the nodes at each level
+def group_by_levels(levels):
+    L = {}
+    for n, lev in levels.items():
+        if lev not in L:
+            L[lev] = []
+        L[lev].append(n)
+
+    return L
+
+
+# now lets get the isomorphism by walking the ordered_children
+def generate_isomorphism(v, w, M, ordered_children):
+    # make sure tree1 comes first
+    assert v < w
+    M.append((v, w))
+    for i, (x, y) in enumerate(zip(ordered_children[v], ordered_children[w])):
+        generate_isomorphism(x, y, M, ordered_children)
+
+
+@nx._dispatchable(graphs={"t1": 0, "t2": 2})
+def rooted_tree_isomorphism(t1, root1, t2, root2):
+    """
+    Given two rooted trees `t1` and `t2`,
+    with roots `root1` and `root2` respectively
+    this routine will determine if they are isomorphic.
+
+    These trees may be either directed or undirected,
+    but if they are directed, all edges should flow from the root.
+
+    It returns the isomorphism, a mapping of the nodes of `t1` onto the nodes
+    of `t2`, such that two trees are then identical.
+
+    Note that two trees may have more than one isomorphism, and this
+    routine just returns one valid mapping.
+
+    Parameters
+    ----------
+    `t1` :  NetworkX graph
+        One of the trees being compared
+
+    `root1` : a node of `t1` which is the root of the tree
+
+    `t2` : undirected NetworkX graph
+        The other tree being compared
+
+    `root2` : a node of `t2` which is the root of the tree
+
+    This is a subroutine used to implement `tree_isomorphism`, but will
+    be somewhat faster if you already have rooted trees.
+
+    Returns
+    -------
+    isomorphism : list
+        A list of pairs in which the left element is a node in `t1`
+        and the right element is a node in `t2`.  The pairs are in
+        arbitrary order.  If the nodes in one tree is mapped to the names in
+        the other, then trees will be identical. Note that an isomorphism
+        will not necessarily be unique.
+
+        If `t1` and `t2` are not isomorphic, then it returns the empty list.
+    """
+
+    assert nx.is_tree(t1)
+    assert nx.is_tree(t2)
+
+    # get the rooted tree formed by combining them
+    # with unique names
+    (dT, namemap, newroot1, newroot2) = root_trees(t1, root1, t2, root2)
+
+    # compute the distance from the root, with 0 for our
+    levels = assign_levels(dT, 0)
+
+    # height
+    h = max(levels.values())
+
+    # collect nodes into a dict by level
+    L = group_by_levels(levels)
+
+    # each node has a label, initially set to 0
+    label = {v: 0 for v in dT}
+    # and also ordered_labels and ordered_children
+    # which will store ordered tuples
+    ordered_labels = {v: () for v in dT}
+    ordered_children = {v: () for v in dT}
+
+    # nothing to do on last level so start on h-1
+    # also nothing to do for our fake level 0, so skip that
+    for i in range(h - 1, 0, -1):
+        # update the ordered_labels and ordered_children
+        # for any children
+        for v in L[i]:
+            # nothing to do if no children
+            if dT.out_degree(v) > 0:
+                # get all the pairs of labels and nodes of children
+                # and sort by labels
+                s = sorted((label[u], u) for u in dT.successors(v))
+
+                # invert to give a list of two tuples
+                # the sorted labels, and the corresponding children
+                ordered_labels[v], ordered_children[v] = list(zip(*s))
+
+        # now collect and sort the sorted ordered_labels
+        # for all nodes in L[i], carrying along the node
+        forlabel = sorted((ordered_labels[v], v) for v in L[i])
+
+        # now assign labels to these nodes, according to the sorted order
+        # starting from 0, where identical ordered_labels get the same label
+        current = 0
+        for i, (ol, v) in enumerate(forlabel):
+            # advance to next label if not 0, and different from previous
+            if (i != 0) and (ol != forlabel[i - 1][0]):
+                current += 1
+            label[v] = current
+
+    # they are isomorphic if the labels of newroot1 and newroot2 are 0
+    isomorphism = []
+    if label[newroot1] == 0 and label[newroot2] == 0:
+        generate_isomorphism(newroot1, newroot2, isomorphism, ordered_children)
+
+        # get the mapping back in terms of the old names
+        # return in sorted order for neatness
+        isomorphism = [(namemap[u], namemap[v]) for (u, v) in isomorphism]
+
+    return isomorphism
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(graphs={"t1": 0, "t2": 1})
+def tree_isomorphism(t1, t2):
+    """
+    Given two undirected (or free) trees `t1` and `t2`,
+    this routine will determine if they are isomorphic.
+    It returns the isomorphism, a mapping of the nodes of `t1` onto the nodes
+    of `t2`, such that two trees are then identical.
+
+    Note that two trees may have more than one isomorphism, and this
+    routine just returns one valid mapping.
+
+    Parameters
+    ----------
+    t1 : undirected NetworkX graph
+        One of the trees being compared
+
+    t2 : undirected NetworkX graph
+        The other tree being compared
+
+    Returns
+    -------
+    isomorphism : list
+        A list of pairs in which the left element is a node in `t1`
+        and the right element is a node in `t2`.  The pairs are in
+        arbitrary order.  If the nodes in one tree is mapped to the names in
+        the other, then trees will be identical. Note that an isomorphism
+        will not necessarily be unique.
+
+        If `t1` and `t2` are not isomorphic, then it returns the empty list.
+
+    Notes
+    -----
+    This runs in O(n*log(n)) time for trees with n nodes.
+    """
+
+    assert nx.is_tree(t1)
+    assert nx.is_tree(t2)
+
+    # To be isomorphic, t1 and t2 must have the same number of nodes.
+    if nx.number_of_nodes(t1) != nx.number_of_nodes(t2):
+        return []
+
+    # Another shortcut is that the sorted degree sequences need to be the same.
+    degree_sequence1 = sorted(d for (n, d) in t1.degree())
+    degree_sequence2 = sorted(d for (n, d) in t2.degree())
+
+    if degree_sequence1 != degree_sequence2:
+        return []
+
+    # A tree can have either 1 or 2 centers.
+    # If the number doesn't match then t1 and t2 are not isomorphic.
+    center1 = nx.center(t1)
+    center2 = nx.center(t2)
+
+    if len(center1) != len(center2):
+        return []
+
+    # If there is only 1 center in each, then use it.
+    if len(center1) == 1:
+        return rooted_tree_isomorphism(t1, center1[0], t2, center2[0])
+
+    # If there both have 2 centers,  then try the first for t1
+    # with the first for t2.
+    attempts = rooted_tree_isomorphism(t1, center1[0], t2, center2[0])
+
+    # If that worked we're done.
+    if len(attempts) > 0:
+        return attempts
+
+    # Otherwise, try center1[0] with the center2[1], and see if that works
+    return rooted_tree_isomorphism(t1, center1[0], t2, center2[1])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py
new file mode 100644
index 00000000..3093d9c9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py
@@ -0,0 +1,1075 @@
+"""
+***************
+VF2++ Algorithm
+***************
+
+An implementation of the VF2++ algorithm [1]_ for Graph Isomorphism testing.
+
+The simplest interface to use this module is to call:
+
+`vf2pp_is_isomorphic`: to check whether two graphs are isomorphic.
+`vf2pp_isomorphism`: to obtain the node mapping between two graphs,
+in case they are isomorphic.
+`vf2pp_all_isomorphisms`: to generate all possible mappings between two graphs,
+if isomorphic.
+
+Introduction
+------------
+The VF2++ algorithm, follows a similar logic to that of VF2, while also
+introducing new easy-to-check cutting rules and determining the optimal access
+order of nodes. It is also implemented in a non-recursive manner, which saves
+both time and space, when compared to its previous counterpart.
+
+The optimal node ordering is obtained after taking into consideration both the
+degree but also the label rarity of each node.
+This way we place the nodes that are more likely to match, first in the order,
+thus examining the most promising branches in the beginning.
+The rules also consider node labels, making it easier to prune unfruitful
+branches early in the process.
+
+Examples
+--------
+
+Suppose G1 and G2 are Isomorphic Graphs. Verification is as follows:
+
+Without node labels:
+
+>>> import networkx as nx
+>>> G1 = nx.path_graph(4)
+>>> G2 = nx.path_graph(4)
+>>> nx.vf2pp_is_isomorphic(G1, G2, node_label=None)
+True
+>>> nx.vf2pp_isomorphism(G1, G2, node_label=None)
+{1: 1, 2: 2, 0: 0, 3: 3}
+
+With node labels:
+
+>>> G1 = nx.path_graph(4)
+>>> G2 = nx.path_graph(4)
+>>> mapped = {1: 1, 2: 2, 3: 3, 0: 0}
+>>> nx.set_node_attributes(
+...     G1, dict(zip(G1, ["blue", "red", "green", "yellow"])), "label"
+... )
+>>> nx.set_node_attributes(
+...     G2,
+...     dict(zip([mapped[u] for u in G1], ["blue", "red", "green", "yellow"])),
+...     "label",
+... )
+>>> nx.vf2pp_is_isomorphic(G1, G2, node_label="label")
+True
+>>> nx.vf2pp_isomorphism(G1, G2, node_label="label")
+{1: 1, 2: 2, 0: 0, 3: 3}
+
+References
+----------
+.. [1] Jüttner, Alpár & Madarasi, Péter. (2018). "VF2++—An improved subgraph
+   isomorphism algorithm". Discrete Applied Mathematics. 242.
+   https://doi.org/10.1016/j.dam.2018.02.018
+
+"""
+
+import collections
+
+import networkx as nx
+
+__all__ = ["vf2pp_isomorphism", "vf2pp_is_isomorphic", "vf2pp_all_isomorphisms"]
+
+_GraphParameters = collections.namedtuple(
+    "_GraphParameters",
+    [
+        "G1",
+        "G2",
+        "G1_labels",
+        "G2_labels",
+        "nodes_of_G1Labels",
+        "nodes_of_G2Labels",
+        "G2_nodes_of_degree",
+    ],
+)
+
+_StateParameters = collections.namedtuple(
+    "_StateParameters",
+    [
+        "mapping",
+        "reverse_mapping",
+        "T1",
+        "T1_in",
+        "T1_tilde",
+        "T1_tilde_in",
+        "T2",
+        "T2_in",
+        "T2_tilde",
+        "T2_tilde_in",
+    ],
+)
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
+def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None):
+    """Return an isomorphic mapping between `G1` and `G2` if it exists.
+
+    Parameters
+    ----------
+    G1, G2 : NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism.
+
+    node_label : str, optional
+        The name of the node attribute to be used when comparing nodes.
+        The default is `None`, meaning node attributes are not considered
+        in the comparison. Any node that doesn't have the `node_label`
+        attribute uses `default_label` instead.
+
+    default_label : scalar
+        Default value to use when a node doesn't have an attribute
+        named `node_label`. Default is `None`.
+
+    Returns
+    -------
+    dict or None
+        Node mapping if the two graphs are isomorphic. None otherwise.
+    """
+    try:
+        mapping = next(vf2pp_all_isomorphisms(G1, G2, node_label, default_label))
+        return mapping
+    except StopIteration:
+        return None
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
+def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None):
+    """Examines whether G1 and G2 are isomorphic.
+
+    Parameters
+    ----------
+    G1, G2 : NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism.
+
+    node_label : str, optional
+        The name of the node attribute to be used when comparing nodes.
+        The default is `None`, meaning node attributes are not considered
+        in the comparison. Any node that doesn't have the `node_label`
+        attribute uses `default_label` instead.
+
+    default_label : scalar
+        Default value to use when a node doesn't have an attribute
+        named `node_label`. Default is `None`.
+
+    Returns
+    -------
+    bool
+        True if the two graphs are isomorphic, False otherwise.
+    """
+    if vf2pp_isomorphism(G1, G2, node_label, default_label) is not None:
+        return True
+    return False
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
+def vf2pp_all_isomorphisms(G1, G2, node_label=None, default_label=None):
+    """Yields all the possible mappings between G1 and G2.
+
+    Parameters
+    ----------
+    G1, G2 : NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism.
+
+    node_label : str, optional
+        The name of the node attribute to be used when comparing nodes.
+        The default is `None`, meaning node attributes are not considered
+        in the comparison. Any node that doesn't have the `node_label`
+        attribute uses `default_label` instead.
+
+    default_label : scalar
+        Default value to use when a node doesn't have an attribute
+        named `node_label`. Default is `None`.
+
+    Yields
+    ------
+    dict
+        Isomorphic mapping between the nodes in `G1` and `G2`.
+    """
+    if G1.number_of_nodes() == 0 or G2.number_of_nodes() == 0:
+        return False
+
+    # Create the degree dicts based on graph type
+    if G1.is_directed():
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+        G2_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G2.in_degree, G2.out_degree)
+        }
+    else:
+        G1_degree = dict(G1.degree)
+        G2_degree = dict(G2.degree)
+
+    if not G1.is_directed():
+        find_candidates = _find_candidates
+        restore_Tinout = _restore_Tinout
+    else:
+        find_candidates = _find_candidates_Di
+        restore_Tinout = _restore_Tinout_Di
+
+    # Check that both graphs have the same number of nodes and degree sequence
+    if G1.order() != G2.order():
+        return False
+    if sorted(G1_degree.values()) != sorted(G2_degree.values()):
+        return False
+
+    # Initialize parameters and cache necessary information about degree and labels
+    graph_params, state_params = _initialize_parameters(
+        G1, G2, G2_degree, node_label, default_label
+    )
+
+    # Check if G1 and G2 have the same labels, and that number of nodes per label is equal between the two graphs
+    if not _precheck_label_properties(graph_params):
+        return False
+
+    # Calculate the optimal node ordering
+    node_order = _matching_order(graph_params)
+
+    # Initialize the stack
+    stack = []
+    candidates = iter(
+        find_candidates(node_order[0], graph_params, state_params, G1_degree)
+    )
+    stack.append((node_order[0], candidates))
+
+    mapping = state_params.mapping
+    reverse_mapping = state_params.reverse_mapping
+
+    # Index of the node from the order, currently being examined
+    matching_node = 1
+
+    while stack:
+        current_node, candidate_nodes = stack[-1]
+
+        try:
+            candidate = next(candidate_nodes)
+        except StopIteration:
+            # If no remaining candidates, return to a previous state, and follow another branch
+            stack.pop()
+            matching_node -= 1
+            if stack:
+                # Pop the previously added u-v pair, and look for a different candidate _v for u
+                popped_node1, _ = stack[-1]
+                popped_node2 = mapping[popped_node1]
+                mapping.pop(popped_node1)
+                reverse_mapping.pop(popped_node2)
+                restore_Tinout(popped_node1, popped_node2, graph_params, state_params)
+            continue
+
+        if _feasibility(current_node, candidate, graph_params, state_params):
+            # Terminate if mapping is extended to its full
+            if len(mapping) == G2.number_of_nodes() - 1:
+                cp_mapping = mapping.copy()
+                cp_mapping[current_node] = candidate
+                yield cp_mapping
+                continue
+
+            # Feasibility rules pass, so extend the mapping and update the parameters
+            mapping[current_node] = candidate
+            reverse_mapping[candidate] = current_node
+            _update_Tinout(current_node, candidate, graph_params, state_params)
+            # Append the next node and its candidates to the stack
+            candidates = iter(
+                find_candidates(
+                    node_order[matching_node], graph_params, state_params, G1_degree
+                )
+            )
+            stack.append((node_order[matching_node], candidates))
+            matching_node += 1
+
+
+def _precheck_label_properties(graph_params):
+    G1, G2, G1_labels, G2_labels, nodes_of_G1Labels, nodes_of_G2Labels, _ = graph_params
+    if any(
+        label not in nodes_of_G1Labels or len(nodes_of_G1Labels[label]) != len(nodes)
+        for label, nodes in nodes_of_G2Labels.items()
+    ):
+        return False
+    return True
+
+
+def _initialize_parameters(G1, G2, G2_degree, node_label=None, default_label=-1):
+    """Initializes all the necessary parameters for VF2++
+
+    Parameters
+    ----------
+    G1,G2: NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism or monomorphism
+
+    G1_labels,G2_labels: dict
+        The label of every node in G1 and G2 respectively
+
+    Returns
+    -------
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2
+        G1_labels,G2_labels: dict
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_out, T2_out: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+    """
+    G1_labels = dict(G1.nodes(data=node_label, default=default_label))
+    G2_labels = dict(G2.nodes(data=node_label, default=default_label))
+
+    graph_params = _GraphParameters(
+        G1,
+        G2,
+        G1_labels,
+        G2_labels,
+        nx.utils.groups(G1_labels),
+        nx.utils.groups(G2_labels),
+        nx.utils.groups(G2_degree),
+    )
+
+    T1, T1_in = set(), set()
+    T2, T2_in = set(), set()
+    if G1.is_directed():
+        T1_tilde, T1_tilde_in = (
+            set(G1.nodes()),
+            set(),
+        )  # todo: do we need Ti_tilde_in? What nodes does it have?
+        T2_tilde, T2_tilde_in = set(G2.nodes()), set()
+    else:
+        T1_tilde, T1_tilde_in = set(G1.nodes()), set()
+        T2_tilde, T2_tilde_in = set(G2.nodes()), set()
+
+    state_params = _StateParameters(
+        {},
+        {},
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    )
+
+    return graph_params, state_params
+
+
+def _matching_order(graph_params):
+    """The node ordering as introduced in VF2++.
+
+    Notes
+    -----
+    Taking into account the structure of the Graph and the node labeling, the nodes are placed in an order such that,
+    most of the unfruitful/infeasible branches of the search space can be pruned on high levels, significantly
+    decreasing the number of visited states. The premise is that, the algorithm will be able to recognize
+    inconsistencies early, proceeding to go deep into the search tree only if it's needed.
+
+    Parameters
+    ----------
+    graph_params: namedtuple
+        Contains:
+
+            G1,G2: NetworkX Graph or MultiGraph instances.
+                The two graphs to check for isomorphism or monomorphism.
+
+            G1_labels,G2_labels: dict
+                The label of every node in G1 and G2 respectively.
+
+    Returns
+    -------
+    node_order: list
+        The ordering of the nodes.
+    """
+    G1, G2, G1_labels, _, _, nodes_of_G2Labels, _ = graph_params
+    if not G1 and not G2:
+        return {}
+
+    if G1.is_directed():
+        G1 = G1.to_undirected(as_view=True)
+
+    V1_unordered = set(G1.nodes())
+    label_rarity = {label: len(nodes) for label, nodes in nodes_of_G2Labels.items()}
+    used_degrees = {node: 0 for node in G1}
+    node_order = []
+
+    while V1_unordered:
+        max_rarity = min(label_rarity[G1_labels[x]] for x in V1_unordered)
+        rarest_nodes = [
+            n for n in V1_unordered if label_rarity[G1_labels[n]] == max_rarity
+        ]
+        max_node = max(rarest_nodes, key=G1.degree)
+
+        for dlevel_nodes in nx.bfs_layers(G1, max_node):
+            nodes_to_add = dlevel_nodes.copy()
+            while nodes_to_add:
+                max_used_degree = max(used_degrees[n] for n in nodes_to_add)
+                max_used_degree_nodes = [
+                    n for n in nodes_to_add if used_degrees[n] == max_used_degree
+                ]
+                max_degree = max(G1.degree[n] for n in max_used_degree_nodes)
+                max_degree_nodes = [
+                    n for n in max_used_degree_nodes if G1.degree[n] == max_degree
+                ]
+                next_node = min(
+                    max_degree_nodes, key=lambda x: label_rarity[G1_labels[x]]
+                )
+
+                node_order.append(next_node)
+                for node in G1.neighbors(next_node):
+                    used_degrees[node] += 1
+
+                nodes_to_add.remove(next_node)
+                label_rarity[G1_labels[next_node]] -= 1
+                V1_unordered.discard(next_node)
+
+    return node_order
+
+
+def _find_candidates(
+    u, graph_params, state_params, G1_degree
+):  # todo: make the 4th argument the degree of u
+    """Given node u of G1, finds the candidates of u from G2.
+
+    Parameters
+    ----------
+    u: Graph node
+        The node from G1 for which to find the candidates from G2.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_tilde contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    candidates: set
+        The nodes from G2 which are candidates for u.
+    """
+    G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params
+    mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params
+
+    covered_nbrs = [nbr for nbr in G1[u] if nbr in mapping]
+    if not covered_nbrs:
+        candidates = set(nodes_of_G2Labels[G1_labels[u]])
+        candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+        candidates.intersection_update(T2_tilde)
+        candidates.difference_update(reverse_mapping)
+        if G1.is_multigraph():
+            candidates.difference_update(
+                {
+                    node
+                    for node in candidates
+                    if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+                }
+            )
+        return candidates
+
+    nbr1 = covered_nbrs[0]
+    common_nodes = set(G2[mapping[nbr1]])
+
+    for nbr1 in covered_nbrs[1:]:
+        common_nodes.intersection_update(G2[mapping[nbr1]])
+
+    common_nodes.difference_update(reverse_mapping)
+    common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+    common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]])
+    if G1.is_multigraph():
+        common_nodes.difference_update(
+            {
+                node
+                for node in common_nodes
+                if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+            }
+        )
+    return common_nodes
+
+
+def _find_candidates_Di(u, graph_params, state_params, G1_degree):
+    G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params
+    mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params
+
+    covered_successors = [succ for succ in G1[u] if succ in mapping]
+    covered_predecessors = [pred for pred in G1.pred[u] if pred in mapping]
+
+    if not (covered_successors or covered_predecessors):
+        candidates = set(nodes_of_G2Labels[G1_labels[u]])
+        candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+        candidates.intersection_update(T2_tilde)
+        candidates.difference_update(reverse_mapping)
+        if G1.is_multigraph():
+            candidates.difference_update(
+                {
+                    node
+                    for node in candidates
+                    if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+                }
+            )
+        return candidates
+
+    if covered_successors:
+        succ1 = covered_successors[0]
+        common_nodes = set(G2.pred[mapping[succ1]])
+
+        for succ1 in covered_successors[1:]:
+            common_nodes.intersection_update(G2.pred[mapping[succ1]])
+    else:
+        pred1 = covered_predecessors.pop()
+        common_nodes = set(G2[mapping[pred1]])
+
+    for pred1 in covered_predecessors:
+        common_nodes.intersection_update(G2[mapping[pred1]])
+
+    common_nodes.difference_update(reverse_mapping)
+    common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+    common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]])
+    if G1.is_multigraph():
+        common_nodes.difference_update(
+            {
+                node
+                for node in common_nodes
+                if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+            }
+        )
+    return common_nodes
+
+
+def _feasibility(node1, node2, graph_params, state_params):
+    """Given a candidate pair of nodes u and v from G1 and G2 respectively, checks if it's feasible to extend the
+    mapping, i.e. if u and v can be matched.
+
+    Notes
+    -----
+    This function performs all the necessary checking by applying both consistency and cutting rules.
+
+    Parameters
+    ----------
+    node1, node2: Graph node
+        The candidate pair of nodes being checked for matching
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_out, T2_out: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    True if all checks are successful, False otherwise.
+    """
+    G1 = graph_params.G1
+
+    if _cut_PT(node1, node2, graph_params, state_params):
+        return False
+
+    if G1.is_multigraph():
+        if not _consistent_PT(node1, node2, graph_params, state_params):
+            return False
+
+    return True
+
+
+def _cut_PT(u, v, graph_params, state_params):
+    """Implements the cutting rules for the ISO problem.
+
+    Parameters
+    ----------
+    u, v: Graph node
+        The two candidate nodes being examined.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    True if we should prune this branch, i.e. the node pair failed the cutting checks. False otherwise.
+    """
+    G1, G2, G1_labels, G2_labels, _, _, _ = graph_params
+    (
+        _,
+        _,
+        T1,
+        T1_in,
+        T1_tilde,
+        _,
+        T2,
+        T2_in,
+        T2_tilde,
+        _,
+    ) = state_params
+
+    u_labels_predecessors, v_labels_predecessors = {}, {}
+    if G1.is_directed():
+        u_labels_predecessors = nx.utils.groups(
+            {n1: G1_labels[n1] for n1 in G1.pred[u]}
+        )
+        v_labels_predecessors = nx.utils.groups(
+            {n2: G2_labels[n2] for n2 in G2.pred[v]}
+        )
+
+        if set(u_labels_predecessors.keys()) != set(v_labels_predecessors.keys()):
+            return True
+
+    u_labels_successors = nx.utils.groups({n1: G1_labels[n1] for n1 in G1[u]})
+    v_labels_successors = nx.utils.groups({n2: G2_labels[n2] for n2 in G2[v]})
+
+    # if the neighbors of u, do not have the same labels as those of v, NOT feasible.
+    if set(u_labels_successors.keys()) != set(v_labels_successors.keys()):
+        return True
+
+    for label, G1_nbh in u_labels_successors.items():
+        G2_nbh = v_labels_successors[label]
+
+        if G1.is_multigraph():
+            # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2
+            u_nbrs_edges = sorted(G1.number_of_edges(u, x) for x in G1_nbh)
+            v_nbrs_edges = sorted(G2.number_of_edges(v, x) for x in G2_nbh)
+            if any(
+                u_nbr_edges != v_nbr_edges
+                for u_nbr_edges, v_nbr_edges in zip(u_nbrs_edges, v_nbrs_edges)
+            ):
+                return True
+
+        if len(T1.intersection(G1_nbh)) != len(T2.intersection(G2_nbh)):
+            return True
+        if len(T1_tilde.intersection(G1_nbh)) != len(T2_tilde.intersection(G2_nbh)):
+            return True
+        if G1.is_directed() and len(T1_in.intersection(G1_nbh)) != len(
+            T2_in.intersection(G2_nbh)
+        ):
+            return True
+
+    if not G1.is_directed():
+        return False
+
+    for label, G1_pred in u_labels_predecessors.items():
+        G2_pred = v_labels_predecessors[label]
+
+        if G1.is_multigraph():
+            # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2
+            u_pred_edges = sorted(G1.number_of_edges(u, x) for x in G1_pred)
+            v_pred_edges = sorted(G2.number_of_edges(v, x) for x in G2_pred)
+            if any(
+                u_nbr_edges != v_nbr_edges
+                for u_nbr_edges, v_nbr_edges in zip(u_pred_edges, v_pred_edges)
+            ):
+                return True
+
+        if len(T1.intersection(G1_pred)) != len(T2.intersection(G2_pred)):
+            return True
+        if len(T1_tilde.intersection(G1_pred)) != len(T2_tilde.intersection(G2_pred)):
+            return True
+        if len(T1_in.intersection(G1_pred)) != len(T2_in.intersection(G2_pred)):
+            return True
+
+    return False
+
+
+def _consistent_PT(u, v, graph_params, state_params):
+    """Checks the consistency of extending the mapping using the current node pair.
+
+    Parameters
+    ----------
+    u, v: Graph node
+        The two candidate nodes being examined.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_out, T2_out: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    True if the pair passes all the consistency checks successfully. False otherwise.
+    """
+    G1, G2 = graph_params.G1, graph_params.G2
+    mapping, reverse_mapping = state_params.mapping, state_params.reverse_mapping
+
+    for neighbor in G1[u]:
+        if neighbor in mapping:
+            if G1.number_of_edges(u, neighbor) != G2.number_of_edges(
+                v, mapping[neighbor]
+            ):
+                return False
+
+    for neighbor in G2[v]:
+        if neighbor in reverse_mapping:
+            if G1.number_of_edges(u, reverse_mapping[neighbor]) != G2.number_of_edges(
+                v, neighbor
+            ):
+                return False
+
+    if not G1.is_directed():
+        return True
+
+    for predecessor in G1.pred[u]:
+        if predecessor in mapping:
+            if G1.number_of_edges(predecessor, u) != G2.number_of_edges(
+                mapping[predecessor], v
+            ):
+                return False
+
+    for predecessor in G2.pred[v]:
+        if predecessor in reverse_mapping:
+            if G1.number_of_edges(
+                reverse_mapping[predecessor], u
+            ) != G2.number_of_edges(predecessor, v):
+                return False
+
+    return True
+
+
+def _update_Tinout(new_node1, new_node2, graph_params, state_params):
+    """Updates the Ti/Ti_out (i=1,2) when a new node pair u-v is added to the mapping.
+
+    Notes
+    -----
+    This function should be called right after the feasibility checks are passed, and node1 is mapped to node2. The
+    purpose of this function is to avoid brute force computing of Ti/Ti_out by iterating over all nodes of the graph
+    and checking which nodes satisfy the necessary conditions. Instead, in every step of the algorithm we focus
+    exclusively on the two nodes that are being added to the mapping, incrementally updating Ti/Ti_out.
+
+    Parameters
+    ----------
+    new_node1, new_node2: Graph node
+        The two new nodes, added to the mapping.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+    """
+    G1, G2, _, _, _, _, _ = graph_params
+    (
+        mapping,
+        reverse_mapping,
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    ) = state_params
+
+    uncovered_successors_G1 = {succ for succ in G1[new_node1] if succ not in mapping}
+    uncovered_successors_G2 = {
+        succ for succ in G2[new_node2] if succ not in reverse_mapping
+    }
+
+    # Add the uncovered neighbors of node1 and node2 in T1 and T2 respectively
+    T1.update(uncovered_successors_G1)
+    T2.update(uncovered_successors_G2)
+    T1.discard(new_node1)
+    T2.discard(new_node2)
+
+    T1_tilde.difference_update(uncovered_successors_G1)
+    T2_tilde.difference_update(uncovered_successors_G2)
+    T1_tilde.discard(new_node1)
+    T2_tilde.discard(new_node2)
+
+    if not G1.is_directed():
+        return
+
+    uncovered_predecessors_G1 = {
+        pred for pred in G1.pred[new_node1] if pred not in mapping
+    }
+    uncovered_predecessors_G2 = {
+        pred for pred in G2.pred[new_node2] if pred not in reverse_mapping
+    }
+
+    T1_in.update(uncovered_predecessors_G1)
+    T2_in.update(uncovered_predecessors_G2)
+    T1_in.discard(new_node1)
+    T2_in.discard(new_node2)
+
+    T1_tilde.difference_update(uncovered_predecessors_G1)
+    T2_tilde.difference_update(uncovered_predecessors_G2)
+    T1_tilde.discard(new_node1)
+    T2_tilde.discard(new_node2)
+
+
+def _restore_Tinout(popped_node1, popped_node2, graph_params, state_params):
+    """Restores the previous version of Ti/Ti_out when a node pair is deleted from the mapping.
+
+    Parameters
+    ----------
+    popped_node1, popped_node2: Graph node
+        The two nodes deleted from the mapping.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+    """
+    # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1.
+    G1, G2, _, _, _, _, _ = graph_params
+    (
+        mapping,
+        reverse_mapping,
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    ) = state_params
+
+    is_added = False
+    for neighbor in G1[popped_node1]:
+        if neighbor in mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T1.add(popped_node1)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if any(nbr in mapping for nbr in G1[neighbor]):
+                continue
+            T1.discard(neighbor)
+            T1_tilde.add(neighbor)
+
+    # Case where the node is not present in neither the mapping nor T1. By definition, it should belong to T1_tilde
+    if not is_added:
+        T1_tilde.add(popped_node1)
+
+    is_added = False
+    for neighbor in G2[popped_node2]:
+        if neighbor in reverse_mapping:
+            is_added = True
+            T2.add(popped_node2)
+        else:
+            if any(nbr in reverse_mapping for nbr in G2[neighbor]):
+                continue
+            T2.discard(neighbor)
+            T2_tilde.add(neighbor)
+
+    if not is_added:
+        T2_tilde.add(popped_node2)
+
+
+def _restore_Tinout_Di(popped_node1, popped_node2, graph_params, state_params):
+    # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1.
+    G1, G2, _, _, _, _, _ = graph_params
+    (
+        mapping,
+        reverse_mapping,
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    ) = state_params
+
+    is_added = False
+    for successor in G1[popped_node1]:
+        if successor in mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T1_in.add(popped_node1)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if not any(pred in mapping for pred in G1.pred[successor]):
+                T1.discard(successor)
+
+            if not any(succ in mapping for succ in G1[successor]):
+                T1_in.discard(successor)
+
+            if successor not in T1:
+                if successor not in T1_in:
+                    T1_tilde.add(successor)
+
+    for predecessor in G1.pred[popped_node1]:
+        if predecessor in mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T1.add(popped_node1)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if not any(pred in mapping for pred in G1.pred[predecessor]):
+                T1.discard(predecessor)
+
+            if not any(succ in mapping for succ in G1[predecessor]):
+                T1_in.discard(predecessor)
+
+            if not (predecessor in T1 or predecessor in T1_in):
+                T1_tilde.add(predecessor)
+
+    # Case where the node is not present in neither the mapping nor T1. By definition it should belong to T1_tilde
+    if not is_added:
+        T1_tilde.add(popped_node1)
+
+    is_added = False
+    for successor in G2[popped_node2]:
+        if successor in reverse_mapping:
+            is_added = True
+            T2_in.add(popped_node2)
+        else:
+            if not any(pred in reverse_mapping for pred in G2.pred[successor]):
+                T2.discard(successor)
+
+            if not any(succ in reverse_mapping for succ in G2[successor]):
+                T2_in.discard(successor)
+
+            if successor not in T2:
+                if successor not in T2_in:
+                    T2_tilde.add(successor)
+
+    for predecessor in G2.pred[popped_node2]:
+        if predecessor in reverse_mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T2.add(popped_node2)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if not any(pred in reverse_mapping for pred in G2.pred[predecessor]):
+                T2.discard(predecessor)
+
+            if not any(succ in reverse_mapping for succ in G2[predecessor]):
+                T2_in.discard(predecessor)
+
+            if not (predecessor in T2 or predecessor in T2_in):
+                T2_tilde.add(predecessor)
+
+    if not is_added:
+        T2_tilde.add(popped_node2)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py
new file mode 100644
index 00000000..6fcf8a15
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py
@@ -0,0 +1,192 @@
+"""
+Module to simplify the specification of user-defined equality functions for
+node and edge attributes during isomorphism checks.
+
+During the construction of an isomorphism, the algorithm considers two
+candidate nodes n1 in G1 and n2 in G2.  The graphs G1 and G2 are then
+compared with respect to properties involving n1 and n2, and if the outcome
+is good, then the candidate nodes are considered isomorphic. NetworkX
+provides a simple mechanism for users to extend the comparisons to include
+node and edge attributes.
+
+Node attributes are handled by the node_match keyword. When considering
+n1 and n2, the algorithm passes their node attribute dictionaries to
+node_match, and if it returns False, then n1 and n2 cannot be
+considered to be isomorphic.
+
+Edge attributes are handled by the edge_match keyword. When considering
+n1 and n2, the algorithm must verify that outgoing edges from n1 are
+commensurate with the outgoing edges for n2. If the graph is directed,
+then a similar check is also performed for incoming edges.
+
+Focusing only on outgoing edges, we consider pairs of nodes (n1, v1) from
+G1 and (n2, v2) from G2. For graphs and digraphs, there is only one edge
+between (n1, v1) and only one edge between (n2, v2). Those edge attribute
+dictionaries are passed to edge_match, and if it returns False, then
+n1 and n2 cannot be considered isomorphic. For multigraphs and
+multidigraphs, there can be multiple edges between (n1, v1) and also
+multiple edges between (n2, v2).  Now, there must exist an isomorphism
+from "all the edges between (n1, v1)" to "all the edges between (n2, v2)".
+So, all of the edge attribute dictionaries are passed to edge_match, and
+it must determine if there is an isomorphism between the two sets of edges.
+"""
+
+from . import isomorphvf2 as vf2
+
+__all__ = ["GraphMatcher", "DiGraphMatcher", "MultiGraphMatcher", "MultiDiGraphMatcher"]
+
+
+def _semantic_feasibility(self, G1_node, G2_node):
+    """Returns True if mapping G1_node to G2_node is semantically feasible."""
+    # Make sure the nodes match
+    if self.node_match is not None:
+        nm = self.node_match(self.G1.nodes[G1_node], self.G2.nodes[G2_node])
+        if not nm:
+            return False
+
+    # Make sure the edges match
+    if self.edge_match is not None:
+        # Cached lookups
+        G1nbrs = self.G1_adj[G1_node]
+        G2nbrs = self.G2_adj[G2_node]
+        core_1 = self.core_1
+        edge_match = self.edge_match
+
+        for neighbor in G1nbrs:
+            # G1_node is not in core_1, so we must handle R_self separately
+            if neighbor == G1_node:
+                if G2_node in G2nbrs and not edge_match(
+                    G1nbrs[G1_node], G2nbrs[G2_node]
+                ):
+                    return False
+            elif neighbor in core_1:
+                G2_nbr = core_1[neighbor]
+                if G2_nbr in G2nbrs and not edge_match(
+                    G1nbrs[neighbor], G2nbrs[G2_nbr]
+                ):
+                    return False
+        # syntactic check has already verified that neighbors are symmetric
+
+    return True
+
+
+class GraphMatcher(vf2.GraphMatcher):
+    """VF2 isomorphism checker for undirected graphs."""
+
+    def __init__(self, G1, G2, node_match=None, edge_match=None):
+        """Initialize graph matcher.
+
+        Parameters
+        ----------
+        G1, G2: graph
+            The graphs to be tested.
+
+        node_match: callable
+            A function that returns True iff node n1 in G1 and n2 in G2
+            should be considered equal during the isomorphism test. The
+            function will be called like::
+
+               node_match(G1.nodes[n1], G2.nodes[n2])
+
+            That is, the function will receive the node attribute dictionaries
+            of the nodes under consideration. If None, then no attributes are
+            considered when testing for an isomorphism.
+
+        edge_match: callable
+            A function that returns True iff the edge attribute dictionary for
+            the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be
+            considered equal during the isomorphism test. The function will be
+            called like::
+
+               edge_match(G1[u1][v1], G2[u2][v2])
+
+            That is, the function will receive the edge attribute dictionaries
+            of the edges under consideration. If None, then no attributes are
+            considered when testing for an isomorphism.
+
+        """
+        vf2.GraphMatcher.__init__(self, G1, G2)
+
+        self.node_match = node_match
+        self.edge_match = edge_match
+
+        # These will be modified during checks to minimize code repeat.
+        self.G1_adj = self.G1.adj
+        self.G2_adj = self.G2.adj
+
+    semantic_feasibility = _semantic_feasibility
+
+
+class DiGraphMatcher(vf2.DiGraphMatcher):
+    """VF2 isomorphism checker for directed graphs."""
+
+    def __init__(self, G1, G2, node_match=None, edge_match=None):
+        """Initialize graph matcher.
+
+        Parameters
+        ----------
+        G1, G2 : graph
+            The graphs to be tested.
+
+        node_match : callable
+            A function that returns True iff node n1 in G1 and n2 in G2
+            should be considered equal during the isomorphism test. The
+            function will be called like::
+
+               node_match(G1.nodes[n1], G2.nodes[n2])
+
+            That is, the function will receive the node attribute dictionaries
+            of the nodes under consideration. If None, then no attributes are
+            considered when testing for an isomorphism.
+
+        edge_match : callable
+            A function that returns True iff the edge attribute dictionary for
+            the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be
+            considered equal during the isomorphism test. The function will be
+            called like::
+
+               edge_match(G1[u1][v1], G2[u2][v2])
+
+            That is, the function will receive the edge attribute dictionaries
+            of the edges under consideration. If None, then no attributes are
+            considered when testing for an isomorphism.
+
+        """
+        vf2.DiGraphMatcher.__init__(self, G1, G2)
+
+        self.node_match = node_match
+        self.edge_match = edge_match
+
+        # These will be modified during checks to minimize code repeat.
+        self.G1_adj = self.G1.adj
+        self.G2_adj = self.G2.adj
+
+    def semantic_feasibility(self, G1_node, G2_node):
+        """Returns True if mapping G1_node to G2_node is semantically feasible."""
+
+        # Test node_match and also test edge_match on successors
+        feasible = _semantic_feasibility(self, G1_node, G2_node)
+        if not feasible:
+            return False
+
+        # Test edge_match on predecessors
+        self.G1_adj = self.G1.pred
+        self.G2_adj = self.G2.pred
+        feasible = _semantic_feasibility(self, G1_node, G2_node)
+        self.G1_adj = self.G1.adj
+        self.G2_adj = self.G2.adj
+
+        return feasible
+
+
+# The "semantics" of edge_match are different for multi(di)graphs, but
+# the implementation is the same.  So, technically we do not need to
+# provide "multi" versions, but we do so to match NetworkX's base classes.
+
+
+class MultiGraphMatcher(GraphMatcher):
+    """VF2 isomorphism checker for undirected multigraphs."""
+
+
+class MultiDiGraphMatcher(DiGraphMatcher):
+    """VF2 isomorphism checker for directed multigraphs."""
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__init__.py
new file mode 100644
index 00000000..6009f000
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__init__.py
@@ -0,0 +1,2 @@
+from networkx.algorithms.link_analysis.hits_alg import *
+from networkx.algorithms.link_analysis.pagerank_alg import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/hits_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/hits_alg.py
new file mode 100644
index 00000000..d9e3069d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/hits_alg.py
@@ -0,0 +1,337 @@
+"""Hubs and authorities analysis of graph structure."""
+
+import networkx as nx
+
+__all__ = ["hits"]
+
+
+@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}})
+def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
+    """Returns HITS hubs and authorities values for nodes.
+
+    The HITS algorithm computes two numbers for a node.
+    Authorities estimates the node value based on the incoming links.
+    Hubs estimates the node value based on outgoing links.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    max_iter : integer, optional
+      Maximum number of iterations in power method.
+
+    tol : float, optional
+      Error tolerance used to check convergence in power method iteration.
+
+    nstart : dictionary, optional
+      Starting value of each node for power method iteration.
+
+    normalized : bool (default=True)
+       Normalize results by the sum of all of the values.
+
+    Returns
+    -------
+    (hubs,authorities) : two-tuple of dictionaries
+       Two dictionaries keyed by node containing the hub and authority
+       values.
+
+    Raises
+    ------
+    PowerIterationFailedConvergence
+        If the algorithm fails to converge to the specified tolerance
+        within the specified number of iterations of the power iteration
+        method.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> h, a = nx.hits(G)
+
+    Notes
+    -----
+    The eigenvector calculation is done by the power iteration method
+    and has no guarantee of convergence.  The iteration will stop
+    after max_iter iterations or an error tolerance of
+    number_of_nodes(G)*tol has been reached.
+
+    The HITS algorithm was designed for directed graphs but this
+    algorithm does not check if the input graph is directed and will
+    execute on undirected graphs.
+
+    References
+    ----------
+    .. [1] A. Langville and C. Meyer,
+       "A survey of eigenvector methods of web information retrieval."
+       http://citeseer.ist.psu.edu/713792.html
+    .. [2] Jon Kleinberg,
+       Authoritative sources in a hyperlinked environment
+       Journal of the ACM 46 (5): 604-32, 1999.
+       doi:10.1145/324133.324140.
+       http://www.cs.cornell.edu/home/kleinber/auth.pdf.
+    """
+    import numpy as np
+    import scipy as sp
+
+    if len(G) == 0:
+        return {}, {}
+    A = nx.adjacency_matrix(G, nodelist=list(G), dtype=float)
+
+    if nstart is not None:
+        nstart = np.array(list(nstart.values()))
+    if max_iter <= 0:
+        raise nx.PowerIterationFailedConvergence(max_iter)
+    try:
+        _, _, vt = sp.sparse.linalg.svds(A, k=1, v0=nstart, maxiter=max_iter, tol=tol)
+    except sp.sparse.linalg.ArpackNoConvergence as exc:
+        raise nx.PowerIterationFailedConvergence(max_iter) from exc
+
+    a = vt.flatten().real
+    h = A @ a
+    if normalized:
+        h /= h.sum()
+        a /= a.sum()
+    hubs = dict(zip(G, map(float, h)))
+    authorities = dict(zip(G, map(float, a)))
+    return hubs, authorities
+
+
+def _hits_python(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
+    if isinstance(G, nx.MultiGraph | nx.MultiDiGraph):
+        raise Exception("hits() not defined for graphs with multiedges.")
+    if len(G) == 0:
+        return {}, {}
+    # choose fixed starting vector if not given
+    if nstart is None:
+        h = dict.fromkeys(G, 1.0 / G.number_of_nodes())
+    else:
+        h = nstart
+        # normalize starting vector
+        s = 1.0 / sum(h.values())
+        for k in h:
+            h[k] *= s
+    for _ in range(max_iter):  # power iteration: make up to max_iter iterations
+        hlast = h
+        h = dict.fromkeys(hlast.keys(), 0)
+        a = dict.fromkeys(hlast.keys(), 0)
+        # this "matrix multiply" looks odd because it is
+        # doing a left multiply a^T=hlast^T*G
+        for n in h:
+            for nbr in G[n]:
+                a[nbr] += hlast[n] * G[n][nbr].get("weight", 1)
+        # now multiply h=Ga
+        for n in h:
+            for nbr in G[n]:
+                h[n] += a[nbr] * G[n][nbr].get("weight", 1)
+        # normalize vector
+        s = 1.0 / max(h.values())
+        for n in h:
+            h[n] *= s
+        # normalize vector
+        s = 1.0 / max(a.values())
+        for n in a:
+            a[n] *= s
+        # check convergence, l1 norm
+        err = sum(abs(h[n] - hlast[n]) for n in h)
+        if err < tol:
+            break
+    else:
+        raise nx.PowerIterationFailedConvergence(max_iter)
+    if normalized:
+        s = 1.0 / sum(a.values())
+        for n in a:
+            a[n] *= s
+        s = 1.0 / sum(h.values())
+        for n in h:
+            h[n] *= s
+    return h, a
+
+
+def _hits_numpy(G, normalized=True):
+    """Returns HITS hubs and authorities values for nodes.
+
+    The HITS algorithm computes two numbers for a node.
+    Authorities estimates the node value based on the incoming links.
+    Hubs estimates the node value based on outgoing links.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    normalized : bool (default=True)
+       Normalize results by the sum of all of the values.
+
+    Returns
+    -------
+    (hubs,authorities) : two-tuple of dictionaries
+       Two dictionaries keyed by node containing the hub and authority
+       values.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+
+    The `hubs` and `authorities` are given by the eigenvectors corresponding to the
+    maximum eigenvalues of the hubs_matrix and the authority_matrix, respectively.
+
+    The ``hubs`` and ``authority`` matrices are computed from the adjacency
+    matrix:
+
+    >>> adj_ary = nx.to_numpy_array(G)
+    >>> hubs_matrix = adj_ary @ adj_ary.T
+    >>> authority_matrix = adj_ary.T @ adj_ary
+
+    `_hits_numpy` maps the eigenvector corresponding to the maximum eigenvalue
+    of the respective matrices to the nodes in `G`:
+
+    >>> from networkx.algorithms.link_analysis.hits_alg import _hits_numpy
+    >>> hubs, authority = _hits_numpy(G)
+
+    Notes
+    -----
+    The eigenvector calculation uses NumPy's interface to LAPACK.
+
+    The HITS algorithm was designed for directed graphs but this
+    algorithm does not check if the input graph is directed and will
+    execute on undirected graphs.
+
+    References
+    ----------
+    .. [1] A. Langville and C. Meyer,
+       "A survey of eigenvector methods of web information retrieval."
+       http://citeseer.ist.psu.edu/713792.html
+    .. [2] Jon Kleinberg,
+       Authoritative sources in a hyperlinked environment
+       Journal of the ACM 46 (5): 604-32, 1999.
+       doi:10.1145/324133.324140.
+       http://www.cs.cornell.edu/home/kleinber/auth.pdf.
+    """
+    import numpy as np
+
+    if len(G) == 0:
+        return {}, {}
+    adj_ary = nx.to_numpy_array(G)
+    # Hub matrix
+    H = adj_ary @ adj_ary.T
+    e, ev = np.linalg.eig(H)
+    h = ev[:, np.argmax(e)]  # eigenvector corresponding to the maximum eigenvalue
+    # Authority matrix
+    A = adj_ary.T @ adj_ary
+    e, ev = np.linalg.eig(A)
+    a = ev[:, np.argmax(e)]  # eigenvector corresponding to the maximum eigenvalue
+    if normalized:
+        h /= h.sum()
+        a /= a.sum()
+    else:
+        h /= h.max()
+        a /= a.max()
+    hubs = dict(zip(G, map(float, h)))
+    authorities = dict(zip(G, map(float, a)))
+    return hubs, authorities
+
+
+def _hits_scipy(G, max_iter=100, tol=1.0e-6, nstart=None, normalized=True):
+    """Returns HITS hubs and authorities values for nodes.
+
+
+    The HITS algorithm computes two numbers for a node.
+    Authorities estimates the node value based on the incoming links.
+    Hubs estimates the node value based on outgoing links.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph
+
+    max_iter : integer, optional
+      Maximum number of iterations in power method.
+
+    tol : float, optional
+      Error tolerance used to check convergence in power method iteration.
+
+    nstart : dictionary, optional
+      Starting value of each node for power method iteration.
+
+    normalized : bool (default=True)
+       Normalize results by the sum of all of the values.
+
+    Returns
+    -------
+    (hubs,authorities) : two-tuple of dictionaries
+       Two dictionaries keyed by node containing the hub and authority
+       values.
+
+    Examples
+    --------
+    >>> from networkx.algorithms.link_analysis.hits_alg import _hits_scipy
+    >>> G = nx.path_graph(4)
+    >>> h, a = _hits_scipy(G)
+
+    Notes
+    -----
+    This implementation uses SciPy sparse matrices.
+
+    The eigenvector calculation is done by the power iteration method
+    and has no guarantee of convergence.  The iteration will stop
+    after max_iter iterations or an error tolerance of
+    number_of_nodes(G)*tol has been reached.
+
+    The HITS algorithm was designed for directed graphs but this
+    algorithm does not check if the input graph is directed and will
+    execute on undirected graphs.
+
+    Raises
+    ------
+    PowerIterationFailedConvergence
+        If the algorithm fails to converge to the specified tolerance
+        within the specified number of iterations of the power iteration
+        method.
+
+    References
+    ----------
+    .. [1] A. Langville and C. Meyer,
+       "A survey of eigenvector methods of web information retrieval."
+       http://citeseer.ist.psu.edu/713792.html
+    .. [2] Jon Kleinberg,
+       Authoritative sources in a hyperlinked environment
+       Journal of the ACM 46 (5): 604-632, 1999.
+       doi:10.1145/324133.324140.
+       http://www.cs.cornell.edu/home/kleinber/auth.pdf.
+    """
+    import numpy as np
+
+    if len(G) == 0:
+        return {}, {}
+    A = nx.to_scipy_sparse_array(G, nodelist=list(G))
+    (n, _) = A.shape  # should be square
+    ATA = A.T @ A  # authority matrix
+    # choose fixed starting vector if not given
+    if nstart is None:
+        x = np.ones((n, 1)) / n
+    else:
+        x = np.array([nstart.get(n, 0) for n in list(G)], dtype=float)
+        x /= x.sum()
+
+    # power iteration on authority matrix
+    i = 0
+    while True:
+        xlast = x
+        x = ATA @ x
+        x /= x.max()
+        # check convergence, l1 norm
+        err = np.absolute(x - xlast).sum()
+        if err < tol:
+            break
+        if i > max_iter:
+            raise nx.PowerIterationFailedConvergence(max_iter)
+        i += 1
+
+    a = x.flatten()
+    h = A @ a
+    if normalized:
+        h /= h.sum()
+        a /= a.sum()
+    hubs = dict(zip(G, map(float, h)))
+    authorities = dict(zip(G, map(float, a)))
+    return hubs, authorities
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py
new file mode 100644
index 00000000..de9f95ba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py
@@ -0,0 +1,500 @@
+"""PageRank analysis of graph structure."""
+
+from warnings import warn
+
+import networkx as nx
+
+__all__ = ["pagerank", "google_matrix"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def pagerank(
+    G,
+    alpha=0.85,
+    personalization=None,
+    max_iter=100,
+    tol=1.0e-6,
+    nstart=None,
+    weight="weight",
+    dangling=None,
+):
+    """Returns the PageRank of the nodes in the graph.
+
+    PageRank computes a ranking of the nodes in the graph G based on
+    the structure of the incoming links. It was originally designed as
+    an algorithm to rank web pages.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.  Undirected graphs will be converted to a directed
+      graph with two directed edges for each undirected edge.
+
+    alpha : float, optional
+      Damping parameter for PageRank, default=0.85.
+
+    personalization: dict, optional
+      The "personalization vector" consisting of a dictionary with a
+      key some subset of graph nodes and personalization value each of those.
+      At least one personalization value must be non-zero.
+      If not specified, a nodes personalization value will be zero.
+      By default, a uniform distribution is used.
+
+    max_iter : integer, optional
+      Maximum number of iterations in power method eigenvalue solver.
+
+    tol : float, optional
+      Error tolerance used to check convergence in power method solver.
+      The iteration will stop after a tolerance of ``len(G) * tol`` is reached.
+
+    nstart : dictionary, optional
+      Starting value of PageRank iteration for each node.
+
+    weight : key, optional
+      Edge data key to use as weight.  If None weights are set to 1.
+
+    dangling: dict, optional
+      The outedges to be assigned to any "dangling" nodes, i.e., nodes without
+      any outedges. The dict key is the node the outedge points to and the dict
+      value is the weight of that outedge. By default, dangling nodes are given
+      outedges according to the personalization vector (uniform if not
+      specified). This must be selected to result in an irreducible transition
+      matrix (see notes under google_matrix). It may be common to have the
+      dangling dict to be the same as the personalization dict.
+
+
+    Returns
+    -------
+    pagerank : dictionary
+       Dictionary of nodes with PageRank as value
+
+    Examples
+    --------
+    >>> G = nx.DiGraph(nx.path_graph(4))
+    >>> pr = nx.pagerank(G, alpha=0.9)
+
+    Notes
+    -----
+    The eigenvector calculation is done by the power iteration method
+    and has no guarantee of convergence.  The iteration will stop after
+    an error tolerance of ``len(G) * tol`` has been reached. If the
+    number of iterations exceed `max_iter`, a
+    :exc:`networkx.exception.PowerIterationFailedConvergence` exception
+    is raised.
+
+    The PageRank algorithm was designed for directed graphs but this
+    algorithm does not check if the input graph is directed and will
+    execute on undirected graphs by converting each edge in the
+    directed graph to two edges.
+
+    See Also
+    --------
+    google_matrix
+
+    Raises
+    ------
+    PowerIterationFailedConvergence
+        If the algorithm fails to converge to the specified tolerance
+        within the specified number of iterations of the power iteration
+        method.
+
+    References
+    ----------
+    .. [1] A. Langville and C. Meyer,
+       "A survey of eigenvector methods of web information retrieval."
+       http://citeseer.ist.psu.edu/713792.html
+    .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
+       The PageRank citation ranking: Bringing order to the Web. 1999
+       http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
+
+    """
+    return _pagerank_scipy(
+        G, alpha, personalization, max_iter, tol, nstart, weight, dangling
+    )
+
+
+def _pagerank_python(
+    G,
+    alpha=0.85,
+    personalization=None,
+    max_iter=100,
+    tol=1.0e-6,
+    nstart=None,
+    weight="weight",
+    dangling=None,
+):
+    if len(G) == 0:
+        return {}
+
+    D = G.to_directed()
+
+    # Create a copy in (right) stochastic form
+    W = nx.stochastic_graph(D, weight=weight)
+    N = W.number_of_nodes()
+
+    # Choose fixed starting vector if not given
+    if nstart is None:
+        x = dict.fromkeys(W, 1.0 / N)
+    else:
+        # Normalized nstart vector
+        s = sum(nstart.values())
+        x = {k: v / s for k, v in nstart.items()}
+
+    if personalization is None:
+        # Assign uniform personalization vector if not given
+        p = dict.fromkeys(W, 1.0 / N)
+    else:
+        s = sum(personalization.values())
+        p = {k: v / s for k, v in personalization.items()}
+
+    if dangling is None:
+        # Use personalization vector if dangling vector not specified
+        dangling_weights = p
+    else:
+        s = sum(dangling.values())
+        dangling_weights = {k: v / s for k, v in dangling.items()}
+    dangling_nodes = [n for n in W if W.out_degree(n, weight=weight) == 0.0]
+
+    # power iteration: make up to max_iter iterations
+    for _ in range(max_iter):
+        xlast = x
+        x = dict.fromkeys(xlast.keys(), 0)
+        danglesum = alpha * sum(xlast[n] for n in dangling_nodes)
+        for n in x:
+            # this matrix multiply looks odd because it is
+            # doing a left multiply x^T=xlast^T*W
+            for _, nbr, wt in W.edges(n, data=weight):
+                x[nbr] += alpha * xlast[n] * wt
+            x[n] += danglesum * dangling_weights.get(n, 0) + (1.0 - alpha) * p.get(n, 0)
+        # check convergence, l1 norm
+        err = sum(abs(x[n] - xlast[n]) for n in x)
+        if err < N * tol:
+            return x
+    raise nx.PowerIterationFailedConvergence(max_iter)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def google_matrix(
+    G, alpha=0.85, personalization=None, nodelist=None, weight="weight", dangling=None
+):
+    """Returns the Google matrix of the graph.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.  Undirected graphs will be converted to a directed
+      graph with two directed edges for each undirected edge.
+
+    alpha : float
+      The damping factor.
+
+    personalization: dict, optional
+      The "personalization vector" consisting of a dictionary with a
+      key some subset of graph nodes and personalization value each of those.
+      At least one personalization value must be non-zero.
+      If not specified, a nodes personalization value will be zero.
+      By default, a uniform distribution is used.
+
+    nodelist : list, optional
+      The rows and columns are ordered according to the nodes in nodelist.
+      If nodelist is None, then the ordering is produced by G.nodes().
+
+    weight : key, optional
+      Edge data key to use as weight.  If None weights are set to 1.
+
+    dangling: dict, optional
+      The outedges to be assigned to any "dangling" nodes, i.e., nodes without
+      any outedges. The dict key is the node the outedge points to and the dict
+      value is the weight of that outedge. By default, dangling nodes are given
+      outedges according to the personalization vector (uniform if not
+      specified) This must be selected to result in an irreducible transition
+      matrix (see notes below). It may be common to have the dangling dict to
+      be the same as the personalization dict.
+
+    Returns
+    -------
+    A : 2D NumPy ndarray
+       Google matrix of the graph
+
+    Notes
+    -----
+    The array returned represents the transition matrix that describes the
+    Markov chain used in PageRank. For PageRank to converge to a unique
+    solution (i.e., a unique stationary distribution in a Markov chain), the
+    transition matrix must be irreducible. In other words, it must be that
+    there exists a path between every pair of nodes in the graph, or else there
+    is the potential of "rank sinks."
+
+    This implementation works with Multi(Di)Graphs. For multigraphs the
+    weight between two nodes is set to be the sum of all edge weights
+    between those nodes.
+
+    See Also
+    --------
+    pagerank
+    """
+    import numpy as np
+
+    if nodelist is None:
+        nodelist = list(G)
+
+    A = nx.to_numpy_array(G, nodelist=nodelist, weight=weight)
+    N = len(G)
+    if N == 0:
+        return A
+
+    # Personalization vector
+    if personalization is None:
+        p = np.repeat(1.0 / N, N)
+    else:
+        p = np.array([personalization.get(n, 0) for n in nodelist], dtype=float)
+        if p.sum() == 0:
+            raise ZeroDivisionError
+        p /= p.sum()
+
+    # Dangling nodes
+    if dangling is None:
+        dangling_weights = p
+    else:
+        # Convert the dangling dictionary into an array in nodelist order
+        dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float)
+        dangling_weights /= dangling_weights.sum()
+    dangling_nodes = np.where(A.sum(axis=1) == 0)[0]
+
+    # Assign dangling_weights to any dangling nodes (nodes with no out links)
+    A[dangling_nodes] = dangling_weights
+
+    A /= A.sum(axis=1)[:, np.newaxis]  # Normalize rows to sum to 1
+
+    return alpha * A + (1 - alpha) * p
+
+
+def _pagerank_numpy(
+    G, alpha=0.85, personalization=None, weight="weight", dangling=None
+):
+    """Returns the PageRank of the nodes in the graph.
+
+    PageRank computes a ranking of the nodes in the graph G based on
+    the structure of the incoming links. It was originally designed as
+    an algorithm to rank web pages.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.  Undirected graphs will be converted to a directed
+      graph with two directed edges for each undirected edge.
+
+    alpha : float, optional
+      Damping parameter for PageRank, default=0.85.
+
+    personalization: dict, optional
+      The "personalization vector" consisting of a dictionary with a
+      key some subset of graph nodes and personalization value each of those.
+      At least one personalization value must be non-zero.
+      If not specified, a nodes personalization value will be zero.
+      By default, a uniform distribution is used.
+
+    weight : key, optional
+      Edge data key to use as weight.  If None weights are set to 1.
+
+    dangling: dict, optional
+      The outedges to be assigned to any "dangling" nodes, i.e., nodes without
+      any outedges. The dict key is the node the outedge points to and the dict
+      value is the weight of that outedge. By default, dangling nodes are given
+      outedges according to the personalization vector (uniform if not
+      specified) This must be selected to result in an irreducible transition
+      matrix (see notes under google_matrix). It may be common to have the
+      dangling dict to be the same as the personalization dict.
+
+    Returns
+    -------
+    pagerank : dictionary
+       Dictionary of nodes with PageRank as value.
+
+    Examples
+    --------
+    >>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_numpy
+    >>> G = nx.DiGraph(nx.path_graph(4))
+    >>> pr = _pagerank_numpy(G, alpha=0.9)
+
+    Notes
+    -----
+    The eigenvector calculation uses NumPy's interface to the LAPACK
+    eigenvalue solvers.  This will be the fastest and most accurate
+    for small graphs.
+
+    This implementation works with Multi(Di)Graphs. For multigraphs the
+    weight between two nodes is set to be the sum of all edge weights
+    between those nodes.
+
+    See Also
+    --------
+    pagerank, google_matrix
+
+    References
+    ----------
+    .. [1] A. Langville and C. Meyer,
+       "A survey of eigenvector methods of web information retrieval."
+       http://citeseer.ist.psu.edu/713792.html
+    .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
+       The PageRank citation ranking: Bringing order to the Web. 1999
+       http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
+    """
+    import numpy as np
+
+    if len(G) == 0:
+        return {}
+    M = google_matrix(
+        G, alpha, personalization=personalization, weight=weight, dangling=dangling
+    )
+    # use numpy LAPACK solver
+    eigenvalues, eigenvectors = np.linalg.eig(M.T)
+    ind = np.argmax(eigenvalues)
+    # eigenvector of largest eigenvalue is at ind, normalized
+    largest = np.array(eigenvectors[:, ind]).flatten().real
+    norm = largest.sum()
+    return dict(zip(G, map(float, largest / norm)))
+
+
+def _pagerank_scipy(
+    G,
+    alpha=0.85,
+    personalization=None,
+    max_iter=100,
+    tol=1.0e-6,
+    nstart=None,
+    weight="weight",
+    dangling=None,
+):
+    """Returns the PageRank of the nodes in the graph.
+
+    PageRank computes a ranking of the nodes in the graph G based on
+    the structure of the incoming links. It was originally designed as
+    an algorithm to rank web pages.
+
+    Parameters
+    ----------
+    G : graph
+      A NetworkX graph.  Undirected graphs will be converted to a directed
+      graph with two directed edges for each undirected edge.
+
+    alpha : float, optional
+      Damping parameter for PageRank, default=0.85.
+
+    personalization: dict, optional
+      The "personalization vector" consisting of a dictionary with a
+      key some subset of graph nodes and personalization value each of those.
+      At least one personalization value must be non-zero.
+      If not specified, a nodes personalization value will be zero.
+      By default, a uniform distribution is used.
+
+    max_iter : integer, optional
+      Maximum number of iterations in power method eigenvalue solver.
+
+    tol : float, optional
+      Error tolerance used to check convergence in power method solver.
+      The iteration will stop after a tolerance of ``len(G) * tol`` is reached.
+
+    nstart : dictionary, optional
+      Starting value of PageRank iteration for each node.
+
+    weight : key, optional
+      Edge data key to use as weight.  If None weights are set to 1.
+
+    dangling: dict, optional
+      The outedges to be assigned to any "dangling" nodes, i.e., nodes without
+      any outedges. The dict key is the node the outedge points to and the dict
+      value is the weight of that outedge. By default, dangling nodes are given
+      outedges according to the personalization vector (uniform if not
+      specified) This must be selected to result in an irreducible transition
+      matrix (see notes under google_matrix). It may be common to have the
+      dangling dict to be the same as the personalization dict.
+
+    Returns
+    -------
+    pagerank : dictionary
+       Dictionary of nodes with PageRank as value
+
+    Examples
+    --------
+    >>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_scipy
+    >>> G = nx.DiGraph(nx.path_graph(4))
+    >>> pr = _pagerank_scipy(G, alpha=0.9)
+
+    Notes
+    -----
+    The eigenvector calculation uses power iteration with a SciPy
+    sparse matrix representation.
+
+    This implementation works with Multi(Di)Graphs. For multigraphs the
+    weight between two nodes is set to be the sum of all edge weights
+    between those nodes.
+
+    See Also
+    --------
+    pagerank
+
+    Raises
+    ------
+    PowerIterationFailedConvergence
+        If the algorithm fails to converge to the specified tolerance
+        within the specified number of iterations of the power iteration
+        method.
+
+    References
+    ----------
+    .. [1] A. Langville and C. Meyer,
+       "A survey of eigenvector methods of web information retrieval."
+       http://citeseer.ist.psu.edu/713792.html
+    .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
+       The PageRank citation ranking: Bringing order to the Web. 1999
+       http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
+    """
+    import numpy as np
+    import scipy as sp
+
+    N = len(G)
+    if N == 0:
+        return {}
+
+    nodelist = list(G)
+    A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, dtype=float)
+    S = A.sum(axis=1)
+    S[S != 0] = 1.0 / S[S != 0]
+    # TODO: csr_array
+    Q = sp.sparse.csr_array(sp.sparse.spdiags(S.T, 0, *A.shape))
+    A = Q @ A
+
+    # initial vector
+    if nstart is None:
+        x = np.repeat(1.0 / N, N)
+    else:
+        x = np.array([nstart.get(n, 0) for n in nodelist], dtype=float)
+        x /= x.sum()
+
+    # Personalization vector
+    if personalization is None:
+        p = np.repeat(1.0 / N, N)
+    else:
+        p = np.array([personalization.get(n, 0) for n in nodelist], dtype=float)
+        if p.sum() == 0:
+            raise ZeroDivisionError
+        p /= p.sum()
+    # Dangling nodes
+    if dangling is None:
+        dangling_weights = p
+    else:
+        # Convert the dangling dictionary into an array in nodelist order
+        dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float)
+        dangling_weights /= dangling_weights.sum()
+    is_dangling = np.where(S == 0)[0]
+
+    # power iteration: make up to max_iter iterations
+    for _ in range(max_iter):
+        xlast = x
+        x = alpha * (x @ A + sum(x[is_dangling]) * dangling_weights) + (1 - alpha) * p
+        # check convergence, l1 norm
+        err = np.absolute(x - xlast).sum()
+        if err < N * tol:
+            return dict(zip(nodelist, map(float, x)))
+    raise nx.PowerIterationFailedConvergence(max_iter)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py
new file mode 100644
index 00000000..54713eb4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py
@@ -0,0 +1,78 @@
+import pytest
+
+import networkx as nx
+
+np = pytest.importorskip("numpy")
+sp = pytest.importorskip("scipy")
+
+from networkx.algorithms.link_analysis.hits_alg import (
+    _hits_numpy,
+    _hits_python,
+    _hits_scipy,
+)
+
+# Example from
+# A. Langville and C. Meyer, "A survey of eigenvector methods of web
+# information retrieval."  http://citeseer.ist.psu.edu/713792.html
+
+
+class TestHITS:
+    @classmethod
+    def setup_class(cls):
+        G = nx.DiGraph()
+
+        edges = [(1, 3), (1, 5), (2, 1), (3, 5), (5, 4), (5, 3), (6, 5)]
+
+        G.add_edges_from(edges, weight=1)
+        cls.G = G
+        cls.G.a = dict(
+            zip(sorted(G), [0.000000, 0.000000, 0.366025, 0.133975, 0.500000, 0.000000])
+        )
+        cls.G.h = dict(
+            zip(sorted(G), [0.366025, 0.000000, 0.211325, 0.000000, 0.211325, 0.211325])
+        )
+
+    def test_hits_numpy(self):
+        G = self.G
+        h, a = _hits_numpy(G)
+        for n in G:
+            assert h[n] == pytest.approx(G.h[n], abs=1e-4)
+        for n in G:
+            assert a[n] == pytest.approx(G.a[n], abs=1e-4)
+
+    @pytest.mark.parametrize("hits_alg", (nx.hits, _hits_python, _hits_scipy))
+    def test_hits(self, hits_alg):
+        G = self.G
+        h, a = hits_alg(G, tol=1.0e-08)
+        for n in G:
+            assert h[n] == pytest.approx(G.h[n], abs=1e-4)
+        for n in G:
+            assert a[n] == pytest.approx(G.a[n], abs=1e-4)
+        nstart = {i: 1.0 / 2 for i in G}
+        h, a = hits_alg(G, nstart=nstart)
+        for n in G:
+            assert h[n] == pytest.approx(G.h[n], abs=1e-4)
+        for n in G:
+            assert a[n] == pytest.approx(G.a[n], abs=1e-4)
+
+    def test_empty(self):
+        G = nx.Graph()
+        assert nx.hits(G) == ({}, {})
+        assert _hits_numpy(G) == ({}, {})
+        assert _hits_python(G) == ({}, {})
+        assert _hits_scipy(G) == ({}, {})
+
+    def test_hits_not_convergent(self):
+        G = nx.path_graph(50)
+        with pytest.raises(nx.PowerIterationFailedConvergence):
+            _hits_scipy(G, max_iter=1)
+        with pytest.raises(nx.PowerIterationFailedConvergence):
+            _hits_python(G, max_iter=1)
+        with pytest.raises(nx.PowerIterationFailedConvergence):
+            _hits_scipy(G, max_iter=0)
+        with pytest.raises(nx.PowerIterationFailedConvergence):
+            _hits_python(G, max_iter=0)
+        with pytest.raises(nx.PowerIterationFailedConvergence):
+            nx.hits(G, max_iter=0)
+        with pytest.raises(nx.PowerIterationFailedConvergence):
+            nx.hits(G, max_iter=1)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py
new file mode 100644
index 00000000..44038fd4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py
@@ -0,0 +1,214 @@
+import random
+
+import pytest
+
+import networkx as nx
+from networkx.classes.tests import dispatch_interface
+
+np = pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+from networkx.algorithms.link_analysis.pagerank_alg import (
+    _pagerank_numpy,
+    _pagerank_python,
+    _pagerank_scipy,
+)
+
+# Example from
+# A. Langville and C. Meyer, "A survey of eigenvector methods of web
+# information retrieval."  http://citeseer.ist.psu.edu/713792.html
+
+
+class TestPageRank:
+    @classmethod
+    def setup_class(cls):
+        G = nx.DiGraph()
+        edges = [
+            (1, 2),
+            (1, 3),
+            # 2 is a dangling node
+            (3, 1),
+            (3, 2),
+            (3, 5),
+            (4, 5),
+            (4, 6),
+            (5, 4),
+            (5, 6),
+            (6, 4),
+        ]
+        G.add_edges_from(edges)
+        cls.G = G
+        cls.G.pagerank = dict(
+            zip(
+                sorted(G),
+                [
+                    0.03721197,
+                    0.05395735,
+                    0.04150565,
+                    0.37508082,
+                    0.20599833,
+                    0.28624589,
+                ],
+            )
+        )
+        cls.dangling_node_index = 1
+        cls.dangling_edges = {1: 2, 2: 3, 3: 0, 4: 0, 5: 0, 6: 0}
+        cls.G.dangling_pagerank = dict(
+            zip(
+                sorted(G),
+                [0.10844518, 0.18618601, 0.0710892, 0.2683668, 0.15919783, 0.20671497],
+            )
+        )
+
+    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
+    def test_pagerank(self, alg):
+        G = self.G
+        p = alg(G, alpha=0.9, tol=1.0e-08)
+        for n in G:
+            assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
+
+        nstart = {n: random.random() for n in G}
+        p = alg(G, alpha=0.9, tol=1.0e-08, nstart=nstart)
+        for n in G:
+            assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
+
+    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
+    def test_pagerank_max_iter(self, alg):
+        with pytest.raises(nx.PowerIterationFailedConvergence):
+            alg(self.G, max_iter=0)
+
+    def test_numpy_pagerank(self):
+        G = self.G
+        p = _pagerank_numpy(G, alpha=0.9)
+        for n in G:
+            assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
+
+    def test_google_matrix(self):
+        G = self.G
+        M = nx.google_matrix(G, alpha=0.9, nodelist=sorted(G))
+        _, ev = np.linalg.eig(M.T)
+        p = ev[:, 0] / ev[:, 0].sum()
+        for a, b in zip(p, self.G.pagerank.values()):
+            assert a == pytest.approx(b, abs=1e-7)
+
+    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, _pagerank_numpy))
+    def test_personalization(self, alg):
+        G = nx.complete_graph(4)
+        personalize = {0: 1, 1: 1, 2: 4, 3: 4}
+        answer = {
+            0: 0.23246732615667579,
+            1: 0.23246732615667579,
+            2: 0.267532673843324,
+            3: 0.2675326738433241,
+        }
+        p = alg(G, alpha=0.85, personalization=personalize)
+        for n in G:
+            assert p[n] == pytest.approx(answer[n], abs=1e-4)
+
+    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, nx.google_matrix))
+    def test_zero_personalization_vector(self, alg):
+        G = nx.complete_graph(4)
+        personalize = {0: 0, 1: 0, 2: 0, 3: 0}
+        pytest.raises(ZeroDivisionError, alg, G, personalization=personalize)
+
+    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
+    def test_one_nonzero_personalization_value(self, alg):
+        G = nx.complete_graph(4)
+        personalize = {0: 0, 1: 0, 2: 0, 3: 1}
+        answer = {
+            0: 0.22077931820379187,
+            1: 0.22077931820379187,
+            2: 0.22077931820379187,
+            3: 0.3376620453886241,
+        }
+        p = alg(G, alpha=0.85, personalization=personalize)
+        for n in G:
+            assert p[n] == pytest.approx(answer[n], abs=1e-4)
+
+    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
+    def test_incomplete_personalization(self, alg):
+        G = nx.complete_graph(4)
+        personalize = {3: 1}
+        answer = {
+            0: 0.22077931820379187,
+            1: 0.22077931820379187,
+            2: 0.22077931820379187,
+            3: 0.3376620453886241,
+        }
+        p = alg(G, alpha=0.85, personalization=personalize)
+        for n in G:
+            assert p[n] == pytest.approx(answer[n], abs=1e-4)
+
+    def test_dangling_matrix(self):
+        """
+        Tests that the google_matrix doesn't change except for the dangling
+        nodes.
+        """
+        G = self.G
+        dangling = self.dangling_edges
+        dangling_sum = sum(dangling.values())
+        M1 = nx.google_matrix(G, personalization=dangling)
+        M2 = nx.google_matrix(G, personalization=dangling, dangling=dangling)
+        for i in range(len(G)):
+            for j in range(len(G)):
+                if i == self.dangling_node_index and (j + 1) in dangling:
+                    assert M2[i, j] == pytest.approx(
+                        dangling[j + 1] / dangling_sum, abs=1e-4
+                    )
+                else:
+                    assert M2[i, j] == pytest.approx(M1[i, j], abs=1e-4)
+
+    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, _pagerank_numpy))
+    def test_dangling_pagerank(self, alg):
+        pr = alg(self.G, dangling=self.dangling_edges)
+        for n in self.G:
+            assert pr[n] == pytest.approx(self.G.dangling_pagerank[n], abs=1e-4)
+
+    def test_empty(self):
+        G = nx.Graph()
+        assert nx.pagerank(G) == {}
+        assert _pagerank_python(G) == {}
+        assert _pagerank_numpy(G) == {}
+        assert nx.google_matrix(G).shape == (0, 0)
+
+    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
+    def test_multigraph(self, alg):
+        G = nx.MultiGraph()
+        G.add_edges_from([(1, 2), (1, 2), (1, 2), (2, 3), (2, 3), ("3", 3), ("3", 3)])
+        answer = {
+            1: 0.21066048614468322,
+            2: 0.3395308825985378,
+            3: 0.28933951385531687,
+            "3": 0.16046911740146227,
+        }
+        p = alg(G)
+        for n in G:
+            assert p[n] == pytest.approx(answer[n], abs=1e-4)
+
+
+class TestPageRankScipy(TestPageRank):
+    def test_scipy_pagerank(self):
+        G = self.G
+        p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08)
+        for n in G:
+            assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
+        personalize = {n: random.random() for n in G}
+        p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08, personalization=personalize)
+
+        nstart = {n: random.random() for n in G}
+        p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08, nstart=nstart)
+        for n in G:
+            assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
+
+    def test_scipy_pagerank_max_iter(self):
+        with pytest.raises(nx.PowerIterationFailedConvergence):
+            _pagerank_scipy(self.G, max_iter=0)
+
+    def test_dangling_scipy_pagerank(self):
+        pr = _pagerank_scipy(self.G, dangling=self.dangling_edges)
+        for n in self.G:
+            assert pr[n] == pytest.approx(self.G.dangling_pagerank[n], abs=1e-4)
+
+    def test_empty_scipy(self):
+        G = nx.Graph()
+        assert _pagerank_scipy(G) == {}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_prediction.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_prediction.py
new file mode 100644
index 00000000..3615f26d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_prediction.py
@@ -0,0 +1,687 @@
+"""
+Link prediction algorithms.
+"""
+
+from math import log
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "resource_allocation_index",
+    "jaccard_coefficient",
+    "adamic_adar_index",
+    "preferential_attachment",
+    "cn_soundarajan_hopcroft",
+    "ra_index_soundarajan_hopcroft",
+    "within_inter_cluster",
+    "common_neighbor_centrality",
+]
+
+
+def _apply_prediction(G, func, ebunch=None):
+    """Applies the given function to each edge in the specified iterable
+    of edges.
+
+    `G` is an instance of :class:`networkx.Graph`.
+
+    `func` is a function on two inputs, each of which is a node in the
+    graph. The function can return anything, but it should return a
+    value representing a prediction of the likelihood of a "link"
+    joining the two nodes.
+
+    `ebunch` is an iterable of pairs of nodes. If not specified, all
+    non-edges in the graph `G` will be used.
+
+    """
+    if ebunch is None:
+        ebunch = nx.non_edges(G)
+    else:
+        for u, v in ebunch:
+            if u not in G:
+                raise nx.NodeNotFound(f"Node {u} not in G.")
+            if v not in G:
+                raise nx.NodeNotFound(f"Node {v} not in G.")
+    return ((u, v, func(u, v)) for u, v in ebunch)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def resource_allocation_index(G, ebunch=None):
+    r"""Compute the resource allocation index of all node pairs in ebunch.
+
+    Resource allocation index of `u` and `v` is defined as
+
+    .. math::
+
+        \sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{1}{|\Gamma(w)|}
+
+    where $\Gamma(u)$ denotes the set of neighbors of $u$.
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX undirected graph.
+
+    ebunch : iterable of node pairs, optional (default = None)
+        Resource allocation index will be computed for each pair of
+        nodes given in the iterable. The pairs must be given as
+        2-tuples (u, v) where u and v are nodes in the graph. If ebunch
+        is None then all nonexistent edges in the graph will be used.
+        Default value: None.
+
+    Returns
+    -------
+    piter : iterator
+        An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
+        pair of nodes and p is their resource allocation index.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`.
+
+    NodeNotFound
+        If `ebunch` has a node that is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> preds = nx.resource_allocation_index(G, [(0, 1), (2, 3)])
+    >>> for u, v, p in preds:
+    ...     print(f"({u}, {v}) -> {p:.8f}")
+    (0, 1) -> 0.75000000
+    (2, 3) -> 0.75000000
+
+    References
+    ----------
+    .. [1] T. Zhou, L. Lu, Y.-C. Zhang.
+       Predicting missing links via local information.
+       Eur. Phys. J. B 71 (2009) 623.
+       https://arxiv.org/pdf/0901.0553.pdf
+    """
+
+    def predict(u, v):
+        return sum(1 / G.degree(w) for w in nx.common_neighbors(G, u, v))
+
+    return _apply_prediction(G, predict, ebunch)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def jaccard_coefficient(G, ebunch=None):
+    r"""Compute the Jaccard coefficient of all node pairs in ebunch.
+
+    Jaccard coefficient of nodes `u` and `v` is defined as
+
+    .. math::
+
+        \frac{|\Gamma(u) \cap \Gamma(v)|}{|\Gamma(u) \cup \Gamma(v)|}
+
+    where $\Gamma(u)$ denotes the set of neighbors of $u$.
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX undirected graph.
+
+    ebunch : iterable of node pairs, optional (default = None)
+        Jaccard coefficient will be computed for each pair of nodes
+        given in the iterable. The pairs must be given as 2-tuples
+        (u, v) where u and v are nodes in the graph. If ebunch is None
+        then all nonexistent edges in the graph will be used.
+        Default value: None.
+
+    Returns
+    -------
+    piter : iterator
+        An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
+        pair of nodes and p is their Jaccard coefficient.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`.
+
+    NodeNotFound
+        If `ebunch` has a node that is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> preds = nx.jaccard_coefficient(G, [(0, 1), (2, 3)])
+    >>> for u, v, p in preds:
+    ...     print(f"({u}, {v}) -> {p:.8f}")
+    (0, 1) -> 0.60000000
+    (2, 3) -> 0.60000000
+
+    References
+    ----------
+    .. [1] D. Liben-Nowell, J. Kleinberg.
+           The Link Prediction Problem for Social Networks (2004).
+           http://www.cs.cornell.edu/home/kleinber/link-pred.pdf
+    """
+
+    def predict(u, v):
+        union_size = len(set(G[u]) | set(G[v]))
+        if union_size == 0:
+            return 0
+        return len(nx.common_neighbors(G, u, v)) / union_size
+
+    return _apply_prediction(G, predict, ebunch)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def adamic_adar_index(G, ebunch=None):
+    r"""Compute the Adamic-Adar index of all node pairs in ebunch.
+
+    Adamic-Adar index of `u` and `v` is defined as
+
+    .. math::
+
+        \sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{1}{\log |\Gamma(w)|}
+
+    where $\Gamma(u)$ denotes the set of neighbors of $u$.
+    This index leads to zero-division for nodes only connected via self-loops.
+    It is intended to be used when no self-loops are present.
+
+    Parameters
+    ----------
+    G : graph
+        NetworkX undirected graph.
+
+    ebunch : iterable of node pairs, optional (default = None)
+        Adamic-Adar index will be computed for each pair of nodes given
+        in the iterable. The pairs must be given as 2-tuples (u, v)
+        where u and v are nodes in the graph. If ebunch is None then all
+        nonexistent edges in the graph will be used.
+        Default value: None.
+
+    Returns
+    -------
+    piter : iterator
+        An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
+        pair of nodes and p is their Adamic-Adar index.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`.
+
+    NodeNotFound
+        If `ebunch` has a node that is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> preds = nx.adamic_adar_index(G, [(0, 1), (2, 3)])
+    >>> for u, v, p in preds:
+    ...     print(f"({u}, {v}) -> {p:.8f}")
+    (0, 1) -> 2.16404256
+    (2, 3) -> 2.16404256
+
+    References
+    ----------
+    .. [1] D. Liben-Nowell, J. Kleinberg.
+           The Link Prediction Problem for Social Networks (2004).
+           http://www.cs.cornell.edu/home/kleinber/link-pred.pdf
+    """
+
+    def predict(u, v):
+        return sum(1 / log(G.degree(w)) for w in nx.common_neighbors(G, u, v))
+
+    return _apply_prediction(G, predict, ebunch)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def common_neighbor_centrality(G, ebunch=None, alpha=0.8):
+    r"""Return the CCPA score for each pair of nodes.
+
+    Compute the Common Neighbor and Centrality based Parameterized Algorithm(CCPA)
+    score of all node pairs in ebunch.
+
+    CCPA score of `u` and `v` is defined as
+
+    .. math::
+
+        \alpha \cdot (|\Gamma (u){\cap }^{}\Gamma (v)|)+(1-\alpha )\cdot \frac{N}{{d}_{uv}}
+
+    where $\Gamma(u)$ denotes the set of neighbors of $u$, $\Gamma(v)$ denotes the
+    set of neighbors of $v$, $\alpha$ is  parameter varies between [0,1], $N$ denotes
+    total number of nodes in the Graph and ${d}_{uv}$ denotes shortest distance
+    between $u$ and $v$.
+
+    This algorithm is based on two vital properties of nodes, namely the number
+    of common neighbors and their centrality. Common neighbor refers to the common
+    nodes between two nodes. Centrality refers to the prestige that a node enjoys
+    in a network.
+
+    .. seealso::
+
+        :func:`common_neighbors`
+
+    Parameters
+    ----------
+    G : graph
+        NetworkX undirected graph.
+
+    ebunch : iterable of node pairs, optional (default = None)
+        Preferential attachment score will be computed for each pair of
+        nodes given in the iterable. The pairs must be given as
+        2-tuples (u, v) where u and v are nodes in the graph. If ebunch
+        is None then all nonexistent edges in the graph will be used.
+        Default value: None.
+
+    alpha : Parameter defined for participation of Common Neighbor
+            and Centrality Algorithm share. Values for alpha should
+            normally be between 0 and 1. Default value set to 0.8
+            because author found better performance at 0.8 for all the
+            dataset.
+            Default value: 0.8
+
+
+    Returns
+    -------
+    piter : iterator
+        An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
+        pair of nodes and p is their Common Neighbor and Centrality based
+        Parameterized Algorithm(CCPA) score.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`.
+
+    NetworkXAlgorithmError
+        If self loops exist in `ebunch` or in `G` (if `ebunch` is `None`).
+
+    NodeNotFound
+        If `ebunch` has a node that is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> preds = nx.common_neighbor_centrality(G, [(0, 1), (2, 3)])
+    >>> for u, v, p in preds:
+    ...     print(f"({u}, {v}) -> {p}")
+    (0, 1) -> 3.4000000000000004
+    (2, 3) -> 3.4000000000000004
+
+    References
+    ----------
+    .. [1] Ahmad, I., Akhtar, M.U., Noor, S. et al.
+           Missing Link Prediction using Common Neighbor and Centrality based Parameterized Algorithm.
+           Sci Rep 10, 364 (2020).
+           https://doi.org/10.1038/s41598-019-57304-y
+    """
+
+    # When alpha == 1, the CCPA score simplifies to the number of common neighbors.
+    if alpha == 1:
+
+        def predict(u, v):
+            if u == v:
+                raise nx.NetworkXAlgorithmError("Self loops are not supported")
+
+            return len(nx.common_neighbors(G, u, v))
+
+    else:
+        spl = dict(nx.shortest_path_length(G))
+        inf = float("inf")
+
+        def predict(u, v):
+            if u == v:
+                raise nx.NetworkXAlgorithmError("Self loops are not supported")
+            path_len = spl[u].get(v, inf)
+
+            n_nbrs = len(nx.common_neighbors(G, u, v))
+            return alpha * n_nbrs + (1 - alpha) * len(G) / path_len
+
+    return _apply_prediction(G, predict, ebunch)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def preferential_attachment(G, ebunch=None):
+    r"""Compute the preferential attachment score of all node pairs in ebunch.
+
+    Preferential attachment score of `u` and `v` is defined as
+
+    .. math::
+
+        |\Gamma(u)| |\Gamma(v)|
+
+    where $\Gamma(u)$ denotes the set of neighbors of $u$.
+
+    Parameters
+    ----------
+    G : graph
+        NetworkX undirected graph.
+
+    ebunch : iterable of node pairs, optional (default = None)
+        Preferential attachment score will be computed for each pair of
+        nodes given in the iterable. The pairs must be given as
+        2-tuples (u, v) where u and v are nodes in the graph. If ebunch
+        is None then all nonexistent edges in the graph will be used.
+        Default value: None.
+
+    Returns
+    -------
+    piter : iterator
+        An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
+        pair of nodes and p is their preferential attachment score.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`.
+
+    NodeNotFound
+        If `ebunch` has a node that is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> preds = nx.preferential_attachment(G, [(0, 1), (2, 3)])
+    >>> for u, v, p in preds:
+    ...     print(f"({u}, {v}) -> {p}")
+    (0, 1) -> 16
+    (2, 3) -> 16
+
+    References
+    ----------
+    .. [1] D. Liben-Nowell, J. Kleinberg.
+           The Link Prediction Problem for Social Networks (2004).
+           http://www.cs.cornell.edu/home/kleinber/link-pred.pdf
+    """
+
+    def predict(u, v):
+        return G.degree(u) * G.degree(v)
+
+    return _apply_prediction(G, predict, ebunch)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(node_attrs="community")
+def cn_soundarajan_hopcroft(G, ebunch=None, community="community"):
+    r"""Count the number of common neighbors of all node pairs in ebunch
+        using community information.
+
+    For two nodes $u$ and $v$, this function computes the number of
+    common neighbors and bonus one for each common neighbor belonging to
+    the same community as $u$ and $v$. Mathematically,
+
+    .. math::
+
+        |\Gamma(u) \cap \Gamma(v)| + \sum_{w \in \Gamma(u) \cap \Gamma(v)} f(w)
+
+    where $f(w)$ equals 1 if $w$ belongs to the same community as $u$
+    and $v$ or 0 otherwise and $\Gamma(u)$ denotes the set of
+    neighbors of $u$.
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX undirected graph.
+
+    ebunch : iterable of node pairs, optional (default = None)
+        The score will be computed for each pair of nodes given in the
+        iterable. The pairs must be given as 2-tuples (u, v) where u
+        and v are nodes in the graph. If ebunch is None then all
+        nonexistent edges in the graph will be used.
+        Default value: None.
+
+    community : string, optional (default = 'community')
+        Nodes attribute name containing the community information.
+        G[u][community] identifies which community u belongs to. Each
+        node belongs to at most one community. Default value: 'community'.
+
+    Returns
+    -------
+    piter : iterator
+        An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
+        pair of nodes and p is their score.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`.
+
+    NetworkXAlgorithmError
+        If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`).
+
+    NodeNotFound
+        If `ebunch` has a node that is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(3)
+    >>> G.nodes[0]["community"] = 0
+    >>> G.nodes[1]["community"] = 0
+    >>> G.nodes[2]["community"] = 0
+    >>> preds = nx.cn_soundarajan_hopcroft(G, [(0, 2)])
+    >>> for u, v, p in preds:
+    ...     print(f"({u}, {v}) -> {p}")
+    (0, 2) -> 2
+
+    References
+    ----------
+    .. [1] Sucheta Soundarajan and John Hopcroft.
+       Using community information to improve the precision of link
+       prediction methods.
+       In Proceedings of the 21st international conference companion on
+       World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608.
+       http://doi.acm.org/10.1145/2187980.2188150
+    """
+
+    def predict(u, v):
+        Cu = _community(G, u, community)
+        Cv = _community(G, v, community)
+        cnbors = nx.common_neighbors(G, u, v)
+        neighbors = (
+            sum(_community(G, w, community) == Cu for w in cnbors) if Cu == Cv else 0
+        )
+        return len(cnbors) + neighbors
+
+    return _apply_prediction(G, predict, ebunch)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(node_attrs="community")
+def ra_index_soundarajan_hopcroft(G, ebunch=None, community="community"):
+    r"""Compute the resource allocation index of all node pairs in
+    ebunch using community information.
+
+    For two nodes $u$ and $v$, this function computes the resource
+    allocation index considering only common neighbors belonging to the
+    same community as $u$ and $v$. Mathematically,
+
+    .. math::
+
+        \sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{f(w)}{|\Gamma(w)|}
+
+    where $f(w)$ equals 1 if $w$ belongs to the same community as $u$
+    and $v$ or 0 otherwise and $\Gamma(u)$ denotes the set of
+    neighbors of $u$.
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX undirected graph.
+
+    ebunch : iterable of node pairs, optional (default = None)
+        The score will be computed for each pair of nodes given in the
+        iterable. The pairs must be given as 2-tuples (u, v) where u
+        and v are nodes in the graph. If ebunch is None then all
+        nonexistent edges in the graph will be used.
+        Default value: None.
+
+    community : string, optional (default = 'community')
+        Nodes attribute name containing the community information.
+        G[u][community] identifies which community u belongs to. Each
+        node belongs to at most one community. Default value: 'community'.
+
+    Returns
+    -------
+    piter : iterator
+        An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
+        pair of nodes and p is their score.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`.
+
+    NetworkXAlgorithmError
+        If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`).
+
+    NodeNotFound
+        If `ebunch` has a node that is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+    >>> G.nodes[0]["community"] = 0
+    >>> G.nodes[1]["community"] = 0
+    >>> G.nodes[2]["community"] = 1
+    >>> G.nodes[3]["community"] = 0
+    >>> preds = nx.ra_index_soundarajan_hopcroft(G, [(0, 3)])
+    >>> for u, v, p in preds:
+    ...     print(f"({u}, {v}) -> {p:.8f}")
+    (0, 3) -> 0.50000000
+
+    References
+    ----------
+    .. [1] Sucheta Soundarajan and John Hopcroft.
+       Using community information to improve the precision of link
+       prediction methods.
+       In Proceedings of the 21st international conference companion on
+       World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608.
+       http://doi.acm.org/10.1145/2187980.2188150
+    """
+
+    def predict(u, v):
+        Cu = _community(G, u, community)
+        Cv = _community(G, v, community)
+        if Cu != Cv:
+            return 0
+        cnbors = nx.common_neighbors(G, u, v)
+        return sum(1 / G.degree(w) for w in cnbors if _community(G, w, community) == Cu)
+
+    return _apply_prediction(G, predict, ebunch)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(node_attrs="community")
+def within_inter_cluster(G, ebunch=None, delta=0.001, community="community"):
+    """Compute the ratio of within- and inter-cluster common neighbors
+    of all node pairs in ebunch.
+
+    For two nodes `u` and `v`, if a common neighbor `w` belongs to the
+    same community as them, `w` is considered as within-cluster common
+    neighbor of `u` and `v`. Otherwise, it is considered as
+    inter-cluster common neighbor of `u` and `v`. The ratio between the
+    size of the set of within- and inter-cluster common neighbors is
+    defined as the WIC measure. [1]_
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX undirected graph.
+
+    ebunch : iterable of node pairs, optional (default = None)
+        The WIC measure will be computed for each pair of nodes given in
+        the iterable. The pairs must be given as 2-tuples (u, v) where
+        u and v are nodes in the graph. If ebunch is None then all
+        nonexistent edges in the graph will be used.
+        Default value: None.
+
+    delta : float, optional (default = 0.001)
+        Value to prevent division by zero in case there is no
+        inter-cluster common neighbor between two nodes. See [1]_ for
+        details. Default value: 0.001.
+
+    community : string, optional (default = 'community')
+        Nodes attribute name containing the community information.
+        G[u][community] identifies which community u belongs to. Each
+        node belongs to at most one community. Default value: 'community'.
+
+    Returns
+    -------
+    piter : iterator
+        An iterator of 3-tuples in the form (u, v, p) where (u, v) is a
+        pair of nodes and p is their WIC measure.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`.
+
+    NetworkXAlgorithmError
+        - If `delta` is less than or equal to zero.
+        - If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`).
+
+    NodeNotFound
+        If `ebunch` has a node that is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 4), (2, 4), (3, 4)])
+    >>> G.nodes[0]["community"] = 0
+    >>> G.nodes[1]["community"] = 1
+    >>> G.nodes[2]["community"] = 0
+    >>> G.nodes[3]["community"] = 0
+    >>> G.nodes[4]["community"] = 0
+    >>> preds = nx.within_inter_cluster(G, [(0, 4)])
+    >>> for u, v, p in preds:
+    ...     print(f"({u}, {v}) -> {p:.8f}")
+    (0, 4) -> 1.99800200
+    >>> preds = nx.within_inter_cluster(G, [(0, 4)], delta=0.5)
+    >>> for u, v, p in preds:
+    ...     print(f"({u}, {v}) -> {p:.8f}")
+    (0, 4) -> 1.33333333
+
+    References
+    ----------
+    .. [1] Jorge Carlos Valverde-Rebaza and Alneu de Andrade Lopes.
+       Link prediction in complex networks based on cluster information.
+       In Proceedings of the 21st Brazilian conference on Advances in
+       Artificial Intelligence (SBIA'12)
+       https://doi.org/10.1007/978-3-642-34459-6_10
+    """
+    if delta <= 0:
+        raise nx.NetworkXAlgorithmError("Delta must be greater than zero")
+
+    def predict(u, v):
+        Cu = _community(G, u, community)
+        Cv = _community(G, v, community)
+        if Cu != Cv:
+            return 0
+        cnbors = nx.common_neighbors(G, u, v)
+        within = {w for w in cnbors if _community(G, w, community) == Cu}
+        inter = cnbors - within
+        return len(within) / (len(inter) + delta)
+
+    return _apply_prediction(G, predict, ebunch)
+
+
+def _community(G, u, community):
+    """Get the community of the given node."""
+    node_u = G.nodes[u]
+    try:
+        return node_u[community]
+    except KeyError as err:
+        raise nx.NetworkXAlgorithmError(
+            f"No community information available for Node {u}"
+        ) from err
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/lowest_common_ancestors.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/lowest_common_ancestors.py
new file mode 100644
index 00000000..d580018b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/lowest_common_ancestors.py
@@ -0,0 +1,269 @@
+"""Algorithms for finding the lowest common ancestor of trees and DAGs."""
+
+from collections import defaultdict
+from collections.abc import Mapping, Set
+from itertools import combinations_with_replacement
+
+import networkx as nx
+from networkx.utils import UnionFind, arbitrary_element, not_implemented_for
+
+__all__ = [
+    "all_pairs_lowest_common_ancestor",
+    "tree_all_pairs_lowest_common_ancestor",
+    "lowest_common_ancestor",
+]
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def all_pairs_lowest_common_ancestor(G, pairs=None):
+    """Return the lowest common ancestor of all pairs or the provided pairs
+
+    Parameters
+    ----------
+    G : NetworkX directed graph
+
+    pairs : iterable of pairs of nodes, optional (default: all pairs)
+        The pairs of nodes of interest.
+        If None, will find the LCA of all pairs of nodes.
+
+    Yields
+    ------
+    ((node1, node2), lca) : 2-tuple
+        Where lca is least common ancestor of node1 and node2.
+        Note that for the default case, the order of the node pair is not considered,
+        e.g. you will not get both ``(a, b)`` and ``(b, a)``
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If `G` is null.
+    NetworkXError
+        If `G` is not a DAG.
+
+    Examples
+    --------
+    The default behavior is to yield the lowest common ancestor for all
+    possible combinations of nodes in `G`, including self-pairings:
+
+    >>> G = nx.DiGraph([(0, 1), (0, 3), (1, 2)])
+    >>> dict(nx.all_pairs_lowest_common_ancestor(G))
+    {(0, 0): 0, (0, 1): 0, (0, 3): 0, (0, 2): 0, (1, 1): 1, (1, 3): 0, (1, 2): 1, (3, 3): 3, (3, 2): 0, (2, 2): 2}
+
+    The pairs argument can be used to limit the output to only the
+    specified node pairings:
+
+    >>> dict(nx.all_pairs_lowest_common_ancestor(G, pairs=[(1, 2), (2, 3)]))
+    {(1, 2): 1, (2, 3): 0}
+
+    Notes
+    -----
+    Only defined on non-null directed acyclic graphs.
+
+    See Also
+    --------
+    lowest_common_ancestor
+    """
+    if not nx.is_directed_acyclic_graph(G):
+        raise nx.NetworkXError("LCA only defined on directed acyclic graphs.")
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.")
+
+    if pairs is None:
+        pairs = combinations_with_replacement(G, 2)
+    else:
+        # Convert iterator to iterable, if necessary. Trim duplicates.
+        pairs = dict.fromkeys(pairs)
+        # Verify that each of the nodes in the provided pairs is in G
+        nodeset = set(G)
+        for pair in pairs:
+            if set(pair) - nodeset:
+                raise nx.NodeNotFound(
+                    f"Node(s) {set(pair) - nodeset} from pair {pair} not in G."
+                )
+
+    # Once input validation is done, construct the generator
+    def generate_lca_from_pairs(G, pairs):
+        ancestor_cache = {}
+
+        for v, w in pairs:
+            if v not in ancestor_cache:
+                ancestor_cache[v] = nx.ancestors(G, v)
+                ancestor_cache[v].add(v)
+            if w not in ancestor_cache:
+                ancestor_cache[w] = nx.ancestors(G, w)
+                ancestor_cache[w].add(w)
+
+            common_ancestors = ancestor_cache[v] & ancestor_cache[w]
+
+            if common_ancestors:
+                common_ancestor = next(iter(common_ancestors))
+                while True:
+                    successor = None
+                    for lower_ancestor in G.successors(common_ancestor):
+                        if lower_ancestor in common_ancestors:
+                            successor = lower_ancestor
+                            break
+                    if successor is None:
+                        break
+                    common_ancestor = successor
+                yield ((v, w), common_ancestor)
+
+    return generate_lca_from_pairs(G, pairs)
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def lowest_common_ancestor(G, node1, node2, default=None):
+    """Compute the lowest common ancestor of the given pair of nodes.
+
+    Parameters
+    ----------
+    G : NetworkX directed graph
+
+    node1, node2 : nodes in the graph.
+
+    default : object
+        Returned if no common ancestor between `node1` and `node2`
+
+    Returns
+    -------
+    The lowest common ancestor of node1 and node2,
+    or default if they have no common ancestors.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> nx.add_path(G, (0, 1, 2, 3))
+    >>> nx.add_path(G, (0, 4, 3))
+    >>> nx.lowest_common_ancestor(G, 2, 4)
+    0
+
+    See Also
+    --------
+    all_pairs_lowest_common_ancestor"""
+
+    ans = list(all_pairs_lowest_common_ancestor(G, pairs=[(node1, node2)]))
+    if ans:
+        assert len(ans) == 1
+        return ans[0][1]
+    return default
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None):
+    r"""Yield the lowest common ancestor for sets of pairs in a tree.
+
+    Parameters
+    ----------
+    G : NetworkX directed graph (must be a tree)
+
+    root : node, optional (default: None)
+        The root of the subtree to operate on.
+        If None, assume the entire graph has exactly one source and use that.
+
+    pairs : iterable or iterator of pairs of nodes, optional (default: None)
+        The pairs of interest. If None, Defaults to all pairs of nodes
+        under `root` that have a lowest common ancestor.
+
+    Returns
+    -------
+    lcas : generator of tuples `((u, v), lca)` where `u` and `v` are nodes
+        in `pairs` and `lca` is their lowest common ancestor.
+
+    Examples
+    --------
+    >>> import pprint
+    >>> G = nx.DiGraph([(1, 3), (2, 4), (1, 2)])
+    >>> pprint.pprint(dict(nx.tree_all_pairs_lowest_common_ancestor(G)))
+    {(1, 1): 1,
+     (2, 1): 1,
+     (2, 2): 2,
+     (3, 1): 1,
+     (3, 2): 1,
+     (3, 3): 3,
+     (3, 4): 1,
+     (4, 1): 1,
+     (4, 2): 2,
+     (4, 4): 4}
+
+    We can also use `pairs` argument to specify the pairs of nodes for which we
+    want to compute lowest common ancestors. Here is an example:
+
+    >>> dict(nx.tree_all_pairs_lowest_common_ancestor(G, pairs=[(1, 4), (2, 3)]))
+    {(2, 3): 1, (1, 4): 1}
+
+    Notes
+    -----
+    Only defined on non-null trees represented with directed edges from
+    parents to children. Uses Tarjan's off-line lowest-common-ancestors
+    algorithm. Runs in time $O(4 \times (V + E + P))$ time, where 4 is the largest
+    value of the inverse Ackermann function likely to ever come up in actual
+    use, and $P$ is the number of pairs requested (or $V^2$ if all are needed).
+
+    Tarjan, R. E. (1979), "Applications of path compression on balanced trees",
+    Journal of the ACM 26 (4): 690-715, doi:10.1145/322154.322161.
+
+    See Also
+    --------
+    all_pairs_lowest_common_ancestor: similar routine for general DAGs
+    lowest_common_ancestor: just a single pair for general DAGs
+    """
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.")
+
+    # Index pairs of interest for efficient lookup from either side.
+    if pairs is not None:
+        pair_dict = defaultdict(set)
+        # See note on all_pairs_lowest_common_ancestor.
+        if not isinstance(pairs, Mapping | Set):
+            pairs = set(pairs)
+        for u, v in pairs:
+            for n in (u, v):
+                if n not in G:
+                    msg = f"The node {str(n)} is not in the digraph."
+                    raise nx.NodeNotFound(msg)
+            pair_dict[u].add(v)
+            pair_dict[v].add(u)
+
+    # If root is not specified, find the exactly one node with in degree 0 and
+    # use it. Raise an error if none are found, or more than one is. Also check
+    # for any nodes with in degree larger than 1, which would imply G is not a
+    # tree.
+    if root is None:
+        for n, deg in G.in_degree:
+            if deg == 0:
+                if root is not None:
+                    msg = "No root specified and tree has multiple sources."
+                    raise nx.NetworkXError(msg)
+                root = n
+            # checking deg>1 is not sufficient for MultiDiGraphs
+            elif deg > 1 and len(G.pred[n]) > 1:
+                msg = "Tree LCA only defined on trees; use DAG routine."
+                raise nx.NetworkXError(msg)
+    if root is None:
+        raise nx.NetworkXError("Graph contains a cycle.")
+
+    # Iterative implementation of Tarjan's offline lca algorithm
+    # as described in CLRS on page 521 (2nd edition)/page 584 (3rd edition)
+    uf = UnionFind()
+    ancestors = {}
+    for node in G:
+        ancestors[node] = uf[node]
+
+    colors = defaultdict(bool)
+    for node in nx.dfs_postorder_nodes(G, root):
+        colors[node] = True
+        for v in pair_dict[node] if pairs is not None else G:
+            if colors[v]:
+                # If the user requested both directions of a pair, give it.
+                # Otherwise, just give one.
+                if pairs is not None and (node, v) in pairs:
+                    yield (node, v), ancestors[uf[v]]
+                if pairs is None or (v, node) in pairs:
+                    yield (v, node), ancestors[uf[v]]
+        if node != root:
+            parent = arbitrary_element(G.pred[node])
+            uf.union(parent, node)
+            ancestors[uf[parent]] = parent
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/matching.py
new file mode 100644
index 00000000..6cfb3c93
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/matching.py
@@ -0,0 +1,1152 @@
+"""Functions for computing and verifying matchings in a graph."""
+
+from collections import Counter
+from itertools import combinations, repeat
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "is_matching",
+    "is_maximal_matching",
+    "is_perfect_matching",
+    "max_weight_matching",
+    "min_weight_matching",
+    "maximal_matching",
+]
+
+
+@not_implemented_for("multigraph")
+@not_implemented_for("directed")
+@nx._dispatchable
+def maximal_matching(G):
+    r"""Find a maximal matching in the graph.
+
+    A matching is a subset of edges in which no node occurs more than once.
+    A maximal matching cannot add more edges and still be a matching.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph
+
+    Returns
+    -------
+    matching : set
+        A maximal matching of the graph.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5)])
+    >>> sorted(nx.maximal_matching(G))
+    [(1, 2), (3, 5)]
+
+    Notes
+    -----
+    The algorithm greedily selects a maximal matching M of the graph G
+    (i.e. no superset of M exists). It runs in $O(|E|)$ time.
+    """
+    matching = set()
+    nodes = set()
+    for edge in G.edges():
+        # If the edge isn't covered, add it to the matching
+        # then remove neighborhood of u and v from consideration.
+        u, v = edge
+        if u not in nodes and v not in nodes and u != v:
+            matching.add(edge)
+            nodes.update(edge)
+    return matching
+
+
+def matching_dict_to_set(matching):
+    """Converts matching dict format to matching set format
+
+    Converts a dictionary representing a matching (as returned by
+    :func:`max_weight_matching`) to a set representing a matching (as
+    returned by :func:`maximal_matching`).
+
+    In the definition of maximal matching adopted by NetworkX,
+    self-loops are not allowed, so the provided dictionary is expected
+    to never have any mapping from a key to itself. However, the
+    dictionary is expected to have mirrored key/value pairs, for
+    example, key ``u`` with value ``v`` and key ``v`` with value ``u``.
+
+    """
+    edges = set()
+    for edge in matching.items():
+        u, v = edge
+        if (v, u) in edges or edge in edges:
+            continue
+        if u == v:
+            raise nx.NetworkXError(f"Selfloops cannot appear in matchings {edge}")
+        edges.add(edge)
+    return edges
+
+
+@nx._dispatchable
+def is_matching(G, matching):
+    """Return True if ``matching`` is a valid matching of ``G``
+
+    A *matching* in a graph is a set of edges in which no two distinct
+    edges share a common endpoint. Each node is incident to at most one
+    edge in the matching. The edges are said to be independent.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    matching : dict or set
+        A dictionary or set representing a matching. If a dictionary, it
+        must have ``matching[u] == v`` and ``matching[v] == u`` for each
+        edge ``(u, v)`` in the matching. If a set, it must have elements
+        of the form ``(u, v)``, where ``(u, v)`` is an edge in the
+        matching.
+
+    Returns
+    -------
+    bool
+        Whether the given set or dictionary represents a valid matching
+        in the graph.
+
+    Raises
+    ------
+    NetworkXError
+        If the proposed matching has an edge to a node not in G.
+        Or if the matching is not a collection of 2-tuple edges.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5)])
+    >>> nx.is_maximal_matching(G, {1: 3, 2: 4})  # using dict to represent matching
+    True
+
+    >>> nx.is_matching(G, {(1, 3), (2, 4)})  # using set to represent matching
+    True
+
+    """
+    if isinstance(matching, dict):
+        matching = matching_dict_to_set(matching)
+
+    nodes = set()
+    for edge in matching:
+        if len(edge) != 2:
+            raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}")
+        u, v = edge
+        if u not in G or v not in G:
+            raise nx.NetworkXError(f"matching contains edge {edge} with node not in G")
+        if u == v:
+            return False
+        if not G.has_edge(u, v):
+            return False
+        if u in nodes or v in nodes:
+            return False
+        nodes.update(edge)
+    return True
+
+
+@nx._dispatchable
+def is_maximal_matching(G, matching):
+    """Return True if ``matching`` is a maximal matching of ``G``
+
+    A *maximal matching* in a graph is a matching in which adding any
+    edge would cause the set to no longer be a valid matching.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    matching : dict or set
+        A dictionary or set representing a matching. If a dictionary, it
+        must have ``matching[u] == v`` and ``matching[v] == u`` for each
+        edge ``(u, v)`` in the matching. If a set, it must have elements
+        of the form ``(u, v)``, where ``(u, v)`` is an edge in the
+        matching.
+
+    Returns
+    -------
+    bool
+        Whether the given set or dictionary represents a valid maximal
+        matching in the graph.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)])
+    >>> nx.is_maximal_matching(G, {(1, 2), (3, 4)})
+    True
+
+    """
+    if isinstance(matching, dict):
+        matching = matching_dict_to_set(matching)
+    # If the given set is not a matching, then it is not a maximal matching.
+    edges = set()
+    nodes = set()
+    for edge in matching:
+        if len(edge) != 2:
+            raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}")
+        u, v = edge
+        if u not in G or v not in G:
+            raise nx.NetworkXError(f"matching contains edge {edge} with node not in G")
+        if u == v:
+            return False
+        if not G.has_edge(u, v):
+            return False
+        if u in nodes or v in nodes:
+            return False
+        nodes.update(edge)
+        edges.add(edge)
+        edges.add((v, u))
+    # A matching is maximal if adding any new edge from G to it
+    # causes the resulting set to match some node twice.
+    # Be careful to check for adding selfloops
+    for u, v in G.edges:
+        if (u, v) not in edges:
+            # could add edge (u, v) to edges and have a bigger matching
+            if u not in nodes and v not in nodes and u != v:
+                return False
+    return True
+
+
+@nx._dispatchable
+def is_perfect_matching(G, matching):
+    """Return True if ``matching`` is a perfect matching for ``G``
+
+    A *perfect matching* in a graph is a matching in which exactly one edge
+    is incident upon each vertex.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    matching : dict or set
+        A dictionary or set representing a matching. If a dictionary, it
+        must have ``matching[u] == v`` and ``matching[v] == u`` for each
+        edge ``(u, v)`` in the matching. If a set, it must have elements
+        of the form ``(u, v)``, where ``(u, v)`` is an edge in the
+        matching.
+
+    Returns
+    -------
+    bool
+        Whether the given set or dictionary represents a valid perfect
+        matching in the graph.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5), (4, 6)])
+    >>> my_match = {1: 2, 3: 5, 4: 6}
+    >>> nx.is_perfect_matching(G, my_match)
+    True
+
+    """
+    if isinstance(matching, dict):
+        matching = matching_dict_to_set(matching)
+
+    nodes = set()
+    for edge in matching:
+        if len(edge) != 2:
+            raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}")
+        u, v = edge
+        if u not in G or v not in G:
+            raise nx.NetworkXError(f"matching contains edge {edge} with node not in G")
+        if u == v:
+            return False
+        if not G.has_edge(u, v):
+            return False
+        if u in nodes or v in nodes:
+            return False
+        nodes.update(edge)
+    return len(nodes) == len(G)
+
+
+@not_implemented_for("multigraph")
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def min_weight_matching(G, weight="weight"):
+    """Computing a minimum-weight maximal matching of G.
+
+    Use the maximum-weight algorithm with edge weights subtracted
+    from the maximum weight of all edges.
+
+    A matching is a subset of edges in which no node occurs more than once.
+    The weight of a matching is the sum of the weights of its edges.
+    A maximal matching cannot add more edges and still be a matching.
+    The cardinality of a matching is the number of matched edges.
+
+    This method replaces the edge weights with 1 plus the maximum edge weight
+    minus the original edge weight.
+
+    new_weight = (max_weight + 1) - edge_weight
+
+    then runs :func:`max_weight_matching` with the new weights.
+    The max weight matching with these new weights corresponds
+    to the min weight matching using the original weights.
+    Adding 1 to the max edge weight keeps all edge weights positive
+    and as integers if they started as integers.
+
+    You might worry that adding 1 to each weight would make the algorithm
+    favor matchings with more edges. But we use the parameter
+    `maxcardinality=True` in `max_weight_matching` to ensure that the
+    number of edges in the competing matchings are the same and thus
+    the optimum does not change due to changes in the number of edges.
+
+    Read the documentation of `max_weight_matching` for more information.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+      Undirected graph
+
+    weight: string, optional (default='weight')
+       Edge data key corresponding to the edge weight.
+       If key not found, uses 1 as weight.
+
+    Returns
+    -------
+    matching : set
+        A minimal weight matching of the graph.
+
+    See Also
+    --------
+    max_weight_matching
+    """
+    if len(G.edges) == 0:
+        return max_weight_matching(G, maxcardinality=True, weight=weight)
+    G_edges = G.edges(data=weight, default=1)
+    max_weight = 1 + max(w for _, _, w in G_edges)
+    InvG = nx.Graph()
+    edges = ((u, v, max_weight - w) for u, v, w in G_edges)
+    InvG.add_weighted_edges_from(edges, weight=weight)
+    return max_weight_matching(InvG, maxcardinality=True, weight=weight)
+
+
+@not_implemented_for("multigraph")
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight")
+def max_weight_matching(G, maxcardinality=False, weight="weight"):
+    """Compute a maximum-weighted matching of G.
+
+    A matching is a subset of edges in which no node occurs more than once.
+    The weight of a matching is the sum of the weights of its edges.
+    A maximal matching cannot add more edges and still be a matching.
+    The cardinality of a matching is the number of matched edges.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+      Undirected graph
+
+    maxcardinality: bool, optional (default=False)
+       If maxcardinality is True, compute the maximum-cardinality matching
+       with maximum weight among all maximum-cardinality matchings.
+
+    weight: string, optional (default='weight')
+       Edge data key corresponding to the edge weight.
+       If key not found, uses 1 as weight.
+
+
+    Returns
+    -------
+    matching : set
+        A maximal matching of the graph.
+
+     Examples
+    --------
+    >>> G = nx.Graph()
+    >>> edges = [(1, 2, 6), (1, 3, 2), (2, 3, 1), (2, 4, 7), (3, 5, 9), (4, 5, 3)]
+    >>> G.add_weighted_edges_from(edges)
+    >>> sorted(nx.max_weight_matching(G))
+    [(2, 4), (5, 3)]
+
+    Notes
+    -----
+    If G has edges with weight attributes the edge data are used as
+    weight values else the weights are assumed to be 1.
+
+    This function takes time O(number_of_nodes ** 3).
+
+    If all edge weights are integers, the algorithm uses only integer
+    computations.  If floating point weights are used, the algorithm
+    could return a slightly suboptimal matching due to numeric
+    precision errors.
+
+    This method is based on the "blossom" method for finding augmenting
+    paths and the "primal-dual" method for finding a matching of maximum
+    weight, both methods invented by Jack Edmonds [1]_.
+
+    Bipartite graphs can also be matched using the functions present in
+    :mod:`networkx.algorithms.bipartite.matching`.
+
+    References
+    ----------
+    .. [1] "Efficient Algorithms for Finding Maximum Matching in Graphs",
+       Zvi Galil, ACM Computing Surveys, 1986.
+    """
+    #
+    # The algorithm is taken from "Efficient Algorithms for Finding Maximum
+    # Matching in Graphs" by Zvi Galil, ACM Computing Surveys, 1986.
+    # It is based on the "blossom" method for finding augmenting paths and
+    # the "primal-dual" method for finding a matching of maximum weight, both
+    # methods invented by Jack Edmonds.
+    #
+    # A C program for maximum weight matching by Ed Rothberg was used
+    # extensively to validate this new code.
+    #
+    # Many terms used in the code comments are explained in the paper
+    # by Galil. You will probably need the paper to make sense of this code.
+    #
+
+    class NoNode:
+        """Dummy value which is different from any node."""
+
+    class Blossom:
+        """Representation of a non-trivial blossom or sub-blossom."""
+
+        __slots__ = ["childs", "edges", "mybestedges"]
+
+        # b.childs is an ordered list of b's sub-blossoms, starting with
+        # the base and going round the blossom.
+
+        # b.edges is the list of b's connecting edges, such that
+        # b.edges[i] = (v, w) where v is a vertex in b.childs[i]
+        # and w is a vertex in b.childs[wrap(i+1)].
+
+        # If b is a top-level S-blossom,
+        # b.mybestedges is a list of least-slack edges to neighboring
+        # S-blossoms, or None if no such list has been computed yet.
+        # This is used for efficient computation of delta3.
+
+        # Generate the blossom's leaf vertices.
+        def leaves(self):
+            stack = [*self.childs]
+            while stack:
+                t = stack.pop()
+                if isinstance(t, Blossom):
+                    stack.extend(t.childs)
+                else:
+                    yield t
+
+    # Get a list of vertices.
+    gnodes = list(G)
+    if not gnodes:
+        return set()  # don't bother with empty graphs
+
+    # Find the maximum edge weight.
+    maxweight = 0
+    allinteger = True
+    for i, j, d in G.edges(data=True):
+        wt = d.get(weight, 1)
+        if i != j and wt > maxweight:
+            maxweight = wt
+        allinteger = allinteger and (str(type(wt)).split("'")[1] in ("int", "long"))
+
+    # If v is a matched vertex, mate[v] is its partner vertex.
+    # If v is a single vertex, v does not occur as a key in mate.
+    # Initially all vertices are single; updated during augmentation.
+    mate = {}
+
+    # If b is a top-level blossom,
+    # label.get(b) is None if b is unlabeled (free),
+    #                 1 if b is an S-blossom,
+    #                 2 if b is a T-blossom.
+    # The label of a vertex is found by looking at the label of its top-level
+    # containing blossom.
+    # If v is a vertex inside a T-blossom, label[v] is 2 iff v is reachable
+    # from an S-vertex outside the blossom.
+    # Labels are assigned during a stage and reset after each augmentation.
+    label = {}
+
+    # If b is a labeled top-level blossom,
+    # labeledge[b] = (v, w) is the edge through which b obtained its label
+    # such that w is a vertex in b, or None if b's base vertex is single.
+    # If w is a vertex inside a T-blossom and label[w] == 2,
+    # labeledge[w] = (v, w) is an edge through which w is reachable from
+    # outside the blossom.
+    labeledge = {}
+
+    # If v is a vertex, inblossom[v] is the top-level blossom to which v
+    # belongs.
+    # If v is a top-level vertex, inblossom[v] == v since v is itself
+    # a (trivial) top-level blossom.
+    # Initially all vertices are top-level trivial blossoms.
+    inblossom = dict(zip(gnodes, gnodes))
+
+    # If b is a sub-blossom,
+    # blossomparent[b] is its immediate parent (sub-)blossom.
+    # If b is a top-level blossom, blossomparent[b] is None.
+    blossomparent = dict(zip(gnodes, repeat(None)))
+
+    # If b is a (sub-)blossom,
+    # blossombase[b] is its base VERTEX (i.e. recursive sub-blossom).
+    blossombase = dict(zip(gnodes, gnodes))
+
+    # If w is a free vertex (or an unreached vertex inside a T-blossom),
+    # bestedge[w] = (v, w) is the least-slack edge from an S-vertex,
+    # or None if there is no such edge.
+    # If b is a (possibly trivial) top-level S-blossom,
+    # bestedge[b] = (v, w) is the least-slack edge to a different S-blossom
+    # (v inside b), or None if there is no such edge.
+    # This is used for efficient computation of delta2 and delta3.
+    bestedge = {}
+
+    # If v is a vertex,
+    # dualvar[v] = 2 * u(v) where u(v) is the v's variable in the dual
+    # optimization problem (if all edge weights are integers, multiplication
+    # by two ensures that all values remain integers throughout the algorithm).
+    # Initially, u(v) = maxweight / 2.
+    dualvar = dict(zip(gnodes, repeat(maxweight)))
+
+    # If b is a non-trivial blossom,
+    # blossomdual[b] = z(b) where z(b) is b's variable in the dual
+    # optimization problem.
+    blossomdual = {}
+
+    # If (v, w) in allowedge or (w, v) in allowedg, then the edge
+    # (v, w) is known to have zero slack in the optimization problem;
+    # otherwise the edge may or may not have zero slack.
+    allowedge = {}
+
+    # Queue of newly discovered S-vertices.
+    queue = []
+
+    # Return 2 * slack of edge (v, w) (does not work inside blossoms).
+    def slack(v, w):
+        return dualvar[v] + dualvar[w] - 2 * G[v][w].get(weight, 1)
+
+    # Assign label t to the top-level blossom containing vertex w,
+    # coming through an edge from vertex v.
+    def assignLabel(w, t, v):
+        b = inblossom[w]
+        assert label.get(w) is None and label.get(b) is None
+        label[w] = label[b] = t
+        if v is not None:
+            labeledge[w] = labeledge[b] = (v, w)
+        else:
+            labeledge[w] = labeledge[b] = None
+        bestedge[w] = bestedge[b] = None
+        if t == 1:
+            # b became an S-vertex/blossom; add it(s vertices) to the queue.
+            if isinstance(b, Blossom):
+                queue.extend(b.leaves())
+            else:
+                queue.append(b)
+        elif t == 2:
+            # b became a T-vertex/blossom; assign label S to its mate.
+            # (If b is a non-trivial blossom, its base is the only vertex
+            # with an external mate.)
+            base = blossombase[b]
+            assignLabel(mate[base], 1, base)
+
+    # Trace back from vertices v and w to discover either a new blossom
+    # or an augmenting path. Return the base vertex of the new blossom,
+    # or NoNode if an augmenting path was found.
+    def scanBlossom(v, w):
+        # Trace back from v and w, placing breadcrumbs as we go.
+        path = []
+        base = NoNode
+        while v is not NoNode:
+            # Look for a breadcrumb in v's blossom or put a new breadcrumb.
+            b = inblossom[v]
+            if label[b] & 4:
+                base = blossombase[b]
+                break
+            assert label[b] == 1
+            path.append(b)
+            label[b] = 5
+            # Trace one step back.
+            if labeledge[b] is None:
+                # The base of blossom b is single; stop tracing this path.
+                assert blossombase[b] not in mate
+                v = NoNode
+            else:
+                assert labeledge[b][0] == mate[blossombase[b]]
+                v = labeledge[b][0]
+                b = inblossom[v]
+                assert label[b] == 2
+                # b is a T-blossom; trace one more step back.
+                v = labeledge[b][0]
+            # Swap v and w so that we alternate between both paths.
+            if w is not NoNode:
+                v, w = w, v
+        # Remove breadcrumbs.
+        for b in path:
+            label[b] = 1
+        # Return base vertex, if we found one.
+        return base
+
+    # Construct a new blossom with given base, through S-vertices v and w.
+    # Label the new blossom as S; set its dual variable to zero;
+    # relabel its T-vertices to S and add them to the queue.
+    def addBlossom(base, v, w):
+        bb = inblossom[base]
+        bv = inblossom[v]
+        bw = inblossom[w]
+        # Create blossom.
+        b = Blossom()
+        blossombase[b] = base
+        blossomparent[b] = None
+        blossomparent[bb] = b
+        # Make list of sub-blossoms and their interconnecting edge endpoints.
+        b.childs = path = []
+        b.edges = edgs = [(v, w)]
+        # Trace back from v to base.
+        while bv != bb:
+            # Add bv to the new blossom.
+            blossomparent[bv] = b
+            path.append(bv)
+            edgs.append(labeledge[bv])
+            assert label[bv] == 2 or (
+                label[bv] == 1 and labeledge[bv][0] == mate[blossombase[bv]]
+            )
+            # Trace one step back.
+            v = labeledge[bv][0]
+            bv = inblossom[v]
+        # Add base sub-blossom; reverse lists.
+        path.append(bb)
+        path.reverse()
+        edgs.reverse()
+        # Trace back from w to base.
+        while bw != bb:
+            # Add bw to the new blossom.
+            blossomparent[bw] = b
+            path.append(bw)
+            edgs.append((labeledge[bw][1], labeledge[bw][0]))
+            assert label[bw] == 2 or (
+                label[bw] == 1 and labeledge[bw][0] == mate[blossombase[bw]]
+            )
+            # Trace one step back.
+            w = labeledge[bw][0]
+            bw = inblossom[w]
+        # Set label to S.
+        assert label[bb] == 1
+        label[b] = 1
+        labeledge[b] = labeledge[bb]
+        # Set dual variable to zero.
+        blossomdual[b] = 0
+        # Relabel vertices.
+        for v in b.leaves():
+            if label[inblossom[v]] == 2:
+                # This T-vertex now turns into an S-vertex because it becomes
+                # part of an S-blossom; add it to the queue.
+                queue.append(v)
+            inblossom[v] = b
+        # Compute b.mybestedges.
+        bestedgeto = {}
+        for bv in path:
+            if isinstance(bv, Blossom):
+                if bv.mybestedges is not None:
+                    # Walk this subblossom's least-slack edges.
+                    nblist = bv.mybestedges
+                    # The sub-blossom won't need this data again.
+                    bv.mybestedges = None
+                else:
+                    # This subblossom does not have a list of least-slack
+                    # edges; get the information from the vertices.
+                    nblist = [
+                        (v, w) for v in bv.leaves() for w in G.neighbors(v) if v != w
+                    ]
+            else:
+                nblist = [(bv, w) for w in G.neighbors(bv) if bv != w]
+            for k in nblist:
+                (i, j) = k
+                if inblossom[j] == b:
+                    i, j = j, i
+                bj = inblossom[j]
+                if (
+                    bj != b
+                    and label.get(bj) == 1
+                    and ((bj not in bestedgeto) or slack(i, j) < slack(*bestedgeto[bj]))
+                ):
+                    bestedgeto[bj] = k
+            # Forget about least-slack edge of the subblossom.
+            bestedge[bv] = None
+        b.mybestedges = list(bestedgeto.values())
+        # Select bestedge[b].
+        mybestedge = None
+        bestedge[b] = None
+        for k in b.mybestedges:
+            kslack = slack(*k)
+            if mybestedge is None or kslack < mybestslack:
+                mybestedge = k
+                mybestslack = kslack
+        bestedge[b] = mybestedge
+
+    # Expand the given top-level blossom.
+    def expandBlossom(b, endstage):
+        # This is an obnoxiously complicated recursive function for the sake of
+        # a stack-transformation.  So, we hack around the complexity by using
+        # a trampoline pattern.  By yielding the arguments to each recursive
+        # call, we keep the actual callstack flat.
+
+        def _recurse(b, endstage):
+            # Convert sub-blossoms into top-level blossoms.
+            for s in b.childs:
+                blossomparent[s] = None
+                if isinstance(s, Blossom):
+                    if endstage and blossomdual[s] == 0:
+                        # Recursively expand this sub-blossom.
+                        yield s
+                    else:
+                        for v in s.leaves():
+                            inblossom[v] = s
+                else:
+                    inblossom[s] = s
+            # If we expand a T-blossom during a stage, its sub-blossoms must be
+            # relabeled.
+            if (not endstage) and label.get(b) == 2:
+                # Start at the sub-blossom through which the expanding
+                # blossom obtained its label, and relabel sub-blossoms untili
+                # we reach the base.
+                # Figure out through which sub-blossom the expanding blossom
+                # obtained its label initially.
+                entrychild = inblossom[labeledge[b][1]]
+                # Decide in which direction we will go round the blossom.
+                j = b.childs.index(entrychild)
+                if j & 1:
+                    # Start index is odd; go forward and wrap.
+                    j -= len(b.childs)
+                    jstep = 1
+                else:
+                    # Start index is even; go backward.
+                    jstep = -1
+                # Move along the blossom until we get to the base.
+                v, w = labeledge[b]
+                while j != 0:
+                    # Relabel the T-sub-blossom.
+                    if jstep == 1:
+                        p, q = b.edges[j]
+                    else:
+                        q, p = b.edges[j - 1]
+                    label[w] = None
+                    label[q] = None
+                    assignLabel(w, 2, v)
+                    # Step to the next S-sub-blossom and note its forward edge.
+                    allowedge[(p, q)] = allowedge[(q, p)] = True
+                    j += jstep
+                    if jstep == 1:
+                        v, w = b.edges[j]
+                    else:
+                        w, v = b.edges[j - 1]
+                    # Step to the next T-sub-blossom.
+                    allowedge[(v, w)] = allowedge[(w, v)] = True
+                    j += jstep
+                # Relabel the base T-sub-blossom WITHOUT stepping through to
+                # its mate (so don't call assignLabel).
+                bw = b.childs[j]
+                label[w] = label[bw] = 2
+                labeledge[w] = labeledge[bw] = (v, w)
+                bestedge[bw] = None
+                # Continue along the blossom until we get back to entrychild.
+                j += jstep
+                while b.childs[j] != entrychild:
+                    # Examine the vertices of the sub-blossom to see whether
+                    # it is reachable from a neighboring S-vertex outside the
+                    # expanding blossom.
+                    bv = b.childs[j]
+                    if label.get(bv) == 1:
+                        # This sub-blossom just got label S through one of its
+                        # neighbors; leave it be.
+                        j += jstep
+                        continue
+                    if isinstance(bv, Blossom):
+                        for v in bv.leaves():
+                            if label.get(v):
+                                break
+                    else:
+                        v = bv
+                    # If the sub-blossom contains a reachable vertex, assign
+                    # label T to the sub-blossom.
+                    if label.get(v):
+                        assert label[v] == 2
+                        assert inblossom[v] == bv
+                        label[v] = None
+                        label[mate[blossombase[bv]]] = None
+                        assignLabel(v, 2, labeledge[v][0])
+                    j += jstep
+            # Remove the expanded blossom entirely.
+            label.pop(b, None)
+            labeledge.pop(b, None)
+            bestedge.pop(b, None)
+            del blossomparent[b]
+            del blossombase[b]
+            del blossomdual[b]
+
+        # Now, we apply the trampoline pattern.  We simulate a recursive
+        # callstack by maintaining a stack of generators, each yielding a
+        # sequence of function arguments.  We grow the stack by appending a call
+        # to _recurse on each argument tuple, and shrink the stack whenever a
+        # generator is exhausted.
+        stack = [_recurse(b, endstage)]
+        while stack:
+            top = stack[-1]
+            for s in top:
+                stack.append(_recurse(s, endstage))
+                break
+            else:
+                stack.pop()
+
+    # Swap matched/unmatched edges over an alternating path through blossom b
+    # between vertex v and the base vertex. Keep blossom bookkeeping
+    # consistent.
+    def augmentBlossom(b, v):
+        # This is an obnoxiously complicated recursive function for the sake of
+        # a stack-transformation.  So, we hack around the complexity by using
+        # a trampoline pattern.  By yielding the arguments to each recursive
+        # call, we keep the actual callstack flat.
+
+        def _recurse(b, v):
+            # Bubble up through the blossom tree from vertex v to an immediate
+            # sub-blossom of b.
+            t = v
+            while blossomparent[t] != b:
+                t = blossomparent[t]
+            # Recursively deal with the first sub-blossom.
+            if isinstance(t, Blossom):
+                yield (t, v)
+            # Decide in which direction we will go round the blossom.
+            i = j = b.childs.index(t)
+            if i & 1:
+                # Start index is odd; go forward and wrap.
+                j -= len(b.childs)
+                jstep = 1
+            else:
+                # Start index is even; go backward.
+                jstep = -1
+            # Move along the blossom until we get to the base.
+            while j != 0:
+                # Step to the next sub-blossom and augment it recursively.
+                j += jstep
+                t = b.childs[j]
+                if jstep == 1:
+                    w, x = b.edges[j]
+                else:
+                    x, w = b.edges[j - 1]
+                if isinstance(t, Blossom):
+                    yield (t, w)
+                # Step to the next sub-blossom and augment it recursively.
+                j += jstep
+                t = b.childs[j]
+                if isinstance(t, Blossom):
+                    yield (t, x)
+                # Match the edge connecting those sub-blossoms.
+                mate[w] = x
+                mate[x] = w
+            # Rotate the list of sub-blossoms to put the new base at the front.
+            b.childs = b.childs[i:] + b.childs[:i]
+            b.edges = b.edges[i:] + b.edges[:i]
+            blossombase[b] = blossombase[b.childs[0]]
+            assert blossombase[b] == v
+
+        # Now, we apply the trampoline pattern.  We simulate a recursive
+        # callstack by maintaining a stack of generators, each yielding a
+        # sequence of function arguments.  We grow the stack by appending a call
+        # to _recurse on each argument tuple, and shrink the stack whenever a
+        # generator is exhausted.
+        stack = [_recurse(b, v)]
+        while stack:
+            top = stack[-1]
+            for args in top:
+                stack.append(_recurse(*args))
+                break
+            else:
+                stack.pop()
+
+    # Swap matched/unmatched edges over an alternating path between two
+    # single vertices. The augmenting path runs through S-vertices v and w.
+    def augmentMatching(v, w):
+        for s, j in ((v, w), (w, v)):
+            # Match vertex s to vertex j. Then trace back from s
+            # until we find a single vertex, swapping matched and unmatched
+            # edges as we go.
+            while 1:
+                bs = inblossom[s]
+                assert label[bs] == 1
+                assert (labeledge[bs] is None and blossombase[bs] not in mate) or (
+                    labeledge[bs][0] == mate[blossombase[bs]]
+                )
+                # Augment through the S-blossom from s to base.
+                if isinstance(bs, Blossom):
+                    augmentBlossom(bs, s)
+                # Update mate[s]
+                mate[s] = j
+                # Trace one step back.
+                if labeledge[bs] is None:
+                    # Reached single vertex; stop.
+                    break
+                t = labeledge[bs][0]
+                bt = inblossom[t]
+                assert label[bt] == 2
+                # Trace one more step back.
+                s, j = labeledge[bt]
+                # Augment through the T-blossom from j to base.
+                assert blossombase[bt] == t
+                if isinstance(bt, Blossom):
+                    augmentBlossom(bt, j)
+                # Update mate[j]
+                mate[j] = s
+
+    # Verify that the optimum solution has been reached.
+    def verifyOptimum():
+        if maxcardinality:
+            # Vertices may have negative dual;
+            # find a constant non-negative number to add to all vertex duals.
+            vdualoffset = max(0, -min(dualvar.values()))
+        else:
+            vdualoffset = 0
+        # 0. all dual variables are non-negative
+        assert min(dualvar.values()) + vdualoffset >= 0
+        assert len(blossomdual) == 0 or min(blossomdual.values()) >= 0
+        # 0. all edges have non-negative slack and
+        # 1. all matched edges have zero slack;
+        for i, j, d in G.edges(data=True):
+            wt = d.get(weight, 1)
+            if i == j:
+                continue  # ignore self-loops
+            s = dualvar[i] + dualvar[j] - 2 * wt
+            iblossoms = [i]
+            jblossoms = [j]
+            while blossomparent[iblossoms[-1]] is not None:
+                iblossoms.append(blossomparent[iblossoms[-1]])
+            while blossomparent[jblossoms[-1]] is not None:
+                jblossoms.append(blossomparent[jblossoms[-1]])
+            iblossoms.reverse()
+            jblossoms.reverse()
+            for bi, bj in zip(iblossoms, jblossoms):
+                if bi != bj:
+                    break
+                s += 2 * blossomdual[bi]
+            assert s >= 0
+            if mate.get(i) == j or mate.get(j) == i:
+                assert mate[i] == j and mate[j] == i
+                assert s == 0
+        # 2. all single vertices have zero dual value;
+        for v in gnodes:
+            assert (v in mate) or dualvar[v] + vdualoffset == 0
+        # 3. all blossoms with positive dual value are full.
+        for b in blossomdual:
+            if blossomdual[b] > 0:
+                assert len(b.edges) % 2 == 1
+                for i, j in b.edges[1::2]:
+                    assert mate[i] == j and mate[j] == i
+        # Ok.
+
+    # Main loop: continue until no further improvement is possible.
+    while 1:
+        # Each iteration of this loop is a "stage".
+        # A stage finds an augmenting path and uses that to improve
+        # the matching.
+
+        # Remove labels from top-level blossoms/vertices.
+        label.clear()
+        labeledge.clear()
+
+        # Forget all about least-slack edges.
+        bestedge.clear()
+        for b in blossomdual:
+            b.mybestedges = None
+
+        # Loss of labeling means that we can not be sure that currently
+        # allowable edges remain allowable throughout this stage.
+        allowedge.clear()
+
+        # Make queue empty.
+        queue[:] = []
+
+        # Label single blossoms/vertices with S and put them in the queue.
+        for v in gnodes:
+            if (v not in mate) and label.get(inblossom[v]) is None:
+                assignLabel(v, 1, None)
+
+        # Loop until we succeed in augmenting the matching.
+        augmented = 0
+        while 1:
+            # Each iteration of this loop is a "substage".
+            # A substage tries to find an augmenting path;
+            # if found, the path is used to improve the matching and
+            # the stage ends. If there is no augmenting path, the
+            # primal-dual method is used to pump some slack out of
+            # the dual variables.
+
+            # Continue labeling until all vertices which are reachable
+            # through an alternating path have got a label.
+            while queue and not augmented:
+                # Take an S vertex from the queue.
+                v = queue.pop()
+                assert label[inblossom[v]] == 1
+
+                # Scan its neighbors:
+                for w in G.neighbors(v):
+                    if w == v:
+                        continue  # ignore self-loops
+                    # w is a neighbor to v
+                    bv = inblossom[v]
+                    bw = inblossom[w]
+                    if bv == bw:
+                        # this edge is internal to a blossom; ignore it
+                        continue
+                    if (v, w) not in allowedge:
+                        kslack = slack(v, w)
+                        if kslack <= 0:
+                            # edge k has zero slack => it is allowable
+                            allowedge[(v, w)] = allowedge[(w, v)] = True
+                    if (v, w) in allowedge:
+                        if label.get(bw) is None:
+                            # (C1) w is a free vertex;
+                            # label w with T and label its mate with S (R12).
+                            assignLabel(w, 2, v)
+                        elif label.get(bw) == 1:
+                            # (C2) w is an S-vertex (not in the same blossom);
+                            # follow back-links to discover either an
+                            # augmenting path or a new blossom.
+                            base = scanBlossom(v, w)
+                            if base is not NoNode:
+                                # Found a new blossom; add it to the blossom
+                                # bookkeeping and turn it into an S-blossom.
+                                addBlossom(base, v, w)
+                            else:
+                                # Found an augmenting path; augment the
+                                # matching and end this stage.
+                                augmentMatching(v, w)
+                                augmented = 1
+                                break
+                        elif label.get(w) is None:
+                            # w is inside a T-blossom, but w itself has not
+                            # yet been reached from outside the blossom;
+                            # mark it as reached (we need this to relabel
+                            # during T-blossom expansion).
+                            assert label[bw] == 2
+                            label[w] = 2
+                            labeledge[w] = (v, w)
+                    elif label.get(bw) == 1:
+                        # keep track of the least-slack non-allowable edge to
+                        # a different S-blossom.
+                        if bestedge.get(bv) is None or kslack < slack(*bestedge[bv]):
+                            bestedge[bv] = (v, w)
+                    elif label.get(w) is None:
+                        # w is a free vertex (or an unreached vertex inside
+                        # a T-blossom) but we can not reach it yet;
+                        # keep track of the least-slack edge that reaches w.
+                        if bestedge.get(w) is None or kslack < slack(*bestedge[w]):
+                            bestedge[w] = (v, w)
+
+            if augmented:
+                break
+
+            # There is no augmenting path under these constraints;
+            # compute delta and reduce slack in the optimization problem.
+            # (Note that our vertex dual variables, edge slacks and delta's
+            # are pre-multiplied by two.)
+            deltatype = -1
+            delta = deltaedge = deltablossom = None
+
+            # Compute delta1: the minimum value of any vertex dual.
+            if not maxcardinality:
+                deltatype = 1
+                delta = min(dualvar.values())
+
+            # Compute delta2: the minimum slack on any edge between
+            # an S-vertex and a free vertex.
+            for v in G.nodes():
+                if label.get(inblossom[v]) is None and bestedge.get(v) is not None:
+                    d = slack(*bestedge[v])
+                    if deltatype == -1 or d < delta:
+                        delta = d
+                        deltatype = 2
+                        deltaedge = bestedge[v]
+
+            # Compute delta3: half the minimum slack on any edge between
+            # a pair of S-blossoms.
+            for b in blossomparent:
+                if (
+                    blossomparent[b] is None
+                    and label.get(b) == 1
+                    and bestedge.get(b) is not None
+                ):
+                    kslack = slack(*bestedge[b])
+                    if allinteger:
+                        assert (kslack % 2) == 0
+                        d = kslack // 2
+                    else:
+                        d = kslack / 2.0
+                    if deltatype == -1 or d < delta:
+                        delta = d
+                        deltatype = 3
+                        deltaedge = bestedge[b]
+
+            # Compute delta4: minimum z variable of any T-blossom.
+            for b in blossomdual:
+                if (
+                    blossomparent[b] is None
+                    and label.get(b) == 2
+                    and (deltatype == -1 or blossomdual[b] < delta)
+                ):
+                    delta = blossomdual[b]
+                    deltatype = 4
+                    deltablossom = b
+
+            if deltatype == -1:
+                # No further improvement possible; max-cardinality optimum
+                # reached. Do a final delta update to make the optimum
+                # verifiable.
+                assert maxcardinality
+                deltatype = 1
+                delta = max(0, min(dualvar.values()))
+
+            # Update dual variables according to delta.
+            for v in gnodes:
+                if label.get(inblossom[v]) == 1:
+                    # S-vertex: 2*u = 2*u - 2*delta
+                    dualvar[v] -= delta
+                elif label.get(inblossom[v]) == 2:
+                    # T-vertex: 2*u = 2*u + 2*delta
+                    dualvar[v] += delta
+            for b in blossomdual:
+                if blossomparent[b] is None:
+                    if label.get(b) == 1:
+                        # top-level S-blossom: z = z + 2*delta
+                        blossomdual[b] += delta
+                    elif label.get(b) == 2:
+                        # top-level T-blossom: z = z - 2*delta
+                        blossomdual[b] -= delta
+
+            # Take action at the point where minimum delta occurred.
+            if deltatype == 1:
+                # No further improvement possible; optimum reached.
+                break
+            elif deltatype == 2:
+                # Use the least-slack edge to continue the search.
+                (v, w) = deltaedge
+                assert label[inblossom[v]] == 1
+                allowedge[(v, w)] = allowedge[(w, v)] = True
+                queue.append(v)
+            elif deltatype == 3:
+                # Use the least-slack edge to continue the search.
+                (v, w) = deltaedge
+                allowedge[(v, w)] = allowedge[(w, v)] = True
+                assert label[inblossom[v]] == 1
+                queue.append(v)
+            elif deltatype == 4:
+                # Expand the least-z blossom.
+                expandBlossom(deltablossom, False)
+
+            # End of a this substage.
+
+        # Paranoia check that the matching is symmetric.
+        for v in mate:
+            assert mate[mate[v]] == v
+
+        # Stop when no more augmenting path can be found.
+        if not augmented:
+            break
+
+        # End of a stage; expand all S-blossoms which have zero dual.
+        for b in list(blossomdual.keys()):
+            if b not in blossomdual:
+                continue  # already expanded
+            if blossomparent[b] is None and label.get(b) == 1 and blossomdual[b] == 0:
+                expandBlossom(b, True)
+
+    # Verify that we reached the optimum solution (only for integer weights).
+    if allinteger:
+        verifyOptimum()
+
+    return matching_dict_to_set(mate)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__init__.py
new file mode 100644
index 00000000..cf15ddb5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__init__.py
@@ -0,0 +1,27 @@
+"""
+Subpackages related to graph-minor problems.
+
+In graph theory, an undirected graph H is called a minor of the graph G if H
+can be formed from G by deleting edges and vertices and by contracting edges
+[1]_.
+
+References
+----------
+.. [1] https://en.wikipedia.org/wiki/Graph_minor
+"""
+
+from networkx.algorithms.minors.contraction import (
+    contracted_edge,
+    contracted_nodes,
+    equivalence_classes,
+    identified_nodes,
+    quotient_graph,
+)
+
+__all__ = [
+    "contracted_edge",
+    "contracted_nodes",
+    "equivalence_classes",
+    "identified_nodes",
+    "quotient_graph",
+]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/contraction.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/contraction.py
new file mode 100644
index 00000000..e85b5778
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/contraction.py
@@ -0,0 +1,634 @@
+"""Provides functions for computing minors of a graph."""
+
+from itertools import chain, combinations, permutations, product
+
+import networkx as nx
+from networkx import density
+from networkx.exception import NetworkXException
+from networkx.utils import arbitrary_element
+
+__all__ = [
+    "contracted_edge",
+    "contracted_nodes",
+    "equivalence_classes",
+    "identified_nodes",
+    "quotient_graph",
+]
+
+chaini = chain.from_iterable
+
+
+def equivalence_classes(iterable, relation):
+    """Returns equivalence classes of `relation` when applied to `iterable`.
+
+    The equivalence classes, or blocks, consist of objects from `iterable`
+    which are all equivalent. They are defined to be equivalent if the
+    `relation` function returns `True` when passed any two objects from that
+    class, and `False` otherwise. To define an equivalence relation the
+    function must be reflexive, symmetric and transitive.
+
+    Parameters
+    ----------
+    iterable : list, tuple, or set
+        An iterable of elements/nodes.
+
+    relation : function
+        A Boolean-valued function that implements an equivalence relation
+        (reflexive, symmetric, transitive binary relation) on the elements
+        of `iterable` - it must take two elements and return `True` if
+        they are related, or `False` if not.
+
+    Returns
+    -------
+    set of frozensets
+        A set of frozensets representing the partition induced by the equivalence
+        relation function `relation` on the elements of `iterable`. Each
+        member set in the return set represents an equivalence class, or
+        block, of the partition.
+
+        Duplicate elements will be ignored so it makes the most sense for
+        `iterable` to be a :class:`set`.
+
+    Notes
+    -----
+    This function does not check that `relation` represents an equivalence
+    relation. You can check that your equivalence classes provide a partition
+    using `is_partition`.
+
+    Examples
+    --------
+    Let `X` be the set of integers from `0` to `9`, and consider an equivalence
+    relation `R` on `X` of congruence modulo `3`: this means that two integers
+    `x` and `y` in `X` are equivalent under `R` if they leave the same
+    remainder when divided by `3`, i.e. `(x - y) mod 3 = 0`.
+
+    The equivalence classes of this relation are `{0, 3, 6, 9}`, `{1, 4, 7}`,
+    `{2, 5, 8}`: `0`, `3`, `6`, `9` are all divisible by `3` and leave zero
+    remainder; `1`, `4`, `7` leave remainder `1`; while `2`, `5` and `8` leave
+    remainder `2`. We can see this by calling `equivalence_classes` with
+    `X` and a function implementation of `R`.
+
+    >>> X = set(range(10))
+    >>> def mod3(x, y):
+    ...     return (x - y) % 3 == 0
+    >>> equivalence_classes(X, mod3)  # doctest: +SKIP
+    {frozenset({1, 4, 7}), frozenset({8, 2, 5}), frozenset({0, 9, 3, 6})}
+    """
+    # For simplicity of implementation, we initialize the return value as a
+    # list of lists, then convert it to a set of sets at the end of the
+    # function.
+    blocks = []
+    # Determine the equivalence class for each element of the iterable.
+    for y in iterable:
+        # Each element y must be in *exactly one* equivalence class.
+        #
+        # Each block is guaranteed to be non-empty
+        for block in blocks:
+            x = arbitrary_element(block)
+            if relation(x, y):
+                block.append(y)
+                break
+        else:
+            # If the element y is not part of any known equivalence class, it
+            # must be in its own, so we create a new singleton equivalence
+            # class for it.
+            blocks.append([y])
+    return {frozenset(block) for block in blocks}
+
+
+@nx._dispatchable(edge_attrs="weight", returns_graph=True)
+def quotient_graph(
+    G,
+    partition,
+    edge_relation=None,
+    node_data=None,
+    edge_data=None,
+    weight="weight",
+    relabel=False,
+    create_using=None,
+):
+    """Returns the quotient graph of `G` under the specified equivalence
+    relation on nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The graph for which to return the quotient graph with the
+        specified node relation.
+
+    partition : function, or dict or list of lists, tuples or sets
+        If a function, this function must represent an equivalence
+        relation on the nodes of `G`. It must take two arguments *u*
+        and *v* and return True exactly when *u* and *v* are in the
+        same equivalence class. The equivalence classes form the nodes
+        in the returned graph.
+
+        If a dict of lists/tuples/sets, the keys can be any meaningful
+        block labels, but the values must be the block lists/tuples/sets
+        (one list/tuple/set per block), and the blocks must form a valid
+        partition of the nodes of the graph. That is, each node must be
+        in exactly one block of the partition.
+
+        If a list of sets, the list must form a valid partition of
+        the nodes of the graph. That is, each node must be in exactly
+        one block of the partition.
+
+    edge_relation : Boolean function with two arguments
+        This function must represent an edge relation on the *blocks* of
+        the `partition` of `G`. It must take two arguments, *B* and *C*,
+        each one a set of nodes, and return True exactly when there should be
+        an edge joining block *B* to block *C* in the returned graph.
+
+        If `edge_relation` is not specified, it is assumed to be the
+        following relation. Block *B* is related to block *C* if and
+        only if some node in *B* is adjacent to some node in *C*,
+        according to the edge set of `G`.
+
+    node_data : function
+        This function takes one argument, *B*, a set of nodes in `G`,
+        and must return a dictionary representing the node data
+        attributes to set on the node representing *B* in the quotient graph.
+        If None, the following node attributes will be set:
+
+        * 'graph', the subgraph of the graph `G` that this block
+          represents,
+        * 'nnodes', the number of nodes in this block,
+        * 'nedges', the number of edges within this block,
+        * 'density', the density of the subgraph of `G` that this
+          block represents.
+
+    edge_data : function
+        This function takes two arguments, *B* and *C*, each one a set
+        of nodes, and must return a dictionary representing the edge
+        data attributes to set on the edge joining *B* and *C*, should
+        there be an edge joining *B* and *C* in the quotient graph (if
+        no such edge occurs in the quotient graph as determined by
+        `edge_relation`, then the output of this function is ignored).
+
+        If the quotient graph would be a multigraph, this function is
+        not applied, since the edge data from each edge in the graph
+        `G` appears in the edges of the quotient graph.
+
+    weight : string or None, optional (default="weight")
+        The name of an edge attribute that holds the numerical value
+        used as a weight. If None then each edge has weight 1.
+
+    relabel : bool
+        If True, relabel the nodes of the quotient graph to be
+        nonnegative integers. Otherwise, the nodes are identified with
+        :class:`frozenset` instances representing the blocks given in
+        `partition`.
+
+    create_using : NetworkX graph constructor, optional (default=nx.Graph)
+       Graph type to create. If graph instance, then cleared before populated.
+
+    Returns
+    -------
+    NetworkX graph
+        The quotient graph of `G` under the equivalence relation
+        specified by `partition`. If the partition were given as a
+        list of :class:`set` instances and `relabel` is False,
+        each node will be a :class:`frozenset` corresponding to the same
+        :class:`set`.
+
+    Raises
+    ------
+    NetworkXException
+        If the given partition is not a valid partition of the nodes of
+        `G`.
+
+    Examples
+    --------
+    The quotient graph of the complete bipartite graph under the "same
+    neighbors" equivalence relation is `K_2`. Under this relation, two nodes
+    are equivalent if they are not adjacent but have the same neighbor set.
+
+    >>> G = nx.complete_bipartite_graph(2, 3)
+    >>> same_neighbors = lambda u, v: (u not in G[v] and v not in G[u] and G[u] == G[v])
+    >>> Q = nx.quotient_graph(G, same_neighbors)
+    >>> K2 = nx.complete_graph(2)
+    >>> nx.is_isomorphic(Q, K2)
+    True
+
+    The quotient graph of a directed graph under the "same strongly connected
+    component" equivalence relation is the condensation of the graph (see
+    :func:`condensation`). This example comes from the Wikipedia article
+    *`Strongly connected component`_*.
+
+    >>> G = nx.DiGraph()
+    >>> edges = [
+    ...     "ab",
+    ...     "be",
+    ...     "bf",
+    ...     "bc",
+    ...     "cg",
+    ...     "cd",
+    ...     "dc",
+    ...     "dh",
+    ...     "ea",
+    ...     "ef",
+    ...     "fg",
+    ...     "gf",
+    ...     "hd",
+    ...     "hf",
+    ... ]
+    >>> G.add_edges_from(tuple(x) for x in edges)
+    >>> components = list(nx.strongly_connected_components(G))
+    >>> sorted(sorted(component) for component in components)
+    [['a', 'b', 'e'], ['c', 'd', 'h'], ['f', 'g']]
+    >>>
+    >>> C = nx.condensation(G, components)
+    >>> component_of = C.graph["mapping"]
+    >>> same_component = lambda u, v: component_of[u] == component_of[v]
+    >>> Q = nx.quotient_graph(G, same_component)
+    >>> nx.is_isomorphic(C, Q)
+    True
+
+    Node identification can be represented as the quotient of a graph under the
+    equivalence relation that places the two nodes in one block and each other
+    node in its own singleton block.
+
+    >>> K24 = nx.complete_bipartite_graph(2, 4)
+    >>> K34 = nx.complete_bipartite_graph(3, 4)
+    >>> C = nx.contracted_nodes(K34, 1, 2)
+    >>> nodes = {1, 2}
+    >>> is_contracted = lambda u, v: u in nodes and v in nodes
+    >>> Q = nx.quotient_graph(K34, is_contracted)
+    >>> nx.is_isomorphic(Q, C)
+    True
+    >>> nx.is_isomorphic(Q, K24)
+    True
+
+    The blockmodeling technique described in [1]_ can be implemented as a
+    quotient graph.
+
+    >>> G = nx.path_graph(6)
+    >>> partition = [{0, 1}, {2, 3}, {4, 5}]
+    >>> M = nx.quotient_graph(G, partition, relabel=True)
+    >>> list(M.edges())
+    [(0, 1), (1, 2)]
+
+    Here is the sample example but using partition as a dict of block sets.
+
+    >>> G = nx.path_graph(6)
+    >>> partition = {0: {0, 1}, 2: {2, 3}, 4: {4, 5}}
+    >>> M = nx.quotient_graph(G, partition, relabel=True)
+    >>> list(M.edges())
+    [(0, 1), (1, 2)]
+
+    Partitions can be represented in various ways:
+
+    0. a list/tuple/set of block lists/tuples/sets
+    1. a dict with block labels as keys and blocks lists/tuples/sets as values
+    2. a dict with block lists/tuples/sets as keys and block labels as values
+    3. a function from nodes in the original iterable to block labels
+    4. an equivalence relation function on the target iterable
+
+    As `quotient_graph` is designed to accept partitions represented as (0), (1) or
+    (4) only, the `equivalence_classes` function can be used to get the partitions
+    in the right form, in order to call `quotient_graph`.
+
+    .. _Strongly connected component: https://en.wikipedia.org/wiki/Strongly_connected_component
+
+    References
+    ----------
+    .. [1] Patrick Doreian, Vladimir Batagelj, and Anuska Ferligoj.
+           *Generalized Blockmodeling*.
+           Cambridge University Press, 2004.
+
+    """
+    # If the user provided an equivalence relation as a function to compute
+    # the blocks of the partition on the nodes of G induced by the
+    # equivalence relation.
+    if callable(partition):
+        # equivalence_classes always return partition of whole G.
+        partition = equivalence_classes(G, partition)
+        if not nx.community.is_partition(G, partition):
+            raise nx.NetworkXException(
+                "Input `partition` is not an equivalence relation for nodes of G"
+            )
+        return _quotient_graph(
+            G,
+            partition,
+            edge_relation,
+            node_data,
+            edge_data,
+            weight,
+            relabel,
+            create_using,
+        )
+
+    # If the partition is a dict, it is assumed to be one where the keys are
+    # user-defined block labels, and values are block lists, tuples or sets.
+    if isinstance(partition, dict):
+        partition = list(partition.values())
+
+    # If the user provided partition as a collection of sets. Then we
+    # need to check if partition covers all of G nodes. If the answer
+    # is 'No' then we need to prepare suitable subgraph view.
+    partition_nodes = set().union(*partition)
+    if len(partition_nodes) != len(G):
+        G = G.subgraph(partition_nodes)
+    # Each node in the graph/subgraph must be in exactly one block.
+    if not nx.community.is_partition(G, partition):
+        raise NetworkXException("each node must be in exactly one part of `partition`")
+    return _quotient_graph(
+        G,
+        partition,
+        edge_relation,
+        node_data,
+        edge_data,
+        weight,
+        relabel,
+        create_using,
+    )
+
+
+def _quotient_graph(
+    G, partition, edge_relation, node_data, edge_data, weight, relabel, create_using
+):
+    """Construct the quotient graph assuming input has been checked"""
+    if create_using is None:
+        H = G.__class__()
+    else:
+        H = nx.empty_graph(0, create_using)
+    # By default set some basic information about the subgraph that each block
+    # represents on the nodes in the quotient graph.
+    if node_data is None:
+
+        def node_data(b):
+            S = G.subgraph(b)
+            return {
+                "graph": S,
+                "nnodes": len(S),
+                "nedges": S.number_of_edges(),
+                "density": density(S),
+            }
+
+    # Each block of the partition becomes a node in the quotient graph.
+    partition = [frozenset(b) for b in partition]
+    H.add_nodes_from((b, node_data(b)) for b in partition)
+    # By default, the edge relation is the relation defined as follows. B is
+    # adjacent to C if a node in B is adjacent to a node in C, according to the
+    # edge set of G.
+    #
+    # This is not a particularly efficient implementation of this relation:
+    # there are O(n^2) pairs to check and each check may require O(log n) time
+    # (to check set membership). This can certainly be parallelized.
+    if edge_relation is None:
+
+        def edge_relation(b, c):
+            return any(v in G[u] for u, v in product(b, c))
+
+    # By default, sum the weights of the edges joining pairs of nodes across
+    # blocks to get the weight of the edge joining those two blocks.
+    if edge_data is None:
+
+        def edge_data(b, c):
+            edgedata = (
+                d
+                for u, v, d in G.edges(b | c, data=True)
+                if (u in b and v in c) or (u in c and v in b)
+            )
+            return {"weight": sum(d.get(weight, 1) for d in edgedata)}
+
+    block_pairs = permutations(H, 2) if H.is_directed() else combinations(H, 2)
+    # In a multigraph, add one edge in the quotient graph for each edge
+    # in the original graph.
+    if H.is_multigraph():
+        edges = chaini(
+            (
+                (b, c, G.get_edge_data(u, v, default={}))
+                for u, v in product(b, c)
+                if v in G[u]
+            )
+            for b, c in block_pairs
+            if edge_relation(b, c)
+        )
+    # In a simple graph, apply the edge data function to each pair of
+    # blocks to determine the edge data attributes to apply to each edge
+    # in the quotient graph.
+    else:
+        edges = (
+            (b, c, edge_data(b, c)) for (b, c) in block_pairs if edge_relation(b, c)
+        )
+    H.add_edges_from(edges)
+    # If requested by the user, relabel the nodes to be integers,
+    # numbered in increasing order from zero in the same order as the
+    # iteration order of `partition`.
+    if relabel:
+        # Can't use nx.convert_node_labels_to_integers() here since we
+        # want the order of iteration to be the same for backward
+        # compatibility with the nx.blockmodel() function.
+        labels = {b: i for i, b in enumerate(partition)}
+        H = nx.relabel_nodes(H, labels)
+    return H
+
+
+@nx._dispatchable(
+    preserve_all_attrs=True, mutates_input={"not copy": 4}, returns_graph=True
+)
+def contracted_nodes(G, u, v, self_loops=True, copy=True):
+    """Returns the graph that results from contracting `u` and `v`.
+
+    Node contraction identifies the two nodes as a single node incident to any
+    edge that was incident to the original two nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The graph whose nodes will be contracted.
+
+    u, v : nodes
+        Must be nodes in `G`.
+
+    self_loops : Boolean
+        If this is True, any edges joining `u` and `v` in `G` become
+        self-loops on the new node in the returned graph.
+
+    copy : Boolean
+        If this is True (default True), make a copy of
+        `G` and return that instead of directly changing `G`.
+
+
+    Returns
+    -------
+    Networkx graph
+        If Copy is True,
+        A new graph object of the same type as `G` (leaving `G` unmodified)
+        with `u` and `v` identified in a single node. The right node `v`
+        will be merged into the node `u`, so only `u` will appear in the
+        returned graph.
+        If copy is False,
+        Modifies `G` with `u` and `v` identified in a single node.
+        The right node `v` will be merged into the node `u`, so
+        only `u` will appear in the returned graph.
+
+    Notes
+    -----
+    For multigraphs, the edge keys for the realigned edges may
+    not be the same as the edge keys for the old edges. This is
+    natural because edge keys are unique only within each pair of nodes.
+
+    For non-multigraphs where `u` and `v` are adjacent to a third node
+    `w`, the edge (`v`, `w`) will be contracted into the edge (`u`,
+    `w`) with its attributes stored into a "contraction" attribute.
+
+    This function is also available as `identified_nodes`.
+
+    Examples
+    --------
+    Contracting two nonadjacent nodes of the cycle graph on four nodes `C_4`
+    yields the path graph (ignoring parallel edges):
+
+    >>> G = nx.cycle_graph(4)
+    >>> M = nx.contracted_nodes(G, 1, 3)
+    >>> P3 = nx.path_graph(3)
+    >>> nx.is_isomorphic(M, P3)
+    True
+
+    >>> G = nx.MultiGraph(P3)
+    >>> M = nx.contracted_nodes(G, 0, 2)
+    >>> M.edges
+    MultiEdgeView([(0, 1, 0), (0, 1, 1)])
+
+    >>> G = nx.Graph([(1, 2), (2, 2)])
+    >>> H = nx.contracted_nodes(G, 1, 2, self_loops=False)
+    >>> list(H.nodes())
+    [1]
+    >>> list(H.edges())
+    [(1, 1)]
+
+    In a ``MultiDiGraph`` with a self loop, the in and out edges will
+    be treated separately as edges, so while contracting a node which
+    has a self loop the contraction will add multiple edges:
+
+    >>> G = nx.MultiDiGraph([(1, 2), (2, 2)])
+    >>> H = nx.contracted_nodes(G, 1, 2)
+    >>> list(H.edges())  # edge 1->2, 2->2, 2<-2 from the original Graph G
+    [(1, 1), (1, 1), (1, 1)]
+    >>> H = nx.contracted_nodes(G, 1, 2, self_loops=False)
+    >>> list(H.edges())  # edge 2->2, 2<-2 from the original Graph G
+    [(1, 1), (1, 1)]
+
+    See Also
+    --------
+    contracted_edge
+    quotient_graph
+
+    """
+    # Copying has significant overhead and can be disabled if needed
+    if copy:
+        H = G.copy()
+    else:
+        H = G
+
+    # edge code uses G.edges(v) instead of G.adj[v] to handle multiedges
+    if H.is_directed():
+        edges_to_remap = chain(G.in_edges(v, data=True), G.out_edges(v, data=True))
+    else:
+        edges_to_remap = G.edges(v, data=True)
+
+    # If the H=G, the generators change as H changes
+    # This makes the edges_to_remap independent of H
+    if not copy:
+        edges_to_remap = list(edges_to_remap)
+
+    v_data = H.nodes[v]
+    H.remove_node(v)
+
+    for prev_w, prev_x, d in edges_to_remap:
+        w = prev_w if prev_w != v else u
+        x = prev_x if prev_x != v else u
+
+        if ({prev_w, prev_x} == {u, v}) and not self_loops:
+            continue
+
+        if not H.has_edge(w, x) or G.is_multigraph():
+            H.add_edge(w, x, **d)
+        else:
+            if "contraction" in H.edges[(w, x)]:
+                H.edges[(w, x)]["contraction"][(prev_w, prev_x)] = d
+            else:
+                H.edges[(w, x)]["contraction"] = {(prev_w, prev_x): d}
+
+    if "contraction" in H.nodes[u]:
+        H.nodes[u]["contraction"][v] = v_data
+    else:
+        H.nodes[u]["contraction"] = {v: v_data}
+    return H
+
+
+identified_nodes = contracted_nodes
+
+
+@nx._dispatchable(
+    preserve_edge_attrs=True, mutates_input={"not copy": 3}, returns_graph=True
+)
+def contracted_edge(G, edge, self_loops=True, copy=True):
+    """Returns the graph that results from contracting the specified edge.
+
+    Edge contraction identifies the two endpoints of the edge as a single node
+    incident to any edge that was incident to the original two nodes. A graph
+    that results from edge contraction is called a *minor* of the original
+    graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+       The graph whose edge will be contracted.
+
+    edge : tuple
+       Must be a pair of nodes in `G`.
+
+    self_loops : Boolean
+       If this is True, any edges (including `edge`) joining the
+       endpoints of `edge` in `G` become self-loops on the new node in the
+       returned graph.
+
+    copy : Boolean (default True)
+        If this is True, a the contraction will be performed on a copy of `G`,
+        otherwise the contraction will happen in place.
+
+    Returns
+    -------
+    Networkx graph
+       A new graph object of the same type as `G` (leaving `G` unmodified)
+       with endpoints of `edge` identified in a single node. The right node
+       of `edge` will be merged into the left one, so only the left one will
+       appear in the returned graph.
+
+    Raises
+    ------
+    ValueError
+       If `edge` is not an edge in `G`.
+
+    Examples
+    --------
+    Attempting to contract two nonadjacent nodes yields an error:
+
+    >>> G = nx.cycle_graph(4)
+    >>> nx.contracted_edge(G, (1, 3))
+    Traceback (most recent call last):
+      ...
+    ValueError: Edge (1, 3) does not exist in graph G; cannot contract it
+
+    Contracting two adjacent nodes in the cycle graph on *n* nodes yields the
+    cycle graph on *n - 1* nodes:
+
+    >>> C5 = nx.cycle_graph(5)
+    >>> C4 = nx.cycle_graph(4)
+    >>> M = nx.contracted_edge(C5, (0, 1), self_loops=False)
+    >>> nx.is_isomorphic(M, C4)
+    True
+
+    See also
+    --------
+    contracted_nodes
+    quotient_graph
+
+    """
+    u, v = edge[:2]
+    if not G.has_edge(u, v):
+        raise ValueError(f"Edge {edge} does not exist in graph G; cannot contract it")
+    return contracted_nodes(G, u, v, self_loops=self_loops, copy=copy)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/test_contraction.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/test_contraction.py
new file mode 100644
index 00000000..22468867
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/test_contraction.py
@@ -0,0 +1,446 @@
+"""Unit tests for the :mod:`networkx.algorithms.minors.contraction` module."""
+
+import pytest
+
+import networkx as nx
+from networkx.utils import arbitrary_element, edges_equal, nodes_equal
+
+
+def test_quotient_graph_complete_multipartite():
+    """Tests that the quotient graph of the complete *n*-partite graph
+    under the "same neighbors" node relation is the complete graph on *n*
+    nodes.
+
+    """
+    G = nx.complete_multipartite_graph(2, 3, 4)
+    # Two nodes are equivalent if they are not adjacent but have the same
+    # neighbor set.
+
+    def same_neighbors(u, v):
+        return u not in G[v] and v not in G[u] and G[u] == G[v]
+
+    expected = nx.complete_graph(3)
+    actual = nx.quotient_graph(G, same_neighbors)
+    # It won't take too long to run a graph isomorphism algorithm on such
+    # small graphs.
+    assert nx.is_isomorphic(expected, actual)
+
+
+def test_quotient_graph_complete_bipartite():
+    """Tests that the quotient graph of the complete bipartite graph under
+    the "same neighbors" node relation is `K_2`.
+
+    """
+    G = nx.complete_bipartite_graph(2, 3)
+    # Two nodes are equivalent if they are not adjacent but have the same
+    # neighbor set.
+
+    def same_neighbors(u, v):
+        return u not in G[v] and v not in G[u] and G[u] == G[v]
+
+    expected = nx.complete_graph(2)
+    actual = nx.quotient_graph(G, same_neighbors)
+    # It won't take too long to run a graph isomorphism algorithm on such
+    # small graphs.
+    assert nx.is_isomorphic(expected, actual)
+
+
+def test_quotient_graph_edge_relation():
+    """Tests for specifying an alternate edge relation for the quotient
+    graph.
+
+    """
+    G = nx.path_graph(5)
+
+    def identity(u, v):
+        return u == v
+
+    def same_parity(b, c):
+        return arbitrary_element(b) % 2 == arbitrary_element(c) % 2
+
+    actual = nx.quotient_graph(G, identity, same_parity)
+    expected = nx.Graph()
+    expected.add_edges_from([(0, 2), (0, 4), (2, 4)])
+    expected.add_edge(1, 3)
+    assert nx.is_isomorphic(actual, expected)
+
+
+def test_condensation_as_quotient():
+    """This tests that the condensation of a graph can be viewed as the
+    quotient graph under the "in the same connected component" equivalence
+    relation.
+
+    """
+    # This example graph comes from the file `test_strongly_connected.py`.
+    G = nx.DiGraph()
+    G.add_edges_from(
+        [
+            (1, 2),
+            (2, 3),
+            (2, 11),
+            (2, 12),
+            (3, 4),
+            (4, 3),
+            (4, 5),
+            (5, 6),
+            (6, 5),
+            (6, 7),
+            (7, 8),
+            (7, 9),
+            (7, 10),
+            (8, 9),
+            (9, 7),
+            (10, 6),
+            (11, 2),
+            (11, 4),
+            (11, 6),
+            (12, 6),
+            (12, 11),
+        ]
+    )
+    scc = list(nx.strongly_connected_components(G))
+    C = nx.condensation(G, scc)
+    component_of = C.graph["mapping"]
+    # Two nodes are equivalent if they are in the same connected component.
+
+    def same_component(u, v):
+        return component_of[u] == component_of[v]
+
+    Q = nx.quotient_graph(G, same_component)
+    assert nx.is_isomorphic(C, Q)
+
+
+def test_path():
+    G = nx.path_graph(6)
+    partition = [{0, 1}, {2, 3}, {4, 5}]
+    M = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(M, [0, 1, 2])
+    assert edges_equal(M.edges(), [(0, 1), (1, 2)])
+    for n in M:
+        assert M.nodes[n]["nedges"] == 1
+        assert M.nodes[n]["nnodes"] == 2
+        assert M.nodes[n]["density"] == 1
+
+
+def test_path__partition_provided_as_dict_of_lists():
+    G = nx.path_graph(6)
+    partition = {0: [0, 1], 2: [2, 3], 4: [4, 5]}
+    M = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(M, [0, 1, 2])
+    assert edges_equal(M.edges(), [(0, 1), (1, 2)])
+    for n in M:
+        assert M.nodes[n]["nedges"] == 1
+        assert M.nodes[n]["nnodes"] == 2
+        assert M.nodes[n]["density"] == 1
+
+
+def test_path__partition_provided_as_dict_of_tuples():
+    G = nx.path_graph(6)
+    partition = {0: (0, 1), 2: (2, 3), 4: (4, 5)}
+    M = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(M, [0, 1, 2])
+    assert edges_equal(M.edges(), [(0, 1), (1, 2)])
+    for n in M:
+        assert M.nodes[n]["nedges"] == 1
+        assert M.nodes[n]["nnodes"] == 2
+        assert M.nodes[n]["density"] == 1
+
+
+def test_path__partition_provided_as_dict_of_sets():
+    G = nx.path_graph(6)
+    partition = {0: {0, 1}, 2: {2, 3}, 4: {4, 5}}
+    M = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(M, [0, 1, 2])
+    assert edges_equal(M.edges(), [(0, 1), (1, 2)])
+    for n in M:
+        assert M.nodes[n]["nedges"] == 1
+        assert M.nodes[n]["nnodes"] == 2
+        assert M.nodes[n]["density"] == 1
+
+
+def test_multigraph_path():
+    G = nx.MultiGraph(nx.path_graph(6))
+    partition = [{0, 1}, {2, 3}, {4, 5}]
+    M = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(M, [0, 1, 2])
+    assert edges_equal(M.edges(), [(0, 1), (1, 2)])
+    for n in M:
+        assert M.nodes[n]["nedges"] == 1
+        assert M.nodes[n]["nnodes"] == 2
+        assert M.nodes[n]["density"] == 1
+
+
+def test_directed_path():
+    G = nx.DiGraph()
+    nx.add_path(G, range(6))
+    partition = [{0, 1}, {2, 3}, {4, 5}]
+    M = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(M, [0, 1, 2])
+    assert edges_equal(M.edges(), [(0, 1), (1, 2)])
+    for n in M:
+        assert M.nodes[n]["nedges"] == 1
+        assert M.nodes[n]["nnodes"] == 2
+        assert M.nodes[n]["density"] == 0.5
+
+
+def test_directed_multigraph_path():
+    G = nx.MultiDiGraph()
+    nx.add_path(G, range(6))
+    partition = [{0, 1}, {2, 3}, {4, 5}]
+    M = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(M, [0, 1, 2])
+    assert edges_equal(M.edges(), [(0, 1), (1, 2)])
+    for n in M:
+        assert M.nodes[n]["nedges"] == 1
+        assert M.nodes[n]["nnodes"] == 2
+        assert M.nodes[n]["density"] == 0.5
+
+
+def test_overlapping_blocks():
+    with pytest.raises(nx.NetworkXException):
+        G = nx.path_graph(6)
+        partition = [{0, 1, 2}, {2, 3}, {4, 5}]
+        nx.quotient_graph(G, partition)
+
+
+def test_weighted_path():
+    G = nx.path_graph(6)
+    for i in range(5):
+        G[i][i + 1]["w"] = i + 1
+    partition = [{0, 1}, {2, 3}, {4, 5}]
+    M = nx.quotient_graph(G, partition, weight="w", relabel=True)
+    assert nodes_equal(M, [0, 1, 2])
+    assert edges_equal(M.edges(), [(0, 1), (1, 2)])
+    assert M[0][1]["weight"] == 2
+    assert M[1][2]["weight"] == 4
+    for n in M:
+        assert M.nodes[n]["nedges"] == 1
+        assert M.nodes[n]["nnodes"] == 2
+        assert M.nodes[n]["density"] == 1
+
+
+def test_barbell():
+    G = nx.barbell_graph(3, 0)
+    partition = [{0, 1, 2}, {3, 4, 5}]
+    M = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(M, [0, 1])
+    assert edges_equal(M.edges(), [(0, 1)])
+    for n in M:
+        assert M.nodes[n]["nedges"] == 3
+        assert M.nodes[n]["nnodes"] == 3
+        assert M.nodes[n]["density"] == 1
+
+
+def test_barbell_plus():
+    G = nx.barbell_graph(3, 0)
+    # Add an extra edge joining the bells.
+    G.add_edge(0, 5)
+    partition = [{0, 1, 2}, {3, 4, 5}]
+    M = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(M, [0, 1])
+    assert edges_equal(M.edges(), [(0, 1)])
+    assert M[0][1]["weight"] == 2
+    for n in M:
+        assert M.nodes[n]["nedges"] == 3
+        assert M.nodes[n]["nnodes"] == 3
+        assert M.nodes[n]["density"] == 1
+
+
+def test_blockmodel():
+    G = nx.path_graph(6)
+    partition = [[0, 1], [2, 3], [4, 5]]
+    M = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(M.nodes(), [0, 1, 2])
+    assert edges_equal(M.edges(), [(0, 1), (1, 2)])
+    for n in M.nodes():
+        assert M.nodes[n]["nedges"] == 1
+        assert M.nodes[n]["nnodes"] == 2
+        assert M.nodes[n]["density"] == 1.0
+
+
+def test_multigraph_blockmodel():
+    G = nx.MultiGraph(nx.path_graph(6))
+    partition = [[0, 1], [2, 3], [4, 5]]
+    M = nx.quotient_graph(G, partition, create_using=nx.MultiGraph(), relabel=True)
+    assert nodes_equal(M.nodes(), [0, 1, 2])
+    assert edges_equal(M.edges(), [(0, 1), (1, 2)])
+    for n in M.nodes():
+        assert M.nodes[n]["nedges"] == 1
+        assert M.nodes[n]["nnodes"] == 2
+        assert M.nodes[n]["density"] == 1.0
+
+
+def test_quotient_graph_incomplete_partition():
+    G = nx.path_graph(6)
+    partition = []
+    H = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(H.nodes(), [])
+    assert edges_equal(H.edges(), [])
+
+    partition = [[0, 1], [2, 3], [5]]
+    H = nx.quotient_graph(G, partition, relabel=True)
+    assert nodes_equal(H.nodes(), [0, 1, 2])
+    assert edges_equal(H.edges(), [(0, 1)])
+
+
+def test_undirected_node_contraction():
+    """Tests for node contraction in an undirected graph."""
+    G = nx.cycle_graph(4)
+    actual = nx.contracted_nodes(G, 0, 1)
+    expected = nx.cycle_graph(3)
+    expected.add_edge(0, 0)
+    assert nx.is_isomorphic(actual, expected)
+
+
+def test_directed_node_contraction():
+    """Tests for node contraction in a directed graph."""
+    G = nx.DiGraph(nx.cycle_graph(4))
+    actual = nx.contracted_nodes(G, 0, 1)
+    expected = nx.DiGraph(nx.cycle_graph(3))
+    expected.add_edge(0, 0)
+    expected.add_edge(0, 0)
+    assert nx.is_isomorphic(actual, expected)
+
+
+def test_undirected_node_contraction_no_copy():
+    """Tests for node contraction in an undirected graph
+    by making changes in place."""
+    G = nx.cycle_graph(4)
+    actual = nx.contracted_nodes(G, 0, 1, copy=False)
+    expected = nx.cycle_graph(3)
+    expected.add_edge(0, 0)
+    assert nx.is_isomorphic(actual, G)
+    assert nx.is_isomorphic(actual, expected)
+
+
+def test_directed_node_contraction_no_copy():
+    """Tests for node contraction in a directed graph
+    by making changes in place."""
+    G = nx.DiGraph(nx.cycle_graph(4))
+    actual = nx.contracted_nodes(G, 0, 1, copy=False)
+    expected = nx.DiGraph(nx.cycle_graph(3))
+    expected.add_edge(0, 0)
+    expected.add_edge(0, 0)
+    assert nx.is_isomorphic(actual, G)
+    assert nx.is_isomorphic(actual, expected)
+
+
+def test_create_multigraph():
+    """Tests that using a MultiGraph creates multiple edges."""
+    G = nx.path_graph(3, create_using=nx.MultiGraph())
+    G.add_edge(0, 1)
+    G.add_edge(0, 0)
+    G.add_edge(0, 2)
+    actual = nx.contracted_nodes(G, 0, 2)
+    expected = nx.MultiGraph()
+    expected.add_edge(0, 1)
+    expected.add_edge(0, 1)
+    expected.add_edge(0, 1)
+    expected.add_edge(0, 0)
+    expected.add_edge(0, 0)
+    assert edges_equal(actual.edges, expected.edges)
+
+
+def test_multigraph_keys():
+    """Tests that multiedge keys are reset in new graph."""
+    G = nx.path_graph(3, create_using=nx.MultiGraph())
+    G.add_edge(0, 1, 5)
+    G.add_edge(0, 0, 0)
+    G.add_edge(0, 2, 5)
+    actual = nx.contracted_nodes(G, 0, 2)
+    expected = nx.MultiGraph()
+    expected.add_edge(0, 1, 0)
+    expected.add_edge(0, 1, 5)
+    expected.add_edge(0, 1, 2)  # keyed as 2 b/c 2 edges already in G
+    expected.add_edge(0, 0, 0)
+    expected.add_edge(0, 0, 1)  # this comes from (0, 2, 5)
+    assert edges_equal(actual.edges, expected.edges)
+
+
+def test_node_attributes():
+    """Tests that node contraction preserves node attributes."""
+    G = nx.cycle_graph(4)
+    # Add some data to the two nodes being contracted.
+    G.nodes[0]["foo"] = "bar"
+    G.nodes[1]["baz"] = "xyzzy"
+    actual = nx.contracted_nodes(G, 0, 1)
+    # We expect that contracting the nodes 0 and 1 in C_4 yields K_3, but
+    # with nodes labeled 0, 2, and 3, and with a -loop on 0.
+    expected = nx.complete_graph(3)
+    expected = nx.relabel_nodes(expected, {1: 2, 2: 3})
+    expected.add_edge(0, 0)
+    cdict = {1: {"baz": "xyzzy"}}
+    expected.nodes[0].update({"foo": "bar", "contraction": cdict})
+    assert nx.is_isomorphic(actual, expected)
+    assert actual.nodes == expected.nodes
+
+
+def test_edge_attributes():
+    """Tests that node contraction preserves edge attributes."""
+    # Shape: src1 --> dest <-- src2
+    G = nx.DiGraph([("src1", "dest"), ("src2", "dest")])
+    G["src1"]["dest"]["value"] = "src1-->dest"
+    G["src2"]["dest"]["value"] = "src2-->dest"
+    H = nx.MultiDiGraph(G)
+
+    G = nx.contracted_nodes(G, "src1", "src2")  # New Shape: src1 --> dest
+    assert G.edges[("src1", "dest")]["value"] == "src1-->dest"
+    assert (
+        G.edges[("src1", "dest")]["contraction"][("src2", "dest")]["value"]
+        == "src2-->dest"
+    )
+
+    H = nx.contracted_nodes(H, "src1", "src2")  # New Shape: src1 -(x2)-> dest
+    assert len(H.edges(("src1", "dest"))) == 2
+
+
+def test_without_self_loops():
+    """Tests for node contraction without preserving -loops."""
+    G = nx.cycle_graph(4)
+    actual = nx.contracted_nodes(G, 0, 1, self_loops=False)
+    expected = nx.complete_graph(3)
+    assert nx.is_isomorphic(actual, expected)
+
+
+def test_contract_loop_graph():
+    """Tests for node contraction when nodes have loops."""
+    G = nx.cycle_graph(4)
+    G.add_edge(0, 0)
+    actual = nx.contracted_nodes(G, 0, 1)
+    expected = nx.complete_graph([0, 2, 3])
+    expected.add_edge(0, 0)
+    expected.add_edge(0, 0)
+    assert edges_equal(actual.edges, expected.edges)
+    actual = nx.contracted_nodes(G, 1, 0)
+    expected = nx.complete_graph([1, 2, 3])
+    expected.add_edge(1, 1)
+    expected.add_edge(1, 1)
+    assert edges_equal(actual.edges, expected.edges)
+
+
+def test_undirected_edge_contraction():
+    """Tests for edge contraction in an undirected graph."""
+    G = nx.cycle_graph(4)
+    actual = nx.contracted_edge(G, (0, 1))
+    expected = nx.complete_graph(3)
+    expected.add_edge(0, 0)
+    assert nx.is_isomorphic(actual, expected)
+
+
+def test_multigraph_edge_contraction():
+    """Tests for edge contraction in a multigraph"""
+    G = nx.cycle_graph(4)
+    actual = nx.contracted_edge(G, (0, 1, 0))
+    expected = nx.complete_graph(3)
+    expected.add_edge(0, 0)
+    assert nx.is_isomorphic(actual, expected)
+
+
+def test_nonexistent_edge():
+    """Tests that attempting to contract a nonexistent edge raises an
+    exception.
+
+    """
+    with pytest.raises(ValueError):
+        G = nx.cycle_graph(4)
+        nx.contracted_edge(G, (0, 2))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/mis.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/mis.py
new file mode 100644
index 00000000..0652ac4a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/mis.py
@@ -0,0 +1,78 @@
+"""
+Algorithm to find a maximal (not maximum) independent set.
+
+"""
+
+import networkx as nx
+from networkx.utils import not_implemented_for, py_random_state
+
+__all__ = ["maximal_independent_set"]
+
+
+@not_implemented_for("directed")
+@py_random_state(2)
+@nx._dispatchable
+def maximal_independent_set(G, nodes=None, seed=None):
+    """Returns a random maximal independent set guaranteed to contain
+    a given set of nodes.
+
+    An independent set is a set of nodes such that the subgraph
+    of G induced by these nodes contains no edges. A maximal
+    independent set is an independent set such that it is not possible
+    to add a new node and still get an independent set.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    nodes : list or iterable
+       Nodes that must be part of the independent set. This set of nodes
+       must be independent.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    indep_nodes : list
+       List of nodes that are part of a maximal independent set.
+
+    Raises
+    ------
+    NetworkXUnfeasible
+       If the nodes in the provided list are not part of the graph or
+       do not form an independent set, an exception is raised.
+
+    NetworkXNotImplemented
+        If `G` is directed.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> nx.maximal_independent_set(G)  # doctest: +SKIP
+    [4, 0, 2]
+    >>> nx.maximal_independent_set(G, [1])  # doctest: +SKIP
+    [1, 3]
+
+    Notes
+    -----
+    This algorithm does not solve the maximum independent set problem.
+
+    """
+    if not nodes:
+        nodes = {seed.choice(list(G))}
+    else:
+        nodes = set(nodes)
+    if not nodes.issubset(G):
+        raise nx.NetworkXUnfeasible(f"{nodes} is not a subset of the nodes of G")
+    neighbors = set.union(*[set(G.adj[v]) for v in nodes])
+    if set.intersection(neighbors, nodes):
+        raise nx.NetworkXUnfeasible(f"{nodes} is not an independent set of G")
+    indep_nodes = list(nodes)
+    available_nodes = set(G.nodes()).difference(neighbors.union(nodes))
+    while available_nodes:
+        node = seed.choice(list(available_nodes))
+        indep_nodes.append(node)
+        available_nodes.difference_update(list(G.adj[node]) + [node])
+    return indep_nodes
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/moral.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/moral.py
new file mode 100644
index 00000000..e2acf80f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/moral.py
@@ -0,0 +1,59 @@
+r"""Function for computing the moral graph of a directed graph."""
+
+import itertools
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["moral_graph"]
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(returns_graph=True)
+def moral_graph(G):
+    r"""Return the Moral Graph
+
+    Returns the moralized graph of a given directed graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Directed graph
+
+    Returns
+    -------
+    H : NetworkX graph
+        The undirected moralized graph of G
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is undirected.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (2, 5), (3, 4), (4, 3)])
+    >>> G_moral = nx.moral_graph(G)
+    >>> G_moral.edges()
+    EdgeView([(1, 2), (2, 3), (2, 5), (2, 4), (3, 4)])
+
+    Notes
+    -----
+    A moral graph is an undirected graph H = (V, E) generated from a
+    directed Graph, where if a node has more than one parent node, edges
+    between these parent nodes are inserted and all directed edges become
+    undirected.
+
+    https://en.wikipedia.org/wiki/Moral_graph
+
+    References
+    ----------
+    .. [1] Wray L. Buntine. 1995. Chain graphs for learning.
+           In Proceedings of the Eleventh conference on Uncertainty
+           in artificial intelligence (UAI'95)
+    """
+    H = G.to_undirected()
+    for preds in G.pred.values():
+        predecessors_combinations = itertools.combinations(preds, r=2)
+        H.add_edges_from(predecessors_combinations)
+    return H
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/node_classification.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/node_classification.py
new file mode 100644
index 00000000..b69a6c97
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/node_classification.py
@@ -0,0 +1,219 @@
+"""This module provides the functions for node classification problem.
+
+The functions in this module are not imported
+into the top level `networkx` namespace.
+You can access these functions by importing
+the `networkx.algorithms.node_classification` modules,
+then accessing the functions as attributes of `node_classification`.
+For example:
+
+  >>> from networkx.algorithms import node_classification
+  >>> G = nx.path_graph(4)
+  >>> G.edges()
+  EdgeView([(0, 1), (1, 2), (2, 3)])
+  >>> G.nodes[0]["label"] = "A"
+  >>> G.nodes[3]["label"] = "B"
+  >>> node_classification.harmonic_function(G)
+  ['A', 'A', 'B', 'B']
+
+References
+----------
+Zhu, X., Ghahramani, Z., & Lafferty, J. (2003, August).
+Semi-supervised learning using gaussian fields and harmonic functions.
+In ICML (Vol. 3, pp. 912-919).
+"""
+
+import networkx as nx
+
+__all__ = ["harmonic_function", "local_and_global_consistency"]
+
+
+@nx.utils.not_implemented_for("directed")
+@nx._dispatchable(node_attrs="label_name")
+def harmonic_function(G, max_iter=30, label_name="label"):
+    """Node classification by Harmonic function
+
+    Function for computing Harmonic function algorithm by Zhu et al.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+    max_iter : int
+        maximum number of iterations allowed
+    label_name : string
+        name of target labels to predict
+
+    Returns
+    -------
+    predicted : list
+        List of length ``len(G)`` with the predicted labels for each node.
+
+    Raises
+    ------
+    NetworkXError
+        If no nodes in `G` have attribute `label_name`.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import node_classification
+    >>> G = nx.path_graph(4)
+    >>> G.nodes[0]["label"] = "A"
+    >>> G.nodes[3]["label"] = "B"
+    >>> G.nodes(data=True)
+    NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}})
+    >>> G.edges()
+    EdgeView([(0, 1), (1, 2), (2, 3)])
+    >>> predicted = node_classification.harmonic_function(G)
+    >>> predicted
+    ['A', 'A', 'B', 'B']
+
+    References
+    ----------
+    Zhu, X., Ghahramani, Z., & Lafferty, J. (2003, August).
+    Semi-supervised learning using gaussian fields and harmonic functions.
+    In ICML (Vol. 3, pp. 912-919).
+    """
+    import numpy as np
+    import scipy as sp
+
+    X = nx.to_scipy_sparse_array(G)  # adjacency matrix
+    labels, label_dict = _get_label_info(G, label_name)
+
+    if labels.shape[0] == 0:
+        raise nx.NetworkXError(
+            f"No node on the input graph is labeled by '{label_name}'."
+        )
+
+    n_samples = X.shape[0]
+    n_classes = label_dict.shape[0]
+    F = np.zeros((n_samples, n_classes))
+
+    # Build propagation matrix
+    degrees = X.sum(axis=0)
+    degrees[degrees == 0] = 1  # Avoid division by 0
+    # TODO: csr_array
+    D = sp.sparse.csr_array(sp.sparse.diags((1.0 / degrees), offsets=0))
+    P = (D @ X).tolil()
+    P[labels[:, 0]] = 0  # labels[:, 0] indicates IDs of labeled nodes
+    # Build base matrix
+    B = np.zeros((n_samples, n_classes))
+    B[labels[:, 0], labels[:, 1]] = 1
+
+    for _ in range(max_iter):
+        F = (P @ F) + B
+
+    return label_dict[np.argmax(F, axis=1)].tolist()
+
+
+@nx.utils.not_implemented_for("directed")
+@nx._dispatchable(node_attrs="label_name")
+def local_and_global_consistency(G, alpha=0.99, max_iter=30, label_name="label"):
+    """Node classification by Local and Global Consistency
+
+    Function for computing Local and global consistency algorithm by Zhou et al.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+    alpha : float
+        Clamping factor
+    max_iter : int
+        Maximum number of iterations allowed
+    label_name : string
+        Name of target labels to predict
+
+    Returns
+    -------
+    predicted : list
+        List of length ``len(G)`` with the predicted labels for each node.
+
+    Raises
+    ------
+    NetworkXError
+        If no nodes in `G` have attribute `label_name`.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import node_classification
+    >>> G = nx.path_graph(4)
+    >>> G.nodes[0]["label"] = "A"
+    >>> G.nodes[3]["label"] = "B"
+    >>> G.nodes(data=True)
+    NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}})
+    >>> G.edges()
+    EdgeView([(0, 1), (1, 2), (2, 3)])
+    >>> predicted = node_classification.local_and_global_consistency(G)
+    >>> predicted
+    ['A', 'A', 'B', 'B']
+
+    References
+    ----------
+    Zhou, D., Bousquet, O., Lal, T. N., Weston, J., & Schölkopf, B. (2004).
+    Learning with local and global consistency.
+    Advances in neural information processing systems, 16(16), 321-328.
+    """
+    import numpy as np
+    import scipy as sp
+
+    X = nx.to_scipy_sparse_array(G)  # adjacency matrix
+    labels, label_dict = _get_label_info(G, label_name)
+
+    if labels.shape[0] == 0:
+        raise nx.NetworkXError(
+            f"No node on the input graph is labeled by '{label_name}'."
+        )
+
+    n_samples = X.shape[0]
+    n_classes = label_dict.shape[0]
+    F = np.zeros((n_samples, n_classes))
+
+    # Build propagation matrix
+    degrees = X.sum(axis=0)
+    degrees[degrees == 0] = 1  # Avoid division by 0
+    # TODO: csr_array
+    D2 = np.sqrt(sp.sparse.csr_array(sp.sparse.diags((1.0 / degrees), offsets=0)))
+    P = alpha * ((D2 @ X) @ D2)
+    # Build base matrix
+    B = np.zeros((n_samples, n_classes))
+    B[labels[:, 0], labels[:, 1]] = 1 - alpha
+
+    for _ in range(max_iter):
+        F = (P @ F) + B
+
+    return label_dict[np.argmax(F, axis=1)].tolist()
+
+
+def _get_label_info(G, label_name):
+    """Get and return information of labels from the input graph
+
+    Parameters
+    ----------
+    G : Network X graph
+    label_name : string
+        Name of the target label
+
+    Returns
+    -------
+    labels : numpy array, shape = [n_labeled_samples, 2]
+        Array of pairs of labeled node ID and label ID
+    label_dict : numpy array, shape = [n_classes]
+        Array of labels
+        i-th element contains the label corresponding label ID `i`
+    """
+    import numpy as np
+
+    labels = []
+    label_to_id = {}
+    lid = 0
+    for i, n in enumerate(G.nodes(data=True)):
+        if label_name in n[1]:
+            label = n[1][label_name]
+            if label not in label_to_id:
+                label_to_id[label] = lid
+                lid += 1
+            labels.append([i, label_to_id[label]])
+    labels = np.array(labels)
+    label_dict = np.array(
+        [label for label, _ in sorted(label_to_id.items(), key=lambda x: x[1])]
+    )
+    return (labels, label_dict)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/non_randomness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/non_randomness.py
new file mode 100644
index 00000000..13799115
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/non_randomness.py
@@ -0,0 +1,98 @@
+r"""Computation of graph non-randomness"""
+
+import math
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["non_randomness"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def non_randomness(G, k=None, weight="weight"):
+    """Compute the non-randomness of graph G.
+
+    The first returned value nr is the sum of non-randomness values of all
+    edges within the graph (where the non-randomness of an edge tends to be
+    small when the two nodes linked by that edge are from two different
+    communities).
+
+    The second computed value nr_rd is a relative measure that indicates
+    to what extent graph G is different from random graphs in terms
+    of probability. When it is close to 0, the graph tends to be more
+    likely generated by an Erdos Renyi model.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Graph must be symmetric, connected, and without self-loops.
+
+    k : int
+        The number of communities in G.
+        If k is not set, the function will use a default community
+        detection algorithm to set it.
+
+    weight : string or None, optional (default=None)
+        The name of an edge attribute that holds the numerical value used
+        as a weight. If None, then each edge has weight 1, i.e., the graph is
+        binary.
+
+    Returns
+    -------
+    non-randomness : (float, float) tuple
+        Non-randomness, Relative non-randomness w.r.t.
+        Erdos Renyi random graphs.
+
+    Raises
+    ------
+    NetworkXException
+        if the input graph is not connected.
+    NetworkXError
+        if the input graph contains self-loops or if graph has no edges.
+
+    Examples
+    --------
+    >>> G = nx.karate_club_graph()
+    >>> nr, nr_rd = nx.non_randomness(G, 2)
+    >>> nr, nr_rd = nx.non_randomness(G, 2, "weight")
+
+    Notes
+    -----
+    This computes Eq. (4.4) and (4.5) in Ref. [1]_.
+
+    If a weight field is passed, this algorithm will use the eigenvalues
+    of the weighted adjacency matrix to compute Eq. (4.4) and (4.5).
+
+    References
+    ----------
+    .. [1] Xiaowei Ying and Xintao Wu,
+           On Randomness Measures for Social Networks,
+           SIAM International Conference on Data Mining. 2009
+    """
+    import numpy as np
+
+    # corner case: graph has no edges
+    if nx.is_empty(G):
+        raise nx.NetworkXError("non_randomness not applicable to empty graphs")
+    if not nx.is_connected(G):
+        raise nx.NetworkXException("Non connected graph.")
+    if len(list(nx.selfloop_edges(G))) > 0:
+        raise nx.NetworkXError("Graph must not contain self-loops")
+
+    if k is None:
+        k = len(tuple(nx.community.label_propagation_communities(G)))
+
+    # eq. 4.4
+    eigenvalues = np.linalg.eigvals(nx.to_numpy_array(G, weight=weight))
+    nr = float(np.real(np.sum(eigenvalues[:k])))
+
+    n = G.number_of_nodes()
+    m = G.number_of_edges()
+    p = (2 * k * m) / (n * (n - k))
+
+    # eq. 4.5
+    nr_rd = (nr - ((n - 2 * k) * p + k)) / math.sqrt(2 * k * p * (1 - p))
+
+    return nr, nr_rd
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__init__.py
new file mode 100644
index 00000000..0ebc6ab9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__init__.py
@@ -0,0 +1,4 @@
+from networkx.algorithms.operators.all import *
+from networkx.algorithms.operators.binary import *
+from networkx.algorithms.operators.product import *
+from networkx.algorithms.operators.unary import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/all.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/all.py
new file mode 100644
index 00000000..549d335d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/all.py
@@ -0,0 +1,321 @@
+"""Operations on many graphs."""
+
+from itertools import chain, repeat
+
+import networkx as nx
+
+__all__ = ["union_all", "compose_all", "disjoint_union_all", "intersection_all"]
+
+
+@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True)
+def union_all(graphs, rename=()):
+    """Returns the union of all graphs.
+
+    The graphs must be disjoint, otherwise an exception is raised.
+
+    Parameters
+    ----------
+    graphs : iterable
+       Iterable of NetworkX graphs
+
+    rename : iterable , optional
+       Node names of graphs can be changed by specifying the tuple
+       rename=('G-','H-') (for example).  Node "u" in G is then renamed
+       "G-u" and "v" in H is renamed "H-v". Infinite generators (like itertools.count)
+       are also supported.
+
+    Returns
+    -------
+    U : a graph with the same type as the first graph in list
+
+    Raises
+    ------
+    ValueError
+       If `graphs` is an empty list.
+
+    NetworkXError
+        In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs.
+
+    Notes
+    -----
+    For operating on mixed type graphs, they should be converted to the same type.
+    >>> G = nx.Graph()
+    >>> H = nx.DiGraph()
+    >>> GH = union_all([nx.DiGraph(G), H])
+
+    To force a disjoint union with node relabeling, use
+    disjoint_union_all(G,H) or convert_node_labels_to integers().
+
+    Graph, edge, and node attributes are propagated to the union graph.
+    If a graph attribute is present in multiple graphs, then the value
+    from the last graph in the list with that attribute is used.
+
+    Examples
+    --------
+    >>> G1 = nx.Graph([(1, 2), (2, 3)])
+    >>> G2 = nx.Graph([(4, 5), (5, 6)])
+    >>> result_graph = nx.union_all([G1, G2])
+    >>> result_graph.nodes()
+    NodeView((1, 2, 3, 4, 5, 6))
+    >>> result_graph.edges()
+    EdgeView([(1, 2), (2, 3), (4, 5), (5, 6)])
+
+    See Also
+    --------
+    union
+    disjoint_union_all
+    """
+    R = None
+    seen_nodes = set()
+
+    # rename graph to obtain disjoint node labels
+    def add_prefix(graph, prefix):
+        if prefix is None:
+            return graph
+
+        def label(x):
+            return f"{prefix}{x}"
+
+        return nx.relabel_nodes(graph, label)
+
+    rename = chain(rename, repeat(None))
+    graphs = (add_prefix(G, name) for G, name in zip(graphs, rename))
+
+    for i, G in enumerate(graphs):
+        G_nodes_set = set(G.nodes)
+        if i == 0:
+            # Union is the same type as first graph
+            R = G.__class__()
+        elif G.is_directed() != R.is_directed():
+            raise nx.NetworkXError("All graphs must be directed or undirected.")
+        elif G.is_multigraph() != R.is_multigraph():
+            raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
+        elif not seen_nodes.isdisjoint(G_nodes_set):
+            raise nx.NetworkXError(
+                "The node sets of the graphs are not disjoint.\n"
+                "Use `rename` to specify prefixes for the graphs or use\n"
+                "disjoint_union(G1, G2, ..., GN)."
+            )
+
+        seen_nodes |= G_nodes_set
+        R.graph.update(G.graph)
+        R.add_nodes_from(G.nodes(data=True))
+        R.add_edges_from(
+            G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True)
+        )
+
+    if R is None:
+        raise ValueError("cannot apply union_all to an empty list")
+
+    return R
+
+
+@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True)
+def disjoint_union_all(graphs):
+    """Returns the disjoint union of all graphs.
+
+    This operation forces distinct integer node labels starting with 0
+    for the first graph in the list and numbering consecutively.
+
+    Parameters
+    ----------
+    graphs : iterable
+       Iterable of NetworkX graphs
+
+    Returns
+    -------
+    U : A graph with the same type as the first graph in list
+
+    Raises
+    ------
+    ValueError
+       If `graphs` is an empty list.
+
+    NetworkXError
+        In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs.
+
+    Examples
+    --------
+    >>> G1 = nx.Graph([(1, 2), (2, 3)])
+    >>> G2 = nx.Graph([(4, 5), (5, 6)])
+    >>> U = nx.disjoint_union_all([G1, G2])
+    >>> list(U.nodes())
+    [0, 1, 2, 3, 4, 5]
+    >>> list(U.edges())
+    [(0, 1), (1, 2), (3, 4), (4, 5)]
+
+    Notes
+    -----
+    For operating on mixed type graphs, they should be converted to the same type.
+
+    Graph, edge, and node attributes are propagated to the union graph.
+    If a graph attribute is present in multiple graphs, then the value
+    from the last graph in the list with that attribute is used.
+    """
+
+    def yield_relabeled(graphs):
+        first_label = 0
+        for G in graphs:
+            yield nx.convert_node_labels_to_integers(G, first_label=first_label)
+            first_label += len(G)
+
+    R = union_all(yield_relabeled(graphs))
+
+    return R
+
+
+@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True)
+def compose_all(graphs):
+    """Returns the composition of all graphs.
+
+    Composition is the simple union of the node sets and edge sets.
+    The node sets of the supplied graphs need not be disjoint.
+
+    Parameters
+    ----------
+    graphs : iterable
+       Iterable of NetworkX graphs
+
+    Returns
+    -------
+    C : A graph with the same type as the first graph in list
+
+    Raises
+    ------
+    ValueError
+       If `graphs` is an empty list.
+
+    NetworkXError
+        In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs.
+
+    Examples
+    --------
+    >>> G1 = nx.Graph([(1, 2), (2, 3)])
+    >>> G2 = nx.Graph([(3, 4), (5, 6)])
+    >>> C = nx.compose_all([G1, G2])
+    >>> list(C.nodes())
+    [1, 2, 3, 4, 5, 6]
+    >>> list(C.edges())
+    [(1, 2), (2, 3), (3, 4), (5, 6)]
+
+    Notes
+    -----
+    For operating on mixed type graphs, they should be converted to the same type.
+
+    Graph, edge, and node attributes are propagated to the union graph.
+    If a graph attribute is present in multiple graphs, then the value
+    from the last graph in the list with that attribute is used.
+    """
+    R = None
+
+    # add graph attributes, H attributes take precedent over G attributes
+    for i, G in enumerate(graphs):
+        if i == 0:
+            # create new graph
+            R = G.__class__()
+        elif G.is_directed() != R.is_directed():
+            raise nx.NetworkXError("All graphs must be directed or undirected.")
+        elif G.is_multigraph() != R.is_multigraph():
+            raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
+
+        R.graph.update(G.graph)
+        R.add_nodes_from(G.nodes(data=True))
+        R.add_edges_from(
+            G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True)
+        )
+
+    if R is None:
+        raise ValueError("cannot apply compose_all to an empty list")
+
+    return R
+
+
+@nx._dispatchable(graphs="[graphs]", returns_graph=True)
+def intersection_all(graphs):
+    """Returns a new graph that contains only the nodes and the edges that exist in
+    all graphs.
+
+    Parameters
+    ----------
+    graphs : iterable
+       Iterable of NetworkX graphs
+
+    Returns
+    -------
+    R : A new graph with the same type as the first graph in list
+
+    Raises
+    ------
+    ValueError
+       If `graphs` is an empty list.
+
+    NetworkXError
+        In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs.
+
+    Notes
+    -----
+    For operating on mixed type graphs, they should be converted to the same type.
+
+    Attributes from the graph, nodes, and edges are not copied to the new
+    graph.
+
+    The resulting graph can be updated with attributes if desired. For example, code which adds the minimum attribute for each node across all graphs could work.
+    >>> g = nx.Graph()
+    >>> g.add_node(0, capacity=4)
+    >>> g.add_node(1, capacity=3)
+    >>> g.add_edge(0, 1)
+
+    >>> h = g.copy()
+    >>> h.nodes[0]["capacity"] = 2
+
+    >>> gh = nx.intersection_all([g, h])
+
+    >>> new_node_attr = {
+    ...     n: min(*(anyG.nodes[n].get("capacity", float("inf")) for anyG in [g, h]))
+    ...     for n in gh
+    ... }
+    >>> nx.set_node_attributes(gh, new_node_attr, "new_capacity")
+    >>> gh.nodes(data=True)
+    NodeDataView({0: {'new_capacity': 2}, 1: {'new_capacity': 3}})
+
+    Examples
+    --------
+    >>> G1 = nx.Graph([(1, 2), (2, 3)])
+    >>> G2 = nx.Graph([(2, 3), (3, 4)])
+    >>> R = nx.intersection_all([G1, G2])
+    >>> list(R.nodes())
+    [2, 3]
+    >>> list(R.edges())
+    [(2, 3)]
+
+    """
+    R = None
+
+    for i, G in enumerate(graphs):
+        G_nodes_set = set(G.nodes)
+        G_edges_set = set(G.edges)
+        if not G.is_directed():
+            if G.is_multigraph():
+                G_edges_set.update((v, u, k) for u, v, k in list(G_edges_set))
+            else:
+                G_edges_set.update((v, u) for u, v in list(G_edges_set))
+        if i == 0:
+            # create new graph
+            R = G.__class__()
+            node_intersection = G_nodes_set
+            edge_intersection = G_edges_set
+        elif G.is_directed() != R.is_directed():
+            raise nx.NetworkXError("All graphs must be directed or undirected.")
+        elif G.is_multigraph() != R.is_multigraph():
+            raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
+        else:
+            node_intersection &= G_nodes_set
+            edge_intersection &= G_edges_set
+
+    if R is None:
+        raise ValueError("cannot apply intersection_all to an empty list")
+
+    R.add_nodes_from(node_intersection)
+    R.add_edges_from(edge_intersection)
+
+    return R
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/binary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/binary.py
new file mode 100644
index 00000000..08907bf6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/binary.py
@@ -0,0 +1,450 @@
+"""
+Operations on graphs including union, intersection, difference.
+"""
+
+import networkx as nx
+
+__all__ = [
+    "union",
+    "compose",
+    "disjoint_union",
+    "intersection",
+    "difference",
+    "symmetric_difference",
+    "full_join",
+]
+_G_H = {"G": 0, "H": 1}
+
+
+@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True)
+def union(G, H, rename=()):
+    """Combine graphs G and H. The names of nodes must be unique.
+
+    A name collision between the graphs will raise an exception.
+
+    A renaming facility is provided to avoid name collisions.
+
+
+    Parameters
+    ----------
+    G, H : graph
+       A NetworkX graph
+
+    rename : iterable , optional
+       Node names of G and H can be changed by specifying the tuple
+       rename=('G-','H-') (for example).  Node "u" in G is then renamed
+       "G-u" and "v" in H is renamed "H-v".
+
+    Returns
+    -------
+    U : A union graph with the same type as G.
+
+    See Also
+    --------
+    compose
+    :func:`~networkx.Graph.update`
+    disjoint_union
+
+    Notes
+    -----
+    To combine graphs that have common nodes, consider compose(G, H)
+    or the method, Graph.update().
+
+    disjoint_union() is similar to union() except that it avoids name clashes
+    by relabeling the nodes with sequential integers.
+
+    Edge and node attributes are propagated from G and H to the union graph.
+    Graph attributes are also propagated, but if they are present in both G and H,
+    then the value from H is used.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
+    >>> H = nx.Graph([(0, 1), (0, 3), (1, 3), (1, 2)])
+    >>> U = nx.union(G, H, rename=("G", "H"))
+    >>> U.nodes
+    NodeView(('G0', 'G1', 'G2', 'H0', 'H1', 'H3', 'H2'))
+    >>> U.edges
+    EdgeView([('G0', 'G1'), ('G0', 'G2'), ('G1', 'G2'), ('H0', 'H1'), ('H0', 'H3'), ('H1', 'H3'), ('H1', 'H2')])
+
+
+    """
+    return nx.union_all([G, H], rename)
+
+
+@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True)
+def disjoint_union(G, H):
+    """Combine graphs G and H. The nodes are assumed to be unique (disjoint).
+
+    This algorithm automatically relabels nodes to avoid name collisions.
+
+    Parameters
+    ----------
+    G,H : graph
+       A NetworkX graph
+
+    Returns
+    -------
+    U : A union graph with the same type as G.
+
+    See Also
+    --------
+    union
+    compose
+    :func:`~networkx.Graph.update`
+
+    Notes
+    -----
+    A new graph is created, of the same class as G.  It is recommended
+    that G and H be either both directed or both undirected.
+
+    The nodes of G are relabeled 0 to len(G)-1, and the nodes of H are
+    relabeled len(G) to len(G)+len(H)-1.
+
+    Renumbering forces G and H to be disjoint, so no exception is ever raised for a name collision.
+    To preserve the check for common nodes, use union().
+
+    Edge and node attributes are propagated from G and H to the union graph.
+    Graph attributes are also propagated, but if they are present in both G and H,
+    then the value from H is used.
+
+    To combine graphs that have common nodes, consider compose(G, H)
+    or the method, Graph.update().
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
+    >>> H = nx.Graph([(0, 3), (1, 2), (2, 3)])
+    >>> G.nodes[0]["key1"] = 5
+    >>> H.nodes[0]["key2"] = 10
+    >>> U = nx.disjoint_union(G, H)
+    >>> U.nodes(data=True)
+    NodeDataView({0: {'key1': 5}, 1: {}, 2: {}, 3: {'key2': 10}, 4: {}, 5: {}, 6: {}})
+    >>> U.edges
+    EdgeView([(0, 1), (0, 2), (1, 2), (3, 4), (4, 6), (5, 6)])
+    """
+    return nx.disjoint_union_all([G, H])
+
+
+@nx._dispatchable(graphs=_G_H, returns_graph=True)
+def intersection(G, H):
+    """Returns a new graph that contains only the nodes and the edges that exist in
+    both G and H.
+
+    Parameters
+    ----------
+    G,H : graph
+       A NetworkX graph. G and H can have different node sets but must be both graphs or both multigraphs.
+
+    Raises
+    ------
+    NetworkXError
+        If one is a MultiGraph and the other one is a graph.
+
+    Returns
+    -------
+    GH : A new graph with the same type as G.
+
+    Notes
+    -----
+    Attributes from the graph, nodes, and edges are not copied to the new
+    graph.  If you want a new graph of the intersection of G and H
+    with the attributes (including edge data) from G use remove_nodes_from()
+    as follows
+
+    >>> G = nx.path_graph(3)
+    >>> H = nx.path_graph(5)
+    >>> R = G.copy()
+    >>> R.remove_nodes_from(n for n in G if n not in H)
+    >>> R.remove_edges_from(e for e in G.edges if e not in H.edges)
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
+    >>> H = nx.Graph([(0, 3), (1, 2), (2, 3)])
+    >>> R = nx.intersection(G, H)
+    >>> R.nodes
+    NodeView((0, 1, 2))
+    >>> R.edges
+    EdgeView([(1, 2)])
+    """
+    return nx.intersection_all([G, H])
+
+
+@nx._dispatchable(graphs=_G_H, returns_graph=True)
+def difference(G, H):
+    """Returns a new graph that contains the edges that exist in G but not in H.
+
+    The node sets of H and G must be the same.
+
+    Parameters
+    ----------
+    G,H : graph
+       A NetworkX graph. G and H must have the same node sets.
+
+    Returns
+    -------
+    D : A new graph with the same type as G.
+
+    Notes
+    -----
+    Attributes from the graph, nodes, and edges are not copied to the new
+    graph.  If you want a new graph of the difference of G and H with
+    the attributes (including edge data) from G use remove_nodes_from()
+    as follows:
+
+    >>> G = nx.path_graph(3)
+    >>> H = nx.path_graph(5)
+    >>> R = G.copy()
+    >>> R.remove_nodes_from(n for n in G if n in H)
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3)])
+    >>> H = nx.Graph([(0, 1), (1, 2), (0, 3)])
+    >>> R = nx.difference(G, H)
+    >>> R.nodes
+    NodeView((0, 1, 2, 3))
+    >>> R.edges
+    EdgeView([(0, 2), (1, 3)])
+    """
+    # create new graph
+    if not G.is_multigraph() == H.is_multigraph():
+        raise nx.NetworkXError("G and H must both be graphs or multigraphs.")
+    R = nx.create_empty_copy(G, with_data=False)
+
+    if set(G) != set(H):
+        raise nx.NetworkXError("Node sets of graphs not equal")
+
+    if G.is_multigraph():
+        edges = G.edges(keys=True)
+    else:
+        edges = G.edges()
+    for e in edges:
+        if not H.has_edge(*e):
+            R.add_edge(*e)
+    return R
+
+
+@nx._dispatchable(graphs=_G_H, returns_graph=True)
+def symmetric_difference(G, H):
+    """Returns new graph with edges that exist in either G or H but not both.
+
+    The node sets of H and G must be the same.
+
+    Parameters
+    ----------
+    G,H : graph
+       A NetworkX graph.  G and H must have the same node sets.
+
+    Returns
+    -------
+    D : A new graph with the same type as G.
+
+    Notes
+    -----
+    Attributes from the graph, nodes, and edges are not copied to the new
+    graph.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3)])
+    >>> H = nx.Graph([(0, 1), (1, 2), (0, 3)])
+    >>> R = nx.symmetric_difference(G, H)
+    >>> R.nodes
+    NodeView((0, 1, 2, 3))
+    >>> R.edges
+    EdgeView([(0, 2), (0, 3), (1, 3)])
+    """
+    # create new graph
+    if not G.is_multigraph() == H.is_multigraph():
+        raise nx.NetworkXError("G and H must both be graphs or multigraphs.")
+    R = nx.create_empty_copy(G, with_data=False)
+
+    if set(G) != set(H):
+        raise nx.NetworkXError("Node sets of graphs not equal")
+
+    gnodes = set(G)  # set of nodes in G
+    hnodes = set(H)  # set of nodes in H
+    nodes = gnodes.symmetric_difference(hnodes)
+    R.add_nodes_from(nodes)
+
+    if G.is_multigraph():
+        edges = G.edges(keys=True)
+    else:
+        edges = G.edges()
+    # we could copy the data here but then this function doesn't
+    # match intersection and difference
+    for e in edges:
+        if not H.has_edge(*e):
+            R.add_edge(*e)
+
+    if H.is_multigraph():
+        edges = H.edges(keys=True)
+    else:
+        edges = H.edges()
+    for e in edges:
+        if not G.has_edge(*e):
+            R.add_edge(*e)
+    return R
+
+
+@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True)
+def compose(G, H):
+    """Compose graph G with H by combining nodes and edges into a single graph.
+
+    The node sets and edges sets do not need to be disjoint.
+
+    Composing preserves the attributes of nodes and edges.
+    Attribute values from H take precedent over attribute values from G.
+
+    Parameters
+    ----------
+    G, H : graph
+       A NetworkX graph
+
+    Returns
+    -------
+    C: A new graph with the same type as G
+
+    See Also
+    --------
+    :func:`~networkx.Graph.update`
+    union
+    disjoint_union
+
+    Notes
+    -----
+    It is recommended that G and H be either both directed or both undirected.
+
+    For MultiGraphs, the edges are identified by incident nodes AND edge-key.
+    This can cause surprises (i.e., edge `(1, 2)` may or may not be the same
+    in two graphs) if you use MultiGraph without keeping track of edge keys.
+
+    If combining the attributes of common nodes is not desired, consider union(),
+    which raises an exception for name collisions.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2)])
+    >>> H = nx.Graph([(0, 1), (1, 2)])
+    >>> R = nx.compose(G, H)
+    >>> R.nodes
+    NodeView((0, 1, 2))
+    >>> R.edges
+    EdgeView([(0, 1), (0, 2), (1, 2)])
+
+    By default, the attributes from `H` take precedent over attributes from `G`.
+    If you prefer another way of combining attributes, you can update them after the compose operation:
+
+    >>> G = nx.Graph([(0, 1, {"weight": 2.0}), (3, 0, {"weight": 100.0})])
+    >>> H = nx.Graph([(0, 1, {"weight": 10.0}), (1, 2, {"weight": -1.0})])
+    >>> nx.set_node_attributes(G, {0: "dark", 1: "light", 3: "black"}, name="color")
+    >>> nx.set_node_attributes(H, {0: "green", 1: "orange", 2: "yellow"}, name="color")
+    >>> GcomposeH = nx.compose(G, H)
+
+    Normally, color attribute values of nodes of GcomposeH come from H. We can workaround this as follows:
+
+    >>> node_data = {
+    ...     n: G.nodes[n]["color"] + " " + H.nodes[n]["color"]
+    ...     for n in G.nodes & H.nodes
+    ... }
+    >>> nx.set_node_attributes(GcomposeH, node_data, "color")
+    >>> print(GcomposeH.nodes[0]["color"])
+    dark green
+
+    >>> print(GcomposeH.nodes[3]["color"])
+    black
+
+    Similarly, we can update edge attributes after the compose operation in a way we prefer:
+
+    >>> edge_data = {
+    ...     e: G.edges[e]["weight"] * H.edges[e]["weight"] for e in G.edges & H.edges
+    ... }
+    >>> nx.set_edge_attributes(GcomposeH, edge_data, "weight")
+    >>> print(GcomposeH.edges[(0, 1)]["weight"])
+    20.0
+
+    >>> print(GcomposeH.edges[(3, 0)]["weight"])
+    100.0
+    """
+    return nx.compose_all([G, H])
+
+
+@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True)
+def full_join(G, H, rename=(None, None)):
+    """Returns the full join of graphs G and H.
+
+    Full join is the union of G and H in which all edges between
+    G and H are added.
+    The node sets of G and H must be disjoint,
+    otherwise an exception is raised.
+
+    Parameters
+    ----------
+    G, H : graph
+       A NetworkX graph
+
+    rename : tuple , default=(None, None)
+       Node names of G and H can be changed by specifying the tuple
+       rename=('G-','H-') (for example).  Node "u" in G is then renamed
+       "G-u" and "v" in H is renamed "H-v".
+
+    Returns
+    -------
+    U : The full join graph with the same type as G.
+
+    Notes
+    -----
+    It is recommended that G and H be either both directed or both undirected.
+
+    If G is directed, then edges from G to H are added as well as from H to G.
+
+    Note that full_join() does not produce parallel edges for MultiGraphs.
+
+    The full join operation of graphs G and H is the same as getting
+    their complement, performing a disjoint union, and finally getting
+    the complement of the resulting graph.
+
+    Graph, edge, and node attributes are propagated from G and H
+    to the union graph.  If a graph attribute is present in both
+    G and H the value from H is used.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2)])
+    >>> H = nx.Graph([(3, 4)])
+    >>> R = nx.full_join(G, H, rename=("G", "H"))
+    >>> R.nodes
+    NodeView(('G0', 'G1', 'G2', 'H3', 'H4'))
+    >>> R.edges
+    EdgeView([('G0', 'G1'), ('G0', 'G2'), ('G0', 'H3'), ('G0', 'H4'), ('G1', 'H3'), ('G1', 'H4'), ('G2', 'H3'), ('G2', 'H4'), ('H3', 'H4')])
+
+    See Also
+    --------
+    union
+    disjoint_union
+    """
+    R = union(G, H, rename)
+
+    def add_prefix(graph, prefix):
+        if prefix is None:
+            return graph
+
+        def label(x):
+            return f"{prefix}{x}"
+
+        return nx.relabel_nodes(graph, label)
+
+    G = add_prefix(G, rename[0])
+    H = add_prefix(H, rename[1])
+
+    for i in G:
+        for j in H:
+            R.add_edge(i, j)
+    if R.is_directed():
+        for i in H:
+            for j in G:
+                R.add_edge(i, j)
+
+    return R
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/product.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/product.py
new file mode 100644
index 00000000..28ca78bf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/product.py
@@ -0,0 +1,633 @@
+"""
+Graph products.
+"""
+
+from itertools import product
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "tensor_product",
+    "cartesian_product",
+    "lexicographic_product",
+    "strong_product",
+    "power",
+    "rooted_product",
+    "corona_product",
+    "modular_product",
+]
+_G_H = {"G": 0, "H": 1}
+
+
+def _dict_product(d1, d2):
+    return {k: (d1.get(k), d2.get(k)) for k in set(d1) | set(d2)}
+
+
+# Generators for producing graph products
+def _node_product(G, H):
+    for u, v in product(G, H):
+        yield ((u, v), _dict_product(G.nodes[u], H.nodes[v]))
+
+
+def _directed_edges_cross_edges(G, H):
+    if not G.is_multigraph() and not H.is_multigraph():
+        for u, v, c in G.edges(data=True):
+            for x, y, d in H.edges(data=True):
+                yield (u, x), (v, y), _dict_product(c, d)
+    if not G.is_multigraph() and H.is_multigraph():
+        for u, v, c in G.edges(data=True):
+            for x, y, k, d in H.edges(data=True, keys=True):
+                yield (u, x), (v, y), k, _dict_product(c, d)
+    if G.is_multigraph() and not H.is_multigraph():
+        for u, v, k, c in G.edges(data=True, keys=True):
+            for x, y, d in H.edges(data=True):
+                yield (u, x), (v, y), k, _dict_product(c, d)
+    if G.is_multigraph() and H.is_multigraph():
+        for u, v, j, c in G.edges(data=True, keys=True):
+            for x, y, k, d in H.edges(data=True, keys=True):
+                yield (u, x), (v, y), (j, k), _dict_product(c, d)
+
+
+def _undirected_edges_cross_edges(G, H):
+    if not G.is_multigraph() and not H.is_multigraph():
+        for u, v, c in G.edges(data=True):
+            for x, y, d in H.edges(data=True):
+                yield (v, x), (u, y), _dict_product(c, d)
+    if not G.is_multigraph() and H.is_multigraph():
+        for u, v, c in G.edges(data=True):
+            for x, y, k, d in H.edges(data=True, keys=True):
+                yield (v, x), (u, y), k, _dict_product(c, d)
+    if G.is_multigraph() and not H.is_multigraph():
+        for u, v, k, c in G.edges(data=True, keys=True):
+            for x, y, d in H.edges(data=True):
+                yield (v, x), (u, y), k, _dict_product(c, d)
+    if G.is_multigraph() and H.is_multigraph():
+        for u, v, j, c in G.edges(data=True, keys=True):
+            for x, y, k, d in H.edges(data=True, keys=True):
+                yield (v, x), (u, y), (j, k), _dict_product(c, d)
+
+
+def _edges_cross_nodes(G, H):
+    if G.is_multigraph():
+        for u, v, k, d in G.edges(data=True, keys=True):
+            for x in H:
+                yield (u, x), (v, x), k, d
+    else:
+        for u, v, d in G.edges(data=True):
+            for x in H:
+                if H.is_multigraph():
+                    yield (u, x), (v, x), None, d
+                else:
+                    yield (u, x), (v, x), d
+
+
+def _nodes_cross_edges(G, H):
+    if H.is_multigraph():
+        for x in G:
+            for u, v, k, d in H.edges(data=True, keys=True):
+                yield (x, u), (x, v), k, d
+    else:
+        for x in G:
+            for u, v, d in H.edges(data=True):
+                if G.is_multigraph():
+                    yield (x, u), (x, v), None, d
+                else:
+                    yield (x, u), (x, v), d
+
+
+def _edges_cross_nodes_and_nodes(G, H):
+    if G.is_multigraph():
+        for u, v, k, d in G.edges(data=True, keys=True):
+            for x in H:
+                for y in H:
+                    yield (u, x), (v, y), k, d
+    else:
+        for u, v, d in G.edges(data=True):
+            for x in H:
+                for y in H:
+                    if H.is_multigraph():
+                        yield (u, x), (v, y), None, d
+                    else:
+                        yield (u, x), (v, y), d
+
+
+def _init_product_graph(G, H):
+    if G.is_directed() != H.is_directed():
+        msg = "G and H must be both directed or both undirected"
+        raise nx.NetworkXError(msg)
+    if G.is_multigraph() or H.is_multigraph():
+        GH = nx.MultiGraph()
+    else:
+        GH = nx.Graph()
+    if G.is_directed():
+        GH = GH.to_directed()
+    return GH
+
+
+@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True)
+def tensor_product(G, H):
+    r"""Returns the tensor product of G and H.
+
+    The tensor product $P$ of the graphs $G$ and $H$ has a node set that
+    is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$.
+    $P$ has an edge $((u,v), (x,y))$ if and only if $(u,x)$ is an edge in $G$
+    and $(v,y)$ is an edge in $H$.
+
+    Tensor product is sometimes also referred to as the categorical product,
+    direct product, cardinal product or conjunction.
+
+
+    Parameters
+    ----------
+    G, H: graphs
+     Networkx graphs.
+
+    Returns
+    -------
+    P: NetworkX graph
+     The tensor product of G and H. P will be a multi-graph if either G
+     or H is a multi-graph, will be a directed if G and H are directed,
+     and undirected if G and H are undirected.
+
+    Raises
+    ------
+    NetworkXError
+     If G and H are not both directed or both undirected.
+
+    Notes
+    -----
+    Node attributes in P are two-tuple of the G and H node attributes.
+    Missing attributes are assigned None.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> H = nx.Graph()
+    >>> G.add_node(0, a1=True)
+    >>> H.add_node("a", a2="Spam")
+    >>> P = nx.tensor_product(G, H)
+    >>> list(P)
+    [(0, 'a')]
+
+    Edge attributes and edge keys (for multigraphs) are also copied to the
+    new product graph
+    """
+    GH = _init_product_graph(G, H)
+    GH.add_nodes_from(_node_product(G, H))
+    GH.add_edges_from(_directed_edges_cross_edges(G, H))
+    if not GH.is_directed():
+        GH.add_edges_from(_undirected_edges_cross_edges(G, H))
+    return GH
+
+
+@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True)
+def cartesian_product(G, H):
+    r"""Returns the Cartesian product of G and H.
+
+    The Cartesian product $P$ of the graphs $G$ and $H$ has a node set that
+    is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$.
+    $P$ has an edge $((u,v),(x,y))$ if and only if either $u$ is equal to $x$
+    and both $v$ and $y$ are adjacent in $H$ or if $v$ is equal to $y$ and
+    both $u$ and $x$ are adjacent in $G$.
+
+    Parameters
+    ----------
+    G, H: graphs
+     Networkx graphs.
+
+    Returns
+    -------
+    P: NetworkX graph
+     The Cartesian product of G and H. P will be a multi-graph if either G
+     or H is a multi-graph. Will be a directed if G and H are directed,
+     and undirected if G and H are undirected.
+
+    Raises
+    ------
+    NetworkXError
+     If G and H are not both directed or both undirected.
+
+    Notes
+    -----
+    Node attributes in P are two-tuple of the G and H node attributes.
+    Missing attributes are assigned None.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> H = nx.Graph()
+    >>> G.add_node(0, a1=True)
+    >>> H.add_node("a", a2="Spam")
+    >>> P = nx.cartesian_product(G, H)
+    >>> list(P)
+    [(0, 'a')]
+
+    Edge attributes and edge keys (for multigraphs) are also copied to the
+    new product graph
+    """
+    GH = _init_product_graph(G, H)
+    GH.add_nodes_from(_node_product(G, H))
+    GH.add_edges_from(_edges_cross_nodes(G, H))
+    GH.add_edges_from(_nodes_cross_edges(G, H))
+    return GH
+
+
+@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True)
+def lexicographic_product(G, H):
+    r"""Returns the lexicographic product of G and H.
+
+    The lexicographical product $P$ of the graphs $G$ and $H$ has a node set
+    that is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$.
+    $P$ has an edge $((u,v), (x,y))$ if and only if $(u,v)$ is an edge in $G$
+    or $u==v$ and $(x,y)$ is an edge in $H$.
+
+    Parameters
+    ----------
+    G, H: graphs
+     Networkx graphs.
+
+    Returns
+    -------
+    P: NetworkX graph
+     The Cartesian product of G and H. P will be a multi-graph if either G
+     or H is a multi-graph. Will be a directed if G and H are directed,
+     and undirected if G and H are undirected.
+
+    Raises
+    ------
+    NetworkXError
+     If G and H are not both directed or both undirected.
+
+    Notes
+    -----
+    Node attributes in P are two-tuple of the G and H node attributes.
+    Missing attributes are assigned None.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> H = nx.Graph()
+    >>> G.add_node(0, a1=True)
+    >>> H.add_node("a", a2="Spam")
+    >>> P = nx.lexicographic_product(G, H)
+    >>> list(P)
+    [(0, 'a')]
+
+    Edge attributes and edge keys (for multigraphs) are also copied to the
+    new product graph
+    """
+    GH = _init_product_graph(G, H)
+    GH.add_nodes_from(_node_product(G, H))
+    # Edges in G regardless of H designation
+    GH.add_edges_from(_edges_cross_nodes_and_nodes(G, H))
+    # For each x in G, only if there is an edge in H
+    GH.add_edges_from(_nodes_cross_edges(G, H))
+    return GH
+
+
+@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True)
+def strong_product(G, H):
+    r"""Returns the strong product of G and H.
+
+    The strong product $P$ of the graphs $G$ and $H$ has a node set that
+    is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$.
+    $P$ has an edge $((u,x), (v,y))$ if any of the following conditions
+    are met:
+
+    - $u=v$ and $(x,y)$ is an edge in $H$
+    - $x=y$ and $(u,v)$ is an edge in $G$
+    - $(u,v)$ is an edge in $G$ and $(x,y)$ is an edge in $H$
+
+    Parameters
+    ----------
+    G, H: graphs
+     Networkx graphs.
+
+    Returns
+    -------
+    P: NetworkX graph
+     The Cartesian product of G and H. P will be a multi-graph if either G
+     or H is a multi-graph. Will be a directed if G and H are directed,
+     and undirected if G and H are undirected.
+
+    Raises
+    ------
+    NetworkXError
+     If G and H are not both directed or both undirected.
+
+    Notes
+    -----
+    Node attributes in P are two-tuple of the G and H node attributes.
+    Missing attributes are assigned None.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> H = nx.Graph()
+    >>> G.add_node(0, a1=True)
+    >>> H.add_node("a", a2="Spam")
+    >>> P = nx.strong_product(G, H)
+    >>> list(P)
+    [(0, 'a')]
+
+    Edge attributes and edge keys (for multigraphs) are also copied to the
+    new product graph
+    """
+    GH = _init_product_graph(G, H)
+    GH.add_nodes_from(_node_product(G, H))
+    GH.add_edges_from(_nodes_cross_edges(G, H))
+    GH.add_edges_from(_edges_cross_nodes(G, H))
+    GH.add_edges_from(_directed_edges_cross_edges(G, H))
+    if not GH.is_directed():
+        GH.add_edges_from(_undirected_edges_cross_edges(G, H))
+    return GH
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(returns_graph=True)
+def power(G, k):
+    """Returns the specified power of a graph.
+
+    The $k$th power of a simple graph $G$, denoted $G^k$, is a
+    graph on the same set of nodes in which two distinct nodes $u$ and
+    $v$ are adjacent in $G^k$ if and only if the shortest path
+    distance between $u$ and $v$ in $G$ is at most $k$.
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX simple graph object.
+
+    k : positive integer
+        The power to which to raise the graph `G`.
+
+    Returns
+    -------
+    NetworkX simple graph
+        `G` to the power `k`.
+
+    Raises
+    ------
+    ValueError
+        If the exponent `k` is not positive.
+
+    NetworkXNotImplemented
+        If `G` is not a simple graph.
+
+    Examples
+    --------
+    The number of edges will never decrease when taking successive
+    powers:
+
+    >>> G = nx.path_graph(4)
+    >>> list(nx.power(G, 2).edges)
+    [(0, 1), (0, 2), (1, 2), (1, 3), (2, 3)]
+    >>> list(nx.power(G, 3).edges)
+    [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]
+
+    The `k` th power of a cycle graph on *n* nodes is the complete graph
+    on *n* nodes, if `k` is at least ``n // 2``:
+
+    >>> G = nx.cycle_graph(5)
+    >>> H = nx.complete_graph(5)
+    >>> nx.is_isomorphic(nx.power(G, 2), H)
+    True
+    >>> G = nx.cycle_graph(8)
+    >>> H = nx.complete_graph(8)
+    >>> nx.is_isomorphic(nx.power(G, 4), H)
+    True
+
+    References
+    ----------
+    .. [1] J. A. Bondy, U. S. R. Murty, *Graph Theory*. Springer, 2008.
+
+    Notes
+    -----
+    This definition of "power graph" comes from Exercise 3.1.6 of
+    *Graph Theory* by Bondy and Murty [1]_.
+
+    """
+    if k <= 0:
+        raise ValueError("k must be a positive integer")
+    H = nx.Graph()
+    H.add_nodes_from(G)
+    # update BFS code to ignore self loops.
+    for n in G:
+        seen = {}  # level (number of hops) when seen in BFS
+        level = 1  # the current level
+        nextlevel = G[n]
+        while nextlevel:
+            thislevel = nextlevel  # advance to next level
+            nextlevel = {}  # and start a new list (fringe)
+            for v in thislevel:
+                if v == n:  # avoid self loop
+                    continue
+                if v not in seen:
+                    seen[v] = level  # set the level of vertex v
+                    nextlevel.update(G[v])  # add neighbors of v
+            if k <= level:
+                break
+            level += 1
+        H.add_edges_from((n, nbr) for nbr in seen)
+    return H
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(graphs=_G_H, returns_graph=True)
+def rooted_product(G, H, root):
+    """Return the rooted product of graphs G and H rooted at root in H.
+
+    A new graph is constructed representing the rooted product of
+    the inputted graphs, G and H, with a root in H.
+    A rooted product duplicates H for each nodes in G with the root
+    of H corresponding to the node in G. Nodes are renamed as the direct
+    product of G and H. The result is a subgraph of the cartesian product.
+
+    Parameters
+    ----------
+    G,H : graph
+       A NetworkX graph
+    root : node
+       A node in H
+
+    Returns
+    -------
+    R : The rooted product of G and H with a specified root in H
+
+    Notes
+    -----
+    The nodes of R are the Cartesian Product of the nodes of G and H.
+    The nodes of G and H are not relabeled.
+    """
+    if root not in H:
+        raise nx.NodeNotFound("root must be a vertex in H")
+
+    R = nx.Graph()
+    R.add_nodes_from(product(G, H))
+
+    R.add_edges_from(((e[0], root), (e[1], root)) for e in G.edges())
+    R.add_edges_from(((g, e[0]), (g, e[1])) for g in G for e in H.edges())
+
+    return R
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(graphs=_G_H, returns_graph=True)
+def corona_product(G, H):
+    r"""Returns the Corona product of G and H.
+
+    The corona product of $G$ and $H$ is the graph $C = G \circ H$ obtained by
+    taking one copy of $G$, called the center graph, $|V(G)|$ copies of $H$,
+    called the outer graph, and making the $i$-th vertex of $G$ adjacent to
+    every vertex of the $i$-th copy of $H$, where $1 ≤ i ≤ |V(G)|$.
+
+    Parameters
+    ----------
+    G, H: NetworkX graphs
+        The graphs to take the carona product of.
+        `G` is the center graph and `H` is the outer graph
+
+    Returns
+    -------
+    C: NetworkX graph
+        The Corona product of G and H.
+
+    Raises
+    ------
+    NetworkXError
+        If G and H are not both directed or both undirected.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4)
+    >>> H = nx.path_graph(2)
+    >>> C = nx.corona_product(G, H)
+    >>> list(C)
+    [0, 1, 2, 3, (0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1), (3, 0), (3, 1)]
+    >>> print(C)
+    Graph with 12 nodes and 16 edges
+
+    References
+    ----------
+    [1] M. Tavakoli, F. Rahbarnia, and A. R. Ashrafi,
+        "Studying the corona product of graphs under some graph invariants,"
+        Transactions on Combinatorics, vol. 3, no. 3, pp. 43–49, Sep. 2014,
+        doi: 10.22108/toc.2014.5542.
+    [2] A. Faraji, "Corona Product in Graph Theory," Ali Faraji, May 11, 2021.
+        https://blog.alifaraji.ir/math/graph-theory/corona-product.html (accessed Dec. 07, 2021).
+    """
+    GH = _init_product_graph(G, H)
+    GH.add_nodes_from(G)
+    GH.add_edges_from(G.edges)
+
+    for G_node in G:
+        # copy nodes of H in GH, call it H_i
+        GH.add_nodes_from((G_node, v) for v in H)
+
+        # copy edges of H_i based on H
+        GH.add_edges_from(
+            ((G_node, e0), (G_node, e1), d) for e0, e1, d in H.edges.data()
+        )
+
+        # creating new edges between H_i and a G's node
+        GH.add_edges_from((G_node, (G_node, H_node)) for H_node in H)
+
+    return GH
+
+
+@nx._dispatchable(
+    graphs=_G_H, preserve_edge_attrs=True, preserve_node_attrs=True, returns_graph=True
+)
+def modular_product(G, H):
+    r"""Returns the Modular product of G and H.
+
+    The modular product of `G` and `H` is the graph $M = G \nabla H$,
+    consisting of the node set $V(M) = V(G) \times V(H)$ that is the Cartesian
+    product of the node sets of `G` and `H`. Further, M contains an edge ((u, v), (x, y)):
+
+    - if u is adjacent to x in `G` and v is adjacent to y in `H`, or
+    - if u is not adjacent to x in `G` and v is not adjacent to y in `H`.
+
+    More formally::
+
+        E(M) = {((u, v), (x, y)) | ((u, x) in E(G) and (v, y) in E(H)) or
+                                   ((u, x) not in E(G) and (v, y) not in E(H))}
+
+    Parameters
+    ----------
+    G, H: NetworkX graphs
+        The graphs to take the modular product of.
+
+    Returns
+    -------
+    M: NetworkX graph
+        The Modular product of `G` and `H`.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        If `G` is not a simple graph.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4)
+    >>> H = nx.path_graph(2)
+    >>> M = nx.modular_product(G, H)
+    >>> list(M)
+    [(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1), (3, 0), (3, 1)]
+    >>> print(M)
+    Graph with 8 nodes and 8 edges
+
+    Notes
+    -----
+    The *modular product* is defined in [1]_ and was first
+    introduced as the *weak modular product*.
+
+    The modular product reduces the problem of counting isomorphic subgraphs
+    in `G` and `H` to the problem of counting cliques in M. The subgraphs of
+    `G` and `H` that are induced by the nodes of a clique in M are
+    isomorphic [2]_ [3]_.
+
+    References
+    ----------
+    .. [1] R. Hammack, W. Imrich, and S. Klavžar,
+        "Handbook of Product Graphs", CRC Press, 2011.
+
+    .. [2] H. G. Barrow and R. M. Burstall,
+        "Subgraph isomorphism, matching relational structures and maximal
+        cliques", Information Processing Letters, vol. 4, issue 4, pp. 83-84,
+        1976, https://doi.org/10.1016/0020-0190(76)90049-1.
+
+    .. [3] V. G. Vizing, "Reduction of the problem of isomorphism and isomorphic
+        entrance to the task of finding the nondensity of a graph." Proc. Third
+        All-Union Conference on Problems of Theoretical Cybernetics. 1974.
+    """
+    if G.is_directed() or H.is_directed():
+        raise nx.NetworkXNotImplemented(
+            "Modular product not implemented for directed graphs"
+        )
+    if G.is_multigraph() or H.is_multigraph():
+        raise nx.NetworkXNotImplemented(
+            "Modular product not implemented for multigraphs"
+        )
+
+    GH = _init_product_graph(G, H)
+    GH.add_nodes_from(_node_product(G, H))
+
+    for u, v, c in G.edges(data=True):
+        for x, y, d in H.edges(data=True):
+            GH.add_edge((u, x), (v, y), **_dict_product(c, d))
+            GH.add_edge((v, x), (u, y), **_dict_product(c, d))
+
+    G = nx.complement(G)
+    H = nx.complement(H)
+
+    for u, v, c in G.edges(data=True):
+        for x, y, d in H.edges(data=True):
+            GH.add_edge((u, x), (v, y), **_dict_product(c, d))
+            GH.add_edge((v, x), (u, y), **_dict_product(c, d))
+
+    return GH
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_all.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_all.py
new file mode 100644
index 00000000..8ec29c15
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_all.py
@@ -0,0 +1,328 @@
+import pytest
+
+import networkx as nx
+from networkx.utils import edges_equal
+
+
+def test_union_all_attributes():
+    g = nx.Graph()
+    g.add_node(0, x=4)
+    g.add_node(1, x=5)
+    g.add_edge(0, 1, size=5)
+    g.graph["name"] = "g"
+
+    h = g.copy()
+    h.graph["name"] = "h"
+    h.graph["attr"] = "attr"
+    h.nodes[0]["x"] = 7
+
+    j = g.copy()
+    j.graph["name"] = "j"
+    j.graph["attr"] = "attr"
+    j.nodes[0]["x"] = 7
+
+    ghj = nx.union_all([g, h, j], rename=("g", "h", "j"))
+    assert set(ghj.nodes()) == {"h0", "h1", "g0", "g1", "j0", "j1"}
+    for n in ghj:
+        graph, node = n
+        assert ghj.nodes[n] == eval(graph).nodes[int(node)]
+
+    assert ghj.graph["attr"] == "attr"
+    assert ghj.graph["name"] == "j"  # j graph attributes take precedent
+
+
+def test_intersection_all():
+    G = nx.Graph()
+    H = nx.Graph()
+    R = nx.Graph(awesome=True)
+    G.add_nodes_from([1, 2, 3, 4])
+    G.add_edge(1, 2)
+    G.add_edge(2, 3)
+    H.add_nodes_from([1, 2, 3, 4])
+    H.add_edge(2, 3)
+    H.add_edge(3, 4)
+    R.add_nodes_from([1, 2, 3, 4])
+    R.add_edge(2, 3)
+    R.add_edge(4, 1)
+    I = nx.intersection_all([G, H, R])
+    assert set(I.nodes()) == {1, 2, 3, 4}
+    assert sorted(I.edges()) == [(2, 3)]
+    assert I.graph == {}
+
+
+def test_intersection_all_different_node_sets():
+    G = nx.Graph()
+    H = nx.Graph()
+    R = nx.Graph()
+    G.add_nodes_from([1, 2, 3, 4, 6, 7])
+    G.add_edge(1, 2)
+    G.add_edge(2, 3)
+    G.add_edge(6, 7)
+    H.add_nodes_from([1, 2, 3, 4])
+    H.add_edge(2, 3)
+    H.add_edge(3, 4)
+    R.add_nodes_from([1, 2, 3, 4, 8, 9])
+    R.add_edge(2, 3)
+    R.add_edge(4, 1)
+    R.add_edge(8, 9)
+    I = nx.intersection_all([G, H, R])
+    assert set(I.nodes()) == {1, 2, 3, 4}
+    assert sorted(I.edges()) == [(2, 3)]
+
+
+def test_intersection_all_attributes():
+    g = nx.Graph()
+    g.add_node(0, x=4)
+    g.add_node(1, x=5)
+    g.add_edge(0, 1, size=5)
+    g.graph["name"] = "g"
+
+    h = g.copy()
+    h.graph["name"] = "h"
+    h.graph["attr"] = "attr"
+    h.nodes[0]["x"] = 7
+
+    gh = nx.intersection_all([g, h])
+    assert set(gh.nodes()) == set(g.nodes())
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == sorted(g.edges())
+
+
+def test_intersection_all_attributes_different_node_sets():
+    g = nx.Graph()
+    g.add_node(0, x=4)
+    g.add_node(1, x=5)
+    g.add_edge(0, 1, size=5)
+    g.graph["name"] = "g"
+
+    h = g.copy()
+    g.add_node(2)
+    h.graph["name"] = "h"
+    h.graph["attr"] = "attr"
+    h.nodes[0]["x"] = 7
+
+    gh = nx.intersection_all([g, h])
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == sorted(g.edges())
+
+
+def test_intersection_all_multigraph_attributes():
+    g = nx.MultiGraph()
+    g.add_edge(0, 1, key=0)
+    g.add_edge(0, 1, key=1)
+    g.add_edge(0, 1, key=2)
+    h = nx.MultiGraph()
+    h.add_edge(0, 1, key=0)
+    h.add_edge(0, 1, key=3)
+    gh = nx.intersection_all([g, h])
+    assert set(gh.nodes()) == set(g.nodes())
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == [(0, 1)]
+    assert sorted(gh.edges(keys=True)) == [(0, 1, 0)]
+
+
+def test_intersection_all_multigraph_attributes_different_node_sets():
+    g = nx.MultiGraph()
+    g.add_edge(0, 1, key=0)
+    g.add_edge(0, 1, key=1)
+    g.add_edge(0, 1, key=2)
+    g.add_edge(1, 2, key=1)
+    g.add_edge(1, 2, key=2)
+    h = nx.MultiGraph()
+    h.add_edge(0, 1, key=0)
+    h.add_edge(0, 1, key=2)
+    h.add_edge(0, 1, key=3)
+    gh = nx.intersection_all([g, h])
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == [(0, 1), (0, 1)]
+    assert sorted(gh.edges(keys=True)) == [(0, 1, 0), (0, 1, 2)]
+
+
+def test_intersection_all_digraph():
+    g = nx.DiGraph()
+    g.add_edges_from([(1, 2), (2, 3)])
+    h = nx.DiGraph()
+    h.add_edges_from([(2, 1), (2, 3)])
+    gh = nx.intersection_all([g, h])
+    assert sorted(gh.edges()) == [(2, 3)]
+
+
+def test_union_all_and_compose_all():
+    K3 = nx.complete_graph(3)
+    P3 = nx.path_graph(3)
+
+    G1 = nx.DiGraph()
+    G1.add_edge("A", "B")
+    G1.add_edge("A", "C")
+    G1.add_edge("A", "D")
+    G2 = nx.DiGraph()
+    G2.add_edge("1", "2")
+    G2.add_edge("1", "3")
+    G2.add_edge("1", "4")
+
+    G = nx.union_all([G1, G2])
+    H = nx.compose_all([G1, G2])
+    assert edges_equal(G.edges(), H.edges())
+    assert not G.has_edge("A", "1")
+    pytest.raises(nx.NetworkXError, nx.union, K3, P3)
+    H1 = nx.union_all([H, G1], rename=("H", "G1"))
+    assert sorted(H1.nodes()) == [
+        "G1A",
+        "G1B",
+        "G1C",
+        "G1D",
+        "H1",
+        "H2",
+        "H3",
+        "H4",
+        "HA",
+        "HB",
+        "HC",
+        "HD",
+    ]
+
+    H2 = nx.union_all([H, G2], rename=("H", ""))
+    assert sorted(H2.nodes()) == [
+        "1",
+        "2",
+        "3",
+        "4",
+        "H1",
+        "H2",
+        "H3",
+        "H4",
+        "HA",
+        "HB",
+        "HC",
+        "HD",
+    ]
+
+    assert not H1.has_edge("NB", "NA")
+
+    G = nx.compose_all([G, G])
+    assert edges_equal(G.edges(), H.edges())
+
+    G2 = nx.union_all([G2, G2], rename=("", "copy"))
+    assert sorted(G2.nodes()) == [
+        "1",
+        "2",
+        "3",
+        "4",
+        "copy1",
+        "copy2",
+        "copy3",
+        "copy4",
+    ]
+
+    assert sorted(G2.neighbors("copy4")) == []
+    assert sorted(G2.neighbors("copy1")) == ["copy2", "copy3", "copy4"]
+    assert len(G) == 8
+    assert nx.number_of_edges(G) == 6
+
+    E = nx.disjoint_union_all([G, G])
+    assert len(E) == 16
+    assert nx.number_of_edges(E) == 12
+
+    E = nx.disjoint_union_all([G1, G2])
+    assert sorted(E.nodes()) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
+
+    G1 = nx.DiGraph()
+    G1.add_edge("A", "B")
+    G2 = nx.DiGraph()
+    G2.add_edge(1, 2)
+    G3 = nx.DiGraph()
+    G3.add_edge(11, 22)
+    G4 = nx.union_all([G1, G2, G3], rename=("G1", "G2", "G3"))
+    assert sorted(G4.nodes()) == ["G1A", "G1B", "G21", "G22", "G311", "G322"]
+
+
+def test_union_all_multigraph():
+    G = nx.MultiGraph()
+    G.add_edge(1, 2, key=0)
+    G.add_edge(1, 2, key=1)
+    H = nx.MultiGraph()
+    H.add_edge(3, 4, key=0)
+    H.add_edge(3, 4, key=1)
+    GH = nx.union_all([G, H])
+    assert set(GH) == set(G) | set(H)
+    assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True))
+
+
+def test_input_output():
+    l = [nx.Graph([(1, 2)]), nx.Graph([(3, 4)], awesome=True)]
+    U = nx.disjoint_union_all(l)
+    assert len(l) == 2
+    assert U.graph["awesome"]
+    C = nx.compose_all(l)
+    assert len(l) == 2
+    l = [nx.Graph([(1, 2)]), nx.Graph([(1, 2)])]
+    R = nx.intersection_all(l)
+    assert len(l) == 2
+
+
+def test_mixed_type_union():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.Graph()
+        H = nx.MultiGraph()
+        I = nx.Graph()
+        U = nx.union_all([G, H, I])
+    with pytest.raises(nx.NetworkXError):
+        X = nx.Graph()
+        Y = nx.DiGraph()
+        XY = nx.union_all([X, Y])
+
+
+def test_mixed_type_disjoint_union():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.Graph()
+        H = nx.MultiGraph()
+        I = nx.Graph()
+        U = nx.disjoint_union_all([G, H, I])
+    with pytest.raises(nx.NetworkXError):
+        X = nx.Graph()
+        Y = nx.DiGraph()
+        XY = nx.disjoint_union_all([X, Y])
+
+
+def test_mixed_type_intersection():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.Graph()
+        H = nx.MultiGraph()
+        I = nx.Graph()
+        U = nx.intersection_all([G, H, I])
+    with pytest.raises(nx.NetworkXError):
+        X = nx.Graph()
+        Y = nx.DiGraph()
+        XY = nx.intersection_all([X, Y])
+
+
+def test_mixed_type_compose():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.Graph()
+        H = nx.MultiGraph()
+        I = nx.Graph()
+        U = nx.compose_all([G, H, I])
+    with pytest.raises(nx.NetworkXError):
+        X = nx.Graph()
+        Y = nx.DiGraph()
+        XY = nx.compose_all([X, Y])
+
+
+def test_empty_union():
+    with pytest.raises(ValueError):
+        nx.union_all([])
+
+
+def test_empty_disjoint_union():
+    with pytest.raises(ValueError):
+        nx.disjoint_union_all([])
+
+
+def test_empty_compose_all():
+    with pytest.raises(ValueError):
+        nx.compose_all([])
+
+
+def test_empty_intersection_all():
+    with pytest.raises(ValueError):
+        nx.intersection_all([])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_binary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_binary.py
new file mode 100644
index 00000000..c907cd6f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_binary.py
@@ -0,0 +1,453 @@
+import os
+
+import pytest
+
+import networkx as nx
+from networkx.utils import edges_equal
+
+
+def test_union_attributes():
+    g = nx.Graph()
+    g.add_node(0, x=4)
+    g.add_node(1, x=5)
+    g.add_edge(0, 1, size=5)
+    g.graph["name"] = "g"
+
+    h = g.copy()
+    h.graph["name"] = "h"
+    h.graph["attr"] = "attr"
+    h.nodes[0]["x"] = 7
+
+    gh = nx.union(g, h, rename=("g", "h"))
+    assert set(gh.nodes()) == {"h0", "h1", "g0", "g1"}
+    for n in gh:
+        graph, node = n
+        assert gh.nodes[n] == eval(graph).nodes[int(node)]
+
+    assert gh.graph["attr"] == "attr"
+    assert gh.graph["name"] == "h"  # h graph attributes take precedent
+
+
+def test_intersection():
+    G = nx.Graph()
+    H = nx.Graph()
+    G.add_nodes_from([1, 2, 3, 4])
+    G.add_edge(1, 2)
+    G.add_edge(2, 3)
+    H.add_nodes_from([1, 2, 3, 4])
+    H.add_edge(2, 3)
+    H.add_edge(3, 4)
+    I = nx.intersection(G, H)
+    assert set(I.nodes()) == {1, 2, 3, 4}
+    assert sorted(I.edges()) == [(2, 3)]
+
+
+def test_intersection_node_sets_different():
+    G = nx.Graph()
+    H = nx.Graph()
+    G.add_nodes_from([1, 2, 3, 4, 7])
+    G.add_edge(1, 2)
+    G.add_edge(2, 3)
+    H.add_nodes_from([1, 2, 3, 4, 5, 6])
+    H.add_edge(2, 3)
+    H.add_edge(3, 4)
+    H.add_edge(5, 6)
+    I = nx.intersection(G, H)
+    assert set(I.nodes()) == {1, 2, 3, 4}
+    assert sorted(I.edges()) == [(2, 3)]
+
+
+def test_intersection_attributes():
+    g = nx.Graph()
+    g.add_node(0, x=4)
+    g.add_node(1, x=5)
+    g.add_edge(0, 1, size=5)
+    g.graph["name"] = "g"
+
+    h = g.copy()
+    h.graph["name"] = "h"
+    h.graph["attr"] = "attr"
+    h.nodes[0]["x"] = 7
+    gh = nx.intersection(g, h)
+
+    assert set(gh.nodes()) == set(g.nodes())
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == sorted(g.edges())
+
+
+def test_intersection_attributes_node_sets_different():
+    g = nx.Graph()
+    g.add_node(0, x=4)
+    g.add_node(1, x=5)
+    g.add_node(2, x=3)
+    g.add_edge(0, 1, size=5)
+    g.graph["name"] = "g"
+
+    h = g.copy()
+    h.graph["name"] = "h"
+    h.graph["attr"] = "attr"
+    h.nodes[0]["x"] = 7
+    h.remove_node(2)
+
+    gh = nx.intersection(g, h)
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == sorted(g.edges())
+
+
+def test_intersection_multigraph_attributes():
+    g = nx.MultiGraph()
+    g.add_edge(0, 1, key=0)
+    g.add_edge(0, 1, key=1)
+    g.add_edge(0, 1, key=2)
+    h = nx.MultiGraph()
+    h.add_edge(0, 1, key=0)
+    h.add_edge(0, 1, key=3)
+    gh = nx.intersection(g, h)
+    assert set(gh.nodes()) == set(g.nodes())
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == [(0, 1)]
+    assert sorted(gh.edges(keys=True)) == [(0, 1, 0)]
+
+
+def test_intersection_multigraph_attributes_node_set_different():
+    g = nx.MultiGraph()
+    g.add_edge(0, 1, key=0)
+    g.add_edge(0, 1, key=1)
+    g.add_edge(0, 1, key=2)
+    g.add_edge(0, 2, key=2)
+    g.add_edge(0, 2, key=1)
+    h = nx.MultiGraph()
+    h.add_edge(0, 1, key=0)
+    h.add_edge(0, 1, key=3)
+    gh = nx.intersection(g, h)
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == [(0, 1)]
+    assert sorted(gh.edges(keys=True)) == [(0, 1, 0)]
+
+
+def test_difference():
+    G = nx.Graph()
+    H = nx.Graph()
+    G.add_nodes_from([1, 2, 3, 4])
+    G.add_edge(1, 2)
+    G.add_edge(2, 3)
+    H.add_nodes_from([1, 2, 3, 4])
+    H.add_edge(2, 3)
+    H.add_edge(3, 4)
+    D = nx.difference(G, H)
+    assert set(D.nodes()) == {1, 2, 3, 4}
+    assert sorted(D.edges()) == [(1, 2)]
+    D = nx.difference(H, G)
+    assert set(D.nodes()) == {1, 2, 3, 4}
+    assert sorted(D.edges()) == [(3, 4)]
+    D = nx.symmetric_difference(G, H)
+    assert set(D.nodes()) == {1, 2, 3, 4}
+    assert sorted(D.edges()) == [(1, 2), (3, 4)]
+
+
+def test_difference2():
+    G = nx.Graph()
+    H = nx.Graph()
+    G.add_nodes_from([1, 2, 3, 4])
+    H.add_nodes_from([1, 2, 3, 4])
+    G.add_edge(1, 2)
+    H.add_edge(1, 2)
+    G.add_edge(2, 3)
+    D = nx.difference(G, H)
+    assert set(D.nodes()) == {1, 2, 3, 4}
+    assert sorted(D.edges()) == [(2, 3)]
+    D = nx.difference(H, G)
+    assert set(D.nodes()) == {1, 2, 3, 4}
+    assert sorted(D.edges()) == []
+    H.add_edge(3, 4)
+    D = nx.difference(H, G)
+    assert set(D.nodes()) == {1, 2, 3, 4}
+    assert sorted(D.edges()) == [(3, 4)]
+
+
+def test_difference_attributes():
+    g = nx.Graph()
+    g.add_node(0, x=4)
+    g.add_node(1, x=5)
+    g.add_edge(0, 1, size=5)
+    g.graph["name"] = "g"
+
+    h = g.copy()
+    h.graph["name"] = "h"
+    h.graph["attr"] = "attr"
+    h.nodes[0]["x"] = 7
+
+    gh = nx.difference(g, h)
+    assert set(gh.nodes()) == set(g.nodes())
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == []
+    # node and graph data should not be copied over
+    assert gh.nodes.data() != g.nodes.data()
+    assert gh.graph != g.graph
+
+
+def test_difference_multigraph_attributes():
+    g = nx.MultiGraph()
+    g.add_edge(0, 1, key=0)
+    g.add_edge(0, 1, key=1)
+    g.add_edge(0, 1, key=2)
+    h = nx.MultiGraph()
+    h.add_edge(0, 1, key=0)
+    h.add_edge(0, 1, key=3)
+    gh = nx.difference(g, h)
+    assert set(gh.nodes()) == set(g.nodes())
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == [(0, 1), (0, 1)]
+    assert sorted(gh.edges(keys=True)) == [(0, 1, 1), (0, 1, 2)]
+
+
+def test_difference_raise():
+    G = nx.path_graph(4)
+    H = nx.path_graph(3)
+    pytest.raises(nx.NetworkXError, nx.difference, G, H)
+    pytest.raises(nx.NetworkXError, nx.symmetric_difference, G, H)
+
+
+def test_symmetric_difference_multigraph():
+    g = nx.MultiGraph()
+    g.add_edge(0, 1, key=0)
+    g.add_edge(0, 1, key=1)
+    g.add_edge(0, 1, key=2)
+    h = nx.MultiGraph()
+    h.add_edge(0, 1, key=0)
+    h.add_edge(0, 1, key=3)
+    gh = nx.symmetric_difference(g, h)
+    assert set(gh.nodes()) == set(g.nodes())
+    assert set(gh.nodes()) == set(h.nodes())
+    assert sorted(gh.edges()) == 3 * [(0, 1)]
+    assert sorted(sorted(e) for e in gh.edges(keys=True)) == [
+        [0, 1, 1],
+        [0, 1, 2],
+        [0, 1, 3],
+    ]
+
+
+def test_union_and_compose():
+    K3 = nx.complete_graph(3)
+    P3 = nx.path_graph(3)
+
+    G1 = nx.DiGraph()
+    G1.add_edge("A", "B")
+    G1.add_edge("A", "C")
+    G1.add_edge("A", "D")
+    G2 = nx.DiGraph()
+    G2.add_edge("1", "2")
+    G2.add_edge("1", "3")
+    G2.add_edge("1", "4")
+
+    G = nx.union(G1, G2)
+    H = nx.compose(G1, G2)
+    assert edges_equal(G.edges(), H.edges())
+    assert not G.has_edge("A", 1)
+    pytest.raises(nx.NetworkXError, nx.union, K3, P3)
+    H1 = nx.union(H, G1, rename=("H", "G1"))
+    assert sorted(H1.nodes()) == [
+        "G1A",
+        "G1B",
+        "G1C",
+        "G1D",
+        "H1",
+        "H2",
+        "H3",
+        "H4",
+        "HA",
+        "HB",
+        "HC",
+        "HD",
+    ]
+
+    H2 = nx.union(H, G2, rename=("H", ""))
+    assert sorted(H2.nodes()) == [
+        "1",
+        "2",
+        "3",
+        "4",
+        "H1",
+        "H2",
+        "H3",
+        "H4",
+        "HA",
+        "HB",
+        "HC",
+        "HD",
+    ]
+
+    assert not H1.has_edge("NB", "NA")
+
+    G = nx.compose(G, G)
+    assert edges_equal(G.edges(), H.edges())
+
+    G2 = nx.union(G2, G2, rename=("", "copy"))
+    assert sorted(G2.nodes()) == [
+        "1",
+        "2",
+        "3",
+        "4",
+        "copy1",
+        "copy2",
+        "copy3",
+        "copy4",
+    ]
+
+    assert sorted(G2.neighbors("copy4")) == []
+    assert sorted(G2.neighbors("copy1")) == ["copy2", "copy3", "copy4"]
+    assert len(G) == 8
+    assert nx.number_of_edges(G) == 6
+
+    E = nx.disjoint_union(G, G)
+    assert len(E) == 16
+    assert nx.number_of_edges(E) == 12
+
+    E = nx.disjoint_union(G1, G2)
+    assert sorted(E.nodes()) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
+
+    G = nx.Graph()
+    H = nx.Graph()
+    G.add_nodes_from([(1, {"a1": 1})])
+    H.add_nodes_from([(1, {"b1": 1})])
+    R = nx.compose(G, H)
+    assert R.nodes == {1: {"a1": 1, "b1": 1}}
+
+
+def test_union_multigraph():
+    G = nx.MultiGraph()
+    G.add_edge(1, 2, key=0)
+    G.add_edge(1, 2, key=1)
+    H = nx.MultiGraph()
+    H.add_edge(3, 4, key=0)
+    H.add_edge(3, 4, key=1)
+    GH = nx.union(G, H)
+    assert set(GH) == set(G) | set(H)
+    assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True))
+
+
+def test_disjoint_union_multigraph():
+    G = nx.MultiGraph()
+    G.add_edge(0, 1, key=0)
+    G.add_edge(0, 1, key=1)
+    H = nx.MultiGraph()
+    H.add_edge(2, 3, key=0)
+    H.add_edge(2, 3, key=1)
+    GH = nx.disjoint_union(G, H)
+    assert set(GH) == set(G) | set(H)
+    assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True))
+
+
+def test_compose_multigraph():
+    G = nx.MultiGraph()
+    G.add_edge(1, 2, key=0)
+    G.add_edge(1, 2, key=1)
+    H = nx.MultiGraph()
+    H.add_edge(3, 4, key=0)
+    H.add_edge(3, 4, key=1)
+    GH = nx.compose(G, H)
+    assert set(GH) == set(G) | set(H)
+    assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True))
+    H.add_edge(1, 2, key=2)
+    GH = nx.compose(G, H)
+    assert set(GH) == set(G) | set(H)
+    assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True))
+
+
+def test_full_join_graph():
+    # Simple Graphs
+    G = nx.Graph()
+    G.add_node(0)
+    G.add_edge(1, 2)
+    H = nx.Graph()
+    H.add_edge(3, 4)
+
+    U = nx.full_join(G, H)
+    assert set(U) == set(G) | set(H)
+    assert len(U) == len(G) + len(H)
+    assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H)
+
+    # Rename
+    U = nx.full_join(G, H, rename=("g", "h"))
+    assert set(U) == {"g0", "g1", "g2", "h3", "h4"}
+    assert len(U) == len(G) + len(H)
+    assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H)
+
+    # Rename graphs with string-like nodes
+    G = nx.Graph()
+    G.add_node("a")
+    G.add_edge("b", "c")
+    H = nx.Graph()
+    H.add_edge("d", "e")
+
+    U = nx.full_join(G, H, rename=("g", "h"))
+    assert set(U) == {"ga", "gb", "gc", "hd", "he"}
+    assert len(U) == len(G) + len(H)
+    assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H)
+
+    # DiGraphs
+    G = nx.DiGraph()
+    G.add_node(0)
+    G.add_edge(1, 2)
+    H = nx.DiGraph()
+    H.add_edge(3, 4)
+
+    U = nx.full_join(G, H)
+    assert set(U) == set(G) | set(H)
+    assert len(U) == len(G) + len(H)
+    assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2
+
+    # DiGraphs Rename
+    U = nx.full_join(G, H, rename=("g", "h"))
+    assert set(U) == {"g0", "g1", "g2", "h3", "h4"}
+    assert len(U) == len(G) + len(H)
+    assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2
+
+
+def test_full_join_multigraph():
+    # MultiGraphs
+    G = nx.MultiGraph()
+    G.add_node(0)
+    G.add_edge(1, 2)
+    H = nx.MultiGraph()
+    H.add_edge(3, 4)
+
+    U = nx.full_join(G, H)
+    assert set(U) == set(G) | set(H)
+    assert len(U) == len(G) + len(H)
+    assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H)
+
+    # MultiGraphs rename
+    U = nx.full_join(G, H, rename=("g", "h"))
+    assert set(U) == {"g0", "g1", "g2", "h3", "h4"}
+    assert len(U) == len(G) + len(H)
+    assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H)
+
+    # MultiDiGraphs
+    G = nx.MultiDiGraph()
+    G.add_node(0)
+    G.add_edge(1, 2)
+    H = nx.MultiDiGraph()
+    H.add_edge(3, 4)
+
+    U = nx.full_join(G, H)
+    assert set(U) == set(G) | set(H)
+    assert len(U) == len(G) + len(H)
+    assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2
+
+    # MultiDiGraphs rename
+    U = nx.full_join(G, H, rename=("g", "h"))
+    assert set(U) == {"g0", "g1", "g2", "h3", "h4"}
+    assert len(U) == len(G) + len(H)
+    assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2
+
+
+def test_mixed_type_union():
+    G = nx.Graph()
+    H = nx.MultiGraph()
+    pytest.raises(nx.NetworkXError, nx.union, G, H)
+    pytest.raises(nx.NetworkXError, nx.disjoint_union, G, H)
+    pytest.raises(nx.NetworkXError, nx.intersection, G, H)
+    pytest.raises(nx.NetworkXError, nx.difference, G, H)
+    pytest.raises(nx.NetworkXError, nx.symmetric_difference, G, H)
+    pytest.raises(nx.NetworkXError, nx.compose, G, H)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_product.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_product.py
new file mode 100644
index 00000000..8ee54b93
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_product.py
@@ -0,0 +1,491 @@
+import pytest
+
+import networkx as nx
+from networkx.utils import edges_equal
+
+
+def test_tensor_product_raises():
+    with pytest.raises(nx.NetworkXError):
+        P = nx.tensor_product(nx.DiGraph(), nx.Graph())
+
+
+def test_tensor_product_null():
+    null = nx.null_graph()
+    empty10 = nx.empty_graph(10)
+    K3 = nx.complete_graph(3)
+    K10 = nx.complete_graph(10)
+    P3 = nx.path_graph(3)
+    P10 = nx.path_graph(10)
+    # null graph
+    G = nx.tensor_product(null, null)
+    assert nx.is_isomorphic(G, null)
+    # null_graph X anything = null_graph and v.v.
+    G = nx.tensor_product(null, empty10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.tensor_product(null, K3)
+    assert nx.is_isomorphic(G, null)
+    G = nx.tensor_product(null, K10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.tensor_product(null, P3)
+    assert nx.is_isomorphic(G, null)
+    G = nx.tensor_product(null, P10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.tensor_product(empty10, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.tensor_product(K3, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.tensor_product(K10, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.tensor_product(P3, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.tensor_product(P10, null)
+    assert nx.is_isomorphic(G, null)
+
+
+def test_tensor_product_size():
+    P5 = nx.path_graph(5)
+    K3 = nx.complete_graph(3)
+    K5 = nx.complete_graph(5)
+
+    G = nx.tensor_product(P5, K3)
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.tensor_product(K3, K5)
+    assert nx.number_of_nodes(G) == 3 * 5
+
+
+def test_tensor_product_combinations():
+    # basic smoke test, more realistic tests would be useful
+    P5 = nx.path_graph(5)
+    K3 = nx.complete_graph(3)
+    G = nx.tensor_product(P5, K3)
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.tensor_product(P5, nx.MultiGraph(K3))
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.tensor_product(nx.MultiGraph(P5), K3)
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.tensor_product(nx.MultiGraph(P5), nx.MultiGraph(K3))
+    assert nx.number_of_nodes(G) == 5 * 3
+
+    G = nx.tensor_product(nx.DiGraph(P5), nx.DiGraph(K3))
+    assert nx.number_of_nodes(G) == 5 * 3
+
+
+def test_tensor_product_classic_result():
+    K2 = nx.complete_graph(2)
+    G = nx.petersen_graph()
+    G = nx.tensor_product(G, K2)
+    assert nx.is_isomorphic(G, nx.desargues_graph())
+
+    G = nx.cycle_graph(5)
+    G = nx.tensor_product(G, K2)
+    assert nx.is_isomorphic(G, nx.cycle_graph(10))
+
+    G = nx.tetrahedral_graph()
+    G = nx.tensor_product(G, K2)
+    assert nx.is_isomorphic(G, nx.cubical_graph())
+
+
+def test_tensor_product_random():
+    G = nx.erdos_renyi_graph(10, 2 / 10.0)
+    H = nx.erdos_renyi_graph(10, 2 / 10.0)
+    GH = nx.tensor_product(G, H)
+
+    for u_G, u_H in GH.nodes():
+        for v_G, v_H in GH.nodes():
+            if H.has_edge(u_H, v_H) and G.has_edge(u_G, v_G):
+                assert GH.has_edge((u_G, u_H), (v_G, v_H))
+            else:
+                assert not GH.has_edge((u_G, u_H), (v_G, v_H))
+
+
+def test_cartesian_product_multigraph():
+    G = nx.MultiGraph()
+    G.add_edge(1, 2, key=0)
+    G.add_edge(1, 2, key=1)
+    H = nx.MultiGraph()
+    H.add_edge(3, 4, key=0)
+    H.add_edge(3, 4, key=1)
+    GH = nx.cartesian_product(G, H)
+    assert set(GH) == {(1, 3), (2, 3), (2, 4), (1, 4)}
+    assert {(frozenset([u, v]), k) for u, v, k in GH.edges(keys=True)} == {
+        (frozenset([u, v]), k)
+        for u, v, k in [
+            ((1, 3), (2, 3), 0),
+            ((1, 3), (2, 3), 1),
+            ((1, 3), (1, 4), 0),
+            ((1, 3), (1, 4), 1),
+            ((2, 3), (2, 4), 0),
+            ((2, 3), (2, 4), 1),
+            ((2, 4), (1, 4), 0),
+            ((2, 4), (1, 4), 1),
+        ]
+    }
+
+
+def test_cartesian_product_raises():
+    with pytest.raises(nx.NetworkXError):
+        P = nx.cartesian_product(nx.DiGraph(), nx.Graph())
+
+
+def test_cartesian_product_null():
+    null = nx.null_graph()
+    empty10 = nx.empty_graph(10)
+    K3 = nx.complete_graph(3)
+    K10 = nx.complete_graph(10)
+    P3 = nx.path_graph(3)
+    P10 = nx.path_graph(10)
+    # null graph
+    G = nx.cartesian_product(null, null)
+    assert nx.is_isomorphic(G, null)
+    # null_graph X anything = null_graph and v.v.
+    G = nx.cartesian_product(null, empty10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.cartesian_product(null, K3)
+    assert nx.is_isomorphic(G, null)
+    G = nx.cartesian_product(null, K10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.cartesian_product(null, P3)
+    assert nx.is_isomorphic(G, null)
+    G = nx.cartesian_product(null, P10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.cartesian_product(empty10, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.cartesian_product(K3, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.cartesian_product(K10, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.cartesian_product(P3, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.cartesian_product(P10, null)
+    assert nx.is_isomorphic(G, null)
+
+
+def test_cartesian_product_size():
+    # order(GXH)=order(G)*order(H)
+    K5 = nx.complete_graph(5)
+    P5 = nx.path_graph(5)
+    K3 = nx.complete_graph(3)
+    G = nx.cartesian_product(P5, K3)
+    assert nx.number_of_nodes(G) == 5 * 3
+    assert nx.number_of_edges(G) == nx.number_of_edges(P5) * nx.number_of_nodes(
+        K3
+    ) + nx.number_of_edges(K3) * nx.number_of_nodes(P5)
+    G = nx.cartesian_product(K3, K5)
+    assert nx.number_of_nodes(G) == 3 * 5
+    assert nx.number_of_edges(G) == nx.number_of_edges(K5) * nx.number_of_nodes(
+        K3
+    ) + nx.number_of_edges(K3) * nx.number_of_nodes(K5)
+
+
+def test_cartesian_product_classic():
+    # test some classic product graphs
+    P2 = nx.path_graph(2)
+    P3 = nx.path_graph(3)
+    # cube = 2-path X 2-path
+    G = nx.cartesian_product(P2, P2)
+    G = nx.cartesian_product(P2, G)
+    assert nx.is_isomorphic(G, nx.cubical_graph())
+
+    # 3x3 grid
+    G = nx.cartesian_product(P3, P3)
+    assert nx.is_isomorphic(G, nx.grid_2d_graph(3, 3))
+
+
+def test_cartesian_product_random():
+    G = nx.erdos_renyi_graph(10, 2 / 10.0)
+    H = nx.erdos_renyi_graph(10, 2 / 10.0)
+    GH = nx.cartesian_product(G, H)
+
+    for u_G, u_H in GH.nodes():
+        for v_G, v_H in GH.nodes():
+            if (u_G == v_G and H.has_edge(u_H, v_H)) or (
+                u_H == v_H and G.has_edge(u_G, v_G)
+            ):
+                assert GH.has_edge((u_G, u_H), (v_G, v_H))
+            else:
+                assert not GH.has_edge((u_G, u_H), (v_G, v_H))
+
+
+def test_lexicographic_product_raises():
+    with pytest.raises(nx.NetworkXError):
+        P = nx.lexicographic_product(nx.DiGraph(), nx.Graph())
+
+
+def test_lexicographic_product_null():
+    null = nx.null_graph()
+    empty10 = nx.empty_graph(10)
+    K3 = nx.complete_graph(3)
+    K10 = nx.complete_graph(10)
+    P3 = nx.path_graph(3)
+    P10 = nx.path_graph(10)
+    # null graph
+    G = nx.lexicographic_product(null, null)
+    assert nx.is_isomorphic(G, null)
+    # null_graph X anything = null_graph and v.v.
+    G = nx.lexicographic_product(null, empty10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.lexicographic_product(null, K3)
+    assert nx.is_isomorphic(G, null)
+    G = nx.lexicographic_product(null, K10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.lexicographic_product(null, P3)
+    assert nx.is_isomorphic(G, null)
+    G = nx.lexicographic_product(null, P10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.lexicographic_product(empty10, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.lexicographic_product(K3, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.lexicographic_product(K10, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.lexicographic_product(P3, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.lexicographic_product(P10, null)
+    assert nx.is_isomorphic(G, null)
+
+
+def test_lexicographic_product_size():
+    K5 = nx.complete_graph(5)
+    P5 = nx.path_graph(5)
+    K3 = nx.complete_graph(3)
+    G = nx.lexicographic_product(P5, K3)
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.lexicographic_product(K3, K5)
+    assert nx.number_of_nodes(G) == 3 * 5
+
+
+def test_lexicographic_product_combinations():
+    P5 = nx.path_graph(5)
+    K3 = nx.complete_graph(3)
+    G = nx.lexicographic_product(P5, K3)
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.lexicographic_product(nx.MultiGraph(P5), K3)
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.lexicographic_product(P5, nx.MultiGraph(K3))
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.lexicographic_product(nx.MultiGraph(P5), nx.MultiGraph(K3))
+    assert nx.number_of_nodes(G) == 5 * 3
+
+    # No classic easily found classic results for lexicographic product
+
+
+def test_lexicographic_product_random():
+    G = nx.erdos_renyi_graph(10, 2 / 10.0)
+    H = nx.erdos_renyi_graph(10, 2 / 10.0)
+    GH = nx.lexicographic_product(G, H)
+
+    for u_G, u_H in GH.nodes():
+        for v_G, v_H in GH.nodes():
+            if G.has_edge(u_G, v_G) or (u_G == v_G and H.has_edge(u_H, v_H)):
+                assert GH.has_edge((u_G, u_H), (v_G, v_H))
+            else:
+                assert not GH.has_edge((u_G, u_H), (v_G, v_H))
+
+
+def test_strong_product_raises():
+    with pytest.raises(nx.NetworkXError):
+        P = nx.strong_product(nx.DiGraph(), nx.Graph())
+
+
+def test_strong_product_null():
+    null = nx.null_graph()
+    empty10 = nx.empty_graph(10)
+    K3 = nx.complete_graph(3)
+    K10 = nx.complete_graph(10)
+    P3 = nx.path_graph(3)
+    P10 = nx.path_graph(10)
+    # null graph
+    G = nx.strong_product(null, null)
+    assert nx.is_isomorphic(G, null)
+    # null_graph X anything = null_graph and v.v.
+    G = nx.strong_product(null, empty10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.strong_product(null, K3)
+    assert nx.is_isomorphic(G, null)
+    G = nx.strong_product(null, K10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.strong_product(null, P3)
+    assert nx.is_isomorphic(G, null)
+    G = nx.strong_product(null, P10)
+    assert nx.is_isomorphic(G, null)
+    G = nx.strong_product(empty10, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.strong_product(K3, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.strong_product(K10, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.strong_product(P3, null)
+    assert nx.is_isomorphic(G, null)
+    G = nx.strong_product(P10, null)
+    assert nx.is_isomorphic(G, null)
+
+
+def test_strong_product_size():
+    K5 = nx.complete_graph(5)
+    P5 = nx.path_graph(5)
+    K3 = nx.complete_graph(3)
+    G = nx.strong_product(P5, K3)
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.strong_product(K3, K5)
+    assert nx.number_of_nodes(G) == 3 * 5
+
+
+def test_strong_product_combinations():
+    P5 = nx.path_graph(5)
+    K3 = nx.complete_graph(3)
+    G = nx.strong_product(P5, K3)
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.strong_product(nx.MultiGraph(P5), K3)
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.strong_product(P5, nx.MultiGraph(K3))
+    assert nx.number_of_nodes(G) == 5 * 3
+    G = nx.strong_product(nx.MultiGraph(P5), nx.MultiGraph(K3))
+    assert nx.number_of_nodes(G) == 5 * 3
+
+    # No classic easily found classic results for strong product
+
+
+def test_strong_product_random():
+    G = nx.erdos_renyi_graph(10, 2 / 10.0)
+    H = nx.erdos_renyi_graph(10, 2 / 10.0)
+    GH = nx.strong_product(G, H)
+
+    for u_G, u_H in GH.nodes():
+        for v_G, v_H in GH.nodes():
+            if (
+                (u_G == v_G and H.has_edge(u_H, v_H))
+                or (u_H == v_H and G.has_edge(u_G, v_G))
+                or (G.has_edge(u_G, v_G) and H.has_edge(u_H, v_H))
+            ):
+                assert GH.has_edge((u_G, u_H), (v_G, v_H))
+            else:
+                assert not GH.has_edge((u_G, u_H), (v_G, v_H))
+
+
+def test_graph_power_raises():
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.power(nx.MultiDiGraph(), 2)
+
+
+def test_graph_power():
+    # wikipedia example for graph power
+    G = nx.cycle_graph(7)
+    G.add_edge(6, 7)
+    G.add_edge(7, 8)
+    G.add_edge(8, 9)
+    G.add_edge(9, 2)
+    H = nx.power(G, 2)
+
+    assert edges_equal(
+        list(H.edges()),
+        [
+            (0, 1),
+            (0, 2),
+            (0, 5),
+            (0, 6),
+            (0, 7),
+            (1, 9),
+            (1, 2),
+            (1, 3),
+            (1, 6),
+            (2, 3),
+            (2, 4),
+            (2, 8),
+            (2, 9),
+            (3, 4),
+            (3, 5),
+            (3, 9),
+            (4, 5),
+            (4, 6),
+            (5, 6),
+            (5, 7),
+            (6, 7),
+            (6, 8),
+            (7, 8),
+            (7, 9),
+            (8, 9),
+        ],
+    )
+
+
+def test_graph_power_negative():
+    with pytest.raises(ValueError):
+        nx.power(nx.Graph(), -1)
+
+
+def test_rooted_product_raises():
+    with pytest.raises(nx.NodeNotFound):
+        nx.rooted_product(nx.Graph(), nx.path_graph(2), 10)
+
+
+def test_rooted_product():
+    G = nx.cycle_graph(5)
+    H = nx.Graph()
+    H.add_edges_from([("a", "b"), ("b", "c"), ("b", "d")])
+    R = nx.rooted_product(G, H, "a")
+    assert len(R) == len(G) * len(H)
+    assert R.size() == G.size() + len(G) * H.size()
+
+
+def test_corona_product():
+    G = nx.cycle_graph(3)
+    H = nx.path_graph(2)
+    C = nx.corona_product(G, H)
+    assert len(C) == (len(G) * len(H)) + len(G)
+    assert C.size() == G.size() + len(G) * H.size() + len(G) * len(H)
+
+
+def test_modular_product():
+    G = nx.path_graph(3)
+    H = nx.path_graph(4)
+    M = nx.modular_product(G, H)
+    assert len(M) == len(G) * len(H)
+
+    assert edges_equal(
+        list(M.edges()),
+        [
+            ((0, 0), (1, 1)),
+            ((0, 0), (2, 2)),
+            ((0, 0), (2, 3)),
+            ((0, 1), (1, 0)),
+            ((0, 1), (1, 2)),
+            ((0, 1), (2, 3)),
+            ((0, 2), (1, 1)),
+            ((0, 2), (1, 3)),
+            ((0, 2), (2, 0)),
+            ((0, 3), (1, 2)),
+            ((0, 3), (2, 0)),
+            ((0, 3), (2, 1)),
+            ((1, 0), (2, 1)),
+            ((1, 1), (2, 0)),
+            ((1, 1), (2, 2)),
+            ((1, 2), (2, 1)),
+            ((1, 2), (2, 3)),
+            ((1, 3), (2, 2)),
+        ],
+    )
+
+
+def test_modular_product_raises():
+    G = nx.Graph([(0, 1), (1, 2), (2, 0)])
+    H = nx.Graph([(0, 1), (1, 2), (2, 0)])
+    DG = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
+    DH = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.modular_product(G, DH)
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.modular_product(DG, H)
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.modular_product(DG, DH)
+
+    MG = nx.MultiGraph([(0, 1), (1, 2), (2, 0), (0, 1)])
+    MH = nx.MultiGraph([(0, 1), (1, 2), (2, 0), (0, 1)])
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.modular_product(G, MH)
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.modular_product(MG, H)
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.modular_product(MG, MH)
+    with pytest.raises(nx.NetworkXNotImplemented):
+        # check multigraph with no multiedges
+        nx.modular_product(nx.MultiGraph(G), H)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_unary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_unary.py
new file mode 100644
index 00000000..d68e55cd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_unary.py
@@ -0,0 +1,55 @@
+import pytest
+
+import networkx as nx
+
+
+def test_complement():
+    null = nx.null_graph()
+    empty1 = nx.empty_graph(1)
+    empty10 = nx.empty_graph(10)
+    K3 = nx.complete_graph(3)
+    K5 = nx.complete_graph(5)
+    K10 = nx.complete_graph(10)
+    P2 = nx.path_graph(2)
+    P3 = nx.path_graph(3)
+    P5 = nx.path_graph(5)
+    P10 = nx.path_graph(10)
+    # complement of the complete graph is empty
+
+    G = nx.complement(K3)
+    assert nx.is_isomorphic(G, nx.empty_graph(3))
+    G = nx.complement(K5)
+    assert nx.is_isomorphic(G, nx.empty_graph(5))
+    # for any G, G=complement(complement(G))
+    P3cc = nx.complement(nx.complement(P3))
+    assert nx.is_isomorphic(P3, P3cc)
+    nullcc = nx.complement(nx.complement(null))
+    assert nx.is_isomorphic(null, nullcc)
+    b = nx.bull_graph()
+    bcc = nx.complement(nx.complement(b))
+    assert nx.is_isomorphic(b, bcc)
+
+
+def test_complement_2():
+    G1 = nx.DiGraph()
+    G1.add_edge("A", "B")
+    G1.add_edge("A", "C")
+    G1.add_edge("A", "D")
+    G1C = nx.complement(G1)
+    assert sorted(G1C.edges()) == [
+        ("B", "A"),
+        ("B", "C"),
+        ("B", "D"),
+        ("C", "A"),
+        ("C", "B"),
+        ("C", "D"),
+        ("D", "A"),
+        ("D", "B"),
+        ("D", "C"),
+    ]
+
+
+def test_reverse1():
+    # Other tests for reverse are done by the DiGraph and MultiDigraph.
+    G1 = nx.Graph()
+    pytest.raises(nx.NetworkXError, nx.reverse, G1)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/unary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/unary.py
new file mode 100644
index 00000000..79e44d1c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/unary.py
@@ -0,0 +1,77 @@
+"""Unary operations on graphs"""
+
+import networkx as nx
+
+__all__ = ["complement", "reverse"]
+
+
+@nx._dispatchable(returns_graph=True)
+def complement(G):
+    """Returns the graph complement of G.
+
+    Parameters
+    ----------
+    G : graph
+       A NetworkX graph
+
+    Returns
+    -------
+    GC : A new graph.
+
+    Notes
+    -----
+    Note that `complement` does not create self-loops and also
+    does not produce parallel edges for MultiGraphs.
+
+    Graph, node, and edge data are not propagated to the new graph.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)])
+    >>> G_complement = nx.complement(G)
+    >>> G_complement.edges()  # This shows the edges of the complemented graph
+    EdgeView([(1, 4), (1, 5), (2, 4), (2, 5), (4, 5)])
+
+    """
+    R = G.__class__()
+    R.add_nodes_from(G)
+    R.add_edges_from(
+        ((n, n2) for n, nbrs in G.adjacency() for n2 in G if n2 not in nbrs if n != n2)
+    )
+    return R
+
+
+@nx._dispatchable(returns_graph=True)
+def reverse(G, copy=True):
+    """Returns the reverse directed graph of G.
+
+    Parameters
+    ----------
+    G : directed graph
+        A NetworkX directed graph
+    copy : bool
+        If True, then a new graph is returned. If False, then the graph is
+        reversed in place.
+
+    Returns
+    -------
+    H : directed graph
+        The reversed G.
+
+    Raises
+    ------
+    NetworkXError
+        If graph is undirected.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)])
+    >>> G_reversed = nx.reverse(G)
+    >>> G_reversed.edges()
+    OutEdgeView([(2, 1), (3, 1), (3, 2), (4, 3), (5, 3)])
+
+    """
+    if not G.is_directed():
+        raise nx.NetworkXError("Cannot reverse an undirected graph.")
+    else:
+        return G.reverse(copy=copy)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/planar_drawing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/planar_drawing.py
new file mode 100644
index 00000000..ea25809b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/planar_drawing.py
@@ -0,0 +1,464 @@
+from collections import defaultdict
+
+import networkx as nx
+
+__all__ = ["combinatorial_embedding_to_pos"]
+
+
+def combinatorial_embedding_to_pos(embedding, fully_triangulate=False):
+    """Assigns every node a (x, y) position based on the given embedding
+
+    The algorithm iteratively inserts nodes of the input graph in a certain
+    order and rearranges previously inserted nodes so that the planar drawing
+    stays valid. This is done efficiently by only maintaining relative
+    positions during the node placements and calculating the absolute positions
+    at the end. For more information see [1]_.
+
+    Parameters
+    ----------
+    embedding : nx.PlanarEmbedding
+        This defines the order of the edges
+
+    fully_triangulate : bool
+        If set to True the algorithm adds edges to a copy of the input
+        embedding and makes it chordal.
+
+    Returns
+    -------
+    pos : dict
+        Maps each node to a tuple that defines the (x, y) position
+
+    References
+    ----------
+    .. [1] M. Chrobak and T.H. Payne:
+        A Linear-time Algorithm for Drawing a Planar Graph on a Grid 1989
+        http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.51.6677
+
+    """
+    if len(embedding.nodes()) < 4:
+        # Position the node in any triangle
+        default_positions = [(0, 0), (2, 0), (1, 1)]
+        pos = {}
+        for i, v in enumerate(embedding.nodes()):
+            pos[v] = default_positions[i]
+        return pos
+
+    embedding, outer_face = triangulate_embedding(embedding, fully_triangulate)
+
+    # The following dicts map a node to another node
+    # If a node is not in the key set it means that the node is not yet in G_k
+    # If a node maps to None then the corresponding subtree does not exist
+    left_t_child = {}
+    right_t_child = {}
+
+    # The following dicts map a node to an integer
+    delta_x = {}
+    y_coordinate = {}
+
+    node_list = get_canonical_ordering(embedding, outer_face)
+
+    # 1. Phase: Compute relative positions
+
+    # Initialization
+    v1, v2, v3 = node_list[0][0], node_list[1][0], node_list[2][0]
+
+    delta_x[v1] = 0
+    y_coordinate[v1] = 0
+    right_t_child[v1] = v3
+    left_t_child[v1] = None
+
+    delta_x[v2] = 1
+    y_coordinate[v2] = 0
+    right_t_child[v2] = None
+    left_t_child[v2] = None
+
+    delta_x[v3] = 1
+    y_coordinate[v3] = 1
+    right_t_child[v3] = v2
+    left_t_child[v3] = None
+
+    for k in range(3, len(node_list)):
+        vk, contour_nbrs = node_list[k]
+        wp = contour_nbrs[0]
+        wp1 = contour_nbrs[1]
+        wq = contour_nbrs[-1]
+        wq1 = contour_nbrs[-2]
+        adds_mult_tri = len(contour_nbrs) > 2
+
+        # Stretch gaps:
+        delta_x[wp1] += 1
+        delta_x[wq] += 1
+
+        delta_x_wp_wq = sum(delta_x[x] for x in contour_nbrs[1:])
+
+        # Adjust offsets
+        delta_x[vk] = (-y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2
+        y_coordinate[vk] = (y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2
+        delta_x[wq] = delta_x_wp_wq - delta_x[vk]
+        if adds_mult_tri:
+            delta_x[wp1] -= delta_x[vk]
+
+        # Install v_k:
+        right_t_child[wp] = vk
+        right_t_child[vk] = wq
+        if adds_mult_tri:
+            left_t_child[vk] = wp1
+            right_t_child[wq1] = None
+        else:
+            left_t_child[vk] = None
+
+    # 2. Phase: Set absolute positions
+    pos = {}
+    pos[v1] = (0, y_coordinate[v1])
+    remaining_nodes = [v1]
+    while remaining_nodes:
+        parent_node = remaining_nodes.pop()
+
+        # Calculate position for left child
+        set_position(
+            parent_node, left_t_child, remaining_nodes, delta_x, y_coordinate, pos
+        )
+        # Calculate position for right child
+        set_position(
+            parent_node, right_t_child, remaining_nodes, delta_x, y_coordinate, pos
+        )
+    return pos
+
+
+def set_position(parent, tree, remaining_nodes, delta_x, y_coordinate, pos):
+    """Helper method to calculate the absolute position of nodes."""
+    child = tree[parent]
+    parent_node_x = pos[parent][0]
+    if child is not None:
+        # Calculate pos of child
+        child_x = parent_node_x + delta_x[child]
+        pos[child] = (child_x, y_coordinate[child])
+        # Remember to calculate pos of its children
+        remaining_nodes.append(child)
+
+
+def get_canonical_ordering(embedding, outer_face):
+    """Returns a canonical ordering of the nodes
+
+    The canonical ordering of nodes (v1, ..., vn) must fulfill the following
+    conditions:
+    (See Lemma 1 in [2]_)
+
+    - For the subgraph G_k of the input graph induced by v1, ..., vk it holds:
+        - 2-connected
+        - internally triangulated
+        - the edge (v1, v2) is part of the outer face
+    - For a node v(k+1) the following holds:
+        - The node v(k+1) is part of the outer face of G_k
+        - It has at least two neighbors in G_k
+        - All neighbors of v(k+1) in G_k lie consecutively on the outer face of
+          G_k (excluding the edge (v1, v2)).
+
+    The algorithm used here starts with G_n (containing all nodes). It first
+    selects the nodes v1 and v2. And then tries to find the order of the other
+    nodes by checking which node can be removed in order to fulfill the
+    conditions mentioned above. This is done by calculating the number of
+    chords of nodes on the outer face. For more information see [1]_.
+
+    Parameters
+    ----------
+    embedding : nx.PlanarEmbedding
+        The embedding must be triangulated
+    outer_face : list
+        The nodes on the outer face of the graph
+
+    Returns
+    -------
+    ordering : list
+        A list of tuples `(vk, wp_wq)`. Here `vk` is the node at this position
+        in the canonical ordering. The element `wp_wq` is a list of nodes that
+        make up the outer face of G_k.
+
+    References
+    ----------
+    .. [1] Steven Chaplick.
+        Canonical Orders of Planar Graphs and (some of) Their Applications 2015
+        https://wuecampus2.uni-wuerzburg.de/moodle/pluginfile.php/545727/mod_resource/content/0/vg-ss15-vl03-canonical-orders-druckversion.pdf
+    .. [2] M. Chrobak and T.H. Payne:
+        A Linear-time Algorithm for Drawing a Planar Graph on a Grid 1989
+        http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.51.6677
+
+    """
+    v1 = outer_face[0]
+    v2 = outer_face[1]
+    chords = defaultdict(int)  # Maps nodes to the number of their chords
+    marked_nodes = set()
+    ready_to_pick = set(outer_face)
+
+    # Initialize outer_face_ccw_nbr (do not include v1 -> v2)
+    outer_face_ccw_nbr = {}
+    prev_nbr = v2
+    for idx in range(2, len(outer_face)):
+        outer_face_ccw_nbr[prev_nbr] = outer_face[idx]
+        prev_nbr = outer_face[idx]
+    outer_face_ccw_nbr[prev_nbr] = v1
+
+    # Initialize outer_face_cw_nbr (do not include v2 -> v1)
+    outer_face_cw_nbr = {}
+    prev_nbr = v1
+    for idx in range(len(outer_face) - 1, 0, -1):
+        outer_face_cw_nbr[prev_nbr] = outer_face[idx]
+        prev_nbr = outer_face[idx]
+
+    def is_outer_face_nbr(x, y):
+        if x not in outer_face_ccw_nbr:
+            return outer_face_cw_nbr[x] == y
+        if x not in outer_face_cw_nbr:
+            return outer_face_ccw_nbr[x] == y
+        return outer_face_ccw_nbr[x] == y or outer_face_cw_nbr[x] == y
+
+    def is_on_outer_face(x):
+        return x not in marked_nodes and (x in outer_face_ccw_nbr or x == v1)
+
+    # Initialize number of chords
+    for v in outer_face:
+        for nbr in embedding.neighbors_cw_order(v):
+            if is_on_outer_face(nbr) and not is_outer_face_nbr(v, nbr):
+                chords[v] += 1
+                ready_to_pick.discard(v)
+
+    # Initialize canonical_ordering
+    canonical_ordering = [None] * len(embedding.nodes())
+    canonical_ordering[0] = (v1, [])
+    canonical_ordering[1] = (v2, [])
+    ready_to_pick.discard(v1)
+    ready_to_pick.discard(v2)
+
+    for k in range(len(embedding.nodes()) - 1, 1, -1):
+        # 1. Pick v from ready_to_pick
+        v = ready_to_pick.pop()
+        marked_nodes.add(v)
+
+        # v has exactly two neighbors on the outer face (wp and wq)
+        wp = None
+        wq = None
+        # Iterate over neighbors of v to find wp and wq
+        nbr_iterator = iter(embedding.neighbors_cw_order(v))
+        while True:
+            nbr = next(nbr_iterator)
+            if nbr in marked_nodes:
+                # Only consider nodes that are not yet removed
+                continue
+            if is_on_outer_face(nbr):
+                # nbr is either wp or wq
+                if nbr == v1:
+                    wp = v1
+                elif nbr == v2:
+                    wq = v2
+                else:
+                    if outer_face_cw_nbr[nbr] == v:
+                        # nbr is wp
+                        wp = nbr
+                    else:
+                        # nbr is wq
+                        wq = nbr
+            if wp is not None and wq is not None:
+                # We don't need to iterate any further
+                break
+
+        # Obtain new nodes on outer face (neighbors of v from wp to wq)
+        wp_wq = [wp]
+        nbr = wp
+        while nbr != wq:
+            # Get next neighbor (clockwise on the outer face)
+            next_nbr = embedding[v][nbr]["ccw"]
+            wp_wq.append(next_nbr)
+            # Update outer face
+            outer_face_cw_nbr[nbr] = next_nbr
+            outer_face_ccw_nbr[next_nbr] = nbr
+            # Move to next neighbor of v
+            nbr = next_nbr
+
+        if len(wp_wq) == 2:
+            # There was a chord between wp and wq, decrease number of chords
+            chords[wp] -= 1
+            if chords[wp] == 0:
+                ready_to_pick.add(wp)
+            chords[wq] -= 1
+            if chords[wq] == 0:
+                ready_to_pick.add(wq)
+        else:
+            # Update all chords involving w_(p+1) to w_(q-1)
+            new_face_nodes = set(wp_wq[1:-1])
+            for w in new_face_nodes:
+                # If we do not find a chord for w later we can pick it next
+                ready_to_pick.add(w)
+                for nbr in embedding.neighbors_cw_order(w):
+                    if is_on_outer_face(nbr) and not is_outer_face_nbr(w, nbr):
+                        # There is a chord involving w
+                        chords[w] += 1
+                        ready_to_pick.discard(w)
+                        if nbr not in new_face_nodes:
+                            # Also increase chord for the neighbor
+                            # We only iterator over new_face_nodes
+                            chords[nbr] += 1
+                            ready_to_pick.discard(nbr)
+        # Set the canonical ordering node and the list of contour neighbors
+        canonical_ordering[k] = (v, wp_wq)
+
+    return canonical_ordering
+
+
+def triangulate_face(embedding, v1, v2):
+    """Triangulates the face given by half edge (v, w)
+
+    Parameters
+    ----------
+    embedding : nx.PlanarEmbedding
+    v1 : node
+        The half-edge (v1, v2) belongs to the face that gets triangulated
+    v2 : node
+    """
+    _, v3 = embedding.next_face_half_edge(v1, v2)
+    _, v4 = embedding.next_face_half_edge(v2, v3)
+    if v1 in (v2, v3):
+        # The component has less than 3 nodes
+        return
+    while v1 != v4:
+        # Add edge if not already present on other side
+        if embedding.has_edge(v1, v3):
+            # Cannot triangulate at this position
+            v1, v2, v3 = v2, v3, v4
+        else:
+            # Add edge for triangulation
+            embedding.add_half_edge(v1, v3, ccw=v2)
+            embedding.add_half_edge(v3, v1, cw=v2)
+            v1, v2, v3 = v1, v3, v4
+        # Get next node
+        _, v4 = embedding.next_face_half_edge(v2, v3)
+
+
+def triangulate_embedding(embedding, fully_triangulate=True):
+    """Triangulates the embedding.
+
+    Traverses faces of the embedding and adds edges to a copy of the
+    embedding to triangulate it.
+    The method also ensures that the resulting graph is 2-connected by adding
+    edges if the same vertex is contained twice on a path around a face.
+
+    Parameters
+    ----------
+    embedding : nx.PlanarEmbedding
+        The input graph must contain at least 3 nodes.
+
+    fully_triangulate : bool
+        If set to False the face with the most nodes is chooses as outer face.
+        This outer face does not get triangulated.
+
+    Returns
+    -------
+    (embedding, outer_face) : (nx.PlanarEmbedding, list) tuple
+        The element `embedding` is a new embedding containing all edges from
+        the input embedding and the additional edges to triangulate the graph.
+        The element `outer_face` is a list of nodes that lie on the outer face.
+        If the graph is fully triangulated these are three arbitrary connected
+        nodes.
+
+    """
+    if len(embedding.nodes) <= 1:
+        return embedding, list(embedding.nodes)
+    embedding = nx.PlanarEmbedding(embedding)
+
+    # Get a list with a node for each connected component
+    component_nodes = [next(iter(x)) for x in nx.connected_components(embedding)]
+
+    # 1. Make graph a single component (add edge between components)
+    for i in range(len(component_nodes) - 1):
+        v1 = component_nodes[i]
+        v2 = component_nodes[i + 1]
+        embedding.connect_components(v1, v2)
+
+    # 2. Calculate faces, ensure 2-connectedness and determine outer face
+    outer_face = []  # A face with the most number of nodes
+    face_list = []
+    edges_visited = set()  # Used to keep track of already visited faces
+    for v in embedding.nodes():
+        for w in embedding.neighbors_cw_order(v):
+            new_face = make_bi_connected(embedding, v, w, edges_visited)
+            if new_face:
+                # Found a new face
+                face_list.append(new_face)
+                if len(new_face) > len(outer_face):
+                    # The face is a candidate to be the outer face
+                    outer_face = new_face
+
+    # 3. Triangulate (internal) faces
+    for face in face_list:
+        if face is not outer_face or fully_triangulate:
+            # Triangulate this face
+            triangulate_face(embedding, face[0], face[1])
+
+    if fully_triangulate:
+        v1 = outer_face[0]
+        v2 = outer_face[1]
+        v3 = embedding[v2][v1]["ccw"]
+        outer_face = [v1, v2, v3]
+
+    return embedding, outer_face
+
+
+def make_bi_connected(embedding, starting_node, outgoing_node, edges_counted):
+    """Triangulate a face and make it 2-connected
+
+    This method also adds all edges on the face to `edges_counted`.
+
+    Parameters
+    ----------
+    embedding: nx.PlanarEmbedding
+        The embedding that defines the faces
+    starting_node : node
+        A node on the face
+    outgoing_node : node
+        A node such that the half edge (starting_node, outgoing_node) belongs
+        to the face
+    edges_counted: set
+        Set of all half-edges that belong to a face that have been visited
+
+    Returns
+    -------
+    face_nodes: list
+        A list of all nodes at the border of this face
+    """
+
+    # Check if the face has already been calculated
+    if (starting_node, outgoing_node) in edges_counted:
+        # This face was already counted
+        return []
+    edges_counted.add((starting_node, outgoing_node))
+
+    # Add all edges to edges_counted which have this face to their left
+    v1 = starting_node
+    v2 = outgoing_node
+    face_list = [starting_node]  # List of nodes around the face
+    face_set = set(face_list)  # Set for faster queries
+    _, v3 = embedding.next_face_half_edge(v1, v2)
+
+    # Move the nodes v1, v2, v3 around the face:
+    while v2 != starting_node or v3 != outgoing_node:
+        if v1 == v2:
+            raise nx.NetworkXException("Invalid half-edge")
+        # cycle is not completed yet
+        if v2 in face_set:
+            # v2 encountered twice: Add edge to ensure 2-connectedness
+            embedding.add_half_edge(v1, v3, ccw=v2)
+            embedding.add_half_edge(v3, v1, cw=v2)
+            edges_counted.add((v2, v3))
+            edges_counted.add((v3, v1))
+            v2 = v1
+        else:
+            face_set.add(v2)
+            face_list.append(v2)
+
+        # set next edge
+        v1 = v2
+        v2, v3 = embedding.next_face_half_edge(v2, v3)
+
+        # remember that this edge has been counted
+        edges_counted.add((v1, v2))
+
+    return face_list
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/planarity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/planarity.py
new file mode 100644
index 00000000..17d0bec5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/planarity.py
@@ -0,0 +1,1402 @@
+from collections import defaultdict
+
+import networkx as nx
+
+__all__ = ["check_planarity", "is_planar", "PlanarEmbedding"]
+
+
+@nx._dispatchable
+def is_planar(G):
+    """Returns True if and only if `G` is planar.
+
+    A graph is *planar* iff it can be drawn in a plane without
+    any edge intersections.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    bool
+       Whether the graph is planar.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2)])
+    >>> nx.is_planar(G)
+    True
+    >>> nx.is_planar(nx.complete_graph(5))
+    False
+
+    See Also
+    --------
+    check_planarity :
+        Check if graph is planar *and* return a `PlanarEmbedding` instance if True.
+    """
+
+    return check_planarity(G, counterexample=False)[0]
+
+
+@nx._dispatchable(returns_graph=True)
+def check_planarity(G, counterexample=False):
+    """Check if a graph is planar and return a counterexample or an embedding.
+
+    A graph is planar iff it can be drawn in a plane without
+    any edge intersections.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+    counterexample : bool
+        A Kuratowski subgraph (to proof non planarity) is only returned if set
+        to true.
+
+    Returns
+    -------
+    (is_planar, certificate) : (bool, NetworkX graph) tuple
+        is_planar is true if the graph is planar.
+        If the graph is planar `certificate` is a PlanarEmbedding
+        otherwise it is a Kuratowski subgraph.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2)])
+    >>> is_planar, P = nx.check_planarity(G)
+    >>> print(is_planar)
+    True
+
+    When `G` is planar, a `PlanarEmbedding` instance is returned:
+
+    >>> P.get_data()
+    {0: [1, 2], 1: [0], 2: [0]}
+
+    Notes
+    -----
+    A (combinatorial) embedding consists of cyclic orderings of the incident
+    edges at each vertex. Given such an embedding there are multiple approaches
+    discussed in literature to drawing the graph (subject to various
+    constraints, e.g. integer coordinates), see e.g. [2].
+
+    The planarity check algorithm and extraction of the combinatorial embedding
+    is based on the Left-Right Planarity Test [1].
+
+    A counterexample is only generated if the corresponding parameter is set,
+    because the complexity of the counterexample generation is higher.
+
+    See also
+    --------
+    is_planar :
+        Check for planarity without creating a `PlanarEmbedding` or counterexample.
+
+    References
+    ----------
+    .. [1] Ulrik Brandes:
+        The Left-Right Planarity Test
+        2009
+        http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.217.9208
+    .. [2] Takao Nishizeki, Md Saidur Rahman:
+        Planar graph drawing
+        Lecture Notes Series on Computing: Volume 12
+        2004
+    """
+
+    planarity_state = LRPlanarity(G)
+    embedding = planarity_state.lr_planarity()
+    if embedding is None:
+        # graph is not planar
+        if counterexample:
+            return False, get_counterexample(G)
+        else:
+            return False, None
+    else:
+        # graph is planar
+        return True, embedding
+
+
+@nx._dispatchable(returns_graph=True)
+def check_planarity_recursive(G, counterexample=False):
+    """Recursive version of :meth:`check_planarity`."""
+    planarity_state = LRPlanarity(G)
+    embedding = planarity_state.lr_planarity_recursive()
+    if embedding is None:
+        # graph is not planar
+        if counterexample:
+            return False, get_counterexample_recursive(G)
+        else:
+            return False, None
+    else:
+        # graph is planar
+        return True, embedding
+
+
+@nx._dispatchable(returns_graph=True)
+def get_counterexample(G):
+    """Obtains a Kuratowski subgraph.
+
+    Raises nx.NetworkXException if G is planar.
+
+    The function removes edges such that the graph is still not planar.
+    At some point the removal of any edge would make the graph planar.
+    This subgraph must be a Kuratowski subgraph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    subgraph : NetworkX graph
+        A Kuratowski subgraph that proves that G is not planar.
+
+    """
+    # copy graph
+    G = nx.Graph(G)
+
+    if check_planarity(G)[0]:
+        raise nx.NetworkXException("G is planar - no counter example.")
+
+    # find Kuratowski subgraph
+    subgraph = nx.Graph()
+    for u in G:
+        nbrs = list(G[u])
+        for v in nbrs:
+            G.remove_edge(u, v)
+            if check_planarity(G)[0]:
+                G.add_edge(u, v)
+                subgraph.add_edge(u, v)
+
+    return subgraph
+
+
+@nx._dispatchable(returns_graph=True)
+def get_counterexample_recursive(G):
+    """Recursive version of :meth:`get_counterexample`."""
+
+    # copy graph
+    G = nx.Graph(G)
+
+    if check_planarity_recursive(G)[0]:
+        raise nx.NetworkXException("G is planar - no counter example.")
+
+    # find Kuratowski subgraph
+    subgraph = nx.Graph()
+    for u in G:
+        nbrs = list(G[u])
+        for v in nbrs:
+            G.remove_edge(u, v)
+            if check_planarity_recursive(G)[0]:
+                G.add_edge(u, v)
+                subgraph.add_edge(u, v)
+
+    return subgraph
+
+
+class Interval:
+    """Represents a set of return edges.
+
+    All return edges in an interval induce a same constraint on the contained
+    edges, which means that all edges must either have a left orientation or
+    all edges must have a right orientation.
+    """
+
+    def __init__(self, low=None, high=None):
+        self.low = low
+        self.high = high
+
+    def empty(self):
+        """Check if the interval is empty"""
+        return self.low is None and self.high is None
+
+    def copy(self):
+        """Returns a copy of this interval"""
+        return Interval(self.low, self.high)
+
+    def conflicting(self, b, planarity_state):
+        """Returns True if interval I conflicts with edge b"""
+        return (
+            not self.empty()
+            and planarity_state.lowpt[self.high] > planarity_state.lowpt[b]
+        )
+
+
+class ConflictPair:
+    """Represents a different constraint between two intervals.
+
+    The edges in the left interval must have a different orientation than
+    the one in the right interval.
+    """
+
+    def __init__(self, left=Interval(), right=Interval()):
+        self.left = left
+        self.right = right
+
+    def swap(self):
+        """Swap left and right intervals"""
+        temp = self.left
+        self.left = self.right
+        self.right = temp
+
+    def lowest(self, planarity_state):
+        """Returns the lowest lowpoint of a conflict pair"""
+        if self.left.empty():
+            return planarity_state.lowpt[self.right.low]
+        if self.right.empty():
+            return planarity_state.lowpt[self.left.low]
+        return min(
+            planarity_state.lowpt[self.left.low], planarity_state.lowpt[self.right.low]
+        )
+
+
+def top_of_stack(l):
+    """Returns the element on top of the stack."""
+    if not l:
+        return None
+    return l[-1]
+
+
+class LRPlanarity:
+    """A class to maintain the state during planarity check."""
+
+    __slots__ = [
+        "G",
+        "roots",
+        "height",
+        "lowpt",
+        "lowpt2",
+        "nesting_depth",
+        "parent_edge",
+        "DG",
+        "adjs",
+        "ordered_adjs",
+        "ref",
+        "side",
+        "S",
+        "stack_bottom",
+        "lowpt_edge",
+        "left_ref",
+        "right_ref",
+        "embedding",
+    ]
+
+    def __init__(self, G):
+        # copy G without adding self-loops
+        self.G = nx.Graph()
+        self.G.add_nodes_from(G.nodes)
+        for e in G.edges:
+            if e[0] != e[1]:
+                self.G.add_edge(e[0], e[1])
+
+        self.roots = []
+
+        # distance from tree root
+        self.height = defaultdict(lambda: None)
+
+        self.lowpt = {}  # height of lowest return point of an edge
+        self.lowpt2 = {}  # height of second lowest return point
+        self.nesting_depth = {}  # for nesting order
+
+        # None -> missing edge
+        self.parent_edge = defaultdict(lambda: None)
+
+        # oriented DFS graph
+        self.DG = nx.DiGraph()
+        self.DG.add_nodes_from(G.nodes)
+
+        self.adjs = {}
+        self.ordered_adjs = {}
+
+        self.ref = defaultdict(lambda: None)
+        self.side = defaultdict(lambda: 1)
+
+        # stack of conflict pairs
+        self.S = []
+        self.stack_bottom = {}
+        self.lowpt_edge = {}
+
+        self.left_ref = {}
+        self.right_ref = {}
+
+        self.embedding = PlanarEmbedding()
+
+    def lr_planarity(self):
+        """Execute the LR planarity test.
+
+        Returns
+        -------
+        embedding : dict
+            If the graph is planar an embedding is returned. Otherwise None.
+        """
+        if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6:
+            # graph is not planar
+            return None
+
+        # make adjacency lists for dfs
+        for v in self.G:
+            self.adjs[v] = list(self.G[v])
+
+        # orientation of the graph by depth first search traversal
+        for v in self.G:
+            if self.height[v] is None:
+                self.height[v] = 0
+                self.roots.append(v)
+                self.dfs_orientation(v)
+
+        # Free no longer used variables
+        self.G = None
+        self.lowpt2 = None
+        self.adjs = None
+
+        # testing
+        for v in self.DG:  # sort the adjacency lists by nesting depth
+            # note: this sorting leads to non linear time
+            self.ordered_adjs[v] = sorted(
+                self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
+            )
+        for v in self.roots:
+            if not self.dfs_testing(v):
+                return None
+
+        # Free no longer used variables
+        self.height = None
+        self.lowpt = None
+        self.S = None
+        self.stack_bottom = None
+        self.lowpt_edge = None
+
+        for e in self.DG.edges:
+            self.nesting_depth[e] = self.sign(e) * self.nesting_depth[e]
+
+        self.embedding.add_nodes_from(self.DG.nodes)
+        for v in self.DG:
+            # sort the adjacency lists again
+            self.ordered_adjs[v] = sorted(
+                self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
+            )
+            # initialize the embedding
+            previous_node = None
+            for w in self.ordered_adjs[v]:
+                self.embedding.add_half_edge(v, w, ccw=previous_node)
+                previous_node = w
+
+        # Free no longer used variables
+        self.DG = None
+        self.nesting_depth = None
+        self.ref = None
+
+        # compute the complete embedding
+        for v in self.roots:
+            self.dfs_embedding(v)
+
+        # Free no longer used variables
+        self.roots = None
+        self.parent_edge = None
+        self.ordered_adjs = None
+        self.left_ref = None
+        self.right_ref = None
+        self.side = None
+
+        return self.embedding
+
+    def lr_planarity_recursive(self):
+        """Recursive version of :meth:`lr_planarity`."""
+        if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6:
+            # graph is not planar
+            return None
+
+        # orientation of the graph by depth first search traversal
+        for v in self.G:
+            if self.height[v] is None:
+                self.height[v] = 0
+                self.roots.append(v)
+                self.dfs_orientation_recursive(v)
+
+        # Free no longer used variable
+        self.G = None
+
+        # testing
+        for v in self.DG:  # sort the adjacency lists by nesting depth
+            # note: this sorting leads to non linear time
+            self.ordered_adjs[v] = sorted(
+                self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
+            )
+        for v in self.roots:
+            if not self.dfs_testing_recursive(v):
+                return None
+
+        for e in self.DG.edges:
+            self.nesting_depth[e] = self.sign_recursive(e) * self.nesting_depth[e]
+
+        self.embedding.add_nodes_from(self.DG.nodes)
+        for v in self.DG:
+            # sort the adjacency lists again
+            self.ordered_adjs[v] = sorted(
+                self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
+            )
+            # initialize the embedding
+            previous_node = None
+            for w in self.ordered_adjs[v]:
+                self.embedding.add_half_edge(v, w, ccw=previous_node)
+                previous_node = w
+
+        # compute the complete embedding
+        for v in self.roots:
+            self.dfs_embedding_recursive(v)
+
+        return self.embedding
+
+    def dfs_orientation(self, v):
+        """Orient the graph by DFS, compute lowpoints and nesting order."""
+        # the recursion stack
+        dfs_stack = [v]
+        # index of next edge to handle in adjacency list of each node
+        ind = defaultdict(lambda: 0)
+        # boolean to indicate whether to skip the initial work for an edge
+        skip_init = defaultdict(lambda: False)
+
+        while dfs_stack:
+            v = dfs_stack.pop()
+            e = self.parent_edge[v]
+
+            for w in self.adjs[v][ind[v] :]:
+                vw = (v, w)
+
+                if not skip_init[vw]:
+                    if (v, w) in self.DG.edges or (w, v) in self.DG.edges:
+                        ind[v] += 1
+                        continue  # the edge was already oriented
+
+                    self.DG.add_edge(v, w)  # orient the edge
+
+                    self.lowpt[vw] = self.height[v]
+                    self.lowpt2[vw] = self.height[v]
+                    if self.height[w] is None:  # (v, w) is a tree edge
+                        self.parent_edge[w] = vw
+                        self.height[w] = self.height[v] + 1
+
+                        dfs_stack.append(v)  # revisit v after finishing w
+                        dfs_stack.append(w)  # visit w next
+                        skip_init[vw] = True  # don't redo this block
+                        break  # handle next node in dfs_stack (i.e. w)
+                    else:  # (v, w) is a back edge
+                        self.lowpt[vw] = self.height[w]
+
+                # determine nesting graph
+                self.nesting_depth[vw] = 2 * self.lowpt[vw]
+                if self.lowpt2[vw] < self.height[v]:  # chordal
+                    self.nesting_depth[vw] += 1
+
+                # update lowpoints of parent edge e
+                if e is not None:
+                    if self.lowpt[vw] < self.lowpt[e]:
+                        self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw])
+                        self.lowpt[e] = self.lowpt[vw]
+                    elif self.lowpt[vw] > self.lowpt[e]:
+                        self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw])
+                    else:
+                        self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw])
+
+                ind[v] += 1
+
+    def dfs_orientation_recursive(self, v):
+        """Recursive version of :meth:`dfs_orientation`."""
+        e = self.parent_edge[v]
+        for w in self.G[v]:
+            if (v, w) in self.DG.edges or (w, v) in self.DG.edges:
+                continue  # the edge was already oriented
+            vw = (v, w)
+            self.DG.add_edge(v, w)  # orient the edge
+
+            self.lowpt[vw] = self.height[v]
+            self.lowpt2[vw] = self.height[v]
+            if self.height[w] is None:  # (v, w) is a tree edge
+                self.parent_edge[w] = vw
+                self.height[w] = self.height[v] + 1
+                self.dfs_orientation_recursive(w)
+            else:  # (v, w) is a back edge
+                self.lowpt[vw] = self.height[w]
+
+            # determine nesting graph
+            self.nesting_depth[vw] = 2 * self.lowpt[vw]
+            if self.lowpt2[vw] < self.height[v]:  # chordal
+                self.nesting_depth[vw] += 1
+
+            # update lowpoints of parent edge e
+            if e is not None:
+                if self.lowpt[vw] < self.lowpt[e]:
+                    self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw])
+                    self.lowpt[e] = self.lowpt[vw]
+                elif self.lowpt[vw] > self.lowpt[e]:
+                    self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw])
+                else:
+                    self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw])
+
+    def dfs_testing(self, v):
+        """Test for LR partition."""
+        # the recursion stack
+        dfs_stack = [v]
+        # index of next edge to handle in adjacency list of each node
+        ind = defaultdict(lambda: 0)
+        # boolean to indicate whether to skip the initial work for an edge
+        skip_init = defaultdict(lambda: False)
+
+        while dfs_stack:
+            v = dfs_stack.pop()
+            e = self.parent_edge[v]
+            # to indicate whether to skip the final block after the for loop
+            skip_final = False
+
+            for w in self.ordered_adjs[v][ind[v] :]:
+                ei = (v, w)
+
+                if not skip_init[ei]:
+                    self.stack_bottom[ei] = top_of_stack(self.S)
+
+                    if ei == self.parent_edge[w]:  # tree edge
+                        dfs_stack.append(v)  # revisit v after finishing w
+                        dfs_stack.append(w)  # visit w next
+                        skip_init[ei] = True  # don't redo this block
+                        skip_final = True  # skip final work after breaking
+                        break  # handle next node in dfs_stack (i.e. w)
+                    else:  # back edge
+                        self.lowpt_edge[ei] = ei
+                        self.S.append(ConflictPair(right=Interval(ei, ei)))
+
+                # integrate new return edges
+                if self.lowpt[ei] < self.height[v]:
+                    if w == self.ordered_adjs[v][0]:  # e_i has return edge
+                        self.lowpt_edge[e] = self.lowpt_edge[ei]
+                    else:  # add constraints of e_i
+                        if not self.add_constraints(ei, e):
+                            # graph is not planar
+                            return False
+
+                ind[v] += 1
+
+            if not skip_final:
+                # remove back edges returning to parent
+                if e is not None:  # v isn't root
+                    self.remove_back_edges(e)
+
+        return True
+
+    def dfs_testing_recursive(self, v):
+        """Recursive version of :meth:`dfs_testing`."""
+        e = self.parent_edge[v]
+        for w in self.ordered_adjs[v]:
+            ei = (v, w)
+            self.stack_bottom[ei] = top_of_stack(self.S)
+            if ei == self.parent_edge[w]:  # tree edge
+                if not self.dfs_testing_recursive(w):
+                    return False
+            else:  # back edge
+                self.lowpt_edge[ei] = ei
+                self.S.append(ConflictPair(right=Interval(ei, ei)))
+
+            # integrate new return edges
+            if self.lowpt[ei] < self.height[v]:
+                if w == self.ordered_adjs[v][0]:  # e_i has return edge
+                    self.lowpt_edge[e] = self.lowpt_edge[ei]
+                else:  # add constraints of e_i
+                    if not self.add_constraints(ei, e):
+                        # graph is not planar
+                        return False
+
+        # remove back edges returning to parent
+        if e is not None:  # v isn't root
+            self.remove_back_edges(e)
+        return True
+
+    def add_constraints(self, ei, e):
+        P = ConflictPair()
+        # merge return edges of e_i into P.right
+        while True:
+            Q = self.S.pop()
+            if not Q.left.empty():
+                Q.swap()
+            if not Q.left.empty():  # not planar
+                return False
+            if self.lowpt[Q.right.low] > self.lowpt[e]:
+                # merge intervals
+                if P.right.empty():  # topmost interval
+                    P.right = Q.right.copy()
+                else:
+                    self.ref[P.right.low] = Q.right.high
+                P.right.low = Q.right.low
+            else:  # align
+                self.ref[Q.right.low] = self.lowpt_edge[e]
+            if top_of_stack(self.S) == self.stack_bottom[ei]:
+                break
+        # merge conflicting return edges of e_1,...,e_i-1 into P.L
+        while top_of_stack(self.S).left.conflicting(ei, self) or top_of_stack(
+            self.S
+        ).right.conflicting(ei, self):
+            Q = self.S.pop()
+            if Q.right.conflicting(ei, self):
+                Q.swap()
+            if Q.right.conflicting(ei, self):  # not planar
+                return False
+            # merge interval below lowpt(e_i) into P.R
+            self.ref[P.right.low] = Q.right.high
+            if Q.right.low is not None:
+                P.right.low = Q.right.low
+
+            if P.left.empty():  # topmost interval
+                P.left = Q.left.copy()
+            else:
+                self.ref[P.left.low] = Q.left.high
+            P.left.low = Q.left.low
+
+        if not (P.left.empty() and P.right.empty()):
+            self.S.append(P)
+        return True
+
+    def remove_back_edges(self, e):
+        u = e[0]
+        # trim back edges ending at parent u
+        # drop entire conflict pairs
+        while self.S and top_of_stack(self.S).lowest(self) == self.height[u]:
+            P = self.S.pop()
+            if P.left.low is not None:
+                self.side[P.left.low] = -1
+
+        if self.S:  # one more conflict pair to consider
+            P = self.S.pop()
+            # trim left interval
+            while P.left.high is not None and P.left.high[1] == u:
+                P.left.high = self.ref[P.left.high]
+            if P.left.high is None and P.left.low is not None:
+                # just emptied
+                self.ref[P.left.low] = P.right.low
+                self.side[P.left.low] = -1
+                P.left.low = None
+            # trim right interval
+            while P.right.high is not None and P.right.high[1] == u:
+                P.right.high = self.ref[P.right.high]
+            if P.right.high is None and P.right.low is not None:
+                # just emptied
+                self.ref[P.right.low] = P.left.low
+                self.side[P.right.low] = -1
+                P.right.low = None
+            self.S.append(P)
+
+        # side of e is side of a highest return edge
+        if self.lowpt[e] < self.height[u]:  # e has return edge
+            hl = top_of_stack(self.S).left.high
+            hr = top_of_stack(self.S).right.high
+
+            if hl is not None and (hr is None or self.lowpt[hl] > self.lowpt[hr]):
+                self.ref[e] = hl
+            else:
+                self.ref[e] = hr
+
+    def dfs_embedding(self, v):
+        """Completes the embedding."""
+        # the recursion stack
+        dfs_stack = [v]
+        # index of next edge to handle in adjacency list of each node
+        ind = defaultdict(lambda: 0)
+
+        while dfs_stack:
+            v = dfs_stack.pop()
+
+            for w in self.ordered_adjs[v][ind[v] :]:
+                ind[v] += 1
+                ei = (v, w)
+
+                if ei == self.parent_edge[w]:  # tree edge
+                    self.embedding.add_half_edge_first(w, v)
+                    self.left_ref[v] = w
+                    self.right_ref[v] = w
+
+                    dfs_stack.append(v)  # revisit v after finishing w
+                    dfs_stack.append(w)  # visit w next
+                    break  # handle next node in dfs_stack (i.e. w)
+                else:  # back edge
+                    if self.side[ei] == 1:
+                        self.embedding.add_half_edge(w, v, ccw=self.right_ref[w])
+                    else:
+                        self.embedding.add_half_edge(w, v, cw=self.left_ref[w])
+                        self.left_ref[w] = v
+
+    def dfs_embedding_recursive(self, v):
+        """Recursive version of :meth:`dfs_embedding`."""
+        for w in self.ordered_adjs[v]:
+            ei = (v, w)
+            if ei == self.parent_edge[w]:  # tree edge
+                self.embedding.add_half_edge_first(w, v)
+                self.left_ref[v] = w
+                self.right_ref[v] = w
+                self.dfs_embedding_recursive(w)
+            else:  # back edge
+                if self.side[ei] == 1:
+                    # place v directly after right_ref[w] in embed. list of w
+                    self.embedding.add_half_edge(w, v, ccw=self.right_ref[w])
+                else:
+                    # place v directly before left_ref[w] in embed. list of w
+                    self.embedding.add_half_edge(w, v, cw=self.left_ref[w])
+                    self.left_ref[w] = v
+
+    def sign(self, e):
+        """Resolve the relative side of an edge to the absolute side."""
+        # the recursion stack
+        dfs_stack = [e]
+        # dict to remember reference edges
+        old_ref = defaultdict(lambda: None)
+
+        while dfs_stack:
+            e = dfs_stack.pop()
+
+            if self.ref[e] is not None:
+                dfs_stack.append(e)  # revisit e after finishing self.ref[e]
+                dfs_stack.append(self.ref[e])  # visit self.ref[e] next
+                old_ref[e] = self.ref[e]  # remember value of self.ref[e]
+                self.ref[e] = None
+            else:
+                self.side[e] *= self.side[old_ref[e]]
+
+        return self.side[e]
+
+    def sign_recursive(self, e):
+        """Recursive version of :meth:`sign`."""
+        if self.ref[e] is not None:
+            self.side[e] = self.side[e] * self.sign_recursive(self.ref[e])
+            self.ref[e] = None
+        return self.side[e]
+
+
+class PlanarEmbedding(nx.DiGraph):
+    """Represents a planar graph with its planar embedding.
+
+    The planar embedding is given by a `combinatorial embedding
+    <https://en.wikipedia.org/wiki/Graph_embedding#Combinatorial_embedding>`_.
+
+    .. note:: `check_planarity` is the preferred way to check if a graph is planar.
+
+    **Neighbor ordering:**
+
+    In comparison to a usual graph structure, the embedding also stores the
+    order of all neighbors for every vertex.
+    The order of the neighbors can be given in clockwise (cw) direction or
+    counterclockwise (ccw) direction. This order is stored as edge attributes
+    in the underlying directed graph. For the edge (u, v) the edge attribute
+    'cw' is set to the neighbor of u that follows immediately after v in
+    clockwise direction.
+
+    In order for a PlanarEmbedding to be valid it must fulfill multiple
+    conditions. It is possible to check if these conditions are fulfilled with
+    the method :meth:`check_structure`.
+    The conditions are:
+
+    * Edges must go in both directions (because the edge attributes differ)
+    * Every edge must have a 'cw' and 'ccw' attribute which corresponds to a
+      correct planar embedding.
+
+    As long as a PlanarEmbedding is invalid only the following methods should
+    be called:
+
+    * :meth:`add_half_edge`
+    * :meth:`connect_components`
+
+    Even though the graph is a subclass of nx.DiGraph, it can still be used
+    for algorithms that require undirected graphs, because the method
+    :meth:`is_directed` is overridden. This is possible, because a valid
+    PlanarGraph must have edges in both directions.
+
+    **Half edges:**
+
+    In methods like `add_half_edge` the term "half-edge" is used, which is
+    a term that is used in `doubly connected edge lists
+    <https://en.wikipedia.org/wiki/Doubly_connected_edge_list>`_. It is used
+    to emphasize that the edge is only in one direction and there exists
+    another half-edge in the opposite direction.
+    While conventional edges always have two faces (including outer face) next
+    to them, it is possible to assign each half-edge *exactly one* face.
+    For a half-edge (u, v) that is oriented such that u is below v then the
+    face that belongs to (u, v) is to the right of this half-edge.
+
+    See Also
+    --------
+    is_planar :
+        Preferred way to check if an existing graph is planar.
+
+    check_planarity :
+        A convenient way to create a `PlanarEmbedding`. If not planar,
+        it returns a subgraph that shows this.
+
+    Examples
+    --------
+
+    Create an embedding of a star graph (compare `nx.star_graph(3)`):
+
+    >>> G = nx.PlanarEmbedding()
+    >>> G.add_half_edge(0, 1)
+    >>> G.add_half_edge(0, 2, ccw=1)
+    >>> G.add_half_edge(0, 3, ccw=2)
+    >>> G.add_half_edge(1, 0)
+    >>> G.add_half_edge(2, 0)
+    >>> G.add_half_edge(3, 0)
+
+    Alternatively the same embedding can also be defined in counterclockwise
+    orientation. The following results in exactly the same PlanarEmbedding:
+
+    >>> G = nx.PlanarEmbedding()
+    >>> G.add_half_edge(0, 1)
+    >>> G.add_half_edge(0, 3, cw=1)
+    >>> G.add_half_edge(0, 2, cw=3)
+    >>> G.add_half_edge(1, 0)
+    >>> G.add_half_edge(2, 0)
+    >>> G.add_half_edge(3, 0)
+
+    After creating a graph, it is possible to validate that the PlanarEmbedding
+    object is correct:
+
+    >>> G.check_structure()
+
+    """
+
+    def __init__(self, incoming_graph_data=None, **attr):
+        super().__init__(incoming_graph_data=incoming_graph_data, **attr)
+        self.add_edge = self.__forbidden
+        self.add_edges_from = self.__forbidden
+        self.add_weighted_edges_from = self.__forbidden
+
+    def __forbidden(self, *args, **kwargs):
+        """Forbidden operation
+
+        Any edge additions to a PlanarEmbedding should be done using
+        method `add_half_edge`.
+        """
+        raise NotImplementedError(
+            "Use `add_half_edge` method to add edges to a PlanarEmbedding."
+        )
+
+    def get_data(self):
+        """Converts the adjacency structure into a better readable structure.
+
+        Returns
+        -------
+        embedding : dict
+            A dict mapping all nodes to a list of neighbors sorted in
+            clockwise order.
+
+        See Also
+        --------
+        set_data
+
+        """
+        embedding = {}
+        for v in self:
+            embedding[v] = list(self.neighbors_cw_order(v))
+        return embedding
+
+    def set_data(self, data):
+        """Inserts edges according to given sorted neighbor list.
+
+        The input format is the same as the output format of get_data().
+
+        Parameters
+        ----------
+        data : dict
+            A dict mapping all nodes to a list of neighbors sorted in
+            clockwise order.
+
+        See Also
+        --------
+        get_data
+
+        """
+        for v in data:
+            ref = None
+            for w in reversed(data[v]):
+                self.add_half_edge(v, w, cw=ref)
+                ref = w
+
+    def remove_node(self, n):
+        """Remove node n.
+
+        Removes the node n and all adjacent edges, updating the
+        PlanarEmbedding to account for any resulting edge removal.
+        Attempting to remove a non-existent node will raise an exception.
+
+        Parameters
+        ----------
+        n : node
+           A node in the graph
+
+        Raises
+        ------
+        NetworkXError
+           If n is not in the graph.
+
+        See Also
+        --------
+        remove_nodes_from
+
+        """
+        try:
+            for u in self._pred[n]:
+                succs_u = self._succ[u]
+                un_cw = succs_u[n]["cw"]
+                un_ccw = succs_u[n]["ccw"]
+                del succs_u[n]
+                del self._pred[u][n]
+                if n != un_cw:
+                    succs_u[un_cw]["ccw"] = un_ccw
+                    succs_u[un_ccw]["cw"] = un_cw
+            del self._node[n]
+            del self._succ[n]
+            del self._pred[n]
+        except KeyError as err:  # NetworkXError if n not in self
+            raise nx.NetworkXError(
+                f"The node {n} is not in the planar embedding."
+            ) from err
+        nx._clear_cache(self)
+
+    def remove_nodes_from(self, nodes):
+        """Remove multiple nodes.
+
+        Parameters
+        ----------
+        nodes : iterable container
+            A container of nodes (list, dict, set, etc.).  If a node
+            in the container is not in the graph it is silently ignored.
+
+        See Also
+        --------
+        remove_node
+
+        Notes
+        -----
+        When removing nodes from an iterator over the graph you are changing,
+        a `RuntimeError` will be raised with message:
+        `RuntimeError: dictionary changed size during iteration`. This
+        happens when the graph's underlying dictionary is modified during
+        iteration. To avoid this error, evaluate the iterator into a separate
+        object, e.g. by using `list(iterator_of_nodes)`, and pass this
+        object to `G.remove_nodes_from`.
+
+        """
+        for n in nodes:
+            if n in self._node:
+                self.remove_node(n)
+            # silently skip non-existing nodes
+
+    def neighbors_cw_order(self, v):
+        """Generator for the neighbors of v in clockwise order.
+
+        Parameters
+        ----------
+        v : node
+
+        Yields
+        ------
+        node
+
+        """
+        succs = self._succ[v]
+        if not succs:
+            # v has no neighbors
+            return
+        start_node = next(reversed(succs))
+        yield start_node
+        current_node = succs[start_node]["cw"]
+        while start_node != current_node:
+            yield current_node
+            current_node = succs[current_node]["cw"]
+
+    def add_half_edge(self, start_node, end_node, *, cw=None, ccw=None):
+        """Adds a half-edge from `start_node` to `end_node`.
+
+        If the half-edge is not the first one out of `start_node`, a reference
+        node must be provided either in the clockwise (parameter `cw`) or in
+        the counterclockwise (parameter `ccw`) direction. Only one of `cw`/`ccw`
+        can be specified (or neither in the case of the first edge).
+        Note that specifying a reference in the clockwise (`cw`) direction means
+        inserting the new edge in the first counterclockwise position with
+        respect to the reference (and vice-versa).
+
+        Parameters
+        ----------
+        start_node : node
+            Start node of inserted edge.
+        end_node : node
+            End node of inserted edge.
+        cw, ccw: node
+            End node of reference edge.
+            Omit or pass `None` if adding the first out-half-edge of `start_node`.
+
+
+        Raises
+        ------
+        NetworkXException
+            If the `cw` or `ccw` node is not a successor of `start_node`.
+            If `start_node` has successors, but neither `cw` or `ccw` is provided.
+            If both `cw` and `ccw` are specified.
+
+        See Also
+        --------
+        connect_components
+        """
+
+        succs = self._succ.get(start_node)
+        if succs:
+            # there is already some edge out of start_node
+            leftmost_nbr = next(reversed(self._succ[start_node]))
+            if cw is not None:
+                if cw not in succs:
+                    raise nx.NetworkXError("Invalid clockwise reference node.")
+                if ccw is not None:
+                    raise nx.NetworkXError("Only one of cw/ccw can be specified.")
+                ref_ccw = succs[cw]["ccw"]
+                super().add_edge(start_node, end_node, cw=cw, ccw=ref_ccw)
+                succs[ref_ccw]["cw"] = end_node
+                succs[cw]["ccw"] = end_node
+                # when (cw == leftmost_nbr), the newly added neighbor is
+                # already at the end of dict self._succ[start_node] and
+                # takes the place of the former leftmost_nbr
+                move_leftmost_nbr_to_end = cw != leftmost_nbr
+            elif ccw is not None:
+                if ccw not in succs:
+                    raise nx.NetworkXError("Invalid counterclockwise reference node.")
+                ref_cw = succs[ccw]["cw"]
+                super().add_edge(start_node, end_node, cw=ref_cw, ccw=ccw)
+                succs[ref_cw]["ccw"] = end_node
+                succs[ccw]["cw"] = end_node
+                move_leftmost_nbr_to_end = True
+            else:
+                raise nx.NetworkXError(
+                    "Node already has out-half-edge(s), either cw or ccw reference node required."
+                )
+            if move_leftmost_nbr_to_end:
+                # LRPlanarity (via self.add_half_edge_first()) requires that
+                # we keep track of the leftmost neighbor, which we accomplish
+                # by keeping it as the last key in dict self._succ[start_node]
+                succs[leftmost_nbr] = succs.pop(leftmost_nbr)
+
+        else:
+            if cw is not None or ccw is not None:
+                raise nx.NetworkXError("Invalid reference node.")
+            # adding the first edge out of start_node
+            super().add_edge(start_node, end_node, ccw=end_node, cw=end_node)
+
+    def check_structure(self):
+        """Runs without exceptions if this object is valid.
+
+        Checks that the following properties are fulfilled:
+
+        * Edges go in both directions (because the edge attributes differ).
+        * Every edge has a 'cw' and 'ccw' attribute which corresponds to a
+          correct planar embedding.
+
+        Running this method verifies that the underlying Graph must be planar.
+
+        Raises
+        ------
+        NetworkXException
+            This exception is raised with a short explanation if the
+            PlanarEmbedding is invalid.
+        """
+        # Check fundamental structure
+        for v in self:
+            try:
+                sorted_nbrs = set(self.neighbors_cw_order(v))
+            except KeyError as err:
+                msg = f"Bad embedding. Missing orientation for a neighbor of {v}"
+                raise nx.NetworkXException(msg) from err
+
+            unsorted_nbrs = set(self[v])
+            if sorted_nbrs != unsorted_nbrs:
+                msg = "Bad embedding. Edge orientations not set correctly."
+                raise nx.NetworkXException(msg)
+            for w in self[v]:
+                # Check if opposite half-edge exists
+                if not self.has_edge(w, v):
+                    msg = "Bad embedding. Opposite half-edge is missing."
+                    raise nx.NetworkXException(msg)
+
+        # Check planarity
+        counted_half_edges = set()
+        for component in nx.connected_components(self):
+            if len(component) == 1:
+                # Don't need to check single node component
+                continue
+            num_nodes = len(component)
+            num_half_edges = 0
+            num_faces = 0
+            for v in component:
+                for w in self.neighbors_cw_order(v):
+                    num_half_edges += 1
+                    if (v, w) not in counted_half_edges:
+                        # We encountered a new face
+                        num_faces += 1
+                        # Mark all half-edges belonging to this face
+                        self.traverse_face(v, w, counted_half_edges)
+            num_edges = num_half_edges // 2  # num_half_edges is even
+            if num_nodes - num_edges + num_faces != 2:
+                # The result does not match Euler's formula
+                msg = "Bad embedding. The graph does not match Euler's formula"
+                raise nx.NetworkXException(msg)
+
+    def add_half_edge_ccw(self, start_node, end_node, reference_neighbor):
+        """Adds a half-edge from start_node to end_node.
+
+        The half-edge is added counter clockwise next to the existing half-edge
+        (start_node, reference_neighbor).
+
+        Parameters
+        ----------
+        start_node : node
+            Start node of inserted edge.
+        end_node : node
+            End node of inserted edge.
+        reference_neighbor: node
+            End node of reference edge.
+
+        Raises
+        ------
+        NetworkXException
+            If the reference_neighbor does not exist.
+
+        See Also
+        --------
+        add_half_edge
+        add_half_edge_cw
+        connect_components
+
+        """
+        self.add_half_edge(start_node, end_node, cw=reference_neighbor)
+
+    def add_half_edge_cw(self, start_node, end_node, reference_neighbor):
+        """Adds a half-edge from start_node to end_node.
+
+        The half-edge is added clockwise next to the existing half-edge
+        (start_node, reference_neighbor).
+
+        Parameters
+        ----------
+        start_node : node
+            Start node of inserted edge.
+        end_node : node
+            End node of inserted edge.
+        reference_neighbor: node
+            End node of reference edge.
+
+        Raises
+        ------
+        NetworkXException
+            If the reference_neighbor does not exist.
+
+        See Also
+        --------
+        add_half_edge
+        add_half_edge_ccw
+        connect_components
+        """
+        self.add_half_edge(start_node, end_node, ccw=reference_neighbor)
+
+    def remove_edge(self, u, v):
+        """Remove the edge between u and v.
+
+        Parameters
+        ----------
+        u, v : nodes
+        Remove the half-edges (u, v) and (v, u) and update the
+        edge ordering around the removed edge.
+
+        Raises
+        ------
+        NetworkXError
+        If there is not an edge between u and v.
+
+        See Also
+        --------
+        remove_edges_from : remove a collection of edges
+        """
+        try:
+            succs_u = self._succ[u]
+            succs_v = self._succ[v]
+            uv_cw = succs_u[v]["cw"]
+            uv_ccw = succs_u[v]["ccw"]
+            vu_cw = succs_v[u]["cw"]
+            vu_ccw = succs_v[u]["ccw"]
+            del succs_u[v]
+            del self._pred[v][u]
+            del succs_v[u]
+            del self._pred[u][v]
+            if v != uv_cw:
+                succs_u[uv_cw]["ccw"] = uv_ccw
+                succs_u[uv_ccw]["cw"] = uv_cw
+            if u != vu_cw:
+                succs_v[vu_cw]["ccw"] = vu_ccw
+                succs_v[vu_ccw]["cw"] = vu_cw
+        except KeyError as err:
+            raise nx.NetworkXError(
+                f"The edge {u}-{v} is not in the planar embedding."
+            ) from err
+        nx._clear_cache(self)
+
+    def remove_edges_from(self, ebunch):
+        """Remove all edges specified in ebunch.
+
+        Parameters
+        ----------
+        ebunch: list or container of edge tuples
+            Each pair of half-edges between the nodes given in the tuples
+            will be removed from the graph. The nodes can be passed as:
+
+                - 2-tuples (u, v) half-edges (u, v) and (v, u).
+                - 3-tuples (u, v, k) where k is ignored.
+
+        See Also
+        --------
+        remove_edge : remove a single edge
+
+        Notes
+        -----
+        Will fail silently if an edge in ebunch is not in the graph.
+
+        Examples
+        --------
+        >>> G = nx.path_graph(4)  # or DiGraph, MultiGraph, MultiDiGraph, etc
+        >>> ebunch = [(1, 2), (2, 3)]
+        >>> G.remove_edges_from(ebunch)
+        """
+        for e in ebunch:
+            u, v = e[:2]  # ignore edge data
+            # assuming that the PlanarEmbedding is valid, if the half_edge
+            # (u, v) is in the graph, then so is half_edge (v, u)
+            if u in self._succ and v in self._succ[u]:
+                self.remove_edge(u, v)
+
+    def connect_components(self, v, w):
+        """Adds half-edges for (v, w) and (w, v) at some position.
+
+        This method should only be called if v and w are in different
+        components, or it might break the embedding.
+        This especially means that if `connect_components(v, w)`
+        is called it is not allowed to call `connect_components(w, v)`
+        afterwards. The neighbor orientations in both directions are
+        all set correctly after the first call.
+
+        Parameters
+        ----------
+        v : node
+        w : node
+
+        See Also
+        --------
+        add_half_edge
+        """
+        if v in self._succ and self._succ[v]:
+            ref = next(reversed(self._succ[v]))
+        else:
+            ref = None
+        self.add_half_edge(v, w, cw=ref)
+        if w in self._succ and self._succ[w]:
+            ref = next(reversed(self._succ[w]))
+        else:
+            ref = None
+        self.add_half_edge(w, v, cw=ref)
+
+    def add_half_edge_first(self, start_node, end_node):
+        """Add a half-edge and set end_node as start_node's leftmost neighbor.
+
+        The new edge is inserted counterclockwise with respect to the current
+        leftmost neighbor, if there is one.
+
+        Parameters
+        ----------
+        start_node : node
+        end_node : node
+
+        See Also
+        --------
+        add_half_edge
+        connect_components
+        """
+        succs = self._succ.get(start_node)
+        # the leftmost neighbor is the last entry in the
+        # self._succ[start_node] dict
+        leftmost_nbr = next(reversed(succs)) if succs else None
+        self.add_half_edge(start_node, end_node, cw=leftmost_nbr)
+
+    def next_face_half_edge(self, v, w):
+        """Returns the following half-edge left of a face.
+
+        Parameters
+        ----------
+        v : node
+        w : node
+
+        Returns
+        -------
+        half-edge : tuple
+        """
+        new_node = self[w][v]["ccw"]
+        return w, new_node
+
+    def traverse_face(self, v, w, mark_half_edges=None):
+        """Returns nodes on the face that belong to the half-edge (v, w).
+
+        The face that is traversed lies to the right of the half-edge (in an
+        orientation where v is below w).
+
+        Optionally it is possible to pass a set to which all encountered half
+        edges are added. Before calling this method, this set must not include
+        any half-edges that belong to the face.
+
+        Parameters
+        ----------
+        v : node
+            Start node of half-edge.
+        w : node
+            End node of half-edge.
+        mark_half_edges: set, optional
+            Set to which all encountered half-edges are added.
+
+        Returns
+        -------
+        face : list
+            A list of nodes that lie on this face.
+        """
+        if mark_half_edges is None:
+            mark_half_edges = set()
+
+        face_nodes = [v]
+        mark_half_edges.add((v, w))
+        prev_node = v
+        cur_node = w
+        # Last half-edge is (incoming_node, v)
+        incoming_node = self[v][w]["cw"]
+
+        while cur_node != v or prev_node != incoming_node:
+            face_nodes.append(cur_node)
+            prev_node, cur_node = self.next_face_half_edge(prev_node, cur_node)
+            if (prev_node, cur_node) in mark_half_edges:
+                raise nx.NetworkXException("Bad planar embedding. Impossible face.")
+            mark_half_edges.add((prev_node, cur_node))
+
+        return face_nodes
+
+    def is_directed(self):
+        """A valid PlanarEmbedding is undirected.
+
+        All reverse edges are contained, i.e. for every existing
+        half-edge (v, w) the half-edge in the opposite direction (w, v) is also
+        contained.
+        """
+        return False
+
+    def copy(self, as_view=False):
+        if as_view is True:
+            return nx.graphviews.generic_graph_view(self)
+        G = self.__class__()
+        G.graph.update(self.graph)
+        G.add_nodes_from((n, d.copy()) for n, d in self._node.items())
+        super(self.__class__, G).add_edges_from(
+            (u, v, datadict.copy())
+            for u, nbrs in self._adj.items()
+            for v, datadict in nbrs.items()
+        )
+        return G
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/polynomials.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/polynomials.py
new file mode 100644
index 00000000..7ebc7554
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/polynomials.py
@@ -0,0 +1,306 @@
+"""Provides algorithms supporting the computation of graph polynomials.
+
+Graph polynomials are polynomial-valued graph invariants that encode a wide
+variety of structural information. Examples include the Tutte polynomial,
+chromatic polynomial, characteristic polynomial, and matching polynomial. An
+extensive treatment is provided in [1]_.
+
+For a simple example, the `~sympy.matrices.matrices.MatrixDeterminant.charpoly`
+method can be used to compute the characteristic polynomial from the adjacency
+matrix of a graph. Consider the complete graph ``K_4``:
+
+>>> import sympy
+>>> x = sympy.Symbol("x")
+>>> G = nx.complete_graph(4)
+>>> A = nx.to_numpy_array(G, dtype=int)
+>>> M = sympy.SparseMatrix(A)
+>>> M.charpoly(x).as_expr()
+x**4 - 6*x**2 - 8*x - 3
+
+
+.. [1] Y. Shi, M. Dehmer, X. Li, I. Gutman,
+   "Graph Polynomials"
+"""
+
+from collections import deque
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["tutte_polynomial", "chromatic_polynomial"]
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def tutte_polynomial(G):
+    r"""Returns the Tutte polynomial of `G`
+
+    This function computes the Tutte polynomial via an iterative version of
+    the deletion-contraction algorithm.
+
+    The Tutte polynomial `T_G(x, y)` is a fundamental graph polynomial invariant in
+    two variables. It encodes a wide array of information related to the
+    edge-connectivity of a graph; "Many problems about graphs can be reduced to
+    problems of finding and evaluating the Tutte polynomial at certain values" [1]_.
+    In fact, every deletion-contraction-expressible feature of a graph is a
+    specialization of the Tutte polynomial [2]_ (see Notes for examples).
+
+    There are several equivalent definitions; here are three:
+
+    Def 1 (rank-nullity expansion): For `G` an undirected graph, `n(G)` the
+    number of vertices of `G`, `E` the edge set of `G`, `V` the vertex set of
+    `G`, and `c(A)` the number of connected components of the graph with vertex
+    set `V` and edge set `A` [3]_:
+
+    .. math::
+
+        T_G(x, y) = \sum_{A \in E} (x-1)^{c(A) - c(E)} (y-1)^{c(A) + |A| - n(G)}
+
+    Def 2 (spanning tree expansion): Let `G` be an undirected graph, `T` a spanning
+    tree of `G`, and `E` the edge set of `G`. Let `E` have an arbitrary strict
+    linear order `L`. Let `B_e` be the unique minimal nonempty edge cut of
+    $E \setminus T \cup {e}$. An edge `e` is internally active with respect to
+    `T` and `L` if `e` is the least edge in `B_e` according to the linear order
+    `L`. The internal activity of `T` (denoted `i(T)`) is the number of edges
+    in $E \setminus T$ that are internally active with respect to `T` and `L`.
+    Let `P_e` be the unique path in $T \cup {e}$ whose source and target vertex
+    are the same. An edge `e` is externally active with respect to `T` and `L`
+    if `e` is the least edge in `P_e` according to the linear order `L`. The
+    external activity of `T` (denoted `e(T)`) is the number of edges in
+    $E \setminus T$ that are externally active with respect to `T` and `L`.
+    Then [4]_ [5]_:
+
+    .. math::
+
+        T_G(x, y) = \sum_{T \text{ a spanning tree of } G} x^{i(T)} y^{e(T)}
+
+    Def 3 (deletion-contraction recurrence): For `G` an undirected graph, `G-e`
+    the graph obtained from `G` by deleting edge `e`, `G/e` the graph obtained
+    from `G` by contracting edge `e`, `k(G)` the number of cut-edges of `G`,
+    and `l(G)` the number of self-loops of `G`:
+
+    .. math::
+        T_G(x, y) = \begin{cases}
+    	   x^{k(G)} y^{l(G)}, & \text{if all edges are cut-edges or self-loops} \\
+           T_{G-e}(x, y) + T_{G/e}(x, y), & \text{otherwise, for an arbitrary edge $e$ not a cut-edge or loop}
+        \end{cases}
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    instance of `sympy.core.add.Add`
+        A Sympy expression representing the Tutte polynomial for `G`.
+
+    Examples
+    --------
+    >>> C = nx.cycle_graph(5)
+    >>> nx.tutte_polynomial(C)
+    x**4 + x**3 + x**2 + x + y
+
+    >>> D = nx.diamond_graph()
+    >>> nx.tutte_polynomial(D)
+    x**3 + 2*x**2 + 2*x*y + x + y**2 + y
+
+    Notes
+    -----
+    Some specializations of the Tutte polynomial:
+
+    - `T_G(1, 1)` counts the number of spanning trees of `G`
+    - `T_G(1, 2)` counts the number of connected spanning subgraphs of `G`
+    - `T_G(2, 1)` counts the number of spanning forests in `G`
+    - `T_G(0, 2)` counts the number of strong orientations of `G`
+    - `T_G(2, 0)` counts the number of acyclic orientations of `G`
+
+    Edge contraction is defined and deletion-contraction is introduced in [6]_.
+    Combinatorial meaning of the coefficients is introduced in [7]_.
+    Universality, properties, and applications are discussed in [8]_.
+
+    Practically, up-front computation of the Tutte polynomial may be useful when
+    users wish to repeatedly calculate edge-connectivity-related information
+    about one or more graphs.
+
+    References
+    ----------
+    .. [1] M. Brandt,
+       "The Tutte Polynomial."
+       Talking About Combinatorial Objects Seminar, 2015
+       https://math.berkeley.edu/~brandtm/talks/tutte.pdf
+    .. [2] A. Björklund, T. Husfeldt, P. Kaski, M. Koivisto,
+       "Computing the Tutte polynomial in vertex-exponential time"
+       49th Annual IEEE Symposium on Foundations of Computer Science, 2008
+       https://ieeexplore.ieee.org/abstract/document/4691000
+    .. [3] Y. Shi, M. Dehmer, X. Li, I. Gutman,
+       "Graph Polynomials," p. 14
+    .. [4] Y. Shi, M. Dehmer, X. Li, I. Gutman,
+       "Graph Polynomials," p. 46
+    .. [5] A. Nešetril, J. Goodall,
+       "Graph invariants, homomorphisms, and the Tutte polynomial"
+       https://iuuk.mff.cuni.cz/~andrew/Tutte.pdf
+    .. [6] D. B. West,
+       "Introduction to Graph Theory," p. 84
+    .. [7] G. Coutinho,
+       "A brief introduction to the Tutte polynomial"
+       Structural Analysis of Complex Networks, 2011
+       https://homepages.dcc.ufmg.br/~gabriel/seminars/coutinho_tuttepolynomial_seminar.pdf
+    .. [8] J. A. Ellis-Monaghan, C. Merino,
+       "Graph polynomials and their applications I: The Tutte polynomial"
+       Structural Analysis of Complex Networks, 2011
+       https://arxiv.org/pdf/0803.3079.pdf
+    """
+    import sympy
+
+    x = sympy.Symbol("x")
+    y = sympy.Symbol("y")
+    stack = deque()
+    stack.append(nx.MultiGraph(G))
+
+    polynomial = 0
+    while stack:
+        G = stack.pop()
+        bridges = set(nx.bridges(G))
+
+        e = None
+        for i in G.edges:
+            if (i[0], i[1]) not in bridges and i[0] != i[1]:
+                e = i
+                break
+        if not e:
+            loops = list(nx.selfloop_edges(G, keys=True))
+            polynomial += x ** len(bridges) * y ** len(loops)
+        else:
+            # deletion-contraction
+            C = nx.contracted_edge(G, e, self_loops=True)
+            C.remove_edge(e[0], e[0])
+            G.remove_edge(*e)
+            stack.append(G)
+            stack.append(C)
+    return sympy.simplify(polynomial)
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def chromatic_polynomial(G):
+    r"""Returns the chromatic polynomial of `G`
+
+    This function computes the chromatic polynomial via an iterative version of
+    the deletion-contraction algorithm.
+
+    The chromatic polynomial `X_G(x)` is a fundamental graph polynomial
+    invariant in one variable. Evaluating `X_G(k)` for an natural number `k`
+    enumerates the proper k-colorings of `G`.
+
+    There are several equivalent definitions; here are three:
+
+    Def 1 (explicit formula):
+    For `G` an undirected graph, `c(G)` the number of connected components of
+    `G`, `E` the edge set of `G`, and `G(S)` the spanning subgraph of `G` with
+    edge set `S` [1]_:
+
+    .. math::
+
+        X_G(x) = \sum_{S \subseteq E} (-1)^{|S|} x^{c(G(S))}
+
+
+    Def 2 (interpolating polynomial):
+    For `G` an undirected graph, `n(G)` the number of vertices of `G`, `k_0 = 0`,
+    and `k_i` the number of distinct ways to color the vertices of `G` with `i`
+    unique colors (for `i` a natural number at most `n(G)`), `X_G(x)` is the
+    unique Lagrange interpolating polynomial of degree `n(G)` through the points
+    `(0, k_0), (1, k_1), \dots, (n(G), k_{n(G)})` [2]_.
+
+
+    Def 3 (chromatic recurrence):
+    For `G` an undirected graph, `G-e` the graph obtained from `G` by deleting
+    edge `e`, `G/e` the graph obtained from `G` by contracting edge `e`, `n(G)`
+    the number of vertices of `G`, and `e(G)` the number of edges of `G` [3]_:
+
+    .. math::
+        X_G(x) = \begin{cases}
+    	   x^{n(G)}, & \text{if $e(G)=0$} \\
+           X_{G-e}(x) - X_{G/e}(x), & \text{otherwise, for an arbitrary edge $e$}
+        \end{cases}
+
+    This formulation is also known as the Fundamental Reduction Theorem [4]_.
+
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    instance of `sympy.core.add.Add`
+        A Sympy expression representing the chromatic polynomial for `G`.
+
+    Examples
+    --------
+    >>> C = nx.cycle_graph(5)
+    >>> nx.chromatic_polynomial(C)
+    x**5 - 5*x**4 + 10*x**3 - 10*x**2 + 4*x
+
+    >>> G = nx.complete_graph(4)
+    >>> nx.chromatic_polynomial(G)
+    x**4 - 6*x**3 + 11*x**2 - 6*x
+
+    Notes
+    -----
+    Interpretation of the coefficients is discussed in [5]_. Several special
+    cases are listed in [2]_.
+
+    The chromatic polynomial is a specialization of the Tutte polynomial; in
+    particular, ``X_G(x) = T_G(x, 0)`` [6]_.
+
+    The chromatic polynomial may take negative arguments, though evaluations
+    may not have chromatic interpretations. For instance, ``X_G(-1)`` enumerates
+    the acyclic orientations of `G` [7]_.
+
+    References
+    ----------
+    .. [1] D. B. West,
+       "Introduction to Graph Theory," p. 222
+    .. [2] E. W. Weisstein
+       "Chromatic Polynomial"
+       MathWorld--A Wolfram Web Resource
+       https://mathworld.wolfram.com/ChromaticPolynomial.html
+    .. [3] D. B. West,
+       "Introduction to Graph Theory," p. 221
+    .. [4] J. Zhang, J. Goodall,
+       "An Introduction to Chromatic Polynomials"
+       https://math.mit.edu/~apost/courses/18.204_2018/Julie_Zhang_paper.pdf
+    .. [5] R. C. Read,
+       "An Introduction to Chromatic Polynomials"
+       Journal of Combinatorial Theory, 1968
+       https://math.berkeley.edu/~mrklug/ReadChromatic.pdf
+    .. [6] W. T. Tutte,
+       "Graph-polynomials"
+       Advances in Applied Mathematics, 2004
+       https://www.sciencedirect.com/science/article/pii/S0196885803000411
+    .. [7] R. P. Stanley,
+       "Acyclic orientations of graphs"
+       Discrete Mathematics, 2006
+       https://math.mit.edu/~rstan/pubs/pubfiles/18.pdf
+    """
+    import sympy
+
+    x = sympy.Symbol("x")
+    stack = deque()
+    stack.append(nx.MultiGraph(G, contraction_idx=0))
+
+    polynomial = 0
+    while stack:
+        G = stack.pop()
+        edges = list(G.edges)
+        if not edges:
+            polynomial += (-1) ** G.graph["contraction_idx"] * x ** len(G)
+        else:
+            e = edges[0]
+            C = nx.contracted_edge(G, e, self_loops=True)
+            C.graph["contraction_idx"] = G.graph["contraction_idx"] + 1
+            C.remove_edge(e[0], e[0])
+            G.remove_edge(*e)
+            stack.append(G)
+            stack.append(C)
+    return polynomial
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/reciprocity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/reciprocity.py
new file mode 100644
index 00000000..5ea7ed2c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/reciprocity.py
@@ -0,0 +1,98 @@
+"""Algorithms to calculate reciprocity in a directed graph."""
+
+import networkx as nx
+from networkx import NetworkXError
+
+from ..utils import not_implemented_for
+
+__all__ = ["reciprocity", "overall_reciprocity"]
+
+
+@not_implemented_for("undirected", "multigraph")
+@nx._dispatchable
+def reciprocity(G, nodes=None):
+    r"""Compute the reciprocity in a directed graph.
+
+    The reciprocity of a directed graph is defined as the ratio
+    of the number of edges pointing in both directions to the total
+    number of edges in the graph.
+    Formally, $r = |{(u,v) \in G|(v,u) \in G}| / |{(u,v) \in G}|$.
+
+    The reciprocity of a single node u is defined similarly,
+    it is the ratio of the number of edges in both directions to
+    the total number of edges attached to node u.
+
+    Parameters
+    ----------
+    G : graph
+       A networkx directed graph
+    nodes : container of nodes, optional (default=whole graph)
+       Compute reciprocity for nodes in this container.
+
+    Returns
+    -------
+    out : dictionary
+       Reciprocity keyed by node label.
+
+    Notes
+    -----
+    The reciprocity is not defined for isolated nodes.
+    In such cases this function will return None.
+
+    """
+    # If `nodes` is not specified, calculate the reciprocity of the graph.
+    if nodes is None:
+        return overall_reciprocity(G)
+
+    # If `nodes` represents a single node in the graph, return only its
+    # reciprocity.
+    if nodes in G:
+        reciprocity = next(_reciprocity_iter(G, nodes))[1]
+        if reciprocity is None:
+            raise NetworkXError("Not defined for isolated nodes.")
+        else:
+            return reciprocity
+
+    # Otherwise, `nodes` represents an iterable of nodes, so return a
+    # dictionary mapping node to its reciprocity.
+    return dict(_reciprocity_iter(G, nodes))
+
+
+def _reciprocity_iter(G, nodes):
+    """Return an iterator of (node, reciprocity)."""
+    n = G.nbunch_iter(nodes)
+    for node in n:
+        pred = set(G.predecessors(node))
+        succ = set(G.successors(node))
+        overlap = pred & succ
+        n_total = len(pred) + len(succ)
+
+        # Reciprocity is not defined for isolated nodes.
+        # Return None.
+        if n_total == 0:
+            yield (node, None)
+        else:
+            reciprocity = 2 * len(overlap) / n_total
+            yield (node, reciprocity)
+
+
+@not_implemented_for("undirected", "multigraph")
+@nx._dispatchable
+def overall_reciprocity(G):
+    """Compute the reciprocity for the whole graph.
+
+    See the doc of reciprocity for the definition.
+
+    Parameters
+    ----------
+    G : graph
+       A networkx graph
+
+    """
+    n_all_edge = G.number_of_edges()
+    n_overlap_edge = (n_all_edge - G.to_undirected().number_of_edges()) * 2
+
+    if n_all_edge == 0:
+        raise NetworkXError("Not defined for empty graphs")
+
+    return n_overlap_edge / n_all_edge
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/regular.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/regular.py
new file mode 100644
index 00000000..f483ef32
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/regular.py
@@ -0,0 +1,215 @@
+"""Functions for computing and verifying regular graphs."""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["is_regular", "is_k_regular", "k_factor"]
+
+
+@nx._dispatchable
+def is_regular(G):
+    """Determines whether the graph ``G`` is a regular graph.
+
+    A regular graph is a graph where each vertex has the same degree. A
+    regular digraph is a graph where the indegree and outdegree of each
+    vertex are equal.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    bool
+        Whether the given graph or digraph is regular.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 1)])
+    >>> nx.is_regular(G)
+    True
+
+    """
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept("Graph has no nodes.")
+    n1 = nx.utils.arbitrary_element(G)
+    if not G.is_directed():
+        d1 = G.degree(n1)
+        return all(d1 == d for _, d in G.degree)
+    else:
+        d_in = G.in_degree(n1)
+        in_regular = all(d_in == d for _, d in G.in_degree)
+        d_out = G.out_degree(n1)
+        out_regular = all(d_out == d for _, d in G.out_degree)
+        return in_regular and out_regular
+
+
+@not_implemented_for("directed")
+@nx._dispatchable
+def is_k_regular(G, k):
+    """Determines whether the graph ``G`` is a k-regular graph.
+
+    A k-regular graph is a graph where each vertex has degree k.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    Returns
+    -------
+    bool
+        Whether the given graph is k-regular.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 1)])
+    >>> nx.is_k_regular(G, k=3)
+    False
+
+    """
+    return all(d == k for n, d in G.degree)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True)
+def k_factor(G, k, matching_weight="weight"):
+    """Compute a k-factor of G
+
+    A k-factor of a graph is a spanning k-regular subgraph.
+    A spanning k-regular subgraph of G is a subgraph that contains
+    each vertex of G and a subset of the edges of G such that each
+    vertex has degree k.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+      Undirected graph
+
+    matching_weight: string, optional (default='weight')
+       Edge data key corresponding to the edge weight.
+       Used for finding the max-weighted perfect matching.
+       If key not found, uses 1 as weight.
+
+    Returns
+    -------
+    G2 : NetworkX graph
+        A k-factor of G
+
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 1)])
+    >>> G2 = nx.k_factor(G, k=1)
+    >>> G2.edges()
+    EdgeView([(1, 2), (3, 4)])
+
+    References
+    ----------
+    .. [1] "An algorithm for computing simple k-factors.",
+       Meijer, Henk, Yurai Núñez-Rodríguez, and David Rappaport,
+       Information processing letters, 2009.
+    """
+
+    from networkx.algorithms.matching import is_perfect_matching, max_weight_matching
+
+    class LargeKGadget:
+        def __init__(self, k, degree, node, g):
+            self.original = node
+            self.g = g
+            self.k = k
+            self.degree = degree
+
+            self.outer_vertices = [(node, x) for x in range(degree)]
+            self.core_vertices = [(node, x + degree) for x in range(degree - k)]
+
+        def replace_node(self):
+            adj_view = self.g[self.original]
+            neighbors = list(adj_view.keys())
+            edge_attrs = list(adj_view.values())
+            for outer, neighbor, edge_attrs in zip(
+                self.outer_vertices, neighbors, edge_attrs
+            ):
+                self.g.add_edge(outer, neighbor, **edge_attrs)
+            for core in self.core_vertices:
+                for outer in self.outer_vertices:
+                    self.g.add_edge(core, outer)
+            self.g.remove_node(self.original)
+
+        def restore_node(self):
+            self.g.add_node(self.original)
+            for outer in self.outer_vertices:
+                adj_view = self.g[outer]
+                for neighbor, edge_attrs in list(adj_view.items()):
+                    if neighbor not in self.core_vertices:
+                        self.g.add_edge(self.original, neighbor, **edge_attrs)
+                        break
+            g.remove_nodes_from(self.outer_vertices)
+            g.remove_nodes_from(self.core_vertices)
+
+    class SmallKGadget:
+        def __init__(self, k, degree, node, g):
+            self.original = node
+            self.k = k
+            self.degree = degree
+            self.g = g
+
+            self.outer_vertices = [(node, x) for x in range(degree)]
+            self.inner_vertices = [(node, x + degree) for x in range(degree)]
+            self.core_vertices = [(node, x + 2 * degree) for x in range(k)]
+
+        def replace_node(self):
+            adj_view = self.g[self.original]
+            for outer, inner, (neighbor, edge_attrs) in zip(
+                self.outer_vertices, self.inner_vertices, list(adj_view.items())
+            ):
+                self.g.add_edge(outer, inner)
+                self.g.add_edge(outer, neighbor, **edge_attrs)
+            for core in self.core_vertices:
+                for inner in self.inner_vertices:
+                    self.g.add_edge(core, inner)
+            self.g.remove_node(self.original)
+
+        def restore_node(self):
+            self.g.add_node(self.original)
+            for outer in self.outer_vertices:
+                adj_view = self.g[outer]
+                for neighbor, edge_attrs in adj_view.items():
+                    if neighbor not in self.core_vertices:
+                        self.g.add_edge(self.original, neighbor, **edge_attrs)
+                        break
+            self.g.remove_nodes_from(self.outer_vertices)
+            self.g.remove_nodes_from(self.inner_vertices)
+            self.g.remove_nodes_from(self.core_vertices)
+
+    # Step 1
+    if any(d < k for _, d in G.degree):
+        raise nx.NetworkXUnfeasible("Graph contains a vertex with degree less than k")
+    g = G.copy()
+
+    # Step 2
+    gadgets = []
+    for node, degree in list(g.degree):
+        if k < degree / 2.0:
+            gadget = SmallKGadget(k, degree, node, g)
+        else:
+            gadget = LargeKGadget(k, degree, node, g)
+        gadget.replace_node()
+        gadgets.append(gadget)
+
+    # Step 3
+    matching = max_weight_matching(g, maxcardinality=True, weight=matching_weight)
+
+    # Step 4
+    if not is_perfect_matching(g, matching):
+        raise nx.NetworkXUnfeasible(
+            "Cannot find k-factor because no perfect matching exists"
+        )
+
+    for edge in g.edges():
+        if edge not in matching and (edge[1], edge[0]) not in matching:
+            g.remove_edge(edge[0], edge[1])
+
+    for gadget in gadgets:
+        gadget.restore_node()
+
+    return g
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/richclub.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/richclub.py
new file mode 100644
index 00000000..445b27d1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/richclub.py
@@ -0,0 +1,138 @@
+"""Functions for computing rich-club coefficients."""
+
+from itertools import accumulate
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["rich_club_coefficient"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def rich_club_coefficient(G, normalized=True, Q=100, seed=None):
+    r"""Returns the rich-club coefficient of the graph `G`.
+
+    For each degree *k*, the *rich-club coefficient* is the ratio of the
+    number of actual to the number of potential edges for nodes with
+    degree greater than *k*:
+
+    .. math::
+
+        \phi(k) = \frac{2 E_k}{N_k (N_k - 1)}
+
+    where `N_k` is the number of nodes with degree larger than *k*, and
+    `E_k` is the number of edges among those nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        Undirected graph with neither parallel edges nor self-loops.
+    normalized : bool (optional)
+        Normalize using randomized network as in [1]_
+    Q : float (optional, default=100)
+        If `normalized` is True, perform `Q * m` double-edge
+        swaps, where `m` is the number of edges in `G`, to use as a
+        null-model for normalization.
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    rc : dictionary
+       A dictionary, keyed by degree, with rich-club coefficient values.
+
+    Raises
+    ------
+    NetworkXError
+        If `G` has fewer than four nodes and ``normalized=True``.
+        A randomly sampled graph for normalization cannot be generated in this case.
+
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
+    >>> rc = nx.rich_club_coefficient(G, normalized=False, seed=42)
+    >>> rc[0]
+    0.4
+
+    Notes
+    -----
+    The rich club definition and algorithm are found in [1]_.  This
+    algorithm ignores any edge weights and is not defined for directed
+    graphs or graphs with parallel edges or self loops.
+
+    Normalization is done by computing the rich club coefficient for a randomly
+    sampled graph with the same degree distribution as `G` by
+    repeatedly swapping the endpoints of existing edges. For graphs with fewer than 4
+    nodes, it is not possible to generate a random graph with a prescribed
+    degree distribution, as the degree distribution fully determines the graph
+    (hence making the coefficients trivially normalized to 1).
+    This function raises an exception in this case.
+
+    Estimates for appropriate values of `Q` are found in [2]_.
+
+    References
+    ----------
+    .. [1] Julian J. McAuley, Luciano da Fontoura Costa,
+       and Tibério S. Caetano,
+       "The rich-club phenomenon across complex network hierarchies",
+       Applied Physics Letters Vol 91 Issue 8, August 2007.
+       https://arxiv.org/abs/physics/0701290
+    .. [2] R. Milo, N. Kashtan, S. Itzkovitz, M. E. J. Newman, U. Alon,
+       "Uniform generation of random graphs with arbitrary degree
+       sequences", 2006. https://arxiv.org/abs/cond-mat/0312028
+    """
+    if nx.number_of_selfloops(G) > 0:
+        raise Exception(
+            "rich_club_coefficient is not implemented for graphs with self loops."
+        )
+    rc = _compute_rc(G)
+    if normalized:
+        # make R a copy of G, randomize with Q*|E| double edge swaps
+        # and use rich_club coefficient of R to normalize
+        R = G.copy()
+        E = R.number_of_edges()
+        nx.double_edge_swap(R, Q * E, max_tries=Q * E * 10, seed=seed)
+        rcran = _compute_rc(R)
+        rc = {k: v / rcran[k] for k, v in rc.items()}
+    return rc
+
+
+def _compute_rc(G):
+    """Returns the rich-club coefficient for each degree in the graph
+    `G`.
+
+    `G` is an undirected graph without multiedges.
+
+    Returns a dictionary mapping degree to rich-club coefficient for
+    that degree.
+
+    """
+    deghist = nx.degree_histogram(G)
+    total = sum(deghist)
+    # Compute the number of nodes with degree greater than `k`, for each
+    # degree `k` (omitting the last entry, which is zero).
+    nks = (total - cs for cs in accumulate(deghist) if total - cs > 1)
+    # Create a sorted list of pairs of edge endpoint degrees.
+    #
+    # The list is sorted in reverse order so that we can pop from the
+    # right side of the list later, instead of popping from the left
+    # side of the list, which would have a linear time cost.
+    edge_degrees = sorted((sorted(map(G.degree, e)) for e in G.edges()), reverse=True)
+    ek = G.number_of_edges()
+    if ek == 0:
+        return {}
+
+    k1, k2 = edge_degrees.pop()
+    rc = {}
+    for d, nk in enumerate(nks):
+        while k1 <= d:
+            if len(edge_degrees) == 0:
+                ek = 0
+                break
+            k1, k2 = edge_degrees.pop()
+            ek -= 1
+        rc[d] = 2 * ek / (nk * (nk - 1))
+    return rc
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__init__.py
new file mode 100644
index 00000000..eb0d91ce
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__init__.py
@@ -0,0 +1,5 @@
+from networkx.algorithms.shortest_paths.generic import *
+from networkx.algorithms.shortest_paths.unweighted import *
+from networkx.algorithms.shortest_paths.weighted import *
+from networkx.algorithms.shortest_paths.astar import *
+from networkx.algorithms.shortest_paths.dense import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/astar.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/astar.py
new file mode 100644
index 00000000..8d988477
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/astar.py
@@ -0,0 +1,241 @@
+"""Shortest paths and path lengths using the A* ("A star") algorithm."""
+
+from heapq import heappop, heappush
+from itertools import count
+
+import networkx as nx
+from networkx.algorithms.shortest_paths.weighted import _weight_function
+
+__all__ = ["astar_path", "astar_path_length"]
+
+
+@nx._dispatchable(edge_attrs="weight", preserve_node_attrs="heuristic")
+def astar_path(G, source, target, heuristic=None, weight="weight", *, cutoff=None):
+    """Returns a list of nodes in a shortest path between source and target
+    using the A* ("A-star") algorithm.
+
+    There may be more than one shortest path.  This returns only one.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Starting node for path
+
+    target : node
+       Ending node for path
+
+    heuristic : function
+       A function to evaluate the estimate of the distance
+       from the a node to the target.  The function takes
+       two nodes arguments and must return a number.
+       If the heuristic is inadmissible (if it might
+       overestimate the cost of reaching the goal from a node),
+       the result may not be a shortest path.
+       The algorithm does not support updating heuristic
+       values for the same node due to caching the first
+       heuristic calculation per node.
+
+    weight : string or function
+       If this is a string, then edge weights will be accessed via the
+       edge attribute with this key (that is, the weight of the edge
+       joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+       such edge attribute exists, the weight of the edge is assumed to
+       be one.
+       If this is a function, the weight of an edge is the value
+       returned by the function. The function must accept exactly three
+       positional arguments: the two endpoints of an edge and the
+       dictionary of edge attributes for that edge. The function must
+       return a number or None to indicate a hidden edge.
+
+    cutoff : float, optional
+       If this is provided, the search will be bounded to this value. I.e. if
+       the evaluation function surpasses this value for a node n, the node will not
+       be expanded further and will be ignored. More formally, let h'(n) be the
+       heuristic function, and g(n) be the cost of reaching n from the source node. Then,
+       if g(n) + h'(n) > cutoff, the node will not be explored further.
+       Note that if the heuristic is inadmissible, it is possible that paths
+       are ignored even though they satisfy the cutoff.
+
+    Raises
+    ------
+    NetworkXNoPath
+        If no path exists between source and target.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> print(nx.astar_path(G, 0, 4))
+    [0, 1, 2, 3, 4]
+    >>> G = nx.grid_graph(dim=[3, 3])  # nodes are two-tuples (x,y)
+    >>> nx.set_edge_attributes(G, {e: e[1][0] * 2 for e in G.edges()}, "cost")
+    >>> def dist(a, b):
+    ...     (x1, y1) = a
+    ...     (x2, y2) = b
+    ...     return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5
+    >>> print(nx.astar_path(G, (0, 0), (2, 2), heuristic=dist, weight="cost"))
+    [(0, 0), (0, 1), (0, 2), (1, 2), (2, 2)]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    See Also
+    --------
+    shortest_path, dijkstra_path
+
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"Source {source} is not in G")
+
+    if target not in G:
+        raise nx.NodeNotFound(f"Target {target} is not in G")
+
+    if heuristic is None:
+        # The default heuristic is h=0 - same as Dijkstra's algorithm
+        def heuristic(u, v):
+            return 0
+
+    push = heappush
+    pop = heappop
+    weight = _weight_function(G, weight)
+
+    G_succ = G._adj  # For speed-up (and works for both directed and undirected graphs)
+
+    # The queue stores priority, node, cost to reach, and parent.
+    # Uses Python heapq to keep in priority order.
+    # Add a counter to the queue to prevent the underlying heap from
+    # attempting to compare the nodes themselves. The hash breaks ties in the
+    # priority and is guaranteed unique for all nodes in the graph.
+    c = count()
+    queue = [(0, next(c), source, 0, None)]
+
+    # Maps enqueued nodes to distance of discovered paths and the
+    # computed heuristics to target. We avoid computing the heuristics
+    # more than once and inserting the node into the queue too many times.
+    enqueued = {}
+    # Maps explored nodes to parent closest to the source.
+    explored = {}
+
+    while queue:
+        # Pop the smallest item from queue.
+        _, __, curnode, dist, parent = pop(queue)
+
+        if curnode == target:
+            path = [curnode]
+            node = parent
+            while node is not None:
+                path.append(node)
+                node = explored[node]
+            path.reverse()
+            return path
+
+        if curnode in explored:
+            # Do not override the parent of starting node
+            if explored[curnode] is None:
+                continue
+
+            # Skip bad paths that were enqueued before finding a better one
+            qcost, h = enqueued[curnode]
+            if qcost < dist:
+                continue
+
+        explored[curnode] = parent
+
+        for neighbor, w in G_succ[curnode].items():
+            cost = weight(curnode, neighbor, w)
+            if cost is None:
+                continue
+            ncost = dist + cost
+            if neighbor in enqueued:
+                qcost, h = enqueued[neighbor]
+                # if qcost <= ncost, a less costly path from the
+                # neighbor to the source was already determined.
+                # Therefore, we won't attempt to push this neighbor
+                # to the queue
+                if qcost <= ncost:
+                    continue
+            else:
+                h = heuristic(neighbor, target)
+
+            if cutoff and ncost + h > cutoff:
+                continue
+
+            enqueued[neighbor] = ncost, h
+            push(queue, (ncost + h, next(c), neighbor, ncost, curnode))
+
+    raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}")
+
+
+@nx._dispatchable(edge_attrs="weight", preserve_node_attrs="heuristic")
+def astar_path_length(
+    G, source, target, heuristic=None, weight="weight", *, cutoff=None
+):
+    """Returns the length of the shortest path between source and target using
+    the A* ("A-star") algorithm.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Starting node for path
+
+    target : node
+       Ending node for path
+
+    heuristic : function
+       A function to evaluate the estimate of the distance
+       from the a node to the target.  The function takes
+       two nodes arguments and must return a number.
+       If the heuristic is inadmissible (if it might
+       overestimate the cost of reaching the goal from a node),
+       the result may not be a shortest path.
+       The algorithm does not support updating heuristic
+       values for the same node due to caching the first
+       heuristic calculation per node.
+
+    weight : string or function
+       If this is a string, then edge weights will be accessed via the
+       edge attribute with this key (that is, the weight of the edge
+       joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+       such edge attribute exists, the weight of the edge is assumed to
+       be one.
+       If this is a function, the weight of an edge is the value
+       returned by the function. The function must accept exactly three
+       positional arguments: the two endpoints of an edge and the
+       dictionary of edge attributes for that edge. The function must
+       return a number or None to indicate a hidden edge.
+
+    cutoff : float, optional
+       If this is provided, the search will be bounded to this value. I.e. if
+       the evaluation function surpasses this value for a node n, the node will not
+       be expanded further and will be ignored. More formally, let h'(n) be the
+       heuristic function, and g(n) be the cost of reaching n from the source node. Then,
+       if g(n) + h'(n) > cutoff, the node will not be explored further.
+       Note that if the heuristic is inadmissible, it is possible that paths
+       are ignored even though they satisfy the cutoff.
+
+    Raises
+    ------
+    NetworkXNoPath
+        If no path exists between source and target.
+
+    See Also
+    --------
+    astar_path
+
+    """
+    if source not in G or target not in G:
+        msg = f"Either source {source} or target {target} is not in G"
+        raise nx.NodeNotFound(msg)
+
+    weight = _weight_function(G, weight)
+    path = astar_path(G, source, target, heuristic, weight, cutoff=cutoff)
+    return sum(weight(u, v, G[u][v]) for u, v in zip(path[:-1], path[1:]))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/dense.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/dense.py
new file mode 100644
index 00000000..107b9208
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/dense.py
@@ -0,0 +1,260 @@
+"""Floyd-Warshall algorithm for shortest paths."""
+
+import networkx as nx
+
+__all__ = [
+    "floyd_warshall",
+    "floyd_warshall_predecessor_and_distance",
+    "reconstruct_path",
+    "floyd_warshall_numpy",
+]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def floyd_warshall_numpy(G, nodelist=None, weight="weight"):
+    """Find all-pairs shortest path lengths using Floyd's algorithm.
+
+    This algorithm for finding shortest paths takes advantage of
+    matrix representations of a graph and works well for dense
+    graphs where all-pairs shortest path lengths are desired.
+    The results are returned as a NumPy array, distance[i, j],
+    where i and j are the indexes of two nodes in nodelist.
+    The entry distance[i, j] is the distance along a shortest
+    path from i to j. If no path exists the distance is Inf.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    nodelist : list, optional (default=G.nodes)
+       The rows and columns are ordered by the nodes in nodelist.
+       If nodelist is None then the ordering is produced by G.nodes.
+       Nodelist should include all nodes in G.
+
+    weight: string, optional (default='weight')
+       Edge data key corresponding to the edge weight.
+
+    Returns
+    -------
+    distance : 2D numpy.ndarray
+        A numpy array of shortest path distances between nodes.
+        If there is no path between two nodes the value is Inf.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_weighted_edges_from(
+    ...     [(0, 1, 5), (1, 2, 2), (2, 3, -3), (1, 3, 10), (3, 2, 8)]
+    ... )
+    >>> nx.floyd_warshall_numpy(G)
+    array([[ 0.,  5.,  7.,  4.],
+           [inf,  0.,  2., -1.],
+           [inf, inf,  0., -3.],
+           [inf, inf,  8.,  0.]])
+
+    Notes
+    -----
+    Floyd's algorithm is appropriate for finding shortest paths in
+    dense graphs or graphs with negative weights when Dijkstra's
+    algorithm fails. This algorithm can still fail if there are negative
+    cycles. It has running time $O(n^3)$ with running space of $O(n^2)$.
+
+    Raises
+    ------
+    NetworkXError
+        If nodelist is not a list of the nodes in G.
+    """
+    import numpy as np
+
+    if nodelist is not None:
+        if not (len(nodelist) == len(G) == len(set(nodelist))):
+            raise nx.NetworkXError(
+                "nodelist must contain every node in G with no repeats."
+                "If you wanted a subgraph of G use G.subgraph(nodelist)"
+            )
+
+    # To handle cases when an edge has weight=0, we must make sure that
+    # nonedges are not given the value 0 as well.
+    A = nx.to_numpy_array(
+        G, nodelist, multigraph_weight=min, weight=weight, nonedge=np.inf
+    )
+    n, m = A.shape
+    np.fill_diagonal(A, 0)  # diagonal elements should be zero
+    for i in range(n):
+        # The second term has the same shape as A due to broadcasting
+        A = np.minimum(A, A[i, :][np.newaxis, :] + A[:, i][:, np.newaxis])
+    return A
+
+
+@nx._dispatchable(edge_attrs="weight")
+def floyd_warshall_predecessor_and_distance(G, weight="weight"):
+    """Find all-pairs shortest path lengths using Floyd's algorithm.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight: string, optional (default= 'weight')
+       Edge data key corresponding to the edge weight.
+
+    Returns
+    -------
+    predecessor,distance : dictionaries
+       Dictionaries, keyed by source and target, of predecessors and distances
+       in the shortest path.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_weighted_edges_from(
+    ...     [
+    ...         ("s", "u", 10),
+    ...         ("s", "x", 5),
+    ...         ("u", "v", 1),
+    ...         ("u", "x", 2),
+    ...         ("v", "y", 1),
+    ...         ("x", "u", 3),
+    ...         ("x", "v", 5),
+    ...         ("x", "y", 2),
+    ...         ("y", "s", 7),
+    ...         ("y", "v", 6),
+    ...     ]
+    ... )
+    >>> predecessors, _ = nx.floyd_warshall_predecessor_and_distance(G)
+    >>> print(nx.reconstruct_path("s", "v", predecessors))
+    ['s', 'x', 'u', 'v']
+
+    Notes
+    -----
+    Floyd's algorithm is appropriate for finding shortest paths
+    in dense graphs or graphs with negative weights when Dijkstra's algorithm
+    fails.  This algorithm can still fail if there are negative cycles.
+    It has running time $O(n^3)$ with running space of $O(n^2)$.
+
+    See Also
+    --------
+    floyd_warshall
+    floyd_warshall_numpy
+    all_pairs_shortest_path
+    all_pairs_shortest_path_length
+    """
+    from collections import defaultdict
+
+    # dictionary-of-dictionaries representation for dist and pred
+    # use some defaultdict magick here
+    # for dist the default is the floating point inf value
+    dist = defaultdict(lambda: defaultdict(lambda: float("inf")))
+    for u in G:
+        dist[u][u] = 0
+    pred = defaultdict(dict)
+    # initialize path distance dictionary to be the adjacency matrix
+    # also set the distance to self to 0 (zero diagonal)
+    undirected = not G.is_directed()
+    for u, v, d in G.edges(data=True):
+        e_weight = d.get(weight, 1.0)
+        dist[u][v] = min(e_weight, dist[u][v])
+        pred[u][v] = u
+        if undirected:
+            dist[v][u] = min(e_weight, dist[v][u])
+            pred[v][u] = v
+    for w in G:
+        dist_w = dist[w]  # save recomputation
+        for u in G:
+            dist_u = dist[u]  # save recomputation
+            for v in G:
+                d = dist_u[w] + dist_w[v]
+                if dist_u[v] > d:
+                    dist_u[v] = d
+                    pred[u][v] = pred[w][v]
+    return dict(pred), dict(dist)
+
+
+@nx._dispatchable(graphs=None)
+def reconstruct_path(source, target, predecessors):
+    """Reconstruct a path from source to target using the predecessors
+    dict as returned by floyd_warshall_predecessor_and_distance
+
+    Parameters
+    ----------
+    source : node
+       Starting node for path
+
+    target : node
+       Ending node for path
+
+    predecessors: dictionary
+       Dictionary, keyed by source and target, of predecessors in the
+       shortest path, as returned by floyd_warshall_predecessor_and_distance
+
+    Returns
+    -------
+    path : list
+       A list of nodes containing the shortest path from source to target
+
+       If source and target are the same, an empty list is returned
+
+    Notes
+    -----
+    This function is meant to give more applicability to the
+    floyd_warshall_predecessor_and_distance function
+
+    See Also
+    --------
+    floyd_warshall_predecessor_and_distance
+    """
+    if source == target:
+        return []
+    prev = predecessors[source]
+    curr = prev[target]
+    path = [target, curr]
+    while curr != source:
+        curr = prev[curr]
+        path.append(curr)
+    return list(reversed(path))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def floyd_warshall(G, weight="weight"):
+    """Find all-pairs shortest path lengths using Floyd's algorithm.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight: string, optional (default= 'weight')
+       Edge data key corresponding to the edge weight.
+
+
+    Returns
+    -------
+    distance : dict
+       A dictionary,  keyed by source and target, of shortest paths distances
+       between nodes.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_weighted_edges_from(
+    ...     [(0, 1, 5), (1, 2, 2), (2, 3, -3), (1, 3, 10), (3, 2, 8)]
+    ... )
+    >>> fw = nx.floyd_warshall(G, weight="weight")
+    >>> results = {a: dict(b) for a, b in fw.items()}
+    >>> print(results)
+    {0: {0: 0, 1: 5, 2: 7, 3: 4}, 1: {1: 0, 2: 2, 3: -1, 0: inf}, 2: {2: 0, 3: -3, 0: inf, 1: inf}, 3: {3: 0, 2: 8, 0: inf, 1: inf}}
+
+    Notes
+    -----
+    Floyd's algorithm is appropriate for finding shortest paths
+    in dense graphs or graphs with negative weights when Dijkstra's algorithm
+    fails.  This algorithm can still fail if there are negative cycles.
+    It has running time $O(n^3)$ with running space of $O(n^2)$.
+
+    See Also
+    --------
+    floyd_warshall_predecessor_and_distance
+    floyd_warshall_numpy
+    all_pairs_shortest_path
+    all_pairs_shortest_path_length
+    """
+    # could make this its own function to reduce memory costs
+    return floyd_warshall_predecessor_and_distance(G, weight=weight)[1]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/generic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/generic.py
new file mode 100644
index 00000000..9ac48c90
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/generic.py
@@ -0,0 +1,730 @@
+"""
+Compute the shortest paths and path lengths between nodes in the graph.
+
+These algorithms work with undirected and directed graphs.
+
+"""
+
+import warnings
+
+import networkx as nx
+
+__all__ = [
+    "shortest_path",
+    "all_shortest_paths",
+    "single_source_all_shortest_paths",
+    "all_pairs_all_shortest_paths",
+    "shortest_path_length",
+    "average_shortest_path_length",
+    "has_path",
+]
+
+
+@nx._dispatchable
+def has_path(G, source, target):
+    """Returns *True* if *G* has a path from *source* to *target*.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Starting node for path
+
+    target : node
+       Ending node for path
+    """
+    try:
+        nx.shortest_path(G, source, target)
+    except nx.NetworkXNoPath:
+        return False
+    return True
+
+
+@nx._dispatchable(edge_attrs="weight")
+def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"):
+    """Compute shortest paths in the graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node, optional
+        Starting node for path. If not specified, compute shortest
+        paths for each possible starting node.
+
+    target : node, optional
+        Ending node for path. If not specified, compute shortest
+        paths to all possible nodes.
+
+    weight : None, string or function, optional (default = None)
+        If None, every edge has weight/distance/cost 1.
+        If a string, use this edge attribute as the edge weight.
+        Any edge attribute not present defaults to 1.
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly
+        three positional arguments: the two endpoints of an edge and
+        the dictionary of edge attributes for that edge.
+        The function must return a number.
+
+    method : string, optional (default = 'dijkstra')
+        The algorithm to use to compute the path.
+        Supported options: 'dijkstra', 'bellman-ford'.
+        Other inputs produce a ValueError.
+        If `weight` is None, unweighted graph methods are used, and this
+        suggestion is ignored.
+
+    Returns
+    -------
+    path: list or dictionary
+        All returned paths include both the source and target in the path.
+
+        If the source and target are both specified, return a single list
+        of nodes in a shortest path from the source to the target.
+
+        If only the source is specified, return a dictionary keyed by
+        targets with a list of nodes in a shortest path from the source
+        to one of the targets.
+
+        If only the target is specified, return a dictionary keyed by
+        sources with a list of nodes in a shortest path from one of the
+        sources to the target.
+
+        If neither the source nor target are specified return a dictionary
+        of dictionaries with path[source][target]=[list of nodes in path].
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    ValueError
+        If `method` is not among the supported options.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> print(nx.shortest_path(G, source=0, target=4))
+    [0, 1, 2, 3, 4]
+    >>> p = nx.shortest_path(G, source=0)  # target not specified
+    >>> p[3]  # shortest path from source=0 to target=3
+    [0, 1, 2, 3]
+    >>> p = nx.shortest_path(G, target=4)  # source not specified
+    >>> p[1]  # shortest path from source=1 to target=4
+    [1, 2, 3, 4]
+    >>> p = dict(nx.shortest_path(G))  # source, target not specified
+    >>> p[2][4]  # shortest path from source=2 to target=4
+    [2, 3, 4]
+
+    Notes
+    -----
+    There may be more than one shortest path between a source and target.
+    This returns only one of them.
+
+    See Also
+    --------
+    all_pairs_shortest_path
+    all_pairs_dijkstra_path
+    all_pairs_bellman_ford_path
+    single_source_shortest_path
+    single_source_dijkstra_path
+    single_source_bellman_ford_path
+    """
+    if method not in ("dijkstra", "bellman-ford"):
+        # so we don't need to check in each branch later
+        raise ValueError(f"method not supported: {method}")
+    method = "unweighted" if weight is None else method
+    if source is None:
+        if target is None:
+            warnings.warn(
+                (
+                    "\n\nshortest_path will return an iterator that yields\n"
+                    "(node, path) pairs instead of a dictionary when source\n"
+                    "and target are unspecified beginning in version 3.5\n\n"
+                    "To keep the current behavior, use:\n\n"
+                    "\tdict(nx.shortest_path(G))"
+                ),
+                FutureWarning,
+                stacklevel=3,
+            )
+
+            # Find paths between all pairs.
+            if method == "unweighted":
+                paths = dict(nx.all_pairs_shortest_path(G))
+            elif method == "dijkstra":
+                paths = dict(nx.all_pairs_dijkstra_path(G, weight=weight))
+            else:  # method == 'bellman-ford':
+                paths = dict(nx.all_pairs_bellman_ford_path(G, weight=weight))
+        else:
+            # Find paths from all nodes co-accessible to the target.
+            if G.is_directed():
+                G = G.reverse(copy=False)
+            if method == "unweighted":
+                paths = nx.single_source_shortest_path(G, target)
+            elif method == "dijkstra":
+                paths = nx.single_source_dijkstra_path(G, target, weight=weight)
+            else:  # method == 'bellman-ford':
+                paths = nx.single_source_bellman_ford_path(G, target, weight=weight)
+            # Now flip the paths so they go from a source to the target.
+            for target in paths:
+                paths[target] = list(reversed(paths[target]))
+    else:
+        if target is None:
+            # Find paths to all nodes accessible from the source.
+            if method == "unweighted":
+                paths = nx.single_source_shortest_path(G, source)
+            elif method == "dijkstra":
+                paths = nx.single_source_dijkstra_path(G, source, weight=weight)
+            else:  # method == 'bellman-ford':
+                paths = nx.single_source_bellman_ford_path(G, source, weight=weight)
+        else:
+            # Find shortest source-target path.
+            if method == "unweighted":
+                paths = nx.bidirectional_shortest_path(G, source, target)
+            elif method == "dijkstra":
+                _, paths = nx.bidirectional_dijkstra(G, source, target, weight)
+            else:  # method == 'bellman-ford':
+                paths = nx.bellman_ford_path(G, source, target, weight)
+    return paths
+
+
+@nx._dispatchable(edge_attrs="weight")
+def shortest_path_length(G, source=None, target=None, weight=None, method="dijkstra"):
+    """Compute shortest path lengths in the graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node, optional
+        Starting node for path.
+        If not specified, compute shortest path lengths using all nodes as
+        source nodes.
+
+    target : node, optional
+        Ending node for path.
+        If not specified, compute shortest path lengths using all nodes as
+        target nodes.
+
+    weight : None, string or function, optional (default = None)
+        If None, every edge has weight/distance/cost 1.
+        If a string, use this edge attribute as the edge weight.
+        Any edge attribute not present defaults to 1.
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly
+        three positional arguments: the two endpoints of an edge and
+        the dictionary of edge attributes for that edge.
+        The function must return a number.
+
+    method : string, optional (default = 'dijkstra')
+        The algorithm to use to compute the path length.
+        Supported options: 'dijkstra', 'bellman-ford'.
+        Other inputs produce a ValueError.
+        If `weight` is None, unweighted graph methods are used, and this
+        suggestion is ignored.
+
+    Returns
+    -------
+    length: number or iterator
+        If the source and target are both specified, return the length of
+        the shortest path from the source to the target.
+
+        If only the source is specified, return a dict keyed by target
+        to the shortest path length from the source to that target.
+
+        If only the target is specified, return a dict keyed by source
+        to the shortest path length from that source to the target.
+
+        If neither the source nor target are specified, return an iterator
+        over (source, dictionary) where dictionary is keyed by target to
+        shortest path length from source to that target.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    NetworkXNoPath
+        If no path exists between source and target.
+
+    ValueError
+        If `method` is not among the supported options.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> nx.shortest_path_length(G, source=0, target=4)
+    4
+    >>> p = nx.shortest_path_length(G, source=0)  # target not specified
+    >>> p[4]
+    4
+    >>> p = nx.shortest_path_length(G, target=4)  # source not specified
+    >>> p[0]
+    4
+    >>> p = dict(nx.shortest_path_length(G))  # source,target not specified
+    >>> p[0][4]
+    4
+
+    Notes
+    -----
+    The length of the path is always 1 less than the number of nodes involved
+    in the path since the length measures the number of edges followed.
+
+    For digraphs this returns the shortest directed path length. To find path
+    lengths in the reverse direction use G.reverse(copy=False) first to flip
+    the edge orientation.
+
+    See Also
+    --------
+    all_pairs_shortest_path_length
+    all_pairs_dijkstra_path_length
+    all_pairs_bellman_ford_path_length
+    single_source_shortest_path_length
+    single_source_dijkstra_path_length
+    single_source_bellman_ford_path_length
+    """
+    if method not in ("dijkstra", "bellman-ford"):
+        # so we don't need to check in each branch later
+        raise ValueError(f"method not supported: {method}")
+    method = "unweighted" if weight is None else method
+    if source is None:
+        if target is None:
+            # Find paths between all pairs.
+            if method == "unweighted":
+                paths = nx.all_pairs_shortest_path_length(G)
+            elif method == "dijkstra":
+                paths = nx.all_pairs_dijkstra_path_length(G, weight=weight)
+            else:  # method == 'bellman-ford':
+                paths = nx.all_pairs_bellman_ford_path_length(G, weight=weight)
+        else:
+            # Find paths from all nodes co-accessible to the target.
+            if G.is_directed():
+                G = G.reverse(copy=False)
+            if method == "unweighted":
+                path_length = nx.single_source_shortest_path_length
+                paths = path_length(G, target)
+            elif method == "dijkstra":
+                path_length = nx.single_source_dijkstra_path_length
+                paths = path_length(G, target, weight=weight)
+            else:  # method == 'bellman-ford':
+                path_length = nx.single_source_bellman_ford_path_length
+                paths = path_length(G, target, weight=weight)
+    else:
+        if target is None:
+            # Find paths to all nodes accessible from the source.
+            if method == "unweighted":
+                paths = nx.single_source_shortest_path_length(G, source)
+            elif method == "dijkstra":
+                path_length = nx.single_source_dijkstra_path_length
+                paths = path_length(G, source, weight=weight)
+            else:  # method == 'bellman-ford':
+                path_length = nx.single_source_bellman_ford_path_length
+                paths = path_length(G, source, weight=weight)
+        else:
+            # Find shortest source-target path.
+            if method == "unweighted":
+                p = nx.bidirectional_shortest_path(G, source, target)
+                paths = len(p) - 1
+            elif method == "dijkstra":
+                paths = nx.dijkstra_path_length(G, source, target, weight)
+            else:  # method == 'bellman-ford':
+                paths = nx.bellman_ford_path_length(G, source, target, weight)
+    return paths
+
+
+@nx._dispatchable(edge_attrs="weight")
+def average_shortest_path_length(G, weight=None, method=None):
+    r"""Returns the average shortest path length.
+
+    The average shortest path length is
+
+    .. math::
+
+       a =\sum_{\substack{s,t \in V \\ s\neq t}} \frac{d(s, t)}{n(n-1)}
+
+    where `V` is the set of nodes in `G`,
+    `d(s, t)` is the shortest path from `s` to `t`,
+    and `n` is the number of nodes in `G`.
+
+    .. versionchanged:: 3.0
+       An exception is raised for directed graphs that are not strongly
+       connected.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : None, string or function, optional (default = None)
+        If None, every edge has weight/distance/cost 1.
+        If a string, use this edge attribute as the edge weight.
+        Any edge attribute not present defaults to 1.
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly
+        three positional arguments: the two endpoints of an edge and
+        the dictionary of edge attributes for that edge.
+        The function must return a number.
+
+    method : string, optional (default = 'unweighted' or 'dijkstra')
+        The algorithm to use to compute the path lengths.
+        Supported options are 'unweighted', 'dijkstra', 'bellman-ford',
+        'floyd-warshall' and 'floyd-warshall-numpy'.
+        Other method values produce a ValueError.
+        The default method is 'unweighted' if `weight` is None,
+        otherwise the default method is 'dijkstra'.
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If `G` is the null graph (that is, the graph on zero nodes).
+
+    NetworkXError
+        If `G` is not connected (or not strongly connected, in the case
+        of a directed graph).
+
+    ValueError
+        If `method` is not among the supported options.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> nx.average_shortest_path_length(G)
+    2.0
+
+    For disconnected graphs, you can compute the average shortest path
+    length for each component
+
+    >>> G = nx.Graph([(1, 2), (3, 4)])
+    >>> for C in (G.subgraph(c).copy() for c in nx.connected_components(G)):
+    ...     print(nx.average_shortest_path_length(C))
+    1.0
+    1.0
+
+    """
+    single_source_methods = ["unweighted", "dijkstra", "bellman-ford"]
+    all_pairs_methods = ["floyd-warshall", "floyd-warshall-numpy"]
+    supported_methods = single_source_methods + all_pairs_methods
+
+    if method is None:
+        method = "unweighted" if weight is None else "dijkstra"
+    if method not in supported_methods:
+        raise ValueError(f"method not supported: {method}")
+
+    n = len(G)
+    # For the special case of the null graph, raise an exception, since
+    # there are no paths in the null graph.
+    if n == 0:
+        msg = (
+            "the null graph has no paths, thus there is no average "
+            "shortest path length"
+        )
+        raise nx.NetworkXPointlessConcept(msg)
+    # For the special case of the trivial graph, return zero immediately.
+    if n == 1:
+        return 0
+    # Shortest path length is undefined if the graph is not strongly connected.
+    if G.is_directed() and not nx.is_strongly_connected(G):
+        raise nx.NetworkXError("Graph is not strongly connected.")
+    # Shortest path length is undefined if the graph is not connected.
+    if not G.is_directed() and not nx.is_connected(G):
+        raise nx.NetworkXError("Graph is not connected.")
+
+    # Compute all-pairs shortest paths.
+    def path_length(v):
+        if method == "unweighted":
+            return nx.single_source_shortest_path_length(G, v)
+        elif method == "dijkstra":
+            return nx.single_source_dijkstra_path_length(G, v, weight=weight)
+        elif method == "bellman-ford":
+            return nx.single_source_bellman_ford_path_length(G, v, weight=weight)
+
+    if method in single_source_methods:
+        # Sum the distances for each (ordered) pair of source and target node.
+        s = sum(l for u in G for l in path_length(u).values())
+    else:
+        if method == "floyd-warshall":
+            all_pairs = nx.floyd_warshall(G, weight=weight)
+            s = sum(sum(t.values()) for t in all_pairs.values())
+        elif method == "floyd-warshall-numpy":
+            s = float(nx.floyd_warshall_numpy(G, weight=weight).sum())
+    return s / (n * (n - 1))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def all_shortest_paths(G, source, target, weight=None, method="dijkstra"):
+    """Compute all shortest simple paths in the graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Starting node for path.
+
+    target : node
+       Ending node for path.
+
+    weight : None, string or function, optional (default = None)
+        If None, every edge has weight/distance/cost 1.
+        If a string, use this edge attribute as the edge weight.
+        Any edge attribute not present defaults to 1.
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly
+        three positional arguments: the two endpoints of an edge and
+        the dictionary of edge attributes for that edge.
+        The function must return a number.
+
+    method : string, optional (default = 'dijkstra')
+       The algorithm to use to compute the path lengths.
+       Supported options: 'dijkstra', 'bellman-ford'.
+       Other inputs produce a ValueError.
+       If `weight` is None, unweighted graph methods are used, and this
+       suggestion is ignored.
+
+    Returns
+    -------
+    paths : generator of lists
+        A generator of all paths between source and target.
+
+    Raises
+    ------
+    ValueError
+        If `method` is not among the supported options.
+
+    NetworkXNoPath
+        If `target` cannot be reached from `source`.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> nx.add_path(G, [0, 1, 2])
+    >>> nx.add_path(G, [0, 10, 2])
+    >>> print([p for p in nx.all_shortest_paths(G, source=0, target=2)])
+    [[0, 1, 2], [0, 10, 2]]
+
+    Notes
+    -----
+    There may be many shortest paths between the source and target.  If G
+    contains zero-weight cycles, this function will not produce all shortest
+    paths because doing so would produce infinitely many paths of unbounded
+    length -- instead, we only produce the shortest simple paths.
+
+    See Also
+    --------
+    shortest_path
+    single_source_shortest_path
+    all_pairs_shortest_path
+    """
+    method = "unweighted" if weight is None else method
+    if method == "unweighted":
+        pred = nx.predecessor(G, source)
+    elif method == "dijkstra":
+        pred, dist = nx.dijkstra_predecessor_and_distance(G, source, weight=weight)
+    elif method == "bellman-ford":
+        pred, dist = nx.bellman_ford_predecessor_and_distance(G, source, weight=weight)
+    else:
+        raise ValueError(f"method not supported: {method}")
+
+    return _build_paths_from_predecessors({source}, target, pred)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def single_source_all_shortest_paths(G, source, weight=None, method="dijkstra"):
+    """Compute all shortest simple paths from the given source in the graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Starting node for path.
+
+    weight : None, string or function, optional (default = None)
+        If None, every edge has weight/distance/cost 1.
+        If a string, use this edge attribute as the edge weight.
+        Any edge attribute not present defaults to 1.
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly
+        three positional arguments: the two endpoints of an edge and
+        the dictionary of edge attributes for that edge.
+        The function must return a number.
+
+    method : string, optional (default = 'dijkstra')
+       The algorithm to use to compute the path lengths.
+       Supported options: 'dijkstra', 'bellman-ford'.
+       Other inputs produce a ValueError.
+       If `weight` is None, unweighted graph methods are used, and this
+       suggestion is ignored.
+
+    Returns
+    -------
+    paths : generator of dictionary
+        A generator of all paths between source and all nodes in the graph.
+
+    Raises
+    ------
+    ValueError
+        If `method` is not among the supported options.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> nx.add_path(G, [0, 1, 2, 3, 0])
+    >>> dict(nx.single_source_all_shortest_paths(G, source=0))
+    {0: [[0]], 1: [[0, 1]], 2: [[0, 1, 2], [0, 3, 2]], 3: [[0, 3]]}
+
+    Notes
+    -----
+    There may be many shortest paths between the source and target.  If G
+    contains zero-weight cycles, this function will not produce all shortest
+    paths because doing so would produce infinitely many paths of unbounded
+    length -- instead, we only produce the shortest simple paths.
+
+    See Also
+    --------
+    shortest_path
+    all_shortest_paths
+    single_source_shortest_path
+    all_pairs_shortest_path
+    all_pairs_all_shortest_paths
+    """
+    method = "unweighted" if weight is None else method
+    if method == "unweighted":
+        pred = nx.predecessor(G, source)
+    elif method == "dijkstra":
+        pred, dist = nx.dijkstra_predecessor_and_distance(G, source, weight=weight)
+    elif method == "bellman-ford":
+        pred, dist = nx.bellman_ford_predecessor_and_distance(G, source, weight=weight)
+    else:
+        raise ValueError(f"method not supported: {method}")
+    for n in G:
+        try:
+            yield n, list(_build_paths_from_predecessors({source}, n, pred))
+        except nx.NetworkXNoPath:
+            pass
+
+
+@nx._dispatchable(edge_attrs="weight")
+def all_pairs_all_shortest_paths(G, weight=None, method="dijkstra"):
+    """Compute all shortest paths between all nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : None, string or function, optional (default = None)
+        If None, every edge has weight/distance/cost 1.
+        If a string, use this edge attribute as the edge weight.
+        Any edge attribute not present defaults to 1.
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly
+        three positional arguments: the two endpoints of an edge and
+        the dictionary of edge attributes for that edge.
+        The function must return a number.
+
+    method : string, optional (default = 'dijkstra')
+       The algorithm to use to compute the path lengths.
+       Supported options: 'dijkstra', 'bellman-ford'.
+       Other inputs produce a ValueError.
+       If `weight` is None, unweighted graph methods are used, and this
+       suggestion is ignored.
+
+    Returns
+    -------
+    paths : generator of dictionary
+        Dictionary of arrays, keyed by source and target, of all shortest paths.
+
+    Raises
+    ------
+    ValueError
+        If `method` is not among the supported options.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4)
+    >>> dict(nx.all_pairs_all_shortest_paths(G))[0][2]
+    [[0, 1, 2], [0, 3, 2]]
+    >>> dict(nx.all_pairs_all_shortest_paths(G))[0][3]
+    [[0, 3]]
+
+    Notes
+    -----
+    There may be multiple shortest paths with equal lengths. Unlike
+    all_pairs_shortest_path, this method returns all shortest paths.
+
+    See Also
+    --------
+    all_pairs_shortest_path
+    single_source_all_shortest_paths
+    """
+    for n in G:
+        yield (
+            n,
+            dict(single_source_all_shortest_paths(G, n, weight=weight, method=method)),
+        )
+
+
+def _build_paths_from_predecessors(sources, target, pred):
+    """Compute all simple paths to target, given the predecessors found in
+    pred, terminating when any source in sources is found.
+
+    Parameters
+    ----------
+    sources : set
+       Starting nodes for path.
+
+    target : node
+       Ending node for path.
+
+    pred : dict
+       A dictionary of predecessor lists, keyed by node
+
+    Returns
+    -------
+    paths : generator of lists
+        A generator of all paths between source and target.
+
+    Raises
+    ------
+    NetworkXNoPath
+        If `target` cannot be reached from `source`.
+
+    Notes
+    -----
+    There may be many paths between the sources and target.  If there are
+    cycles among the predecessors, this function will not produce all
+    possible paths because doing so would produce infinitely many paths
+    of unbounded length -- instead, we only produce simple paths.
+
+    See Also
+    --------
+    shortest_path
+    single_source_shortest_path
+    all_pairs_shortest_path
+    all_shortest_paths
+    bellman_ford_path
+    """
+    if target not in pred:
+        raise nx.NetworkXNoPath(f"Target {target} cannot be reached from given sources")
+
+    seen = {target}
+    stack = [[target, 0]]
+    top = 0
+    while top >= 0:
+        node, i = stack[top]
+        if node in sources:
+            yield [p for p, n in reversed(stack[: top + 1])]
+        if len(pred[node]) > i:
+            stack[top][1] = i + 1
+            next = pred[node][i]
+            if next in seen:
+                continue
+            else:
+                seen.add(next)
+            top += 1
+            if top == len(stack):
+                stack.append([next, 0])
+            else:
+                stack[top][:] = [next, 0]
+        else:
+            seen.discard(node)
+            top -= 1
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_astar.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_astar.py
new file mode 100644
index 00000000..40a7d4e8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_astar.py
@@ -0,0 +1,248 @@
+import pytest
+
+import networkx as nx
+from networkx.utils import pairwise
+
+
+class TestAStar:
+    @classmethod
+    def setup_class(cls):
+        edges = [
+            ("s", "u", 10),
+            ("s", "x", 5),
+            ("u", "v", 1),
+            ("u", "x", 2),
+            ("v", "y", 1),
+            ("x", "u", 3),
+            ("x", "v", 5),
+            ("x", "y", 2),
+            ("y", "s", 7),
+            ("y", "v", 6),
+        ]
+        cls.XG = nx.DiGraph()
+        cls.XG.add_weighted_edges_from(edges)
+
+    def test_multiple_optimal_paths(self):
+        """Tests that A* algorithm finds any of multiple optimal paths"""
+        heuristic_values = {"a": 1.35, "b": 1.18, "c": 0.67, "d": 0}
+
+        def h(u, v):
+            return heuristic_values[u]
+
+        graph = nx.Graph()
+        points = ["a", "b", "c", "d"]
+        edges = [("a", "b", 0.18), ("a", "c", 0.68), ("b", "c", 0.50), ("c", "d", 0.67)]
+
+        graph.add_nodes_from(points)
+        graph.add_weighted_edges_from(edges)
+
+        path1 = ["a", "c", "d"]
+        path2 = ["a", "b", "c", "d"]
+        assert nx.astar_path(graph, "a", "d", h) in (path1, path2)
+
+    def test_astar_directed(self):
+        assert nx.astar_path(self.XG, "s", "v") == ["s", "x", "u", "v"]
+        assert nx.astar_path_length(self.XG, "s", "v") == 9
+
+    def test_astar_directed_weight_function(self):
+        w1 = lambda u, v, d: d["weight"]
+        assert nx.astar_path(self.XG, "x", "u", weight=w1) == ["x", "u"]
+        assert nx.astar_path_length(self.XG, "x", "u", weight=w1) == 3
+        assert nx.astar_path(self.XG, "s", "v", weight=w1) == ["s", "x", "u", "v"]
+        assert nx.astar_path_length(self.XG, "s", "v", weight=w1) == 9
+
+        w2 = lambda u, v, d: None if (u, v) == ("x", "u") else d["weight"]
+        assert nx.astar_path(self.XG, "x", "u", weight=w2) == ["x", "y", "s", "u"]
+        assert nx.astar_path_length(self.XG, "x", "u", weight=w2) == 19
+        assert nx.astar_path(self.XG, "s", "v", weight=w2) == ["s", "x", "v"]
+        assert nx.astar_path_length(self.XG, "s", "v", weight=w2) == 10
+
+        w3 = lambda u, v, d: d["weight"] + 10
+        assert nx.astar_path(self.XG, "x", "u", weight=w3) == ["x", "u"]
+        assert nx.astar_path_length(self.XG, "x", "u", weight=w3) == 13
+        assert nx.astar_path(self.XG, "s", "v", weight=w3) == ["s", "x", "v"]
+        assert nx.astar_path_length(self.XG, "s", "v", weight=w3) == 30
+
+    def test_astar_multigraph(self):
+        G = nx.MultiDiGraph(self.XG)
+        G.add_weighted_edges_from((u, v, 1000) for (u, v) in list(G.edges()))
+        assert nx.astar_path(G, "s", "v") == ["s", "x", "u", "v"]
+        assert nx.astar_path_length(G, "s", "v") == 9
+
+    def test_astar_undirected(self):
+        GG = self.XG.to_undirected()
+        # make sure we get lower weight
+        # to_undirected might choose either edge with weight 2 or weight 3
+        GG["u"]["x"]["weight"] = 2
+        GG["y"]["v"]["weight"] = 2
+        assert nx.astar_path(GG, "s", "v") == ["s", "x", "u", "v"]
+        assert nx.astar_path_length(GG, "s", "v") == 8
+
+    def test_astar_directed2(self):
+        XG2 = nx.DiGraph()
+        edges = [
+            (1, 4, 1),
+            (4, 5, 1),
+            (5, 6, 1),
+            (6, 3, 1),
+            (1, 3, 50),
+            (1, 2, 100),
+            (2, 3, 100),
+        ]
+        XG2.add_weighted_edges_from(edges)
+        assert nx.astar_path(XG2, 1, 3) == [1, 4, 5, 6, 3]
+
+    def test_astar_undirected2(self):
+        XG3 = nx.Graph()
+        edges = [(0, 1, 2), (1, 2, 12), (2, 3, 1), (3, 4, 5), (4, 5, 1), (5, 0, 10)]
+        XG3.add_weighted_edges_from(edges)
+        assert nx.astar_path(XG3, 0, 3) == [0, 1, 2, 3]
+        assert nx.astar_path_length(XG3, 0, 3) == 15
+
+    def test_astar_undirected3(self):
+        XG4 = nx.Graph()
+        edges = [
+            (0, 1, 2),
+            (1, 2, 2),
+            (2, 3, 1),
+            (3, 4, 1),
+            (4, 5, 1),
+            (5, 6, 1),
+            (6, 7, 1),
+            (7, 0, 1),
+        ]
+        XG4.add_weighted_edges_from(edges)
+        assert nx.astar_path(XG4, 0, 2) == [0, 1, 2]
+        assert nx.astar_path_length(XG4, 0, 2) == 4
+
+    """ Tests that A* finds correct path when multiple paths exist
+        and the best one is not expanded first (GH issue #3464)
+    """
+
+    def test_astar_directed3(self):
+        heuristic_values = {"n5": 36, "n2": 4, "n1": 0, "n0": 0}
+
+        def h(u, v):
+            return heuristic_values[u]
+
+        edges = [("n5", "n1", 11), ("n5", "n2", 9), ("n2", "n1", 1), ("n1", "n0", 32)]
+        graph = nx.DiGraph()
+        graph.add_weighted_edges_from(edges)
+        answer = ["n5", "n2", "n1", "n0"]
+        assert nx.astar_path(graph, "n5", "n0", h) == answer
+
+    """ Tests that parent is not wrongly overridden when a node
+        is re-explored multiple times.
+    """
+
+    def test_astar_directed4(self):
+        edges = [
+            ("a", "b", 1),
+            ("a", "c", 1),
+            ("b", "d", 2),
+            ("c", "d", 1),
+            ("d", "e", 1),
+        ]
+        graph = nx.DiGraph()
+        graph.add_weighted_edges_from(edges)
+        assert nx.astar_path(graph, "a", "e") == ["a", "c", "d", "e"]
+
+    # >>> MXG4=NX.MultiGraph(XG4)
+    # >>> MXG4.add_edge(0,1,3)
+    # >>> NX.dijkstra_path(MXG4,0,2)
+    # [0, 1, 2]
+
+    def test_astar_w1(self):
+        G = nx.DiGraph()
+        G.add_edges_from(
+            [
+                ("s", "u"),
+                ("s", "x"),
+                ("u", "v"),
+                ("u", "x"),
+                ("v", "y"),
+                ("x", "u"),
+                ("x", "w"),
+                ("w", "v"),
+                ("x", "y"),
+                ("y", "s"),
+                ("y", "v"),
+            ]
+        )
+        assert nx.astar_path(G, "s", "v") == ["s", "u", "v"]
+        assert nx.astar_path_length(G, "s", "v") == 2
+
+    def test_astar_nopath(self):
+        with pytest.raises(nx.NodeNotFound):
+            nx.astar_path(self.XG, "s", "moon")
+
+    def test_astar_cutoff(self):
+        with pytest.raises(nx.NetworkXNoPath):
+            # optimal path_length in XG is 9
+            nx.astar_path(self.XG, "s", "v", cutoff=8.0)
+        with pytest.raises(nx.NetworkXNoPath):
+            nx.astar_path_length(self.XG, "s", "v", cutoff=8.0)
+
+    def test_astar_admissible_heuristic_with_cutoff(self):
+        heuristic_values = {"s": 36, "y": 4, "x": 0, "u": 0, "v": 0}
+
+        def h(u, v):
+            return heuristic_values[u]
+
+        assert nx.astar_path_length(self.XG, "s", "v") == 9
+        assert nx.astar_path_length(self.XG, "s", "v", heuristic=h) == 9
+        assert nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=12) == 9
+        assert nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=9) == 9
+        with pytest.raises(nx.NetworkXNoPath):
+            nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=8)
+
+    def test_astar_inadmissible_heuristic_with_cutoff(self):
+        heuristic_values = {"s": 36, "y": 14, "x": 10, "u": 10, "v": 0}
+
+        def h(u, v):
+            return heuristic_values[u]
+
+        # optimal path_length in XG is 9. This heuristic gives over-estimate.
+        assert nx.astar_path_length(self.XG, "s", "v", heuristic=h) == 10
+        assert nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=15) == 10
+        with pytest.raises(nx.NetworkXNoPath):
+            nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=9)
+        with pytest.raises(nx.NetworkXNoPath):
+            nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=12)
+
+    def test_astar_cutoff2(self):
+        assert nx.astar_path(self.XG, "s", "v", cutoff=10.0) == ["s", "x", "u", "v"]
+        assert nx.astar_path_length(self.XG, "s", "v") == 9
+
+    def test_cycle(self):
+        C = nx.cycle_graph(7)
+        assert nx.astar_path(C, 0, 3) == [0, 1, 2, 3]
+        assert nx.dijkstra_path(C, 0, 4) == [0, 6, 5, 4]
+
+    def test_unorderable_nodes(self):
+        """Tests that A* accommodates nodes that are not orderable.
+
+        For more information, see issue #554.
+
+        """
+        # Create the cycle graph on four nodes, with nodes represented
+        # as (unorderable) Python objects.
+        nodes = [object() for n in range(4)]
+        G = nx.Graph()
+        G.add_edges_from(pairwise(nodes, cyclic=True))
+        path = nx.astar_path(G, nodes[0], nodes[2])
+        assert len(path) == 3
+
+    def test_astar_NetworkXNoPath(self):
+        """Tests that exception is raised when there exists no
+        path between source and target"""
+        G = nx.gnp_random_graph(10, 0.2, seed=10)
+        with pytest.raises(nx.NetworkXNoPath):
+            nx.astar_path(G, 4, 9)
+
+    def test_astar_NodeNotFound(self):
+        """Tests that exception is raised when either
+        source or target is not in graph"""
+        G = nx.gnp_random_graph(10, 0.2, seed=10)
+        with pytest.raises(nx.NodeNotFound):
+            nx.astar_path_length(G, 11, 9)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense.py
new file mode 100644
index 00000000..6923bfef
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense.py
@@ -0,0 +1,212 @@
+import pytest
+
+import networkx as nx
+
+
+class TestFloyd:
+    @classmethod
+    def setup_class(cls):
+        pass
+
+    def test_floyd_warshall_predecessor_and_distance(self):
+        XG = nx.DiGraph()
+        XG.add_weighted_edges_from(
+            [
+                ("s", "u", 10),
+                ("s", "x", 5),
+                ("u", "v", 1),
+                ("u", "x", 2),
+                ("v", "y", 1),
+                ("x", "u", 3),
+                ("x", "v", 5),
+                ("x", "y", 2),
+                ("y", "s", 7),
+                ("y", "v", 6),
+            ]
+        )
+        path, dist = nx.floyd_warshall_predecessor_and_distance(XG)
+        assert dist["s"]["v"] == 9
+        assert path["s"]["v"] == "u"
+        assert dist == {
+            "y": {"y": 0, "x": 12, "s": 7, "u": 15, "v": 6},
+            "x": {"y": 2, "x": 0, "s": 9, "u": 3, "v": 4},
+            "s": {"y": 7, "x": 5, "s": 0, "u": 8, "v": 9},
+            "u": {"y": 2, "x": 2, "s": 9, "u": 0, "v": 1},
+            "v": {"y": 1, "x": 13, "s": 8, "u": 16, "v": 0},
+        }
+
+        GG = XG.to_undirected()
+        # make sure we get lower weight
+        # to_undirected might choose either edge with weight 2 or weight 3
+        GG["u"]["x"]["weight"] = 2
+        path, dist = nx.floyd_warshall_predecessor_and_distance(GG)
+        assert dist["s"]["v"] == 8
+        # skip this test, could be alternate path s-u-v
+        #        assert_equal(path['s']['v'],'y')
+
+        G = nx.DiGraph()  # no weights
+        G.add_edges_from(
+            [
+                ("s", "u"),
+                ("s", "x"),
+                ("u", "v"),
+                ("u", "x"),
+                ("v", "y"),
+                ("x", "u"),
+                ("x", "v"),
+                ("x", "y"),
+                ("y", "s"),
+                ("y", "v"),
+            ]
+        )
+        path, dist = nx.floyd_warshall_predecessor_and_distance(G)
+        assert dist["s"]["v"] == 2
+        # skip this test, could be alternate path s-u-v
+        # assert_equal(path['s']['v'],'x')
+
+        # alternate interface
+        dist = nx.floyd_warshall(G)
+        assert dist["s"]["v"] == 2
+
+        # floyd_warshall_predecessor_and_distance returns
+        # dicts-of-defautdicts
+        # make sure we don't get empty dictionary
+        XG = nx.DiGraph()
+        XG.add_weighted_edges_from(
+            [("v", "x", 5.0), ("y", "x", 5.0), ("v", "y", 6.0), ("x", "u", 2.0)]
+        )
+        path, dist = nx.floyd_warshall_predecessor_and_distance(XG)
+        inf = float("inf")
+        assert dist == {
+            "v": {"v": 0, "x": 5.0, "y": 6.0, "u": 7.0},
+            "x": {"x": 0, "u": 2.0, "v": inf, "y": inf},
+            "y": {"y": 0, "x": 5.0, "v": inf, "u": 7.0},
+            "u": {"u": 0, "v": inf, "x": inf, "y": inf},
+        }
+        assert path == {
+            "v": {"x": "v", "y": "v", "u": "x"},
+            "x": {"u": "x"},
+            "y": {"x": "y", "u": "x"},
+        }
+
+    def test_reconstruct_path(self):
+        with pytest.raises(KeyError):
+            XG = nx.DiGraph()
+            XG.add_weighted_edges_from(
+                [
+                    ("s", "u", 10),
+                    ("s", "x", 5),
+                    ("u", "v", 1),
+                    ("u", "x", 2),
+                    ("v", "y", 1),
+                    ("x", "u", 3),
+                    ("x", "v", 5),
+                    ("x", "y", 2),
+                    ("y", "s", 7),
+                    ("y", "v", 6),
+                ]
+            )
+            predecessors, _ = nx.floyd_warshall_predecessor_and_distance(XG)
+
+            path = nx.reconstruct_path("s", "v", predecessors)
+            assert path == ["s", "x", "u", "v"]
+
+            path = nx.reconstruct_path("s", "s", predecessors)
+            assert path == []
+
+            # this part raises the keyError
+            nx.reconstruct_path("1", "2", predecessors)
+
+    def test_cycle(self):
+        path, dist = nx.floyd_warshall_predecessor_and_distance(nx.cycle_graph(7))
+        assert dist[0][3] == 3
+        assert path[0][3] == 2
+        assert dist[0][4] == 3
+
+    def test_weighted(self):
+        XG3 = nx.Graph()
+        XG3.add_weighted_edges_from(
+            [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]]
+        )
+        path, dist = nx.floyd_warshall_predecessor_and_distance(XG3)
+        assert dist[0][3] == 15
+        assert path[0][3] == 2
+
+    def test_weighted2(self):
+        XG4 = nx.Graph()
+        XG4.add_weighted_edges_from(
+            [
+                [0, 1, 2],
+                [1, 2, 2],
+                [2, 3, 1],
+                [3, 4, 1],
+                [4, 5, 1],
+                [5, 6, 1],
+                [6, 7, 1],
+                [7, 0, 1],
+            ]
+        )
+        path, dist = nx.floyd_warshall_predecessor_and_distance(XG4)
+        assert dist[0][2] == 4
+        assert path[0][2] == 1
+
+    def test_weight_parameter(self):
+        XG4 = nx.Graph()
+        XG4.add_edges_from(
+            [
+                (0, 1, {"heavy": 2}),
+                (1, 2, {"heavy": 2}),
+                (2, 3, {"heavy": 1}),
+                (3, 4, {"heavy": 1}),
+                (4, 5, {"heavy": 1}),
+                (5, 6, {"heavy": 1}),
+                (6, 7, {"heavy": 1}),
+                (7, 0, {"heavy": 1}),
+            ]
+        )
+        path, dist = nx.floyd_warshall_predecessor_and_distance(XG4, weight="heavy")
+        assert dist[0][2] == 4
+        assert path[0][2] == 1
+
+    def test_zero_distance(self):
+        XG = nx.DiGraph()
+        XG.add_weighted_edges_from(
+            [
+                ("s", "u", 10),
+                ("s", "x", 5),
+                ("u", "v", 1),
+                ("u", "x", 2),
+                ("v", "y", 1),
+                ("x", "u", 3),
+                ("x", "v", 5),
+                ("x", "y", 2),
+                ("y", "s", 7),
+                ("y", "v", 6),
+            ]
+        )
+        path, dist = nx.floyd_warshall_predecessor_and_distance(XG)
+
+        for u in XG:
+            assert dist[u][u] == 0
+
+        GG = XG.to_undirected()
+        # make sure we get lower weight
+        # to_undirected might choose either edge with weight 2 or weight 3
+        GG["u"]["x"]["weight"] = 2
+        path, dist = nx.floyd_warshall_predecessor_and_distance(GG)
+
+        for u in GG:
+            dist[u][u] = 0
+
+    def test_zero_weight(self):
+        G = nx.DiGraph()
+        edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), (5, 4, 0), (4, 3, -5), (2, 5, -7)]
+        G.add_weighted_edges_from(edges)
+        dist = nx.floyd_warshall(G)
+        assert dist[1][3] == -14
+
+        G = nx.MultiDiGraph()
+        edges.append((2, 5, -7))
+        G.add_weighted_edges_from(edges)
+        dist = nx.floyd_warshall(G)
+        assert dist[1][3] == -14
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py
new file mode 100644
index 00000000..1316e23e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py
@@ -0,0 +1,89 @@
+import pytest
+
+np = pytest.importorskip("numpy")
+
+
+import networkx as nx
+
+
+def test_cycle_numpy():
+    dist = nx.floyd_warshall_numpy(nx.cycle_graph(7))
+    assert dist[0, 3] == 3
+    assert dist[0, 4] == 3
+
+
+def test_weighted_numpy_three_edges():
+    XG3 = nx.Graph()
+    XG3.add_weighted_edges_from(
+        [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]]
+    )
+    dist = nx.floyd_warshall_numpy(XG3)
+    assert dist[0, 3] == 15
+
+
+def test_weighted_numpy_two_edges():
+    XG4 = nx.Graph()
+    XG4.add_weighted_edges_from(
+        [
+            [0, 1, 2],
+            [1, 2, 2],
+            [2, 3, 1],
+            [3, 4, 1],
+            [4, 5, 1],
+            [5, 6, 1],
+            [6, 7, 1],
+            [7, 0, 1],
+        ]
+    )
+    dist = nx.floyd_warshall_numpy(XG4)
+    assert dist[0, 2] == 4
+
+
+def test_weight_parameter_numpy():
+    XG4 = nx.Graph()
+    XG4.add_edges_from(
+        [
+            (0, 1, {"heavy": 2}),
+            (1, 2, {"heavy": 2}),
+            (2, 3, {"heavy": 1}),
+            (3, 4, {"heavy": 1}),
+            (4, 5, {"heavy": 1}),
+            (5, 6, {"heavy": 1}),
+            (6, 7, {"heavy": 1}),
+            (7, 0, {"heavy": 1}),
+        ]
+    )
+    dist = nx.floyd_warshall_numpy(XG4, weight="heavy")
+    assert dist[0, 2] == 4
+
+
+def test_directed_cycle_numpy():
+    G = nx.DiGraph()
+    nx.add_cycle(G, [0, 1, 2, 3])
+    pred, dist = nx.floyd_warshall_predecessor_and_distance(G)
+    D = nx.utils.dict_to_numpy_array(dist)
+    np.testing.assert_equal(nx.floyd_warshall_numpy(G), D)
+
+
+def test_zero_weight():
+    G = nx.DiGraph()
+    edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), (5, 4, 0), (4, 3, -5), (2, 5, -7)]
+    G.add_weighted_edges_from(edges)
+    dist = nx.floyd_warshall_numpy(G)
+    assert int(np.min(dist)) == -14
+
+    G = nx.MultiDiGraph()
+    edges.append((2, 5, -7))
+    G.add_weighted_edges_from(edges)
+    dist = nx.floyd_warshall_numpy(G)
+    assert int(np.min(dist)) == -14
+
+
+def test_nodelist():
+    G = nx.path_graph(7)
+    dist = nx.floyd_warshall_numpy(G, nodelist=[3, 5, 4, 6, 2, 1, 0])
+    assert dist[0, 3] == 3
+    assert dist[0, 1] == 2
+    assert dist[6, 2] == 4
+    pytest.raises(nx.NetworkXError, nx.floyd_warshall_numpy, G, [1, 3])
+    pytest.raises(nx.NetworkXError, nx.floyd_warshall_numpy, G, list(range(9)))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_generic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_generic.py
new file mode 100644
index 00000000..e30de517
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_generic.py
@@ -0,0 +1,450 @@
+import pytest
+
+import networkx as nx
+
+
+def validate_grid_path(r, c, s, t, p):
+    assert isinstance(p, list)
+    assert p[0] == s
+    assert p[-1] == t
+    s = ((s - 1) // c, (s - 1) % c)
+    t = ((t - 1) // c, (t - 1) % c)
+    assert len(p) == abs(t[0] - s[0]) + abs(t[1] - s[1]) + 1
+    p = [((u - 1) // c, (u - 1) % c) for u in p]
+    for u in p:
+        assert 0 <= u[0] < r
+        assert 0 <= u[1] < c
+    for u, v in zip(p[:-1], p[1:]):
+        assert (abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)]
+
+
+class TestGenericPath:
+    @classmethod
+    def setup_class(cls):
+        from networkx import convert_node_labels_to_integers as cnlti
+
+        cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted")
+        cls.cycle = nx.cycle_graph(7)
+        cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
+        cls.neg_weights = nx.DiGraph()
+        cls.neg_weights.add_edge(0, 1, weight=1)
+        cls.neg_weights.add_edge(0, 2, weight=3)
+        cls.neg_weights.add_edge(1, 3, weight=1)
+        cls.neg_weights.add_edge(2, 3, weight=-2)
+
+    def test_shortest_path(self):
+        assert nx.shortest_path(self.cycle, 0, 3) == [0, 1, 2, 3]
+        assert nx.shortest_path(self.cycle, 0, 4) == [0, 6, 5, 4]
+        validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12))
+        assert nx.shortest_path(self.directed_cycle, 0, 3) == [0, 1, 2, 3]
+        # now with weights
+        assert nx.shortest_path(self.cycle, 0, 3, weight="weight") == [0, 1, 2, 3]
+        assert nx.shortest_path(self.cycle, 0, 4, weight="weight") == [0, 6, 5, 4]
+        validate_grid_path(
+            4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12, weight="weight")
+        )
+        assert nx.shortest_path(self.directed_cycle, 0, 3, weight="weight") == [
+            0,
+            1,
+            2,
+            3,
+        ]
+        # weights and method specified
+        assert nx.shortest_path(
+            self.directed_cycle, 0, 3, weight="weight", method="dijkstra"
+        ) == [0, 1, 2, 3]
+        assert nx.shortest_path(
+            self.directed_cycle, 0, 3, weight="weight", method="bellman-ford"
+        ) == [0, 1, 2, 3]
+        # when Dijkstra's will probably (depending on precise implementation)
+        # incorrectly return [0, 1, 3] instead
+        assert nx.shortest_path(
+            self.neg_weights, 0, 3, weight="weight", method="bellman-ford"
+        ) == [0, 2, 3]
+        # confirm bad method rejection
+        pytest.raises(ValueError, nx.shortest_path, self.cycle, method="SPAM")
+        # confirm absent source rejection
+        pytest.raises(nx.NodeNotFound, nx.shortest_path, self.cycle, 8)
+
+    def test_shortest_path_target(self):
+        answer = {0: [0, 1], 1: [1], 2: [2, 1]}
+        sp = nx.shortest_path(nx.path_graph(3), target=1)
+        assert sp == answer
+        # with weights
+        sp = nx.shortest_path(nx.path_graph(3), target=1, weight="weight")
+        assert sp == answer
+        # weights and method specified
+        sp = nx.shortest_path(
+            nx.path_graph(3), target=1, weight="weight", method="dijkstra"
+        )
+        assert sp == answer
+        sp = nx.shortest_path(
+            nx.path_graph(3), target=1, weight="weight", method="bellman-ford"
+        )
+        assert sp == answer
+
+    def test_shortest_path_length(self):
+        assert nx.shortest_path_length(self.cycle, 0, 3) == 3
+        assert nx.shortest_path_length(self.grid, 1, 12) == 5
+        assert nx.shortest_path_length(self.directed_cycle, 0, 4) == 4
+        # now with weights
+        assert nx.shortest_path_length(self.cycle, 0, 3, weight="weight") == 3
+        assert nx.shortest_path_length(self.grid, 1, 12, weight="weight") == 5
+        assert nx.shortest_path_length(self.directed_cycle, 0, 4, weight="weight") == 4
+        # weights and method specified
+        assert (
+            nx.shortest_path_length(
+                self.cycle, 0, 3, weight="weight", method="dijkstra"
+            )
+            == 3
+        )
+        assert (
+            nx.shortest_path_length(
+                self.cycle, 0, 3, weight="weight", method="bellman-ford"
+            )
+            == 3
+        )
+        # confirm bad method rejection
+        pytest.raises(ValueError, nx.shortest_path_length, self.cycle, method="SPAM")
+        # confirm absent source rejection
+        pytest.raises(nx.NodeNotFound, nx.shortest_path_length, self.cycle, 8)
+
+    def test_shortest_path_length_target(self):
+        answer = {0: 1, 1: 0, 2: 1}
+        sp = dict(nx.shortest_path_length(nx.path_graph(3), target=1))
+        assert sp == answer
+        # with weights
+        sp = nx.shortest_path_length(nx.path_graph(3), target=1, weight="weight")
+        assert sp == answer
+        # weights and method specified
+        sp = nx.shortest_path_length(
+            nx.path_graph(3), target=1, weight="weight", method="dijkstra"
+        )
+        assert sp == answer
+        sp = nx.shortest_path_length(
+            nx.path_graph(3), target=1, weight="weight", method="bellman-ford"
+        )
+        assert sp == answer
+
+    def test_single_source_shortest_path(self):
+        p = nx.shortest_path(self.cycle, 0)
+        assert p[3] == [0, 1, 2, 3]
+        assert p == nx.single_source_shortest_path(self.cycle, 0)
+        p = nx.shortest_path(self.grid, 1)
+        validate_grid_path(4, 4, 1, 12, p[12])
+        # now with weights
+        p = nx.shortest_path(self.cycle, 0, weight="weight")
+        assert p[3] == [0, 1, 2, 3]
+        assert p == nx.single_source_dijkstra_path(self.cycle, 0)
+        p = nx.shortest_path(self.grid, 1, weight="weight")
+        validate_grid_path(4, 4, 1, 12, p[12])
+        # weights and method specified
+        p = nx.shortest_path(self.cycle, 0, method="dijkstra", weight="weight")
+        assert p[3] == [0, 1, 2, 3]
+        assert p == nx.single_source_shortest_path(self.cycle, 0)
+        p = nx.shortest_path(self.cycle, 0, method="bellman-ford", weight="weight")
+        assert p[3] == [0, 1, 2, 3]
+        assert p == nx.single_source_shortest_path(self.cycle, 0)
+
+    def test_single_source_shortest_path_length(self):
+        ans = dict(nx.shortest_path_length(self.cycle, 0))
+        assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert ans == dict(nx.single_source_shortest_path_length(self.cycle, 0))
+        ans = dict(nx.shortest_path_length(self.grid, 1))
+        assert ans[16] == 6
+        # now with weights
+        ans = dict(nx.shortest_path_length(self.cycle, 0, weight="weight"))
+        assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert ans == dict(nx.single_source_dijkstra_path_length(self.cycle, 0))
+        ans = dict(nx.shortest_path_length(self.grid, 1, weight="weight"))
+        assert ans[16] == 6
+        # weights and method specified
+        ans = dict(
+            nx.shortest_path_length(self.cycle, 0, weight="weight", method="dijkstra")
+        )
+        assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert ans == dict(nx.single_source_dijkstra_path_length(self.cycle, 0))
+        ans = dict(
+            nx.shortest_path_length(
+                self.cycle, 0, weight="weight", method="bellman-ford"
+            )
+        )
+        assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert ans == dict(nx.single_source_bellman_ford_path_length(self.cycle, 0))
+
+    def test_single_source_all_shortest_paths(self):
+        cycle_ans = {0: [[0]], 1: [[0, 1]], 2: [[0, 1, 2], [0, 3, 2]], 3: [[0, 3]]}
+        ans = dict(nx.single_source_all_shortest_paths(nx.cycle_graph(4), 0))
+        assert sorted(ans[2]) == cycle_ans[2]
+        ans = dict(nx.single_source_all_shortest_paths(self.grid, 1))
+        grid_ans = [
+            [1, 2, 3, 7, 11],
+            [1, 2, 6, 7, 11],
+            [1, 2, 6, 10, 11],
+            [1, 5, 6, 7, 11],
+            [1, 5, 6, 10, 11],
+            [1, 5, 9, 10, 11],
+        ]
+        assert sorted(ans[11]) == grid_ans
+        ans = dict(
+            nx.single_source_all_shortest_paths(nx.cycle_graph(4), 0, weight="weight")
+        )
+        assert sorted(ans[2]) == cycle_ans[2]
+        ans = dict(
+            nx.single_source_all_shortest_paths(
+                nx.cycle_graph(4), 0, method="bellman-ford", weight="weight"
+            )
+        )
+        assert sorted(ans[2]) == cycle_ans[2]
+        ans = dict(nx.single_source_all_shortest_paths(self.grid, 1, weight="weight"))
+        assert sorted(ans[11]) == grid_ans
+        ans = dict(
+            nx.single_source_all_shortest_paths(
+                self.grid, 1, method="bellman-ford", weight="weight"
+            )
+        )
+        assert sorted(ans[11]) == grid_ans
+        G = nx.cycle_graph(4)
+        G.add_node(4)
+        ans = dict(nx.single_source_all_shortest_paths(G, 0))
+        assert sorted(ans[2]) == [[0, 1, 2], [0, 3, 2]]
+        ans = dict(nx.single_source_all_shortest_paths(G, 4))
+        assert sorted(ans[4]) == [[4]]
+
+    def test_all_pairs_shortest_path(self):
+        # shortest_path w/o source and target will return a generator instead of
+        # a dict beginning in version 3.5. Only the first call needs changed here.
+        p = nx.shortest_path(self.cycle)
+        assert p[0][3] == [0, 1, 2, 3]
+        assert p == dict(nx.all_pairs_shortest_path(self.cycle))
+        p = dict(nx.shortest_path(self.grid))
+        validate_grid_path(4, 4, 1, 12, p[1][12])
+        # now with weights
+        p = dict(nx.shortest_path(self.cycle, weight="weight"))
+        assert p[0][3] == [0, 1, 2, 3]
+        assert p == dict(nx.all_pairs_dijkstra_path(self.cycle))
+        p = dict(nx.shortest_path(self.grid, weight="weight"))
+        validate_grid_path(4, 4, 1, 12, p[1][12])
+        # weights and method specified
+        p = dict(nx.shortest_path(self.cycle, weight="weight", method="dijkstra"))
+        assert p[0][3] == [0, 1, 2, 3]
+        assert p == dict(nx.all_pairs_dijkstra_path(self.cycle))
+        p = dict(nx.shortest_path(self.cycle, weight="weight", method="bellman-ford"))
+        assert p[0][3] == [0, 1, 2, 3]
+        assert p == dict(nx.all_pairs_bellman_ford_path(self.cycle))
+
+    def test_all_pairs_shortest_path_length(self):
+        ans = dict(nx.shortest_path_length(self.cycle))
+        assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert ans == dict(nx.all_pairs_shortest_path_length(self.cycle))
+        ans = dict(nx.shortest_path_length(self.grid))
+        assert ans[1][16] == 6
+        # now with weights
+        ans = dict(nx.shortest_path_length(self.cycle, weight="weight"))
+        assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert ans == dict(nx.all_pairs_dijkstra_path_length(self.cycle))
+        ans = dict(nx.shortest_path_length(self.grid, weight="weight"))
+        assert ans[1][16] == 6
+        # weights and method specified
+        ans = dict(
+            nx.shortest_path_length(self.cycle, weight="weight", method="dijkstra")
+        )
+        assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert ans == dict(nx.all_pairs_dijkstra_path_length(self.cycle))
+        ans = dict(
+            nx.shortest_path_length(self.cycle, weight="weight", method="bellman-ford")
+        )
+        assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert ans == dict(nx.all_pairs_bellman_ford_path_length(self.cycle))
+
+    def test_all_pairs_all_shortest_paths(self):
+        ans = dict(nx.all_pairs_all_shortest_paths(nx.cycle_graph(4)))
+        assert sorted(ans[1][3]) == [[1, 0, 3], [1, 2, 3]]
+        ans = dict(nx.all_pairs_all_shortest_paths(nx.cycle_graph(4)), weight="weight")
+        assert sorted(ans[1][3]) == [[1, 0, 3], [1, 2, 3]]
+        ans = dict(
+            nx.all_pairs_all_shortest_paths(nx.cycle_graph(4)),
+            method="bellman-ford",
+            weight="weight",
+        )
+        assert sorted(ans[1][3]) == [[1, 0, 3], [1, 2, 3]]
+        G = nx.cycle_graph(4)
+        G.add_node(4)
+        ans = dict(nx.all_pairs_all_shortest_paths(G))
+        assert sorted(ans[4][4]) == [[4]]
+
+    def test_has_path(self):
+        G = nx.Graph()
+        nx.add_path(G, range(3))
+        nx.add_path(G, range(3, 5))
+        assert nx.has_path(G, 0, 2)
+        assert not nx.has_path(G, 0, 4)
+
+    def test_has_path_singleton(self):
+        G = nx.empty_graph(1)
+        assert nx.has_path(G, 0, 0)
+
+    def test_all_shortest_paths(self):
+        G = nx.Graph()
+        nx.add_path(G, [0, 1, 2, 3])
+        nx.add_path(G, [0, 10, 20, 3])
+        assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted(nx.all_shortest_paths(G, 0, 3))
+        # with weights
+        G = nx.Graph()
+        nx.add_path(G, [0, 1, 2, 3])
+        nx.add_path(G, [0, 10, 20, 3])
+        assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted(
+            nx.all_shortest_paths(G, 0, 3, weight="weight")
+        )
+        # weights and method specified
+        G = nx.Graph()
+        nx.add_path(G, [0, 1, 2, 3])
+        nx.add_path(G, [0, 10, 20, 3])
+        assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted(
+            nx.all_shortest_paths(G, 0, 3, weight="weight", method="dijkstra")
+        )
+        G = nx.Graph()
+        nx.add_path(G, [0, 1, 2, 3])
+        nx.add_path(G, [0, 10, 20, 3])
+        assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted(
+            nx.all_shortest_paths(G, 0, 3, weight="weight", method="bellman-ford")
+        )
+
+    def test_all_shortest_paths_raise(self):
+        with pytest.raises(nx.NetworkXNoPath):
+            G = nx.path_graph(4)
+            G.add_node(4)
+            list(nx.all_shortest_paths(G, 0, 4))
+
+    def test_bad_method(self):
+        with pytest.raises(ValueError):
+            G = nx.path_graph(2)
+            list(nx.all_shortest_paths(G, 0, 1, weight="weight", method="SPAM"))
+
+    def test_single_source_all_shortest_paths_bad_method(self):
+        with pytest.raises(ValueError):
+            G = nx.path_graph(2)
+            dict(
+                nx.single_source_all_shortest_paths(
+                    G, 0, weight="weight", method="SPAM"
+                )
+            )
+
+    def test_all_shortest_paths_zero_weight_edge(self):
+        g = nx.Graph()
+        nx.add_path(g, [0, 1, 3])
+        nx.add_path(g, [0, 1, 2, 3])
+        g.edges[1, 2]["weight"] = 0
+        paths30d = list(
+            nx.all_shortest_paths(g, 3, 0, weight="weight", method="dijkstra")
+        )
+        paths03d = list(
+            nx.all_shortest_paths(g, 0, 3, weight="weight", method="dijkstra")
+        )
+        paths30b = list(
+            nx.all_shortest_paths(g, 3, 0, weight="weight", method="bellman-ford")
+        )
+        paths03b = list(
+            nx.all_shortest_paths(g, 0, 3, weight="weight", method="bellman-ford")
+        )
+        assert sorted(paths03d) == sorted(p[::-1] for p in paths30d)
+        assert sorted(paths03d) == sorted(p[::-1] for p in paths30b)
+        assert sorted(paths03b) == sorted(p[::-1] for p in paths30b)
+
+
+class TestAverageShortestPathLength:
+    def test_cycle_graph(self):
+        ans = nx.average_shortest_path_length(nx.cycle_graph(7))
+        assert ans == pytest.approx(2, abs=1e-7)
+
+    def test_path_graph(self):
+        ans = nx.average_shortest_path_length(nx.path_graph(5))
+        assert ans == pytest.approx(2, abs=1e-7)
+
+    def test_weighted(self):
+        G = nx.Graph()
+        nx.add_cycle(G, range(7), weight=2)
+        ans = nx.average_shortest_path_length(G, weight="weight")
+        assert ans == pytest.approx(4, abs=1e-7)
+        G = nx.Graph()
+        nx.add_path(G, range(5), weight=2)
+        ans = nx.average_shortest_path_length(G, weight="weight")
+        assert ans == pytest.approx(4, abs=1e-7)
+
+    def test_specified_methods(self):
+        G = nx.Graph()
+        nx.add_cycle(G, range(7), weight=2)
+        ans = nx.average_shortest_path_length(G, weight="weight", method="dijkstra")
+        assert ans == pytest.approx(4, abs=1e-7)
+        ans = nx.average_shortest_path_length(G, weight="weight", method="bellman-ford")
+        assert ans == pytest.approx(4, abs=1e-7)
+        ans = nx.average_shortest_path_length(
+            G, weight="weight", method="floyd-warshall"
+        )
+        assert ans == pytest.approx(4, abs=1e-7)
+
+        G = nx.Graph()
+        nx.add_path(G, range(5), weight=2)
+        ans = nx.average_shortest_path_length(G, weight="weight", method="dijkstra")
+        assert ans == pytest.approx(4, abs=1e-7)
+        ans = nx.average_shortest_path_length(G, weight="weight", method="bellman-ford")
+        assert ans == pytest.approx(4, abs=1e-7)
+        ans = nx.average_shortest_path_length(
+            G, weight="weight", method="floyd-warshall"
+        )
+        assert ans == pytest.approx(4, abs=1e-7)
+
+    def test_directed_not_strongly_connected(self):
+        G = nx.DiGraph([(0, 1)])
+        with pytest.raises(nx.NetworkXError, match="Graph is not strongly connected"):
+            nx.average_shortest_path_length(G)
+
+    def test_undirected_not_connected(self):
+        g = nx.Graph()
+        g.add_nodes_from(range(3))
+        g.add_edge(0, 1)
+        pytest.raises(nx.NetworkXError, nx.average_shortest_path_length, g)
+
+    def test_trivial_graph(self):
+        """Tests that the trivial graph has average path length zero,
+        since there is exactly one path of length zero in the trivial
+        graph.
+
+        For more information, see issue #1960.
+
+        """
+        G = nx.trivial_graph()
+        assert nx.average_shortest_path_length(G) == 0
+
+    def test_null_graph(self):
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            nx.average_shortest_path_length(nx.null_graph())
+
+    def test_bad_method(self):
+        with pytest.raises(ValueError):
+            G = nx.path_graph(2)
+            nx.average_shortest_path_length(G, weight="weight", method="SPAM")
+
+
+class TestAverageShortestPathLengthNumpy:
+    @classmethod
+    def setup_class(cls):
+        global np
+        import pytest
+
+        np = pytest.importorskip("numpy")
+
+    def test_specified_methods_numpy(self):
+        G = nx.Graph()
+        nx.add_cycle(G, range(7), weight=2)
+        ans = nx.average_shortest_path_length(
+            G, weight="weight", method="floyd-warshall-numpy"
+        )
+        np.testing.assert_almost_equal(ans, 4)
+
+        G = nx.Graph()
+        nx.add_path(G, range(5), weight=2)
+        ans = nx.average_shortest_path_length(
+            G, weight="weight", method="floyd-warshall-numpy"
+        )
+        np.testing.assert_almost_equal(ans, 4)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_unweighted.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_unweighted.py
new file mode 100644
index 00000000..f09c8b10
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_unweighted.py
@@ -0,0 +1,149 @@
+import pytest
+
+import networkx as nx
+
+
+def validate_grid_path(r, c, s, t, p):
+    assert isinstance(p, list)
+    assert p[0] == s
+    assert p[-1] == t
+    s = ((s - 1) // c, (s - 1) % c)
+    t = ((t - 1) // c, (t - 1) % c)
+    assert len(p) == abs(t[0] - s[0]) + abs(t[1] - s[1]) + 1
+    p = [((u - 1) // c, (u - 1) % c) for u in p]
+    for u in p:
+        assert 0 <= u[0] < r
+        assert 0 <= u[1] < c
+    for u, v in zip(p[:-1], p[1:]):
+        assert (abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)]
+
+
+class TestUnweightedPath:
+    @classmethod
+    def setup_class(cls):
+        from networkx import convert_node_labels_to_integers as cnlti
+
+        cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted")
+        cls.cycle = nx.cycle_graph(7)
+        cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
+
+    def test_bidirectional_shortest_path(self):
+        assert nx.bidirectional_shortest_path(self.cycle, 0, 3) == [0, 1, 2, 3]
+        assert nx.bidirectional_shortest_path(self.cycle, 0, 4) == [0, 6, 5, 4]
+        validate_grid_path(
+            4, 4, 1, 12, nx.bidirectional_shortest_path(self.grid, 1, 12)
+        )
+        assert nx.bidirectional_shortest_path(self.directed_cycle, 0, 3) == [0, 1, 2, 3]
+        # test source = target
+        assert nx.bidirectional_shortest_path(self.cycle, 3, 3) == [3]
+
+    @pytest.mark.parametrize(
+        ("src", "tgt"),
+        (
+            (8, 3),  # source not in graph
+            (3, 8),  # target not in graph
+            (8, 10),  # neither source nor target in graph
+            (8, 8),  # src == tgt, neither in graph - tests order of input checks
+        ),
+    )
+    def test_bidirectional_shortest_path_src_tgt_not_in_graph(self, src, tgt):
+        with pytest.raises(
+            nx.NodeNotFound,
+            match=f"(Source {src}|Target {tgt}) is not in G",
+        ):
+            nx.bidirectional_shortest_path(self.cycle, src, tgt)
+
+    def test_shortest_path_length(self):
+        assert nx.shortest_path_length(self.cycle, 0, 3) == 3
+        assert nx.shortest_path_length(self.grid, 1, 12) == 5
+        assert nx.shortest_path_length(self.directed_cycle, 0, 4) == 4
+        # now with weights
+        assert nx.shortest_path_length(self.cycle, 0, 3, weight=True) == 3
+        assert nx.shortest_path_length(self.grid, 1, 12, weight=True) == 5
+        assert nx.shortest_path_length(self.directed_cycle, 0, 4, weight=True) == 4
+
+    def test_single_source_shortest_path(self):
+        p = nx.single_source_shortest_path(self.directed_cycle, 3)
+        assert p[0] == [3, 4, 5, 6, 0]
+        p = nx.single_source_shortest_path(self.cycle, 0)
+        assert p[3] == [0, 1, 2, 3]
+        p = nx.single_source_shortest_path(self.cycle, 0, cutoff=0)
+        assert p == {0: [0]}
+
+    def test_single_source_shortest_path_length(self):
+        pl = nx.single_source_shortest_path_length
+        lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert dict(pl(self.cycle, 0)) == lengths
+        lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6}
+        assert dict(pl(self.directed_cycle, 0)) == lengths
+
+    def test_single_target_shortest_path(self):
+        p = nx.single_target_shortest_path(self.directed_cycle, 0)
+        assert p[3] == [3, 4, 5, 6, 0]
+        p = nx.single_target_shortest_path(self.cycle, 0)
+        assert p[3] == [3, 2, 1, 0]
+        p = nx.single_target_shortest_path(self.cycle, 0, cutoff=0)
+        assert p == {0: [0]}
+        # test missing targets
+        target = 8
+        with pytest.raises(nx.NodeNotFound, match=f"Target {target} not in G"):
+            nx.single_target_shortest_path(self.cycle, target)
+
+    def test_single_target_shortest_path_length(self):
+        pl = nx.single_target_shortest_path_length
+        lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert dict(pl(self.cycle, 0)) == lengths
+        lengths = {0: 0, 1: 6, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1}
+        assert dict(pl(self.directed_cycle, 0)) == lengths
+        # test missing targets
+        target = 8
+        with pytest.raises(nx.NodeNotFound, match=f"Target {target} is not in G"):
+            nx.single_target_shortest_path_length(self.cycle, target)
+
+    def test_all_pairs_shortest_path(self):
+        p = dict(nx.all_pairs_shortest_path(self.cycle))
+        assert p[0][3] == [0, 1, 2, 3]
+        p = dict(nx.all_pairs_shortest_path(self.grid))
+        validate_grid_path(4, 4, 1, 12, p[1][12])
+
+    def test_all_pairs_shortest_path_length(self):
+        l = dict(nx.all_pairs_shortest_path_length(self.cycle))
+        assert l[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        l = dict(nx.all_pairs_shortest_path_length(self.grid))
+        assert l[1][16] == 6
+
+    def test_predecessor_path(self):
+        G = nx.path_graph(4)
+        assert nx.predecessor(G, 0) == {0: [], 1: [0], 2: [1], 3: [2]}
+        assert nx.predecessor(G, 0, 3) == [2]
+
+    def test_predecessor_cycle(self):
+        G = nx.cycle_graph(4)
+        pred = nx.predecessor(G, 0)
+        assert pred[0] == []
+        assert pred[1] == [0]
+        assert pred[2] in [[1, 3], [3, 1]]
+        assert pred[3] == [0]
+
+    def test_predecessor_cutoff(self):
+        G = nx.path_graph(4)
+        p = nx.predecessor(G, 0, 3)
+        assert 4 not in p
+
+    def test_predecessor_target(self):
+        G = nx.path_graph(4)
+        p = nx.predecessor(G, 0, 3)
+        assert p == [2]
+        p = nx.predecessor(G, 0, 3, cutoff=2)
+        assert p == []
+        p, s = nx.predecessor(G, 0, 3, return_seen=True)
+        assert p == [2]
+        assert s == 3
+        p, s = nx.predecessor(G, 0, 3, cutoff=2, return_seen=True)
+        assert p == []
+        assert s == -1
+
+    def test_predecessor_missing_source(self):
+        source = 8
+        with pytest.raises(nx.NodeNotFound, match=f"Source {source} not in G"):
+            nx.predecessor(self.cycle, source)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_weighted.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_weighted.py
new file mode 100644
index 00000000..dc88572d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_weighted.py
@@ -0,0 +1,972 @@
+import pytest
+
+import networkx as nx
+from networkx.utils import pairwise
+
+
+def validate_path(G, s, t, soln_len, path, weight="weight"):
+    assert path[0] == s
+    assert path[-1] == t
+
+    if callable(weight):
+        weight_f = weight
+    else:
+        if G.is_multigraph():
+
+            def weight_f(u, v, d):
+                return min(e.get(weight, 1) for e in d.values())
+
+        else:
+
+            def weight_f(u, v, d):
+                return d.get(weight, 1)
+
+    computed = sum(weight_f(u, v, G[u][v]) for u, v in pairwise(path))
+    assert soln_len == computed
+
+
+def validate_length_path(G, s, t, soln_len, length, path, weight="weight"):
+    assert soln_len == length
+    validate_path(G, s, t, length, path, weight=weight)
+
+
+class WeightedTestBase:
+    """Base class for test classes that test functions for computing
+    shortest paths in weighted graphs.
+
+    """
+
+    def setup_method(self):
+        """Creates some graphs for use in the unit tests."""
+        cnlti = nx.convert_node_labels_to_integers
+        self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted")
+        self.cycle = nx.cycle_graph(7)
+        self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
+        self.XG = nx.DiGraph()
+        self.XG.add_weighted_edges_from(
+            [
+                ("s", "u", 10),
+                ("s", "x", 5),
+                ("u", "v", 1),
+                ("u", "x", 2),
+                ("v", "y", 1),
+                ("x", "u", 3),
+                ("x", "v", 5),
+                ("x", "y", 2),
+                ("y", "s", 7),
+                ("y", "v", 6),
+            ]
+        )
+        self.MXG = nx.MultiDiGraph(self.XG)
+        self.MXG.add_edge("s", "u", weight=15)
+        self.XG2 = nx.DiGraph()
+        self.XG2.add_weighted_edges_from(
+            [
+                [1, 4, 1],
+                [4, 5, 1],
+                [5, 6, 1],
+                [6, 3, 1],
+                [1, 3, 50],
+                [1, 2, 100],
+                [2, 3, 100],
+            ]
+        )
+
+        self.XG3 = nx.Graph()
+        self.XG3.add_weighted_edges_from(
+            [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]]
+        )
+
+        self.XG4 = nx.Graph()
+        self.XG4.add_weighted_edges_from(
+            [
+                [0, 1, 2],
+                [1, 2, 2],
+                [2, 3, 1],
+                [3, 4, 1],
+                [4, 5, 1],
+                [5, 6, 1],
+                [6, 7, 1],
+                [7, 0, 1],
+            ]
+        )
+        self.MXG4 = nx.MultiGraph(self.XG4)
+        self.MXG4.add_edge(0, 1, weight=3)
+        self.G = nx.DiGraph()  # no weights
+        self.G.add_edges_from(
+            [
+                ("s", "u"),
+                ("s", "x"),
+                ("u", "v"),
+                ("u", "x"),
+                ("v", "y"),
+                ("x", "u"),
+                ("x", "v"),
+                ("x", "y"),
+                ("y", "s"),
+                ("y", "v"),
+            ]
+        )
+
+
+class TestWeightedPath(WeightedTestBase):
+    def test_dijkstra(self):
+        (D, P) = nx.single_source_dijkstra(self.XG, "s")
+        validate_path(self.XG, "s", "v", 9, P["v"])
+        assert D["v"] == 9
+
+        validate_path(
+            self.XG, "s", "v", 9, nx.single_source_dijkstra_path(self.XG, "s")["v"]
+        )
+        assert dict(nx.single_source_dijkstra_path_length(self.XG, "s"))["v"] == 9
+
+        validate_path(
+            self.XG, "s", "v", 9, nx.single_source_dijkstra(self.XG, "s")[1]["v"]
+        )
+        validate_path(
+            self.MXG, "s", "v", 9, nx.single_source_dijkstra_path(self.MXG, "s")["v"]
+        )
+
+        GG = self.XG.to_undirected()
+        # make sure we get lower weight
+        # to_undirected might choose either edge with weight 2 or weight 3
+        GG["u"]["x"]["weight"] = 2
+        (D, P) = nx.single_source_dijkstra(GG, "s")
+        validate_path(GG, "s", "v", 8, P["v"])
+        assert D["v"] == 8  # uses lower weight of 2 on u<->x edge
+        validate_path(GG, "s", "v", 8, nx.dijkstra_path(GG, "s", "v"))
+        assert nx.dijkstra_path_length(GG, "s", "v") == 8
+
+        validate_path(self.XG2, 1, 3, 4, nx.dijkstra_path(self.XG2, 1, 3))
+        validate_path(self.XG3, 0, 3, 15, nx.dijkstra_path(self.XG3, 0, 3))
+        assert nx.dijkstra_path_length(self.XG3, 0, 3) == 15
+        validate_path(self.XG4, 0, 2, 4, nx.dijkstra_path(self.XG4, 0, 2))
+        assert nx.dijkstra_path_length(self.XG4, 0, 2) == 4
+        validate_path(self.MXG4, 0, 2, 4, nx.dijkstra_path(self.MXG4, 0, 2))
+        validate_path(
+            self.G, "s", "v", 2, nx.single_source_dijkstra(self.G, "s", "v")[1]
+        )
+        validate_path(
+            self.G, "s", "v", 2, nx.single_source_dijkstra(self.G, "s")[1]["v"]
+        )
+
+        validate_path(self.G, "s", "v", 2, nx.dijkstra_path(self.G, "s", "v"))
+        assert nx.dijkstra_path_length(self.G, "s", "v") == 2
+
+        # NetworkXError: node s not reachable from moon
+        pytest.raises(nx.NetworkXNoPath, nx.dijkstra_path, self.G, "s", "moon")
+        pytest.raises(nx.NetworkXNoPath, nx.dijkstra_path_length, self.G, "s", "moon")
+
+        validate_path(self.cycle, 0, 3, 3, nx.dijkstra_path(self.cycle, 0, 3))
+        validate_path(self.cycle, 0, 4, 3, nx.dijkstra_path(self.cycle, 0, 4))
+
+        assert nx.single_source_dijkstra(self.cycle, 0, 0) == (0, [0])
+
+    def test_bidirectional_dijkstra(self):
+        validate_length_path(
+            self.XG, "s", "v", 9, *nx.bidirectional_dijkstra(self.XG, "s", "v")
+        )
+        validate_length_path(
+            self.G, "s", "v", 2, *nx.bidirectional_dijkstra(self.G, "s", "v")
+        )
+        validate_length_path(
+            self.cycle, 0, 3, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 3)
+        )
+        validate_length_path(
+            self.cycle, 0, 4, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 4)
+        )
+        validate_length_path(
+            self.XG3, 0, 3, 15, *nx.bidirectional_dijkstra(self.XG3, 0, 3)
+        )
+        validate_length_path(
+            self.XG4, 0, 2, 4, *nx.bidirectional_dijkstra(self.XG4, 0, 2)
+        )
+
+        # need more tests here
+        P = nx.single_source_dijkstra_path(self.XG, "s")["v"]
+        validate_path(
+            self.XG,
+            "s",
+            "v",
+            sum(self.XG[u][v]["weight"] for u, v in zip(P[:-1], P[1:])),
+            nx.dijkstra_path(self.XG, "s", "v"),
+        )
+
+        # check absent source
+        G = nx.path_graph(2)
+        pytest.raises(nx.NodeNotFound, nx.bidirectional_dijkstra, G, 3, 0)
+
+    def test_weight_functions(self):
+        def heuristic(*z):
+            return sum(val**2 for val in z)
+
+        def getpath(pred, v, s):
+            return [v] if v == s else getpath(pred, pred[v], s) + [v]
+
+        def goldberg_radzik(g, s, t, weight="weight"):
+            pred, dist = nx.goldberg_radzik(g, s, weight=weight)
+            dist = dist[t]
+            return dist, getpath(pred, t, s)
+
+        def astar(g, s, t, weight="weight"):
+            path = nx.astar_path(g, s, t, heuristic, weight=weight)
+            dist = nx.astar_path_length(g, s, t, heuristic, weight=weight)
+            return dist, path
+
+        def vlp(G, s, t, l, F, w):
+            res = F(G, s, t, weight=w)
+            validate_length_path(G, s, t, l, *res, weight=w)
+
+        G = self.cycle
+        s = 6
+        t = 4
+        path = [6] + list(range(t + 1))
+
+        def weight(u, v, _):
+            return 1 + v**2
+
+        length = sum(weight(u, v, None) for u, v in pairwise(path))
+        vlp(G, s, t, length, nx.bidirectional_dijkstra, weight)
+        vlp(G, s, t, length, nx.single_source_dijkstra, weight)
+        vlp(G, s, t, length, nx.single_source_bellman_ford, weight)
+        vlp(G, s, t, length, goldberg_radzik, weight)
+        vlp(G, s, t, length, astar, weight)
+
+        def weight(u, v, _):
+            return 2 ** (u * v)
+
+        length = sum(weight(u, v, None) for u, v in pairwise(path))
+        vlp(G, s, t, length, nx.bidirectional_dijkstra, weight)
+        vlp(G, s, t, length, nx.single_source_dijkstra, weight)
+        vlp(G, s, t, length, nx.single_source_bellman_ford, weight)
+        vlp(G, s, t, length, goldberg_radzik, weight)
+        vlp(G, s, t, length, astar, weight)
+
+    def test_bidirectional_dijkstra_no_path(self):
+        with pytest.raises(nx.NetworkXNoPath):
+            G = nx.Graph()
+            nx.add_path(G, [1, 2, 3])
+            nx.add_path(G, [4, 5, 6])
+            path = nx.bidirectional_dijkstra(G, 1, 6)
+
+    @pytest.mark.parametrize(
+        "fn",
+        (
+            nx.dijkstra_path,
+            nx.dijkstra_path_length,
+            nx.single_source_dijkstra_path,
+            nx.single_source_dijkstra_path_length,
+            nx.single_source_dijkstra,
+            nx.dijkstra_predecessor_and_distance,
+        ),
+    )
+    def test_absent_source(self, fn):
+        G = nx.path_graph(2)
+        with pytest.raises(nx.NodeNotFound):
+            fn(G, 3, 0)
+        # Test when source == target, which is handled specially by some functions
+        with pytest.raises(nx.NodeNotFound):
+            fn(G, 3, 3)
+
+    def test_dijkstra_predecessor1(self):
+        G = nx.path_graph(4)
+        assert nx.dijkstra_predecessor_and_distance(G, 0) == (
+            {0: [], 1: [0], 2: [1], 3: [2]},
+            {0: 0, 1: 1, 2: 2, 3: 3},
+        )
+
+    def test_dijkstra_predecessor2(self):
+        # 4-cycle
+        G = nx.Graph([(0, 1), (1, 2), (2, 3), (3, 0)])
+        pred, dist = nx.dijkstra_predecessor_and_distance(G, (0))
+        assert pred[0] == []
+        assert pred[1] == [0]
+        assert pred[2] in [[1, 3], [3, 1]]
+        assert pred[3] == [0]
+        assert dist == {0: 0, 1: 1, 2: 2, 3: 1}
+
+    def test_dijkstra_predecessor3(self):
+        XG = nx.DiGraph()
+        XG.add_weighted_edges_from(
+            [
+                ("s", "u", 10),
+                ("s", "x", 5),
+                ("u", "v", 1),
+                ("u", "x", 2),
+                ("v", "y", 1),
+                ("x", "u", 3),
+                ("x", "v", 5),
+                ("x", "y", 2),
+                ("y", "s", 7),
+                ("y", "v", 6),
+            ]
+        )
+        (P, D) = nx.dijkstra_predecessor_and_distance(XG, "s")
+        assert P["v"] == ["u"]
+        assert D["v"] == 9
+        (P, D) = nx.dijkstra_predecessor_and_distance(XG, "s", cutoff=8)
+        assert "v" not in D
+
+    def test_single_source_dijkstra_path_length(self):
+        pl = nx.single_source_dijkstra_path_length
+        assert dict(pl(self.MXG4, 0))[2] == 4
+        spl = pl(self.MXG4, 0, cutoff=2)
+        assert 2 not in spl
+
+    def test_bidirectional_dijkstra_multigraph(self):
+        G = nx.MultiGraph()
+        G.add_edge("a", "b", weight=10)
+        G.add_edge("a", "b", weight=100)
+        dp = nx.bidirectional_dijkstra(G, "a", "b")
+        assert dp == (10, ["a", "b"])
+
+    def test_dijkstra_pred_distance_multigraph(self):
+        G = nx.MultiGraph()
+        G.add_edge("a", "b", key="short", foo=5, weight=100)
+        G.add_edge("a", "b", key="long", bar=1, weight=110)
+        p, d = nx.dijkstra_predecessor_and_distance(G, "a")
+        assert p == {"a": [], "b": ["a"]}
+        assert d == {"a": 0, "b": 100}
+
+    def test_negative_edge_cycle(self):
+        G = nx.cycle_graph(5, create_using=nx.DiGraph())
+        assert not nx.negative_edge_cycle(G)
+        G.add_edge(8, 9, weight=-7)
+        G.add_edge(9, 8, weight=3)
+        graph_size = len(G)
+        assert nx.negative_edge_cycle(G)
+        assert graph_size == len(G)
+        pytest.raises(ValueError, nx.single_source_dijkstra_path_length, G, 8)
+        pytest.raises(ValueError, nx.single_source_dijkstra, G, 8)
+        pytest.raises(ValueError, nx.dijkstra_predecessor_and_distance, G, 8)
+        G.add_edge(9, 10)
+        pytest.raises(ValueError, nx.bidirectional_dijkstra, G, 8, 10)
+        G = nx.MultiDiGraph()
+        G.add_edge(2, 2, weight=-1)
+        assert nx.negative_edge_cycle(G)
+
+    def test_negative_edge_cycle_empty(self):
+        G = nx.DiGraph()
+        assert not nx.negative_edge_cycle(G)
+
+    def test_negative_edge_cycle_custom_weight_key(self):
+        d = nx.DiGraph()
+        d.add_edge("a", "b", w=-2)
+        d.add_edge("b", "a", w=-1)
+        assert nx.negative_edge_cycle(d, weight="w")
+
+    def test_weight_function(self):
+        """Tests that a callable weight is interpreted as a weight
+        function instead of an edge attribute.
+
+        """
+        # Create a triangle in which the edge from node 0 to node 2 has
+        # a large weight and the other two edges have a small weight.
+        G = nx.complete_graph(3)
+        G.adj[0][2]["weight"] = 10
+        G.adj[0][1]["weight"] = 1
+        G.adj[1][2]["weight"] = 1
+
+        # The weight function will take the multiplicative inverse of
+        # the weights on the edges. This way, weights that were large
+        # before now become small and vice versa.
+
+        def weight(u, v, d):
+            return 1 / d["weight"]
+
+        # The shortest path from 0 to 2 using the actual weights on the
+        # edges should be [0, 1, 2].
+        distance, path = nx.single_source_dijkstra(G, 0, 2)
+        assert distance == 2
+        assert path == [0, 1, 2]
+        # However, with the above weight function, the shortest path
+        # should be [0, 2], since that has a very small weight.
+        distance, path = nx.single_source_dijkstra(G, 0, 2, weight=weight)
+        assert distance == 1 / 10
+        assert path == [0, 2]
+
+    def test_all_pairs_dijkstra_path(self):
+        cycle = nx.cycle_graph(7)
+        p = dict(nx.all_pairs_dijkstra_path(cycle))
+        assert p[0][3] == [0, 1, 2, 3]
+
+        cycle[1][2]["weight"] = 10
+        p = dict(nx.all_pairs_dijkstra_path(cycle))
+        assert p[0][3] == [0, 6, 5, 4, 3]
+
+    def test_all_pairs_dijkstra_path_length(self):
+        cycle = nx.cycle_graph(7)
+        pl = dict(nx.all_pairs_dijkstra_path_length(cycle))
+        assert pl[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+
+        cycle[1][2]["weight"] = 10
+        pl = dict(nx.all_pairs_dijkstra_path_length(cycle))
+        assert pl[0] == {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1}
+
+    def test_all_pairs_dijkstra(self):
+        cycle = nx.cycle_graph(7)
+        out = dict(nx.all_pairs_dijkstra(cycle))
+        assert out[0][0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1}
+        assert out[0][1][3] == [0, 1, 2, 3]
+
+        cycle[1][2]["weight"] = 10
+        out = dict(nx.all_pairs_dijkstra(cycle))
+        assert out[0][0] == {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1}
+        assert out[0][1][3] == [0, 6, 5, 4, 3]
+
+
+class TestDijkstraPathLength:
+    """Unit tests for the :func:`networkx.dijkstra_path_length`
+    function.
+
+    """
+
+    def test_weight_function(self):
+        """Tests for computing the length of the shortest path using
+        Dijkstra's algorithm with a user-defined weight function.
+
+        """
+        # Create a triangle in which the edge from node 0 to node 2 has
+        # a large weight and the other two edges have a small weight.
+        G = nx.complete_graph(3)
+        G.adj[0][2]["weight"] = 10
+        G.adj[0][1]["weight"] = 1
+        G.adj[1][2]["weight"] = 1
+
+        # The weight function will take the multiplicative inverse of
+        # the weights on the edges. This way, weights that were large
+        # before now become small and vice versa.
+
+        def weight(u, v, d):
+            return 1 / d["weight"]
+
+        # The shortest path from 0 to 2 using the actual weights on the
+        # edges should be [0, 1, 2]. However, with the above weight
+        # function, the shortest path should be [0, 2], since that has a
+        # very small weight.
+        length = nx.dijkstra_path_length(G, 0, 2, weight=weight)
+        assert length == 1 / 10
+
+
+class TestMultiSourceDijkstra:
+    """Unit tests for the multi-source dialect of Dijkstra's shortest
+    path algorithms.
+
+    """
+
+    def test_no_sources(self):
+        with pytest.raises(ValueError):
+            nx.multi_source_dijkstra(nx.Graph(), {})
+
+    def test_path_no_sources(self):
+        with pytest.raises(ValueError):
+            nx.multi_source_dijkstra_path(nx.Graph(), {})
+
+    def test_path_length_no_sources(self):
+        with pytest.raises(ValueError):
+            nx.multi_source_dijkstra_path_length(nx.Graph(), {})
+
+    @pytest.mark.parametrize(
+        "fn",
+        (
+            nx.multi_source_dijkstra_path,
+            nx.multi_source_dijkstra_path_length,
+            nx.multi_source_dijkstra,
+        ),
+    )
+    def test_absent_source(self, fn):
+        G = nx.path_graph(2)
+        with pytest.raises(nx.NodeNotFound):
+            fn(G, [3], 0)
+        with pytest.raises(nx.NodeNotFound):
+            fn(G, [3], 3)
+
+    def test_two_sources(self):
+        edges = [(0, 1, 1), (1, 2, 1), (2, 3, 10), (3, 4, 1)]
+        G = nx.Graph()
+        G.add_weighted_edges_from(edges)
+        sources = {0, 4}
+        distances, paths = nx.multi_source_dijkstra(G, sources)
+        expected_distances = {0: 0, 1: 1, 2: 2, 3: 1, 4: 0}
+        expected_paths = {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [4, 3], 4: [4]}
+        assert distances == expected_distances
+        assert paths == expected_paths
+
+    def test_simple_paths(self):
+        G = nx.path_graph(4)
+        lengths = nx.multi_source_dijkstra_path_length(G, [0])
+        assert lengths == {n: n for n in G}
+        paths = nx.multi_source_dijkstra_path(G, [0])
+        assert paths == {n: list(range(n + 1)) for n in G}
+
+
+class TestBellmanFordAndGoldbergRadzik(WeightedTestBase):
+    def test_single_node_graph(self):
+        G = nx.DiGraph()
+        G.add_node(0)
+        assert nx.single_source_bellman_ford_path(G, 0) == {0: [0]}
+        assert nx.single_source_bellman_ford_path_length(G, 0) == {0: 0}
+        assert nx.single_source_bellman_ford(G, 0) == ({0: 0}, {0: [0]})
+        assert nx.bellman_ford_predecessor_and_distance(G, 0) == ({0: []}, {0: 0})
+        assert nx.goldberg_radzik(G, 0) == ({0: None}, {0: 0})
+
+    def test_absent_source_bellman_ford(self):
+        # the check is in _bellman_ford; this provides regression testing
+        # against later changes to "client" Bellman-Ford functions
+        G = nx.path_graph(2)
+        for fn in (
+            nx.bellman_ford_predecessor_and_distance,
+            nx.bellman_ford_path,
+            nx.bellman_ford_path_length,
+            nx.single_source_bellman_ford_path,
+            nx.single_source_bellman_ford_path_length,
+            nx.single_source_bellman_ford,
+        ):
+            pytest.raises(nx.NodeNotFound, fn, G, 3, 0)
+            pytest.raises(nx.NodeNotFound, fn, G, 3, 3)
+
+    def test_absent_source_goldberg_radzik(self):
+        with pytest.raises(nx.NodeNotFound):
+            G = nx.path_graph(2)
+            nx.goldberg_radzik(G, 3, 0)
+
+    def test_negative_cycle_heuristic(self):
+        G = nx.DiGraph()
+        G.add_edge(0, 1, weight=-1)
+        G.add_edge(1, 2, weight=-1)
+        G.add_edge(2, 3, weight=-1)
+        G.add_edge(3, 0, weight=3)
+        assert not nx.negative_edge_cycle(G, heuristic=True)
+        G.add_edge(2, 0, weight=1.999)
+        assert nx.negative_edge_cycle(G, heuristic=True)
+        G.edges[2, 0]["weight"] = 2
+        assert not nx.negative_edge_cycle(G, heuristic=True)
+
+    def test_negative_cycle_consistency(self):
+        import random
+
+        unif = random.uniform
+        for random_seed in range(2):  # range(20):
+            random.seed(random_seed)
+            for density in [0.1, 0.9]:  # .3, .7, .9]:
+                for N in [1, 10, 20]:  # range(1, 60 - int(30 * density)):
+                    for max_cost in [1, 90]:  # [1, 10, 40, 90]:
+                        G = nx.binomial_graph(N, density, seed=4, directed=True)
+                        edges = ((u, v, unif(-1, max_cost)) for u, v in G.edges)
+                        G.add_weighted_edges_from(edges)
+
+                        no_heuristic = nx.negative_edge_cycle(G, heuristic=False)
+                        with_heuristic = nx.negative_edge_cycle(G, heuristic=True)
+                        assert no_heuristic == with_heuristic
+
+    def test_negative_cycle(self):
+        G = nx.cycle_graph(5, create_using=nx.DiGraph())
+        G.add_edge(1, 2, weight=-7)
+        for i in range(5):
+            pytest.raises(
+                nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i
+            )
+            pytest.raises(
+                nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i
+            )
+            pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, i)
+            pytest.raises(
+                nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i
+            )
+            pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i)
+        G = nx.cycle_graph(5)  # undirected Graph
+        G.add_edge(1, 2, weight=-3)
+        for i in range(5):
+            pytest.raises(
+                nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i
+            )
+            pytest.raises(
+                nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i
+            )
+            pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, i)
+            pytest.raises(
+                nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i
+            )
+            pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i)
+        G = nx.DiGraph([(1, 1, {"weight": -1})])
+        pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, 1)
+        pytest.raises(
+            nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, 1
+        )
+        pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, 1)
+        pytest.raises(
+            nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, 1
+        )
+        pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1)
+        G = nx.MultiDiGraph([(1, 1, {"weight": -1})])
+        pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, 1)
+        pytest.raises(
+            nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, 1
+        )
+        pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, 1)
+        pytest.raises(
+            nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, 1
+        )
+        pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1)
+
+    def test_zero_cycle(self):
+        G = nx.cycle_graph(5, create_using=nx.DiGraph())
+        G.add_edge(2, 3, weight=-4)
+        # check that zero cycle doesn't raise
+        nx.goldberg_radzik(G, 1)
+        nx.bellman_ford_predecessor_and_distance(G, 1)
+
+        G.add_edge(2, 3, weight=-4.0001)
+        # check that negative cycle does raise
+        pytest.raises(
+            nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, 1
+        )
+        pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1)
+
+    def test_find_negative_cycle_longer_cycle(self):
+        G = nx.cycle_graph(5, create_using=nx.DiGraph())
+        nx.add_cycle(G, [3, 5, 6, 7, 8, 9])
+        G.add_edge(1, 2, weight=-30)
+        assert nx.find_negative_cycle(G, 1) == [0, 1, 2, 3, 4, 0]
+        assert nx.find_negative_cycle(G, 7) == [2, 3, 4, 0, 1, 2]
+
+    def test_find_negative_cycle_no_cycle(self):
+        G = nx.path_graph(5, create_using=nx.DiGraph())
+        pytest.raises(nx.NetworkXError, nx.find_negative_cycle, G, 3)
+
+    def test_find_negative_cycle_single_edge(self):
+        G = nx.Graph()
+        G.add_edge(0, 1, weight=-1)
+        assert nx.find_negative_cycle(G, 1) == [1, 0, 1]
+
+    def test_negative_weight(self):
+        G = nx.cycle_graph(5, create_using=nx.DiGraph())
+        G.add_edge(1, 2, weight=-3)
+        assert nx.single_source_bellman_ford_path(G, 0) == {
+            0: [0],
+            1: [0, 1],
+            2: [0, 1, 2],
+            3: [0, 1, 2, 3],
+            4: [0, 1, 2, 3, 4],
+        }
+        assert nx.single_source_bellman_ford_path_length(G, 0) == {
+            0: 0,
+            1: 1,
+            2: -2,
+            3: -1,
+            4: 0,
+        }
+        assert nx.single_source_bellman_ford(G, 0) == (
+            {0: 0, 1: 1, 2: -2, 3: -1, 4: 0},
+            {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3], 4: [0, 1, 2, 3, 4]},
+        )
+        assert nx.bellman_ford_predecessor_and_distance(G, 0) == (
+            {0: [], 1: [0], 2: [1], 3: [2], 4: [3]},
+            {0: 0, 1: 1, 2: -2, 3: -1, 4: 0},
+        )
+        assert nx.goldberg_radzik(G, 0) == (
+            {0: None, 1: 0, 2: 1, 3: 2, 4: 3},
+            {0: 0, 1: 1, 2: -2, 3: -1, 4: 0},
+        )
+
+    def test_not_connected(self):
+        G = nx.complete_graph(6)
+        G.add_edge(10, 11)
+        G.add_edge(10, 12)
+        assert nx.single_source_bellman_ford_path(G, 0) == {
+            0: [0],
+            1: [0, 1],
+            2: [0, 2],
+            3: [0, 3],
+            4: [0, 4],
+            5: [0, 5],
+        }
+        assert nx.single_source_bellman_ford_path_length(G, 0) == {
+            0: 0,
+            1: 1,
+            2: 1,
+            3: 1,
+            4: 1,
+            5: 1,
+        }
+        assert nx.single_source_bellman_ford(G, 0) == (
+            {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1},
+            {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]},
+        )
+        assert nx.bellman_ford_predecessor_and_distance(G, 0) == (
+            {0: [], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]},
+            {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1},
+        )
+        assert nx.goldberg_radzik(G, 0) == (
+            {0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
+            {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1},
+        )
+
+        # not connected, with a component not containing the source that
+        # contains a negative cycle.
+        G = nx.complete_graph(6)
+        G.add_edges_from(
+            [
+                ("A", "B", {"load": 3}),
+                ("B", "C", {"load": -10}),
+                ("C", "A", {"load": 2}),
+            ]
+        )
+        assert nx.single_source_bellman_ford_path(G, 0, weight="load") == {
+            0: [0],
+            1: [0, 1],
+            2: [0, 2],
+            3: [0, 3],
+            4: [0, 4],
+            5: [0, 5],
+        }
+        assert nx.single_source_bellman_ford_path_length(G, 0, weight="load") == {
+            0: 0,
+            1: 1,
+            2: 1,
+            3: 1,
+            4: 1,
+            5: 1,
+        }
+        assert nx.single_source_bellman_ford(G, 0, weight="load") == (
+            {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1},
+            {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]},
+        )
+        assert nx.bellman_ford_predecessor_and_distance(G, 0, weight="load") == (
+            {0: [], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]},
+            {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1},
+        )
+        assert nx.goldberg_radzik(G, 0, weight="load") == (
+            {0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
+            {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1},
+        )
+
+    def test_multigraph(self):
+        assert nx.bellman_ford_path(self.MXG, "s", "v") == ["s", "x", "u", "v"]
+        assert nx.bellman_ford_path_length(self.MXG, "s", "v") == 9
+        assert nx.single_source_bellman_ford_path(self.MXG, "s")["v"] == [
+            "s",
+            "x",
+            "u",
+            "v",
+        ]
+        assert nx.single_source_bellman_ford_path_length(self.MXG, "s")["v"] == 9
+        D, P = nx.single_source_bellman_ford(self.MXG, "s", target="v")
+        assert D == 9
+        assert P == ["s", "x", "u", "v"]
+        P, D = nx.bellman_ford_predecessor_and_distance(self.MXG, "s")
+        assert P["v"] == ["u"]
+        assert D["v"] == 9
+        P, D = nx.goldberg_radzik(self.MXG, "s")
+        assert P["v"] == "u"
+        assert D["v"] == 9
+        assert nx.bellman_ford_path(self.MXG4, 0, 2) == [0, 1, 2]
+        assert nx.bellman_ford_path_length(self.MXG4, 0, 2) == 4
+        assert nx.single_source_bellman_ford_path(self.MXG4, 0)[2] == [0, 1, 2]
+        assert nx.single_source_bellman_ford_path_length(self.MXG4, 0)[2] == 4
+        D, P = nx.single_source_bellman_ford(self.MXG4, 0, target=2)
+        assert D == 4
+        assert P == [0, 1, 2]
+        P, D = nx.bellman_ford_predecessor_and_distance(self.MXG4, 0)
+        assert P[2] == [1]
+        assert D[2] == 4
+        P, D = nx.goldberg_radzik(self.MXG4, 0)
+        assert P[2] == 1
+        assert D[2] == 4
+
+    def test_others(self):
+        assert nx.bellman_ford_path(self.XG, "s", "v") == ["s", "x", "u", "v"]
+        assert nx.bellman_ford_path_length(self.XG, "s", "v") == 9
+        assert nx.single_source_bellman_ford_path(self.XG, "s")["v"] == [
+            "s",
+            "x",
+            "u",
+            "v",
+        ]
+        assert nx.single_source_bellman_ford_path_length(self.XG, "s")["v"] == 9
+        D, P = nx.single_source_bellman_ford(self.XG, "s", target="v")
+        assert D == 9
+        assert P == ["s", "x", "u", "v"]
+        (P, D) = nx.bellman_ford_predecessor_and_distance(self.XG, "s")
+        assert P["v"] == ["u"]
+        assert D["v"] == 9
+        (P, D) = nx.goldberg_radzik(self.XG, "s")
+        assert P["v"] == "u"
+        assert D["v"] == 9
+
+    def test_path_graph(self):
+        G = nx.path_graph(4)
+        assert nx.single_source_bellman_ford_path(G, 0) == {
+            0: [0],
+            1: [0, 1],
+            2: [0, 1, 2],
+            3: [0, 1, 2, 3],
+        }
+        assert nx.single_source_bellman_ford_path_length(G, 0) == {
+            0: 0,
+            1: 1,
+            2: 2,
+            3: 3,
+        }
+        assert nx.single_source_bellman_ford(G, 0) == (
+            {0: 0, 1: 1, 2: 2, 3: 3},
+            {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3]},
+        )
+        assert nx.bellman_ford_predecessor_and_distance(G, 0) == (
+            {0: [], 1: [0], 2: [1], 3: [2]},
+            {0: 0, 1: 1, 2: 2, 3: 3},
+        )
+        assert nx.goldberg_radzik(G, 0) == (
+            {0: None, 1: 0, 2: 1, 3: 2},
+            {0: 0, 1: 1, 2: 2, 3: 3},
+        )
+        assert nx.single_source_bellman_ford_path(G, 3) == {
+            0: [3, 2, 1, 0],
+            1: [3, 2, 1],
+            2: [3, 2],
+            3: [3],
+        }
+        assert nx.single_source_bellman_ford_path_length(G, 3) == {
+            0: 3,
+            1: 2,
+            2: 1,
+            3: 0,
+        }
+        assert nx.single_source_bellman_ford(G, 3) == (
+            {0: 3, 1: 2, 2: 1, 3: 0},
+            {0: [3, 2, 1, 0], 1: [3, 2, 1], 2: [3, 2], 3: [3]},
+        )
+        assert nx.bellman_ford_predecessor_and_distance(G, 3) == (
+            {0: [1], 1: [2], 2: [3], 3: []},
+            {0: 3, 1: 2, 2: 1, 3: 0},
+        )
+        assert nx.goldberg_radzik(G, 3) == (
+            {0: 1, 1: 2, 2: 3, 3: None},
+            {0: 3, 1: 2, 2: 1, 3: 0},
+        )
+
+    def test_4_cycle(self):
+        # 4-cycle
+        G = nx.Graph([(0, 1), (1, 2), (2, 3), (3, 0)])
+        dist, path = nx.single_source_bellman_ford(G, 0)
+        assert dist == {0: 0, 1: 1, 2: 2, 3: 1}
+        assert path[0] == [0]
+        assert path[1] == [0, 1]
+        assert path[2] in [[0, 1, 2], [0, 3, 2]]
+        assert path[3] == [0, 3]
+
+        pred, dist = nx.bellman_ford_predecessor_and_distance(G, 0)
+        assert pred[0] == []
+        assert pred[1] == [0]
+        assert pred[2] in [[1, 3], [3, 1]]
+        assert pred[3] == [0]
+        assert dist == {0: 0, 1: 1, 2: 2, 3: 1}
+
+        pred, dist = nx.goldberg_radzik(G, 0)
+        assert pred[0] is None
+        assert pred[1] == 0
+        assert pred[2] in [1, 3]
+        assert pred[3] == 0
+        assert dist == {0: 0, 1: 1, 2: 2, 3: 1}
+
+    def test_negative_weight_bf_path(self):
+        G = nx.DiGraph()
+        G.add_nodes_from("abcd")
+        G.add_edge("a", "d", weight=0)
+        G.add_edge("a", "b", weight=1)
+        G.add_edge("b", "c", weight=-3)
+        G.add_edge("c", "d", weight=1)
+
+        assert nx.bellman_ford_path(G, "a", "d") == ["a", "b", "c", "d"]
+        assert nx.bellman_ford_path_length(G, "a", "d") == -1
+
+    def test_zero_cycle_smoke(self):
+        D = nx.DiGraph()
+        D.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1), (3, 1, -2)])
+
+        nx.bellman_ford_path(D, 1, 3)
+        nx.dijkstra_path(D, 1, 3)
+        nx.bidirectional_dijkstra(D, 1, 3)
+        # FIXME nx.goldberg_radzik(D, 1)
+
+
+class TestJohnsonAlgorithm(WeightedTestBase):
+    def test_single_node_graph(self):
+        G = nx.DiGraph()
+        G.add_node(0)
+        assert nx.johnson(G) == {0: {0: [0]}}
+
+    def test_negative_cycle(self):
+        G = nx.DiGraph()
+        G.add_weighted_edges_from(
+            [
+                ("0", "3", 3),
+                ("0", "1", -5),
+                ("1", "0", -5),
+                ("0", "2", 2),
+                ("1", "2", 4),
+                ("2", "3", 1),
+            ]
+        )
+        pytest.raises(nx.NetworkXUnbounded, nx.johnson, G)
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                ("0", "3", 3),
+                ("0", "1", -5),
+                ("1", "0", -5),
+                ("0", "2", 2),
+                ("1", "2", 4),
+                ("2", "3", 1),
+            ]
+        )
+        pytest.raises(nx.NetworkXUnbounded, nx.johnson, G)
+
+    def test_negative_weights(self):
+        G = nx.DiGraph()
+        G.add_weighted_edges_from(
+            [("0", "3", 3), ("0", "1", -5), ("0", "2", 2), ("1", "2", 4), ("2", "3", 1)]
+        )
+        paths = nx.johnson(G)
+        assert paths == {
+            "1": {"1": ["1"], "3": ["1", "2", "3"], "2": ["1", "2"]},
+            "0": {
+                "1": ["0", "1"],
+                "0": ["0"],
+                "3": ["0", "1", "2", "3"],
+                "2": ["0", "1", "2"],
+            },
+            "3": {"3": ["3"]},
+            "2": {"3": ["2", "3"], "2": ["2"]},
+        }
+
+    def test_unweighted_graph(self):
+        G = nx.Graph()
+        G.add_edges_from([(1, 0), (2, 1)])
+        H = G.copy()
+        nx.set_edge_attributes(H, values=1, name="weight")
+        assert nx.johnson(G) == nx.johnson(H)
+
+    def test_partially_weighted_graph_with_negative_edges(self):
+        G = nx.DiGraph()
+        G.add_edges_from([(0, 1), (1, 2), (2, 0), (1, 0)])
+        G[1][0]["weight"] = -2
+        G[0][1]["weight"] = 3
+        G[1][2]["weight"] = -4
+
+        H = G.copy()
+        H[2][0]["weight"] = 1
+
+        I = G.copy()
+        I[2][0]["weight"] = 8
+
+        assert nx.johnson(G) == nx.johnson(H)
+        assert nx.johnson(G) != nx.johnson(I)
+
+    def test_graphs(self):
+        validate_path(self.XG, "s", "v", 9, nx.johnson(self.XG)["s"]["v"])
+        validate_path(self.MXG, "s", "v", 9, nx.johnson(self.MXG)["s"]["v"])
+        validate_path(self.XG2, 1, 3, 4, nx.johnson(self.XG2)[1][3])
+        validate_path(self.XG3, 0, 3, 15, nx.johnson(self.XG3)[0][3])
+        validate_path(self.XG4, 0, 2, 4, nx.johnson(self.XG4)[0][2])
+        validate_path(self.MXG4, 0, 2, 4, nx.johnson(self.MXG4)[0][2])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/unweighted.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/unweighted.py
new file mode 100644
index 00000000..3aeef854
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/unweighted.py
@@ -0,0 +1,579 @@
+"""
+Shortest path algorithms for unweighted graphs.
+"""
+
+import warnings
+
+import networkx as nx
+
+__all__ = [
+    "bidirectional_shortest_path",
+    "single_source_shortest_path",
+    "single_source_shortest_path_length",
+    "single_target_shortest_path",
+    "single_target_shortest_path_length",
+    "all_pairs_shortest_path",
+    "all_pairs_shortest_path_length",
+    "predecessor",
+]
+
+
+@nx._dispatchable
+def single_source_shortest_path_length(G, source, cutoff=None):
+    """Compute the shortest path lengths from source to all reachable nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Starting node for path
+
+    cutoff : integer, optional
+        Depth to stop the search. Only paths of length <= cutoff are returned.
+
+    Returns
+    -------
+    lengths : dict
+        Dict keyed by node to shortest path length to source.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length = nx.single_source_shortest_path_length(G, 0)
+    >>> length[4]
+    4
+    >>> for node in length:
+    ...     print(f"{node}: {length[node]}")
+    0: 0
+    1: 1
+    2: 2
+    3: 3
+    4: 4
+
+    See Also
+    --------
+    shortest_path_length
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"Source {source} is not in G")
+    if cutoff is None:
+        cutoff = float("inf")
+    nextlevel = [source]
+    return dict(_single_shortest_path_length(G._adj, nextlevel, cutoff))
+
+
+def _single_shortest_path_length(adj, firstlevel, cutoff):
+    """Yields (node, level) in a breadth first search
+
+    Shortest Path Length helper function
+    Parameters
+    ----------
+        adj : dict
+            Adjacency dict or view
+        firstlevel : list
+            starting nodes, e.g. [source] or [target]
+        cutoff : int or float
+            level at which we stop the process
+    """
+    seen = set(firstlevel)
+    nextlevel = firstlevel
+    level = 0
+    n = len(adj)
+    for v in nextlevel:
+        yield (v, level)
+    while nextlevel and cutoff > level:
+        level += 1
+        thislevel = nextlevel
+        nextlevel = []
+        for v in thislevel:
+            for w in adj[v]:
+                if w not in seen:
+                    seen.add(w)
+                    nextlevel.append(w)
+                    yield (w, level)
+            if len(seen) == n:
+                return
+
+
+@nx._dispatchable
+def single_target_shortest_path_length(G, target, cutoff=None):
+    """Compute the shortest path lengths to target from all reachable nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    target : node
+       Target node for path
+
+    cutoff : integer, optional
+        Depth to stop the search. Only paths of length <= cutoff are returned.
+
+    Returns
+    -------
+    lengths : iterator
+        (source, shortest path length) iterator
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5, create_using=nx.DiGraph())
+    >>> length = dict(nx.single_target_shortest_path_length(G, 4))
+    >>> length[0]
+    4
+    >>> for node in range(5):
+    ...     print(f"{node}: {length[node]}")
+    0: 4
+    1: 3
+    2: 2
+    3: 1
+    4: 0
+
+    See Also
+    --------
+    single_source_shortest_path_length, shortest_path_length
+    """
+    if target not in G:
+        raise nx.NodeNotFound(f"Target {target} is not in G")
+
+    warnings.warn(
+        (
+            "\n\nsingle_target_shortest_path_length will return a dict instead of"
+            "\nan iterator in version 3.5"
+        ),
+        FutureWarning,
+        stacklevel=3,
+    )
+
+    if cutoff is None:
+        cutoff = float("inf")
+    # handle either directed or undirected
+    adj = G._pred if G.is_directed() else G._adj
+    nextlevel = [target]
+    # for version 3.3 we will return a dict like this:
+    # return dict(_single_shortest_path_length(adj, nextlevel, cutoff))
+    return _single_shortest_path_length(adj, nextlevel, cutoff)
+
+
+@nx._dispatchable
+def all_pairs_shortest_path_length(G, cutoff=None):
+    """Computes the shortest path lengths between all nodes in `G`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    cutoff : integer, optional
+        Depth at which to stop the search. Only paths of length at most
+        `cutoff` are returned.
+
+    Returns
+    -------
+    lengths : iterator
+        (source, dictionary) iterator with dictionary keyed by target and
+        shortest path length as the key value.
+
+    Notes
+    -----
+    The iterator returned only has reachable node pairs.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length = dict(nx.all_pairs_shortest_path_length(G))
+    >>> for node in [0, 1, 2, 3, 4]:
+    ...     print(f"1 - {node}: {length[1][node]}")
+    1 - 0: 1
+    1 - 1: 0
+    1 - 2: 1
+    1 - 3: 2
+    1 - 4: 3
+    >>> length[3][2]
+    1
+    >>> length[2][2]
+    0
+
+    """
+    length = single_source_shortest_path_length
+    # TODO This can be trivially parallelized.
+    for n in G:
+        yield (n, length(G, n, cutoff=cutoff))
+
+
+@nx._dispatchable
+def bidirectional_shortest_path(G, source, target):
+    """Returns a list of nodes in a shortest path between source and target.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node label
+       starting node for path
+
+    target : node label
+       ending node for path
+
+    Returns
+    -------
+    path: list
+       List of nodes in a path from source to target.
+
+    Raises
+    ------
+    NetworkXNoPath
+       If no path exists between source and target.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> nx.add_path(G, [0, 1, 2, 3, 0, 4, 5, 6, 7, 4])
+    >>> nx.bidirectional_shortest_path(G, 2, 6)
+    [2, 1, 0, 4, 5, 6]
+
+    See Also
+    --------
+    shortest_path
+
+    Notes
+    -----
+    This algorithm is used by shortest_path(G, source, target).
+    """
+
+    if source not in G:
+        raise nx.NodeNotFound(f"Source {source} is not in G")
+
+    if target not in G:
+        raise nx.NodeNotFound(f"Target {target} is not in G")
+
+    # call helper to do the real work
+    results = _bidirectional_pred_succ(G, source, target)
+    pred, succ, w = results
+
+    # build path from pred+w+succ
+    path = []
+    # from source to w
+    while w is not None:
+        path.append(w)
+        w = pred[w]
+    path.reverse()
+    # from w to target
+    w = succ[path[-1]]
+    while w is not None:
+        path.append(w)
+        w = succ[w]
+
+    return path
+
+
+def _bidirectional_pred_succ(G, source, target):
+    """Bidirectional shortest path helper.
+
+    Returns (pred, succ, w) where
+    pred is a dictionary of predecessors from w to the source, and
+    succ is a dictionary of successors from w to the target.
+    """
+    # does BFS from both source and target and meets in the middle
+    if target == source:
+        return ({target: None}, {source: None}, source)
+
+    # handle either directed or undirected
+    if G.is_directed():
+        Gpred = G.pred
+        Gsucc = G.succ
+    else:
+        Gpred = G.adj
+        Gsucc = G.adj
+
+    # predecessor and successors in search
+    pred = {source: None}
+    succ = {target: None}
+
+    # initialize fringes, start with forward
+    forward_fringe = [source]
+    reverse_fringe = [target]
+
+    while forward_fringe and reverse_fringe:
+        if len(forward_fringe) <= len(reverse_fringe):
+            this_level = forward_fringe
+            forward_fringe = []
+            for v in this_level:
+                for w in Gsucc[v]:
+                    if w not in pred:
+                        forward_fringe.append(w)
+                        pred[w] = v
+                    if w in succ:  # path found
+                        return pred, succ, w
+        else:
+            this_level = reverse_fringe
+            reverse_fringe = []
+            for v in this_level:
+                for w in Gpred[v]:
+                    if w not in succ:
+                        succ[w] = v
+                        reverse_fringe.append(w)
+                    if w in pred:  # found path
+                        return pred, succ, w
+
+    raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
+
+
+@nx._dispatchable
+def single_source_shortest_path(G, source, cutoff=None):
+    """Compute shortest path between source
+    and all other nodes reachable from source.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node label
+       Starting node for path
+
+    cutoff : integer, optional
+        Depth to stop the search. Only paths of length <= cutoff are returned.
+
+    Returns
+    -------
+    paths : dictionary
+        Dictionary, keyed by target, of shortest paths.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> path = nx.single_source_shortest_path(G, 0)
+    >>> path[4]
+    [0, 1, 2, 3, 4]
+
+    Notes
+    -----
+    The shortest path is not necessarily unique. So there can be multiple
+    paths between the source and each target node, all of which have the
+    same 'shortest' length. For each target node, this function returns
+    only one of those paths.
+
+    See Also
+    --------
+    shortest_path
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"Source {source} not in G")
+
+    def join(p1, p2):
+        return p1 + p2
+
+    if cutoff is None:
+        cutoff = float("inf")
+    nextlevel = {source: 1}  # list of nodes to check at next level
+    paths = {source: [source]}  # paths dictionary  (paths to key from source)
+    return dict(_single_shortest_path(G.adj, nextlevel, paths, cutoff, join))
+
+
+def _single_shortest_path(adj, firstlevel, paths, cutoff, join):
+    """Returns shortest paths
+
+    Shortest Path helper function
+    Parameters
+    ----------
+        adj : dict
+            Adjacency dict or view
+        firstlevel : dict
+            starting nodes, e.g. {source: 1} or {target: 1}
+        paths : dict
+            paths for starting nodes, e.g. {source: [source]}
+        cutoff : int or float
+            level at which we stop the process
+        join : function
+            function to construct a path from two partial paths. Requires two
+            list inputs `p1` and `p2`, and returns a list. Usually returns
+            `p1 + p2` (forward from source) or `p2 + p1` (backward from target)
+    """
+    level = 0  # the current level
+    nextlevel = firstlevel
+    while nextlevel and cutoff > level:
+        thislevel = nextlevel
+        nextlevel = {}
+        for v in thislevel:
+            for w in adj[v]:
+                if w not in paths:
+                    paths[w] = join(paths[v], [w])
+                    nextlevel[w] = 1
+        level += 1
+    return paths
+
+
+@nx._dispatchable
+def single_target_shortest_path(G, target, cutoff=None):
+    """Compute shortest path to target from all nodes that reach target.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    target : node label
+       Target node for path
+
+    cutoff : integer, optional
+        Depth to stop the search. Only paths of length <= cutoff are returned.
+
+    Returns
+    -------
+    paths : dictionary
+        Dictionary, keyed by target, of shortest paths.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5, create_using=nx.DiGraph())
+    >>> path = nx.single_target_shortest_path(G, 4)
+    >>> path[0]
+    [0, 1, 2, 3, 4]
+
+    Notes
+    -----
+    The shortest path is not necessarily unique. So there can be multiple
+    paths between the source and each target node, all of which have the
+    same 'shortest' length. For each target node, this function returns
+    only one of those paths.
+
+    See Also
+    --------
+    shortest_path, single_source_shortest_path
+    """
+    if target not in G:
+        raise nx.NodeNotFound(f"Target {target} not in G")
+
+    def join(p1, p2):
+        return p2 + p1
+
+    # handle undirected graphs
+    adj = G.pred if G.is_directed() else G.adj
+    if cutoff is None:
+        cutoff = float("inf")
+    nextlevel = {target: 1}  # list of nodes to check at next level
+    paths = {target: [target]}  # paths dictionary  (paths to key from source)
+    return dict(_single_shortest_path(adj, nextlevel, paths, cutoff, join))
+
+
+@nx._dispatchable
+def all_pairs_shortest_path(G, cutoff=None):
+    """Compute shortest paths between all nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    cutoff : integer, optional
+        Depth at which to stop the search. Only paths of length at most
+        `cutoff` are returned.
+
+    Returns
+    -------
+    paths : iterator
+        Dictionary, keyed by source and target, of shortest paths.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> path = dict(nx.all_pairs_shortest_path(G))
+    >>> print(path[0][4])
+    [0, 1, 2, 3, 4]
+
+    Notes
+    -----
+    There may be multiple shortest paths with the same length between
+    two nodes. For each pair, this function returns only one of those paths.
+
+    See Also
+    --------
+    floyd_warshall
+    all_pairs_all_shortest_paths
+
+    """
+    # TODO This can be trivially parallelized.
+    for n in G:
+        yield (n, single_source_shortest_path(G, n, cutoff=cutoff))
+
+
+@nx._dispatchable
+def predecessor(G, source, target=None, cutoff=None, return_seen=None):
+    """Returns dict of predecessors for the path from source to all nodes in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node label
+       Starting node for path
+
+    target : node label, optional
+       Ending node for path. If provided only predecessors between
+       source and target are returned
+
+    cutoff : integer, optional
+        Depth to stop the search. Only paths of length <= cutoff are returned.
+
+    return_seen : bool, optional (default=None)
+        Whether to return a dictionary, keyed by node, of the level (number of
+        hops) to reach the node (as seen during breadth-first-search).
+
+    Returns
+    -------
+    pred : dictionary
+        Dictionary, keyed by node, of predecessors in the shortest path.
+
+
+    (pred, seen): tuple of dictionaries
+        If `return_seen` argument is set to `True`, then a tuple of dictionaries
+        is returned. The first element is the dictionary, keyed by node, of
+        predecessors in the shortest path. The second element is the dictionary,
+        keyed by node, of the level (number of hops) to reach the node (as seen
+        during breadth-first-search).
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> list(G)
+    [0, 1, 2, 3]
+    >>> nx.predecessor(G, 0)
+    {0: [], 1: [0], 2: [1], 3: [2]}
+    >>> nx.predecessor(G, 0, return_seen=True)
+    ({0: [], 1: [0], 2: [1], 3: [2]}, {0: 0, 1: 1, 2: 2, 3: 3})
+
+
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"Source {source} not in G")
+
+    level = 0  # the current level
+    nextlevel = [source]  # list of nodes to check at next level
+    seen = {source: level}  # level (number of hops) when seen in BFS
+    pred = {source: []}  # predecessor dictionary
+    while nextlevel:
+        level = level + 1
+        thislevel = nextlevel
+        nextlevel = []
+        for v in thislevel:
+            for w in G[v]:
+                if w not in seen:
+                    pred[w] = [v]
+                    seen[w] = level
+                    nextlevel.append(w)
+                elif seen[w] == level:  # add v to predecessor list if it
+                    pred[w].append(v)  # is at the correct level
+        if cutoff and cutoff <= level:
+            break
+
+    if target is not None:
+        if return_seen:
+            if target not in pred:
+                return ([], -1)  # No predecessor
+            return (pred[target], seen[target])
+        else:
+            if target not in pred:
+                return []  # No predecessor
+            return pred[target]
+    else:
+        if return_seen:
+            return (pred, seen)
+        else:
+            return pred
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/weighted.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/weighted.py
new file mode 100644
index 00000000..f8421d42
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/weighted.py
@@ -0,0 +1,2520 @@
+"""
+Shortest path algorithms for weighted graphs.
+"""
+
+from collections import deque
+from heapq import heappop, heappush
+from itertools import count
+
+import networkx as nx
+from networkx.algorithms.shortest_paths.generic import _build_paths_from_predecessors
+
+__all__ = [
+    "dijkstra_path",
+    "dijkstra_path_length",
+    "bidirectional_dijkstra",
+    "single_source_dijkstra",
+    "single_source_dijkstra_path",
+    "single_source_dijkstra_path_length",
+    "multi_source_dijkstra",
+    "multi_source_dijkstra_path",
+    "multi_source_dijkstra_path_length",
+    "all_pairs_dijkstra",
+    "all_pairs_dijkstra_path",
+    "all_pairs_dijkstra_path_length",
+    "dijkstra_predecessor_and_distance",
+    "bellman_ford_path",
+    "bellman_ford_path_length",
+    "single_source_bellman_ford",
+    "single_source_bellman_ford_path",
+    "single_source_bellman_ford_path_length",
+    "all_pairs_bellman_ford_path",
+    "all_pairs_bellman_ford_path_length",
+    "bellman_ford_predecessor_and_distance",
+    "negative_edge_cycle",
+    "find_negative_cycle",
+    "goldberg_radzik",
+    "johnson",
+]
+
+
+def _weight_function(G, weight):
+    """Returns a function that returns the weight of an edge.
+
+    The returned function is specifically suitable for input to
+    functions :func:`_dijkstra` and :func:`_bellman_ford_relaxation`.
+
+    Parameters
+    ----------
+    G : NetworkX graph.
+
+    weight : string or function
+        If it is callable, `weight` itself is returned. If it is a string,
+        it is assumed to be the name of the edge attribute that represents
+        the weight of an edge. In that case, a function is returned that
+        gets the edge weight according to the specified edge attribute.
+
+    Returns
+    -------
+    function
+        This function returns a callable that accepts exactly three inputs:
+        a node, an node adjacent to the first one, and the edge attribute
+        dictionary for the eedge joining those nodes. That function returns
+        a number representing the weight of an edge.
+
+    If `G` is a multigraph, and `weight` is not callable, the
+    minimum edge weight over all parallel edges is returned. If any edge
+    does not have an attribute with key `weight`, it is assumed to
+    have weight one.
+
+    """
+    if callable(weight):
+        return weight
+    # If the weight keyword argument is not callable, we assume it is a
+    # string representing the edge attribute containing the weight of
+    # the edge.
+    if G.is_multigraph():
+        return lambda u, v, d: min(attr.get(weight, 1) for attr in d.values())
+    return lambda u, v, data: data.get(weight, 1)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def dijkstra_path(G, source, target, weight="weight"):
+    """Returns the shortest weighted path from source to target in G.
+
+    Uses Dijkstra's Method to compute the shortest weighted path
+    between two nodes in a graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+        Starting node
+
+    target : node
+        Ending node
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    path : list
+        List of nodes in a shortest path.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    NetworkXNoPath
+        If no path exists between source and target.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> print(nx.dijkstra_path(G, 0, 4))
+    [0, 1, 2, 3, 4]
+
+    Find edges of shortest path in Multigraph
+
+    >>> G = nx.MultiDiGraph()
+    >>> G.add_weighted_edges_from([(1, 2, 0.75), (1, 2, 0.5), (2, 3, 0.5), (1, 3, 1.5)])
+    >>> nodes = nx.dijkstra_path(G, 1, 3)
+    >>> edges = nx.utils.pairwise(nodes)
+    >>> list(
+    ...     (u, v, min(G[u][v], key=lambda k: G[u][v][k].get("weight", 1)))
+    ...     for u, v in edges
+    ... )
+    [(1, 2, 1), (2, 3, 0)]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    The weight function can be used to include node weights.
+
+    >>> def func(u, v, d):
+    ...     node_u_wt = G.nodes[u].get("node_weight", 1)
+    ...     node_v_wt = G.nodes[v].get("node_weight", 1)
+    ...     edge_wt = d.get("weight", 1)
+    ...     return node_u_wt / 2 + node_v_wt / 2 + edge_wt
+
+    In this example we take the average of start and end node
+    weights of an edge and add it to the weight of the edge.
+
+    The function :func:`single_source_dijkstra` computes both
+    path and length-of-path if you need both, use that.
+
+    See Also
+    --------
+    bidirectional_dijkstra
+    bellman_ford_path
+    single_source_dijkstra
+    """
+    (length, path) = single_source_dijkstra(G, source, target=target, weight=weight)
+    return path
+
+
+@nx._dispatchable(edge_attrs="weight")
+def dijkstra_path_length(G, source, target, weight="weight"):
+    """Returns the shortest weighted path length in G from source to target.
+
+    Uses Dijkstra's Method to compute the shortest weighted path length
+    between two nodes in a graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node label
+        starting node for path
+
+    target : node label
+        ending node for path
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    length : number
+        Shortest path length.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    NetworkXNoPath
+        If no path exists between source and target.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> nx.dijkstra_path_length(G, 0, 4)
+    4
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    The function :func:`single_source_dijkstra` computes both
+    path and length-of-path if you need both, use that.
+
+    See Also
+    --------
+    bidirectional_dijkstra
+    bellman_ford_path_length
+    single_source_dijkstra
+
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"Node {source} not found in graph")
+    if source == target:
+        return 0
+    weight = _weight_function(G, weight)
+    length = _dijkstra(G, source, weight, target=target)
+    try:
+        return length[target]
+    except KeyError as err:
+        raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") from err
+
+
+@nx._dispatchable(edge_attrs="weight")
+def single_source_dijkstra_path(G, source, cutoff=None, weight="weight"):
+    """Find shortest weighted paths in G from a source node.
+
+    Compute shortest path between source and all other reachable
+    nodes for a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+        Starting node for path.
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    paths : dictionary
+        Dictionary of shortest path lengths keyed by target.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> path = nx.single_source_dijkstra_path(G, 0)
+    >>> path[4]
+    [0, 1, 2, 3, 4]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    See Also
+    --------
+    single_source_dijkstra, single_source_bellman_ford
+
+    """
+    return multi_source_dijkstra_path(G, {source}, cutoff=cutoff, weight=weight)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def single_source_dijkstra_path_length(G, source, cutoff=None, weight="weight"):
+    """Find shortest weighted path lengths in G from a source node.
+
+    Compute the shortest path length between source and all other
+    reachable nodes for a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node label
+        Starting node for path
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    length : dict
+        Dict keyed by node to shortest path length from source.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length = nx.single_source_dijkstra_path_length(G, 0)
+    >>> length[4]
+    4
+    >>> for node in [0, 1, 2, 3, 4]:
+    ...     print(f"{node}: {length[node]}")
+    0: 0
+    1: 1
+    2: 2
+    3: 3
+    4: 4
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    See Also
+    --------
+    single_source_dijkstra, single_source_bellman_ford_path_length
+
+    """
+    return multi_source_dijkstra_path_length(G, {source}, cutoff=cutoff, weight=weight)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def single_source_dijkstra(G, source, target=None, cutoff=None, weight="weight"):
+    """Find shortest weighted paths and lengths from a source node.
+
+    Compute the shortest path length between source and all other
+    reachable nodes for a weighted graph.
+
+    Uses Dijkstra's algorithm to compute shortest paths and lengths
+    between a source and all other reachable nodes in a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node label
+        Starting node for path
+
+    target : node label, optional
+        Ending node for path
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    distance, path : pair of dictionaries, or numeric and list.
+        If target is None, paths and lengths to all nodes are computed.
+        The return value is a tuple of two dictionaries keyed by target nodes.
+        The first dictionary stores distance to each target node.
+        The second stores the path to each target node.
+        If target is not None, returns a tuple (distance, path), where
+        distance is the distance from source to target and path is a list
+        representing the path from source to target.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length, path = nx.single_source_dijkstra(G, 0)
+    >>> length[4]
+    4
+    >>> for node in [0, 1, 2, 3, 4]:
+    ...     print(f"{node}: {length[node]}")
+    0: 0
+    1: 1
+    2: 2
+    3: 3
+    4: 4
+    >>> path[4]
+    [0, 1, 2, 3, 4]
+    >>> length, path = nx.single_source_dijkstra(G, 0, 1)
+    >>> length
+    1
+    >>> path
+    [0, 1]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    Based on the Python cookbook recipe (119466) at
+    https://code.activestate.com/recipes/119466/
+
+    This algorithm is not guaranteed to work if edge weights
+    are negative or are floating point numbers
+    (overflows and roundoff errors can cause problems).
+
+    See Also
+    --------
+    single_source_dijkstra_path
+    single_source_dijkstra_path_length
+    single_source_bellman_ford
+    """
+    return multi_source_dijkstra(
+        G, {source}, cutoff=cutoff, target=target, weight=weight
+    )
+
+
+@nx._dispatchable(edge_attrs="weight")
+def multi_source_dijkstra_path(G, sources, cutoff=None, weight="weight"):
+    """Find shortest weighted paths in G from a given set of source
+    nodes.
+
+    Compute shortest path between any of the source nodes and all other
+    reachable nodes for a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    sources : non-empty set of nodes
+        Starting nodes for paths. If this is just a set containing a
+        single node, then all paths computed by this function will start
+        from that node. If there are two or more nodes in the set, the
+        computed paths may begin from any one of the start nodes.
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    paths : dictionary
+        Dictionary of shortest paths keyed by target.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> path = nx.multi_source_dijkstra_path(G, {0, 4})
+    >>> path[1]
+    [0, 1]
+    >>> path[3]
+    [4, 3]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    Raises
+    ------
+    ValueError
+        If `sources` is empty.
+    NodeNotFound
+        If any of `sources` is not in `G`.
+
+    See Also
+    --------
+    multi_source_dijkstra, multi_source_bellman_ford
+
+    """
+    length, path = multi_source_dijkstra(G, sources, cutoff=cutoff, weight=weight)
+    return path
+
+
+@nx._dispatchable(edge_attrs="weight")
+def multi_source_dijkstra_path_length(G, sources, cutoff=None, weight="weight"):
+    """Find shortest weighted path lengths in G from a given set of
+    source nodes.
+
+    Compute the shortest path length between any of the source nodes and
+    all other reachable nodes for a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    sources : non-empty set of nodes
+        Starting nodes for paths. If this is just a set containing a
+        single node, then all paths computed by this function will start
+        from that node. If there are two or more nodes in the set, the
+        computed paths may begin from any one of the start nodes.
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    length : dict
+        Dict keyed by node to shortest path length to nearest source.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length = nx.multi_source_dijkstra_path_length(G, {0, 4})
+    >>> for node in [0, 1, 2, 3, 4]:
+    ...     print(f"{node}: {length[node]}")
+    0: 0
+    1: 1
+    2: 2
+    3: 1
+    4: 0
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    Raises
+    ------
+    ValueError
+        If `sources` is empty.
+    NodeNotFound
+        If any of `sources` is not in `G`.
+
+    See Also
+    --------
+    multi_source_dijkstra
+
+    """
+    if not sources:
+        raise ValueError("sources must not be empty")
+    for s in sources:
+        if s not in G:
+            raise nx.NodeNotFound(f"Node {s} not found in graph")
+    weight = _weight_function(G, weight)
+    return _dijkstra_multisource(G, sources, weight, cutoff=cutoff)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def multi_source_dijkstra(G, sources, target=None, cutoff=None, weight="weight"):
+    """Find shortest weighted paths and lengths from a given set of
+    source nodes.
+
+    Uses Dijkstra's algorithm to compute the shortest paths and lengths
+    between one of the source nodes and the given `target`, or all other
+    reachable nodes if not specified, for a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    sources : non-empty set of nodes
+        Starting nodes for paths. If this is just a set containing a
+        single node, then all paths computed by this function will start
+        from that node. If there are two or more nodes in the set, the
+        computed paths may begin from any one of the start nodes.
+
+    target : node label, optional
+        Ending node for path
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    distance, path : pair of dictionaries, or numeric and list
+        If target is None, returns a tuple of two dictionaries keyed by node.
+        The first dictionary stores distance from one of the source nodes.
+        The second stores the path from one of the sources to that node.
+        If target is not None, returns a tuple of (distance, path) where
+        distance is the distance from source to target and path is a list
+        representing the path from source to target.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length, path = nx.multi_source_dijkstra(G, {0, 4})
+    >>> for node in [0, 1, 2, 3, 4]:
+    ...     print(f"{node}: {length[node]}")
+    0: 0
+    1: 1
+    2: 2
+    3: 1
+    4: 0
+    >>> path[1]
+    [0, 1]
+    >>> path[3]
+    [4, 3]
+
+    >>> length, path = nx.multi_source_dijkstra(G, {0, 4}, 1)
+    >>> length
+    1
+    >>> path
+    [0, 1]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    Based on the Python cookbook recipe (119466) at
+    https://code.activestate.com/recipes/119466/
+
+    This algorithm is not guaranteed to work if edge weights
+    are negative or are floating point numbers
+    (overflows and roundoff errors can cause problems).
+
+    Raises
+    ------
+    ValueError
+        If `sources` is empty.
+    NodeNotFound
+        If any of `sources` is not in `G`.
+
+    See Also
+    --------
+    multi_source_dijkstra_path
+    multi_source_dijkstra_path_length
+
+    """
+    if not sources:
+        raise ValueError("sources must not be empty")
+    for s in sources:
+        if s not in G:
+            raise nx.NodeNotFound(f"Node {s} not found in graph")
+    if target in sources:
+        return (0, [target])
+    weight = _weight_function(G, weight)
+    paths = {source: [source] for source in sources}  # dictionary of paths
+    dist = _dijkstra_multisource(
+        G, sources, weight, paths=paths, cutoff=cutoff, target=target
+    )
+    if target is None:
+        return (dist, paths)
+    try:
+        return (dist[target], paths[target])
+    except KeyError as err:
+        raise nx.NetworkXNoPath(f"No path to {target}.") from err
+
+
+def _dijkstra(G, source, weight, pred=None, paths=None, cutoff=None, target=None):
+    """Uses Dijkstra's algorithm to find shortest weighted paths from a
+    single source.
+
+    This is a convenience function for :func:`_dijkstra_multisource`
+    with all the arguments the same, except the keyword argument
+    `sources` set to ``[source]``.
+
+    """
+    return _dijkstra_multisource(
+        G, [source], weight, pred=pred, paths=paths, cutoff=cutoff, target=target
+    )
+
+
+def _dijkstra_multisource(
+    G, sources, weight, pred=None, paths=None, cutoff=None, target=None
+):
+    """Uses Dijkstra's algorithm to find shortest weighted paths
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    sources : non-empty iterable of nodes
+        Starting nodes for paths. If this is just an iterable containing
+        a single node, then all paths computed by this function will
+        start from that node. If there are two or more nodes in this
+        iterable, the computed paths may begin from any one of the start
+        nodes.
+
+    weight: function
+        Function with (u, v, data) input that returns that edge's weight
+        or None to indicate a hidden edge
+
+    pred: dict of lists, optional(default=None)
+        dict to store a list of predecessors keyed by that node
+        If None, predecessors are not stored.
+
+    paths: dict, optional (default=None)
+        dict to store the path list from source to each node, keyed by node.
+        If None, paths are not stored.
+
+    target : node label, optional
+        Ending node for path. Search is halted when target is found.
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+    Returns
+    -------
+    distance : dictionary
+        A mapping from node to shortest distance to that node from one
+        of the source nodes.
+
+    Raises
+    ------
+    NodeNotFound
+        If any of `sources` is not in `G`.
+
+    Notes
+    -----
+    The optional predecessor and path dictionaries can be accessed by
+    the caller through the original pred and paths objects passed
+    as arguments. No need to explicitly return pred or paths.
+
+    """
+    G_succ = G._adj  # For speed-up (and works for both directed and undirected graphs)
+
+    push = heappush
+    pop = heappop
+    dist = {}  # dictionary of final distances
+    seen = {}
+    # fringe is heapq with 3-tuples (distance,c,node)
+    # use the count c to avoid comparing nodes (may not be able to)
+    c = count()
+    fringe = []
+    for source in sources:
+        seen[source] = 0
+        push(fringe, (0, next(c), source))
+    while fringe:
+        (d, _, v) = pop(fringe)
+        if v in dist:
+            continue  # already searched this node.
+        dist[v] = d
+        if v == target:
+            break
+        for u, e in G_succ[v].items():
+            cost = weight(v, u, e)
+            if cost is None:
+                continue
+            vu_dist = dist[v] + cost
+            if cutoff is not None:
+                if vu_dist > cutoff:
+                    continue
+            if u in dist:
+                u_dist = dist[u]
+                if vu_dist < u_dist:
+                    raise ValueError("Contradictory paths found:", "negative weights?")
+                elif pred is not None and vu_dist == u_dist:
+                    pred[u].append(v)
+            elif u not in seen or vu_dist < seen[u]:
+                seen[u] = vu_dist
+                push(fringe, (vu_dist, next(c), u))
+                if paths is not None:
+                    paths[u] = paths[v] + [u]
+                if pred is not None:
+                    pred[u] = [v]
+            elif vu_dist == seen[u]:
+                if pred is not None:
+                    pred[u].append(v)
+
+    # The optional predecessor and path dictionaries can be accessed
+    # by the caller via the pred and paths objects passed as arguments.
+    return dist
+
+
+@nx._dispatchable(edge_attrs="weight")
+def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight="weight"):
+    """Compute weighted shortest path length and predecessors.
+
+    Uses Dijkstra's Method to obtain the shortest weighted paths
+    and return dictionaries of predecessors for each node and
+    distance for each node from the `source`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node label
+        Starting node for path
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    pred, distance : dictionaries
+        Returns two dictionaries representing a list of predecessors
+        of a node and the distance to each node.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The list of predecessors contains more than one element only when
+    there are more than one shortest paths to the key node.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5, create_using=nx.DiGraph())
+    >>> pred, dist = nx.dijkstra_predecessor_and_distance(G, 0)
+    >>> sorted(pred.items())
+    [(0, []), (1, [0]), (2, [1]), (3, [2]), (4, [3])]
+    >>> sorted(dist.items())
+    [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)]
+
+    >>> pred, dist = nx.dijkstra_predecessor_and_distance(G, 0, 1)
+    >>> sorted(pred.items())
+    [(0, []), (1, [0])]
+    >>> sorted(dist.items())
+    [(0, 0), (1, 1)]
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"Node {source} is not found in the graph")
+    weight = _weight_function(G, weight)
+    pred = {source: []}  # dictionary of predecessors
+    return (pred, _dijkstra(G, source, weight, pred=pred, cutoff=cutoff))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def all_pairs_dijkstra(G, cutoff=None, weight="weight"):
+    """Find shortest weighted paths and lengths between all nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edge[u][v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Yields
+    ------
+    (node, (distance, path)) : (node obj, (dict, dict))
+        Each source node has two associated dicts. The first holds distance
+        keyed by target and the second holds paths keyed by target.
+        (See single_source_dijkstra for the source/target node terminology.)
+        If desired you can apply `dict()` to this function to create a dict
+        keyed by source node to the two dicts.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> len_path = dict(nx.all_pairs_dijkstra(G))
+    >>> len_path[3][0][1]
+    2
+    >>> for node in [0, 1, 2, 3, 4]:
+    ...     print(f"3 - {node}: {len_path[3][0][node]}")
+    3 - 0: 3
+    3 - 1: 2
+    3 - 2: 1
+    3 - 3: 0
+    3 - 4: 1
+    >>> len_path[3][1][1]
+    [3, 2, 1]
+    >>> for n, (dist, path) in nx.all_pairs_dijkstra(G):
+    ...     print(path[1])
+    [0, 1]
+    [1]
+    [2, 1]
+    [3, 2, 1]
+    [4, 3, 2, 1]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The yielded dicts only have keys for reachable nodes.
+    """
+    for n in G:
+        dist, path = single_source_dijkstra(G, n, cutoff=cutoff, weight=weight)
+        yield (n, (dist, path))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def all_pairs_dijkstra_path_length(G, cutoff=None, weight="weight"):
+    """Compute shortest path lengths between all nodes in a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    distance : iterator
+        (source, dictionary) iterator with dictionary keyed by target and
+        shortest path length as the key value.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length = dict(nx.all_pairs_dijkstra_path_length(G))
+    >>> for node in [0, 1, 2, 3, 4]:
+    ...     print(f"1 - {node}: {length[1][node]}")
+    1 - 0: 1
+    1 - 1: 0
+    1 - 2: 1
+    1 - 3: 2
+    1 - 4: 3
+    >>> length[3][2]
+    1
+    >>> length[2][2]
+    0
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The dictionary returned only has keys for reachable node pairs.
+    """
+    length = single_source_dijkstra_path_length
+    for n in G:
+        yield (n, length(G, n, cutoff=cutoff, weight=weight))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def all_pairs_dijkstra_path(G, cutoff=None, weight="weight"):
+    """Compute shortest paths between all nodes in a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    cutoff : integer or float, optional
+        Length (sum of edge weights) at which the search is stopped.
+        If cutoff is provided, only return paths with summed weight <= cutoff.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    paths : iterator
+        (source, dictionary) iterator with dictionary keyed by target and
+        shortest path as the key value.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> path = dict(nx.all_pairs_dijkstra_path(G))
+    >>> path[0][4]
+    [0, 1, 2, 3, 4]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    See Also
+    --------
+    floyd_warshall, all_pairs_bellman_ford_path
+
+    """
+    path = single_source_dijkstra_path
+    # TODO This can be trivially parallelized.
+    for n in G:
+        yield (n, path(G, n, cutoff=cutoff, weight=weight))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def bellman_ford_predecessor_and_distance(
+    G, source, target=None, weight="weight", heuristic=False
+):
+    """Compute shortest path lengths and predecessors on shortest paths
+    in weighted graphs.
+
+    The algorithm has a running time of $O(mn)$ where $n$ is the number of
+    nodes and $m$ is the number of edges.  It is slower than Dijkstra but
+    can handle negative edge weights.
+
+    If a negative cycle is detected, you can use :func:`find_negative_cycle`
+    to return the cycle and examine it. Shortest paths are not defined when
+    a negative cycle exists because once reached, the path can cycle forever
+    to build up arbitrarily low weights.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The algorithm works for all types of graphs, including directed
+        graphs and multigraphs.
+
+    source: node label
+        Starting node for path
+
+    target : node label, optional
+        Ending node for path
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    heuristic : bool
+        Determines whether to use a heuristic to early detect negative
+        cycles at a hopefully negligible cost.
+
+    Returns
+    -------
+    pred, dist : dictionaries
+        Returns two dictionaries keyed by node to predecessor in the
+        path and to the distance from the source respectively.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    NetworkXUnbounded
+        If the (di)graph contains a negative (di)cycle, the
+        algorithm raises an exception to indicate the presence of the
+        negative (di)cycle.  Note: any negative weight edge in an
+        undirected graph is a negative cycle.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5, create_using=nx.DiGraph())
+    >>> pred, dist = nx.bellman_ford_predecessor_and_distance(G, 0)
+    >>> sorted(pred.items())
+    [(0, []), (1, [0]), (2, [1]), (3, [2]), (4, [3])]
+    >>> sorted(dist.items())
+    [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)]
+
+    >>> pred, dist = nx.bellman_ford_predecessor_and_distance(G, 0, 1)
+    >>> sorted(pred.items())
+    [(0, []), (1, [0]), (2, [1]), (3, [2]), (4, [3])]
+    >>> sorted(dist.items())
+    [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)]
+
+    >>> G = nx.cycle_graph(5, create_using=nx.DiGraph())
+    >>> G[1][2]["weight"] = -7
+    >>> nx.bellman_ford_predecessor_and_distance(G, 0)
+    Traceback (most recent call last):
+        ...
+    networkx.exception.NetworkXUnbounded: Negative cycle detected.
+
+    See Also
+    --------
+    find_negative_cycle
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The dictionaries returned only have keys for nodes reachable from
+    the source.
+
+    In the case where the (di)graph is not connected, if a component
+    not containing the source contains a negative (di)cycle, it
+    will not be detected.
+
+    In NetworkX v2.1 and prior, the source node had predecessor `[None]`.
+    In NetworkX v2.2 this changed to the source node having predecessor `[]`
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"Node {source} is not found in the graph")
+    weight = _weight_function(G, weight)
+    if G.is_multigraph():
+        if any(
+            weight(u, v, {k: d}) < 0
+            for u, v, k, d in nx.selfloop_edges(G, keys=True, data=True)
+        ):
+            raise nx.NetworkXUnbounded("Negative cycle detected.")
+    else:
+        if any(weight(u, v, d) < 0 for u, v, d in nx.selfloop_edges(G, data=True)):
+            raise nx.NetworkXUnbounded("Negative cycle detected.")
+
+    dist = {source: 0}
+    pred = {source: []}
+
+    if len(G) == 1:
+        return pred, dist
+
+    weight = _weight_function(G, weight)
+
+    dist = _bellman_ford(
+        G, [source], weight, pred=pred, dist=dist, target=target, heuristic=heuristic
+    )
+    return (pred, dist)
+
+
+def _bellman_ford(
+    G,
+    source,
+    weight,
+    pred=None,
+    paths=None,
+    dist=None,
+    target=None,
+    heuristic=True,
+):
+    """Calls relaxation loop for Bellman–Ford algorithm and builds paths
+
+    This is an implementation of the SPFA variant.
+    See https://en.wikipedia.org/wiki/Shortest_Path_Faster_Algorithm
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source: list
+        List of source nodes. The shortest path from any of the source
+        nodes will be found if multiple sources are provided.
+
+    weight : function
+        The weight of an edge is the value returned by the function. The
+        function must accept exactly three positional arguments: the two
+        endpoints of an edge and the dictionary of edge attributes for
+        that edge. The function must return a number.
+
+    pred: dict of lists, optional (default=None)
+        dict to store a list of predecessors keyed by that node
+        If None, predecessors are not stored
+
+    paths: dict, optional (default=None)
+        dict to store the path list from source to each node, keyed by node
+        If None, paths are not stored
+
+    dist: dict, optional (default=None)
+        dict to store distance from source to the keyed node
+        If None, returned dist dict contents default to 0 for every node in the
+        source list
+
+    target: node label, optional
+        Ending node for path. Path lengths to other destinations may (and
+        probably will) be incorrect.
+
+    heuristic : bool
+        Determines whether to use a heuristic to early detect negative
+        cycles at a hopefully negligible cost.
+
+    Returns
+    -------
+    dist : dict
+        Returns a dict keyed by node to the distance from the source.
+        Dicts for paths and pred are in the mutated input dicts by those names.
+
+    Raises
+    ------
+    NodeNotFound
+        If any of `source` is not in `G`.
+
+    NetworkXUnbounded
+        If the (di)graph contains a negative (di)cycle, the
+        algorithm raises an exception to indicate the presence of the
+        negative (di)cycle.  Note: any negative weight edge in an
+        undirected graph is a negative cycle
+    """
+    if pred is None:
+        pred = {v: [] for v in source}
+
+    if dist is None:
+        dist = {v: 0 for v in source}
+
+    negative_cycle_found = _inner_bellman_ford(
+        G,
+        source,
+        weight,
+        pred,
+        dist,
+        heuristic,
+    )
+    if negative_cycle_found is not None:
+        raise nx.NetworkXUnbounded("Negative cycle detected.")
+
+    if paths is not None:
+        sources = set(source)
+        dsts = [target] if target is not None else pred
+        for dst in dsts:
+            gen = _build_paths_from_predecessors(sources, dst, pred)
+            paths[dst] = next(gen)
+
+    return dist
+
+
+def _inner_bellman_ford(
+    G,
+    sources,
+    weight,
+    pred,
+    dist=None,
+    heuristic=True,
+):
+    """Inner Relaxation loop for Bellman–Ford algorithm.
+
+    This is an implementation of the SPFA variant.
+    See https://en.wikipedia.org/wiki/Shortest_Path_Faster_Algorithm
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source: list
+        List of source nodes. The shortest path from any of the source
+        nodes will be found if multiple sources are provided.
+
+    weight : function
+        The weight of an edge is the value returned by the function. The
+        function must accept exactly three positional arguments: the two
+        endpoints of an edge and the dictionary of edge attributes for
+        that edge. The function must return a number.
+
+    pred: dict of lists
+        dict to store a list of predecessors keyed by that node
+
+    dist: dict, optional (default=None)
+        dict to store distance from source to the keyed node
+        If None, returned dist dict contents default to 0 for every node in the
+        source list
+
+    heuristic : bool
+        Determines whether to use a heuristic to early detect negative
+        cycles at a hopefully negligible cost.
+
+    Returns
+    -------
+    node or None
+        Return a node `v` where processing discovered a negative cycle.
+        If no negative cycle found, return None.
+
+    Raises
+    ------
+    NodeNotFound
+        If any of `source` is not in `G`.
+    """
+    for s in sources:
+        if s not in G:
+            raise nx.NodeNotFound(f"Source {s} not in G")
+
+    if pred is None:
+        pred = {v: [] for v in sources}
+
+    if dist is None:
+        dist = {v: 0 for v in sources}
+
+    # Heuristic Storage setup. Note: use None because nodes cannot be None
+    nonexistent_edge = (None, None)
+    pred_edge = {v: None for v in sources}
+    recent_update = {v: nonexistent_edge for v in sources}
+
+    G_succ = G._adj  # For speed-up (and works for both directed and undirected graphs)
+    inf = float("inf")
+    n = len(G)
+
+    count = {}
+    q = deque(sources)
+    in_q = set(sources)
+    while q:
+        u = q.popleft()
+        in_q.remove(u)
+
+        # Skip relaxations if any of the predecessors of u is in the queue.
+        if all(pred_u not in in_q for pred_u in pred[u]):
+            dist_u = dist[u]
+            for v, e in G_succ[u].items():
+                dist_v = dist_u + weight(u, v, e)
+
+                if dist_v < dist.get(v, inf):
+                    # In this conditional branch we are updating the path with v.
+                    # If it happens that some earlier update also added node v
+                    # that implies the existence of a negative cycle since
+                    # after the update node v would lie on the update path twice.
+                    # The update path is stored up to one of the source nodes,
+                    # therefore u is always in the dict recent_update
+                    if heuristic:
+                        if v in recent_update[u]:
+                            # Negative cycle found!
+                            pred[v].append(u)
+                            return v
+
+                        # Transfer the recent update info from u to v if the
+                        # same source node is the head of the update path.
+                        # If the source node is responsible for the cost update,
+                        # then clear the history and use it instead.
+                        if v in pred_edge and pred_edge[v] == u:
+                            recent_update[v] = recent_update[u]
+                        else:
+                            recent_update[v] = (u, v)
+
+                    if v not in in_q:
+                        q.append(v)
+                        in_q.add(v)
+                        count_v = count.get(v, 0) + 1
+                        if count_v == n:
+                            # Negative cycle found!
+                            return v
+
+                        count[v] = count_v
+                    dist[v] = dist_v
+                    pred[v] = [u]
+                    pred_edge[v] = u
+
+                elif dist.get(v) is not None and dist_v == dist.get(v):
+                    pred[v].append(u)
+
+    # successfully found shortest_path. No negative cycles found.
+    return None
+
+
+@nx._dispatchable(edge_attrs="weight")
+def bellman_ford_path(G, source, target, weight="weight"):
+    """Returns the shortest path from source to target in a weighted graph G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+        Starting node
+
+    target : node
+        Ending node
+
+    weight : string or function (default="weight")
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    Returns
+    -------
+    path : list
+        List of nodes in a shortest path.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    NetworkXNoPath
+        If no path exists between source and target.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> nx.bellman_ford_path(G, 0, 4)
+    [0, 1, 2, 3, 4]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    See Also
+    --------
+    dijkstra_path, bellman_ford_path_length
+    """
+    length, path = single_source_bellman_ford(G, source, target=target, weight=weight)
+    return path
+
+
+@nx._dispatchable(edge_attrs="weight")
+def bellman_ford_path_length(G, source, target, weight="weight"):
+    """Returns the shortest path length from source to target
+    in a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node label
+        starting node for path
+
+    target : node label
+        ending node for path
+
+    weight : string or function (default="weight")
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    Returns
+    -------
+    length : number
+        Shortest path length.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    NetworkXNoPath
+        If no path exists between source and target.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> nx.bellman_ford_path_length(G, 0, 4)
+    4
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    See Also
+    --------
+    dijkstra_path_length, bellman_ford_path
+    """
+    if source == target:
+        if source not in G:
+            raise nx.NodeNotFound(f"Node {source} not found in graph")
+        return 0
+
+    weight = _weight_function(G, weight)
+
+    length = _bellman_ford(G, [source], weight, target=target)
+
+    try:
+        return length[target]
+    except KeyError as err:
+        raise nx.NetworkXNoPath(f"node {target} not reachable from {source}") from err
+
+
+@nx._dispatchable(edge_attrs="weight")
+def single_source_bellman_ford_path(G, source, weight="weight"):
+    """Compute shortest path between source and all other reachable
+    nodes for a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+        Starting node for path.
+
+    weight : string or function (default="weight")
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    Returns
+    -------
+    paths : dictionary
+        Dictionary of shortest path lengths keyed by target.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> path = nx.single_source_bellman_ford_path(G, 0)
+    >>> path[4]
+    [0, 1, 2, 3, 4]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    See Also
+    --------
+    single_source_dijkstra, single_source_bellman_ford
+
+    """
+    (length, path) = single_source_bellman_ford(G, source, weight=weight)
+    return path
+
+
+@nx._dispatchable(edge_attrs="weight")
+def single_source_bellman_ford_path_length(G, source, weight="weight"):
+    """Compute the shortest path length between source and all other
+    reachable nodes for a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node label
+        Starting node for path
+
+    weight : string or function (default="weight")
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    Returns
+    -------
+    length : dictionary
+        Dictionary of shortest path length keyed by target
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length = nx.single_source_bellman_ford_path_length(G, 0)
+    >>> length[4]
+    4
+    >>> for node in [0, 1, 2, 3, 4]:
+    ...     print(f"{node}: {length[node]}")
+    0: 0
+    1: 1
+    2: 2
+    3: 3
+    4: 4
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    See Also
+    --------
+    single_source_dijkstra, single_source_bellman_ford
+
+    """
+    weight = _weight_function(G, weight)
+    return _bellman_ford(G, [source], weight)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def single_source_bellman_ford(G, source, target=None, weight="weight"):
+    """Compute shortest paths and lengths in a weighted graph G.
+
+    Uses Bellman-Ford algorithm for shortest paths.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node label
+        Starting node for path
+
+    target : node label, optional
+        Ending node for path
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    Returns
+    -------
+    distance, path : pair of dictionaries, or numeric and list
+        If target is None, returns a tuple of two dictionaries keyed by node.
+        The first dictionary stores distance from one of the source nodes.
+        The second stores the path from one of the sources to that node.
+        If target is not None, returns a tuple of (distance, path) where
+        distance is the distance from source to target and path is a list
+        representing the path from source to target.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length, path = nx.single_source_bellman_ford(G, 0)
+    >>> length[4]
+    4
+    >>> for node in [0, 1, 2, 3, 4]:
+    ...     print(f"{node}: {length[node]}")
+    0: 0
+    1: 1
+    2: 2
+    3: 3
+    4: 4
+    >>> path[4]
+    [0, 1, 2, 3, 4]
+    >>> length, path = nx.single_source_bellman_ford(G, 0, 1)
+    >>> length
+    1
+    >>> path
+    [0, 1]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    See Also
+    --------
+    single_source_dijkstra
+    single_source_bellman_ford_path
+    single_source_bellman_ford_path_length
+    """
+    if source == target:
+        if source not in G:
+            raise nx.NodeNotFound(f"Node {source} is not found in the graph")
+        return (0, [source])
+
+    weight = _weight_function(G, weight)
+
+    paths = {source: [source]}  # dictionary of paths
+    dist = _bellman_ford(G, [source], weight, paths=paths, target=target)
+    if target is None:
+        return (dist, paths)
+    try:
+        return (dist[target], paths[target])
+    except KeyError as err:
+        msg = f"Node {target} not reachable from {source}"
+        raise nx.NetworkXNoPath(msg) from err
+
+
+@nx._dispatchable(edge_attrs="weight")
+def all_pairs_bellman_ford_path_length(G, weight="weight"):
+    """Compute shortest path lengths between all nodes in a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : string or function (default="weight")
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    Returns
+    -------
+    distance : iterator
+        (source, dictionary) iterator with dictionary keyed by target and
+        shortest path length as the key value.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length = dict(nx.all_pairs_bellman_ford_path_length(G))
+    >>> for node in [0, 1, 2, 3, 4]:
+    ...     print(f"1 - {node}: {length[1][node]}")
+    1 - 0: 1
+    1 - 1: 0
+    1 - 2: 1
+    1 - 3: 2
+    1 - 4: 3
+    >>> length[3][2]
+    1
+    >>> length[2][2]
+    0
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The dictionary returned only has keys for reachable node pairs.
+    """
+    length = single_source_bellman_ford_path_length
+    for n in G:
+        yield (n, dict(length(G, n, weight=weight)))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def all_pairs_bellman_ford_path(G, weight="weight"):
+    """Compute shortest paths between all nodes in a weighted graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : string or function (default="weight")
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    Returns
+    -------
+    paths : iterator
+        (source, dictionary) iterator with dictionary keyed by target and
+        shortest path as the key value.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> path = dict(nx.all_pairs_bellman_ford_path(G))
+    >>> path[0][4]
+    [0, 1, 2, 3, 4]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    See Also
+    --------
+    floyd_warshall, all_pairs_dijkstra_path
+
+    """
+    path = single_source_bellman_ford_path
+    for n in G:
+        yield (n, path(G, n, weight=weight))
+
+
+@nx._dispatchable(edge_attrs="weight")
+def goldberg_radzik(G, source, weight="weight"):
+    """Compute shortest path lengths and predecessors on shortest paths
+    in weighted graphs.
+
+    The algorithm has a running time of $O(mn)$ where $n$ is the number of
+    nodes and $m$ is the number of edges.  It is slower than Dijkstra but
+    can handle negative edge weights.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The algorithm works for all types of graphs, including directed
+        graphs and multigraphs.
+
+    source: node label
+        Starting node for path
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    Returns
+    -------
+    pred, dist : dictionaries
+        Returns two dictionaries keyed by node to predecessor in the
+        path and to the distance from the source respectively.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` is not in `G`.
+
+    NetworkXUnbounded
+        If the (di)graph contains a negative (di)cycle, the
+        algorithm raises an exception to indicate the presence of the
+        negative (di)cycle.  Note: any negative weight edge in an
+        undirected graph is a negative cycle.
+
+        As of NetworkX v3.2, a zero weight cycle is no longer
+        incorrectly reported as a negative weight cycle.
+
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5, create_using=nx.DiGraph())
+    >>> pred, dist = nx.goldberg_radzik(G, 0)
+    >>> sorted(pred.items())
+    [(0, None), (1, 0), (2, 1), (3, 2), (4, 3)]
+    >>> sorted(dist.items())
+    [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)]
+
+    >>> G = nx.cycle_graph(5, create_using=nx.DiGraph())
+    >>> G[1][2]["weight"] = -7
+    >>> nx.goldberg_radzik(G, 0)
+    Traceback (most recent call last):
+        ...
+    networkx.exception.NetworkXUnbounded: Negative cycle detected.
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The dictionaries returned only have keys for nodes reachable from
+    the source.
+
+    In the case where the (di)graph is not connected, if a component
+    not containing the source contains a negative (di)cycle, it
+    will not be detected.
+
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"Node {source} is not found in the graph")
+    weight = _weight_function(G, weight)
+    if G.is_multigraph():
+        if any(
+            weight(u, v, {k: d}) < 0
+            for u, v, k, d in nx.selfloop_edges(G, keys=True, data=True)
+        ):
+            raise nx.NetworkXUnbounded("Negative cycle detected.")
+    else:
+        if any(weight(u, v, d) < 0 for u, v, d in nx.selfloop_edges(G, data=True)):
+            raise nx.NetworkXUnbounded("Negative cycle detected.")
+
+    if len(G) == 1:
+        return {source: None}, {source: 0}
+
+    G_succ = G._adj  # For speed-up (and works for both directed and undirected graphs)
+
+    inf = float("inf")
+    d = {u: inf for u in G}
+    d[source] = 0
+    pred = {source: None}
+
+    def topo_sort(relabeled):
+        """Topologically sort nodes relabeled in the previous round and detect
+        negative cycles.
+        """
+        # List of nodes to scan in this round. Denoted by A in Goldberg and
+        # Radzik's paper.
+        to_scan = []
+        # In the DFS in the loop below, neg_count records for each node the
+        # number of edges of negative reduced costs on the path from a DFS root
+        # to the node in the DFS forest. The reduced cost of an edge (u, v) is
+        # defined as d[u] + weight[u][v] - d[v].
+        #
+        # neg_count also doubles as the DFS visit marker array.
+        neg_count = {}
+        for u in relabeled:
+            # Skip visited nodes.
+            if u in neg_count:
+                continue
+            d_u = d[u]
+            # Skip nodes without out-edges of negative reduced costs.
+            if all(d_u + weight(u, v, e) >= d[v] for v, e in G_succ[u].items()):
+                continue
+            # Nonrecursive DFS that inserts nodes reachable from u via edges of
+            # nonpositive reduced costs into to_scan in (reverse) topological
+            # order.
+            stack = [(u, iter(G_succ[u].items()))]
+            in_stack = {u}
+            neg_count[u] = 0
+            while stack:
+                u, it = stack[-1]
+                try:
+                    v, e = next(it)
+                except StopIteration:
+                    to_scan.append(u)
+                    stack.pop()
+                    in_stack.remove(u)
+                    continue
+                t = d[u] + weight(u, v, e)
+                d_v = d[v]
+                if t < d_v:
+                    is_neg = t < d_v
+                    d[v] = t
+                    pred[v] = u
+                    if v not in neg_count:
+                        neg_count[v] = neg_count[u] + int(is_neg)
+                        stack.append((v, iter(G_succ[v].items())))
+                        in_stack.add(v)
+                    elif v in in_stack and neg_count[u] + int(is_neg) > neg_count[v]:
+                        # (u, v) is a back edge, and the cycle formed by the
+                        # path v to u and (u, v) contains at least one edge of
+                        # negative reduced cost. The cycle must be of negative
+                        # cost.
+                        raise nx.NetworkXUnbounded("Negative cycle detected.")
+        to_scan.reverse()
+        return to_scan
+
+    def relax(to_scan):
+        """Relax out-edges of relabeled nodes."""
+        relabeled = set()
+        # Scan nodes in to_scan in topological order and relax incident
+        # out-edges. Add the relabled nodes to labeled.
+        for u in to_scan:
+            d_u = d[u]
+            for v, e in G_succ[u].items():
+                w_e = weight(u, v, e)
+                if d_u + w_e < d[v]:
+                    d[v] = d_u + w_e
+                    pred[v] = u
+                    relabeled.add(v)
+        return relabeled
+
+    # Set of nodes relabled in the last round of scan operations. Denoted by B
+    # in Goldberg and Radzik's paper.
+    relabeled = {source}
+
+    while relabeled:
+        to_scan = topo_sort(relabeled)
+        relabeled = relax(to_scan)
+
+    d = {u: d[u] for u in pred}
+    return pred, d
+
+
+@nx._dispatchable(edge_attrs="weight")
+def negative_edge_cycle(G, weight="weight", heuristic=True):
+    """Returns True if there exists a negative edge cycle anywhere in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    heuristic : bool
+        Determines whether to use a heuristic to early detect negative
+        cycles at a negligible cost. In case of graphs with a negative cycle,
+        the performance of detection increases by at least an order of magnitude.
+
+    Returns
+    -------
+    negative_cycle : bool
+        True if a negative edge cycle exists, otherwise False.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(5, create_using=nx.DiGraph())
+    >>> print(nx.negative_edge_cycle(G))
+    False
+    >>> G[1][2]["weight"] = -7
+    >>> print(nx.negative_edge_cycle(G))
+    True
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    This algorithm uses bellman_ford_predecessor_and_distance() but finds
+    negative cycles on any component by first adding a new node connected to
+    every node, and starting bellman_ford_predecessor_and_distance on that
+    node.  It then removes that extra node.
+    """
+    if G.size() == 0:
+        return False
+
+    # find unused node to use temporarily
+    newnode = -1
+    while newnode in G:
+        newnode -= 1
+    # connect it to all nodes
+    G.add_edges_from([(newnode, n) for n in G])
+
+    try:
+        bellman_ford_predecessor_and_distance(
+            G, newnode, weight=weight, heuristic=heuristic
+        )
+    except nx.NetworkXUnbounded:
+        return True
+    finally:
+        G.remove_node(newnode)
+    return False
+
+
+@nx._dispatchable(edge_attrs="weight")
+def find_negative_cycle(G, source, weight="weight"):
+    """Returns a cycle with negative total weight if it exists.
+
+    Bellman-Ford is used to find shortest_paths. That algorithm
+    stops if there exists a negative cycle. This algorithm
+    picks up from there and returns the found negative cycle.
+
+    The cycle consists of a list of nodes in the cycle order. The last
+    node equals the first to make it a cycle.
+    You can look up the edge weights in the original graph. In the case
+    of multigraphs the relevant edge is the minimal weight edge between
+    the nodes in the 2-tuple.
+
+    If the graph has no negative cycle, a NetworkXError is raised.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source: node label
+        The search for the negative cycle will start from this node.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_weighted_edges_from(
+    ...     [(0, 1, 2), (1, 2, 2), (2, 0, 1), (1, 4, 2), (4, 0, -5)]
+    ... )
+    >>> nx.find_negative_cycle(G, 0)
+    [4, 0, 1, 4]
+
+    Returns
+    -------
+    cycle : list
+        A list of nodes in the order of the cycle found. The last node
+        equals the first to indicate a cycle.
+
+    Raises
+    ------
+    NetworkXError
+        If no negative cycle is found.
+    """
+    weight = _weight_function(G, weight)
+    pred = {source: []}
+
+    v = _inner_bellman_ford(G, [source], weight, pred=pred)
+    if v is None:
+        raise nx.NetworkXError("No negative cycles detected.")
+
+    # negative cycle detected... find it
+    neg_cycle = []
+    stack = [(v, list(pred[v]))]
+    seen = {v}
+    while stack:
+        node, preds = stack[-1]
+        if v in preds:
+            # found the cycle
+            neg_cycle.extend([node, v])
+            neg_cycle = list(reversed(neg_cycle))
+            return neg_cycle
+
+        if preds:
+            nbr = preds.pop()
+            if nbr not in seen:
+                stack.append((nbr, list(pred[nbr])))
+                neg_cycle.append(node)
+                seen.add(nbr)
+        else:
+            stack.pop()
+            if neg_cycle:
+                neg_cycle.pop()
+            else:
+                if v in G[v] and weight(G, v, v) < 0:
+                    return [v, v]
+                # should not reach here
+                raise nx.NetworkXError("Negative cycle is detected but not found")
+    # should not get here...
+    msg = "negative cycle detected but not identified"
+    raise nx.NetworkXUnbounded(msg)
+
+
+@nx._dispatchable(edge_attrs="weight")
+def bidirectional_dijkstra(G, source, target, weight="weight"):
+    r"""Dijkstra's algorithm for shortest paths using bidirectional search.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+        Starting node.
+
+    target : node
+        Ending node.
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    Returns
+    -------
+    length, path : number and list
+        length is the distance from source to target.
+        path is a list of nodes on a path from source to target.
+
+    Raises
+    ------
+    NodeNotFound
+        If `source` or `target` is not in `G`.
+
+    NetworkXNoPath
+        If no path exists between source and target.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> length, path = nx.bidirectional_dijkstra(G, 0, 4)
+    >>> print(length)
+    4
+    >>> print(path)
+    [0, 1, 2, 3, 4]
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    In practice  bidirectional Dijkstra is much more than twice as fast as
+    ordinary Dijkstra.
+
+    Ordinary Dijkstra expands nodes in a sphere-like manner from the
+    source. The radius of this sphere will eventually be the length
+    of the shortest path. Bidirectional Dijkstra will expand nodes
+    from both the source and the target, making two spheres of half
+    this radius. Volume of the first sphere is `\pi*r*r` while the
+    others are `2*\pi*r/2*r/2`, making up half the volume.
+
+    This algorithm is not guaranteed to work if edge weights
+    are negative or are floating point numbers
+    (overflows and roundoff errors can cause problems).
+
+    See Also
+    --------
+    shortest_path
+    shortest_path_length
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"Source {source} is not in G")
+
+    if target not in G:
+        raise nx.NodeNotFound(f"Target {target} is not in G")
+
+    if source == target:
+        return (0, [source])
+
+    weight = _weight_function(G, weight)
+    push = heappush
+    pop = heappop
+    # Init:  [Forward, Backward]
+    dists = [{}, {}]  # dictionary of final distances
+    paths = [{source: [source]}, {target: [target]}]  # dictionary of paths
+    fringe = [[], []]  # heap of (distance, node) for choosing node to expand
+    seen = [{source: 0}, {target: 0}]  # dict of distances to seen nodes
+    c = count()
+    # initialize fringe heap
+    push(fringe[0], (0, next(c), source))
+    push(fringe[1], (0, next(c), target))
+    # neighs for extracting correct neighbor information
+    if G.is_directed():
+        neighs = [G._succ, G._pred]
+    else:
+        neighs = [G._adj, G._adj]
+    # variables to hold shortest discovered path
+    # finaldist = 1e30000
+    finalpath = []
+    dir = 1
+    while fringe[0] and fringe[1]:
+        # choose direction
+        # dir == 0 is forward direction and dir == 1 is back
+        dir = 1 - dir
+        # extract closest to expand
+        (dist, _, v) = pop(fringe[dir])
+        if v in dists[dir]:
+            # Shortest path to v has already been found
+            continue
+        # update distance
+        dists[dir][v] = dist  # equal to seen[dir][v]
+        if v in dists[1 - dir]:
+            # if we have scanned v in both directions we are done
+            # we have now discovered the shortest path
+            return (finaldist, finalpath)
+
+        for w, d in neighs[dir][v].items():
+            # weight(v, w, d) for forward and weight(w, v, d) for back direction
+            cost = weight(v, w, d) if dir == 0 else weight(w, v, d)
+            if cost is None:
+                continue
+            vwLength = dists[dir][v] + cost
+            if w in dists[dir]:
+                if vwLength < dists[dir][w]:
+                    raise ValueError("Contradictory paths found: negative weights?")
+            elif w not in seen[dir] or vwLength < seen[dir][w]:
+                # relaxing
+                seen[dir][w] = vwLength
+                push(fringe[dir], (vwLength, next(c), w))
+                paths[dir][w] = paths[dir][v] + [w]
+                if w in seen[0] and w in seen[1]:
+                    # see if this path is better than the already
+                    # discovered shortest path
+                    totaldist = seen[0][w] + seen[1][w]
+                    if finalpath == [] or finaldist > totaldist:
+                        finaldist = totaldist
+                        revpath = paths[1][w][:]
+                        revpath.reverse()
+                        finalpath = paths[0][w] + revpath[1:]
+    raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
+
+
+@nx._dispatchable(edge_attrs="weight")
+def johnson(G, weight="weight"):
+    r"""Uses Johnson's Algorithm to compute shortest paths.
+
+    Johnson's Algorithm finds a shortest path between each pair of
+    nodes in a weighted graph even if negative weights are present.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : string or function
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+    Returns
+    -------
+    distance : dictionary
+        Dictionary, keyed by source and target, of shortest paths.
+
+    Examples
+    --------
+    >>> graph = nx.DiGraph()
+    >>> graph.add_weighted_edges_from(
+    ...     [("0", "3", 3), ("0", "1", -5), ("0", "2", 2), ("1", "2", 4), ("2", "3", 1)]
+    ... )
+    >>> paths = nx.johnson(graph, weight="weight")
+    >>> paths["0"]["2"]
+    ['0', '1', '2']
+
+    Notes
+    -----
+    Johnson's algorithm is suitable even for graphs with negative weights. It
+    works by using the Bellman–Ford algorithm to compute a transformation of
+    the input graph that removes all negative weights, allowing Dijkstra's
+    algorithm to be used on the transformed graph.
+
+    The time complexity of this algorithm is $O(n^2 \log n + n m)$,
+    where $n$ is the number of nodes and $m$ the number of edges in the
+    graph. For dense graphs, this may be faster than the Floyd–Warshall
+    algorithm.
+
+    See Also
+    --------
+    floyd_warshall_predecessor_and_distance
+    floyd_warshall_numpy
+    all_pairs_shortest_path
+    all_pairs_shortest_path_length
+    all_pairs_dijkstra_path
+    bellman_ford_predecessor_and_distance
+    all_pairs_bellman_ford_path
+    all_pairs_bellman_ford_path_length
+
+    """
+    dist = {v: 0 for v in G}
+    pred = {v: [] for v in G}
+    weight = _weight_function(G, weight)
+
+    # Calculate distance of shortest paths
+    dist_bellman = _bellman_ford(G, list(G), weight, pred=pred, dist=dist)
+
+    # Update the weight function to take into account the Bellman--Ford
+    # relaxation distances.
+    def new_weight(u, v, d):
+        return weight(u, v, d) + dist_bellman[u] - dist_bellman[v]
+
+    def dist_path(v):
+        paths = {v: [v]}
+        _dijkstra(G, v, new_weight, paths=paths)
+        return paths
+
+    return {v: dist_path(v) for v in G}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/similarity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/similarity.py
new file mode 100644
index 00000000..3c601a72
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/similarity.py
@@ -0,0 +1,1780 @@
+"""Functions measuring similarity using graph edit distance.
+
+The graph edit distance is the number of edge/node changes needed
+to make two graphs isomorphic.
+
+The default algorithm/implementation is sub-optimal for some graphs.
+The problem of finding the exact Graph Edit Distance (GED) is NP-hard
+so it is often slow. If the simple interface `graph_edit_distance`
+takes too long for your graph, try `optimize_graph_edit_distance`
+and/or `optimize_edit_paths`.
+
+At the same time, I encourage capable people to investigate
+alternative GED algorithms, in order to improve the choices available.
+"""
+
+import math
+import time
+import warnings
+from dataclasses import dataclass
+from itertools import product
+
+import networkx as nx
+from networkx.utils import np_random_state
+
+__all__ = [
+    "graph_edit_distance",
+    "optimal_edit_paths",
+    "optimize_graph_edit_distance",
+    "optimize_edit_paths",
+    "simrank_similarity",
+    "panther_similarity",
+    "generate_random_paths",
+]
+
+
+def debug_print(*args, **kwargs):
+    print(*args, **kwargs)
+
+
+@nx._dispatchable(
+    graphs={"G1": 0, "G2": 1}, preserve_edge_attrs=True, preserve_node_attrs=True
+)
+def graph_edit_distance(
+    G1,
+    G2,
+    node_match=None,
+    edge_match=None,
+    node_subst_cost=None,
+    node_del_cost=None,
+    node_ins_cost=None,
+    edge_subst_cost=None,
+    edge_del_cost=None,
+    edge_ins_cost=None,
+    roots=None,
+    upper_bound=None,
+    timeout=None,
+):
+    """Returns GED (graph edit distance) between graphs G1 and G2.
+
+    Graph edit distance is a graph similarity measure analogous to
+    Levenshtein distance for strings.  It is defined as minimum cost
+    of edit path (sequence of node and edge edit operations)
+    transforming graph G1 to graph isomorphic to G2.
+
+    Parameters
+    ----------
+    G1, G2: graphs
+        The two graphs G1 and G2 must be of the same type.
+
+    node_match : callable
+        A function that returns True if node n1 in G1 and n2 in G2
+        should be considered equal during matching.
+
+        The function will be called like
+
+           node_match(G1.nodes[n1], G2.nodes[n2]).
+
+        That is, the function will receive the node attribute
+        dictionaries for n1 and n2 as inputs.
+
+        Ignored if node_subst_cost is specified.  If neither
+        node_match nor node_subst_cost are specified then node
+        attributes are not considered.
+
+    edge_match : callable
+        A function that returns True if the edge attribute dictionaries
+        for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
+        be considered equal during matching.
+
+        The function will be called like
+
+           edge_match(G1[u1][v1], G2[u2][v2]).
+
+        That is, the function will receive the edge attribute
+        dictionaries of the edges under consideration.
+
+        Ignored if edge_subst_cost is specified.  If neither
+        edge_match nor edge_subst_cost are specified then edge
+        attributes are not considered.
+
+    node_subst_cost, node_del_cost, node_ins_cost : callable
+        Functions that return the costs of node substitution, node
+        deletion, and node insertion, respectively.
+
+        The functions will be called like
+
+           node_subst_cost(G1.nodes[n1], G2.nodes[n2]),
+           node_del_cost(G1.nodes[n1]),
+           node_ins_cost(G2.nodes[n2]).
+
+        That is, the functions will receive the node attribute
+        dictionaries as inputs.  The functions are expected to return
+        positive numeric values.
+
+        Function node_subst_cost overrides node_match if specified.
+        If neither node_match nor node_subst_cost are specified then
+        default node substitution cost of 0 is used (node attributes
+        are not considered during matching).
+
+        If node_del_cost is not specified then default node deletion
+        cost of 1 is used.  If node_ins_cost is not specified then
+        default node insertion cost of 1 is used.
+
+    edge_subst_cost, edge_del_cost, edge_ins_cost : callable
+        Functions that return the costs of edge substitution, edge
+        deletion, and edge insertion, respectively.
+
+        The functions will be called like
+
+           edge_subst_cost(G1[u1][v1], G2[u2][v2]),
+           edge_del_cost(G1[u1][v1]),
+           edge_ins_cost(G2[u2][v2]).
+
+        That is, the functions will receive the edge attribute
+        dictionaries as inputs.  The functions are expected to return
+        positive numeric values.
+
+        Function edge_subst_cost overrides edge_match if specified.
+        If neither edge_match nor edge_subst_cost are specified then
+        default edge substitution cost of 0 is used (edge attributes
+        are not considered during matching).
+
+        If edge_del_cost is not specified then default edge deletion
+        cost of 1 is used.  If edge_ins_cost is not specified then
+        default edge insertion cost of 1 is used.
+
+    roots : 2-tuple
+        Tuple where first element is a node in G1 and the second
+        is a node in G2.
+        These nodes are forced to be matched in the comparison to
+        allow comparison between rooted graphs.
+
+    upper_bound : numeric
+        Maximum edit distance to consider.  Return None if no edit
+        distance under or equal to upper_bound exists.
+
+    timeout : numeric
+        Maximum number of seconds to execute.
+        After timeout is met, the current best GED is returned.
+
+    Examples
+    --------
+    >>> G1 = nx.cycle_graph(6)
+    >>> G2 = nx.wheel_graph(7)
+    >>> nx.graph_edit_distance(G1, G2)
+    7.0
+
+    >>> G1 = nx.star_graph(5)
+    >>> G2 = nx.star_graph(5)
+    >>> nx.graph_edit_distance(G1, G2, roots=(0, 0))
+    0.0
+    >>> nx.graph_edit_distance(G1, G2, roots=(1, 0))
+    8.0
+
+    See Also
+    --------
+    optimal_edit_paths, optimize_graph_edit_distance,
+
+    is_isomorphic: test for graph edit distance of 0
+
+    References
+    ----------
+    .. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick
+       Martineau. An Exact Graph Edit Distance Algorithm for Solving
+       Pattern Recognition Problems. 4th International Conference on
+       Pattern Recognition Applications and Methods 2015, Jan 2015,
+       Lisbon, Portugal. 2015,
+       <10.5220/0005209202710278>. <hal-01168816>
+       https://hal.archives-ouvertes.fr/hal-01168816
+
+    """
+    bestcost = None
+    for _, _, cost in optimize_edit_paths(
+        G1,
+        G2,
+        node_match,
+        edge_match,
+        node_subst_cost,
+        node_del_cost,
+        node_ins_cost,
+        edge_subst_cost,
+        edge_del_cost,
+        edge_ins_cost,
+        upper_bound,
+        True,
+        roots,
+        timeout,
+    ):
+        # assert bestcost is None or cost < bestcost
+        bestcost = cost
+    return bestcost
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1})
+def optimal_edit_paths(
+    G1,
+    G2,
+    node_match=None,
+    edge_match=None,
+    node_subst_cost=None,
+    node_del_cost=None,
+    node_ins_cost=None,
+    edge_subst_cost=None,
+    edge_del_cost=None,
+    edge_ins_cost=None,
+    upper_bound=None,
+):
+    """Returns all minimum-cost edit paths transforming G1 to G2.
+
+    Graph edit path is a sequence of node and edge edit operations
+    transforming graph G1 to graph isomorphic to G2.  Edit operations
+    include substitutions, deletions, and insertions.
+
+    Parameters
+    ----------
+    G1, G2: graphs
+        The two graphs G1 and G2 must be of the same type.
+
+    node_match : callable
+        A function that returns True if node n1 in G1 and n2 in G2
+        should be considered equal during matching.
+
+        The function will be called like
+
+           node_match(G1.nodes[n1], G2.nodes[n2]).
+
+        That is, the function will receive the node attribute
+        dictionaries for n1 and n2 as inputs.
+
+        Ignored if node_subst_cost is specified.  If neither
+        node_match nor node_subst_cost are specified then node
+        attributes are not considered.
+
+    edge_match : callable
+        A function that returns True if the edge attribute dictionaries
+        for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
+        be considered equal during matching.
+
+        The function will be called like
+
+           edge_match(G1[u1][v1], G2[u2][v2]).
+
+        That is, the function will receive the edge attribute
+        dictionaries of the edges under consideration.
+
+        Ignored if edge_subst_cost is specified.  If neither
+        edge_match nor edge_subst_cost are specified then edge
+        attributes are not considered.
+
+    node_subst_cost, node_del_cost, node_ins_cost : callable
+        Functions that return the costs of node substitution, node
+        deletion, and node insertion, respectively.
+
+        The functions will be called like
+
+           node_subst_cost(G1.nodes[n1], G2.nodes[n2]),
+           node_del_cost(G1.nodes[n1]),
+           node_ins_cost(G2.nodes[n2]).
+
+        That is, the functions will receive the node attribute
+        dictionaries as inputs.  The functions are expected to return
+        positive numeric values.
+
+        Function node_subst_cost overrides node_match if specified.
+        If neither node_match nor node_subst_cost are specified then
+        default node substitution cost of 0 is used (node attributes
+        are not considered during matching).
+
+        If node_del_cost is not specified then default node deletion
+        cost of 1 is used.  If node_ins_cost is not specified then
+        default node insertion cost of 1 is used.
+
+    edge_subst_cost, edge_del_cost, edge_ins_cost : callable
+        Functions that return the costs of edge substitution, edge
+        deletion, and edge insertion, respectively.
+
+        The functions will be called like
+
+           edge_subst_cost(G1[u1][v1], G2[u2][v2]),
+           edge_del_cost(G1[u1][v1]),
+           edge_ins_cost(G2[u2][v2]).
+
+        That is, the functions will receive the edge attribute
+        dictionaries as inputs.  The functions are expected to return
+        positive numeric values.
+
+        Function edge_subst_cost overrides edge_match if specified.
+        If neither edge_match nor edge_subst_cost are specified then
+        default edge substitution cost of 0 is used (edge attributes
+        are not considered during matching).
+
+        If edge_del_cost is not specified then default edge deletion
+        cost of 1 is used.  If edge_ins_cost is not specified then
+        default edge insertion cost of 1 is used.
+
+    upper_bound : numeric
+        Maximum edit distance to consider.
+
+    Returns
+    -------
+    edit_paths : list of tuples (node_edit_path, edge_edit_path)
+       - node_edit_path : list of tuples ``(u, v)`` indicating node transformations
+         between `G1` and `G2`. ``u`` is `None` for insertion, ``v`` is `None`
+         for deletion.
+       - edge_edit_path : list of tuples ``((u1, v1), (u2, v2))`` indicating edge
+         transformations between `G1` and `G2`. ``(None, (u2,v2))`` for insertion
+         and ``((u1,v1), None)`` for deletion.
+
+    cost : numeric
+        Optimal edit path cost (graph edit distance). When the cost
+        is zero, it indicates that `G1` and `G2` are isomorphic.
+
+    Examples
+    --------
+    >>> G1 = nx.cycle_graph(4)
+    >>> G2 = nx.wheel_graph(5)
+    >>> paths, cost = nx.optimal_edit_paths(G1, G2)
+    >>> len(paths)
+    40
+    >>> cost
+    5.0
+
+    Notes
+    -----
+    To transform `G1` into a graph isomorphic to `G2`, apply the node
+    and edge edits in the returned ``edit_paths``.
+    In the case of isomorphic graphs, the cost is zero, and the paths
+    represent different isomorphic mappings (isomorphisms). That is, the
+    edits involve renaming nodes and edges to match the structure of `G2`.
+
+    See Also
+    --------
+    graph_edit_distance, optimize_edit_paths
+
+    References
+    ----------
+    .. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick
+       Martineau. An Exact Graph Edit Distance Algorithm for Solving
+       Pattern Recognition Problems. 4th International Conference on
+       Pattern Recognition Applications and Methods 2015, Jan 2015,
+       Lisbon, Portugal. 2015,
+       <10.5220/0005209202710278>. <hal-01168816>
+       https://hal.archives-ouvertes.fr/hal-01168816
+
+    """
+    paths = []
+    bestcost = None
+    for vertex_path, edge_path, cost in optimize_edit_paths(
+        G1,
+        G2,
+        node_match,
+        edge_match,
+        node_subst_cost,
+        node_del_cost,
+        node_ins_cost,
+        edge_subst_cost,
+        edge_del_cost,
+        edge_ins_cost,
+        upper_bound,
+        False,
+    ):
+        # assert bestcost is None or cost <= bestcost
+        if bestcost is not None and cost < bestcost:
+            paths = []
+        paths.append((vertex_path, edge_path))
+        bestcost = cost
+    return paths, bestcost
+
+
+@nx._dispatchable(graphs={"G1": 0, "G2": 1})
+def optimize_graph_edit_distance(
+    G1,
+    G2,
+    node_match=None,
+    edge_match=None,
+    node_subst_cost=None,
+    node_del_cost=None,
+    node_ins_cost=None,
+    edge_subst_cost=None,
+    edge_del_cost=None,
+    edge_ins_cost=None,
+    upper_bound=None,
+):
+    """Returns consecutive approximations of GED (graph edit distance)
+    between graphs G1 and G2.
+
+    Graph edit distance is a graph similarity measure analogous to
+    Levenshtein distance for strings.  It is defined as minimum cost
+    of edit path (sequence of node and edge edit operations)
+    transforming graph G1 to graph isomorphic to G2.
+
+    Parameters
+    ----------
+    G1, G2: graphs
+        The two graphs G1 and G2 must be of the same type.
+
+    node_match : callable
+        A function that returns True if node n1 in G1 and n2 in G2
+        should be considered equal during matching.
+
+        The function will be called like
+
+           node_match(G1.nodes[n1], G2.nodes[n2]).
+
+        That is, the function will receive the node attribute
+        dictionaries for n1 and n2 as inputs.
+
+        Ignored if node_subst_cost is specified.  If neither
+        node_match nor node_subst_cost are specified then node
+        attributes are not considered.
+
+    edge_match : callable
+        A function that returns True if the edge attribute dictionaries
+        for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
+        be considered equal during matching.
+
+        The function will be called like
+
+           edge_match(G1[u1][v1], G2[u2][v2]).
+
+        That is, the function will receive the edge attribute
+        dictionaries of the edges under consideration.
+
+        Ignored if edge_subst_cost is specified.  If neither
+        edge_match nor edge_subst_cost are specified then edge
+        attributes are not considered.
+
+    node_subst_cost, node_del_cost, node_ins_cost : callable
+        Functions that return the costs of node substitution, node
+        deletion, and node insertion, respectively.
+
+        The functions will be called like
+
+           node_subst_cost(G1.nodes[n1], G2.nodes[n2]),
+           node_del_cost(G1.nodes[n1]),
+           node_ins_cost(G2.nodes[n2]).
+
+        That is, the functions will receive the node attribute
+        dictionaries as inputs.  The functions are expected to return
+        positive numeric values.
+
+        Function node_subst_cost overrides node_match if specified.
+        If neither node_match nor node_subst_cost are specified then
+        default node substitution cost of 0 is used (node attributes
+        are not considered during matching).
+
+        If node_del_cost is not specified then default node deletion
+        cost of 1 is used.  If node_ins_cost is not specified then
+        default node insertion cost of 1 is used.
+
+    edge_subst_cost, edge_del_cost, edge_ins_cost : callable
+        Functions that return the costs of edge substitution, edge
+        deletion, and edge insertion, respectively.
+
+        The functions will be called like
+
+           edge_subst_cost(G1[u1][v1], G2[u2][v2]),
+           edge_del_cost(G1[u1][v1]),
+           edge_ins_cost(G2[u2][v2]).
+
+        That is, the functions will receive the edge attribute
+        dictionaries as inputs.  The functions are expected to return
+        positive numeric values.
+
+        Function edge_subst_cost overrides edge_match if specified.
+        If neither edge_match nor edge_subst_cost are specified then
+        default edge substitution cost of 0 is used (edge attributes
+        are not considered during matching).
+
+        If edge_del_cost is not specified then default edge deletion
+        cost of 1 is used.  If edge_ins_cost is not specified then
+        default edge insertion cost of 1 is used.
+
+    upper_bound : numeric
+        Maximum edit distance to consider.
+
+    Returns
+    -------
+    Generator of consecutive approximations of graph edit distance.
+
+    Examples
+    --------
+    >>> G1 = nx.cycle_graph(6)
+    >>> G2 = nx.wheel_graph(7)
+    >>> for v in nx.optimize_graph_edit_distance(G1, G2):
+    ...     minv = v
+    >>> minv
+    7.0
+
+    See Also
+    --------
+    graph_edit_distance, optimize_edit_paths
+
+    References
+    ----------
+    .. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick
+       Martineau. An Exact Graph Edit Distance Algorithm for Solving
+       Pattern Recognition Problems. 4th International Conference on
+       Pattern Recognition Applications and Methods 2015, Jan 2015,
+       Lisbon, Portugal. 2015,
+       <10.5220/0005209202710278>. <hal-01168816>
+       https://hal.archives-ouvertes.fr/hal-01168816
+    """
+    for _, _, cost in optimize_edit_paths(
+        G1,
+        G2,
+        node_match,
+        edge_match,
+        node_subst_cost,
+        node_del_cost,
+        node_ins_cost,
+        edge_subst_cost,
+        edge_del_cost,
+        edge_ins_cost,
+        upper_bound,
+        True,
+    ):
+        yield cost
+
+
+@nx._dispatchable(
+    graphs={"G1": 0, "G2": 1}, preserve_edge_attrs=True, preserve_node_attrs=True
+)
+def optimize_edit_paths(
+    G1,
+    G2,
+    node_match=None,
+    edge_match=None,
+    node_subst_cost=None,
+    node_del_cost=None,
+    node_ins_cost=None,
+    edge_subst_cost=None,
+    edge_del_cost=None,
+    edge_ins_cost=None,
+    upper_bound=None,
+    strictly_decreasing=True,
+    roots=None,
+    timeout=None,
+):
+    """GED (graph edit distance) calculation: advanced interface.
+
+    Graph edit path is a sequence of node and edge edit operations
+    transforming graph G1 to graph isomorphic to G2.  Edit operations
+    include substitutions, deletions, and insertions.
+
+    Graph edit distance is defined as minimum cost of edit path.
+
+    Parameters
+    ----------
+    G1, G2: graphs
+        The two graphs G1 and G2 must be of the same type.
+
+    node_match : callable
+        A function that returns True if node n1 in G1 and n2 in G2
+        should be considered equal during matching.
+
+        The function will be called like
+
+           node_match(G1.nodes[n1], G2.nodes[n2]).
+
+        That is, the function will receive the node attribute
+        dictionaries for n1 and n2 as inputs.
+
+        Ignored if node_subst_cost is specified.  If neither
+        node_match nor node_subst_cost are specified then node
+        attributes are not considered.
+
+    edge_match : callable
+        A function that returns True if the edge attribute dictionaries
+        for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
+        be considered equal during matching.
+
+        The function will be called like
+
+           edge_match(G1[u1][v1], G2[u2][v2]).
+
+        That is, the function will receive the edge attribute
+        dictionaries of the edges under consideration.
+
+        Ignored if edge_subst_cost is specified.  If neither
+        edge_match nor edge_subst_cost are specified then edge
+        attributes are not considered.
+
+    node_subst_cost, node_del_cost, node_ins_cost : callable
+        Functions that return the costs of node substitution, node
+        deletion, and node insertion, respectively.
+
+        The functions will be called like
+
+           node_subst_cost(G1.nodes[n1], G2.nodes[n2]),
+           node_del_cost(G1.nodes[n1]),
+           node_ins_cost(G2.nodes[n2]).
+
+        That is, the functions will receive the node attribute
+        dictionaries as inputs.  The functions are expected to return
+        positive numeric values.
+
+        Function node_subst_cost overrides node_match if specified.
+        If neither node_match nor node_subst_cost are specified then
+        default node substitution cost of 0 is used (node attributes
+        are not considered during matching).
+
+        If node_del_cost is not specified then default node deletion
+        cost of 1 is used.  If node_ins_cost is not specified then
+        default node insertion cost of 1 is used.
+
+    edge_subst_cost, edge_del_cost, edge_ins_cost : callable
+        Functions that return the costs of edge substitution, edge
+        deletion, and edge insertion, respectively.
+
+        The functions will be called like
+
+           edge_subst_cost(G1[u1][v1], G2[u2][v2]),
+           edge_del_cost(G1[u1][v1]),
+           edge_ins_cost(G2[u2][v2]).
+
+        That is, the functions will receive the edge attribute
+        dictionaries as inputs.  The functions are expected to return
+        positive numeric values.
+
+        Function edge_subst_cost overrides edge_match if specified.
+        If neither edge_match nor edge_subst_cost are specified then
+        default edge substitution cost of 0 is used (edge attributes
+        are not considered during matching).
+
+        If edge_del_cost is not specified then default edge deletion
+        cost of 1 is used.  If edge_ins_cost is not specified then
+        default edge insertion cost of 1 is used.
+
+    upper_bound : numeric
+        Maximum edit distance to consider.
+
+    strictly_decreasing : bool
+        If True, return consecutive approximations of strictly
+        decreasing cost.  Otherwise, return all edit paths of cost
+        less than or equal to the previous minimum cost.
+
+    roots : 2-tuple
+        Tuple where first element is a node in G1 and the second
+        is a node in G2.
+        These nodes are forced to be matched in the comparison to
+        allow comparison between rooted graphs.
+
+    timeout : numeric
+        Maximum number of seconds to execute.
+        After timeout is met, the current best GED is returned.
+
+    Returns
+    -------
+    Generator of tuples (node_edit_path, edge_edit_path, cost)
+        node_edit_path : list of tuples (u, v)
+        edge_edit_path : list of tuples ((u1, v1), (u2, v2))
+        cost : numeric
+
+    See Also
+    --------
+    graph_edit_distance, optimize_graph_edit_distance, optimal_edit_paths
+
+    References
+    ----------
+    .. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick
+       Martineau. An Exact Graph Edit Distance Algorithm for Solving
+       Pattern Recognition Problems. 4th International Conference on
+       Pattern Recognition Applications and Methods 2015, Jan 2015,
+       Lisbon, Portugal. 2015,
+       <10.5220/0005209202710278>. <hal-01168816>
+       https://hal.archives-ouvertes.fr/hal-01168816
+
+    """
+    # TODO: support DiGraph
+
+    import numpy as np
+    import scipy as sp
+
+    @dataclass
+    class CostMatrix:
+        C: ...
+        lsa_row_ind: ...
+        lsa_col_ind: ...
+        ls: ...
+
+    def make_CostMatrix(C, m, n):
+        # assert(C.shape == (m + n, m + n))
+        lsa_row_ind, lsa_col_ind = sp.optimize.linear_sum_assignment(C)
+
+        # Fixup dummy assignments:
+        # each substitution i<->j should have dummy assignment m+j<->n+i
+        # NOTE: fast reduce of Cv relies on it
+        # Create masks for substitution and dummy indices
+        is_subst = (lsa_row_ind < m) & (lsa_col_ind < n)
+        is_dummy = (lsa_row_ind >= m) & (lsa_col_ind >= n)
+
+        # Map dummy assignments to the correct indices
+        lsa_row_ind[is_dummy] = lsa_col_ind[is_subst] + m
+        lsa_col_ind[is_dummy] = lsa_row_ind[is_subst] + n
+
+        return CostMatrix(
+            C, lsa_row_ind, lsa_col_ind, C[lsa_row_ind, lsa_col_ind].sum()
+        )
+
+    def extract_C(C, i, j, m, n):
+        # assert(C.shape == (m + n, m + n))
+        row_ind = [k in i or k - m in j for k in range(m + n)]
+        col_ind = [k in j or k - n in i for k in range(m + n)]
+        return C[row_ind, :][:, col_ind]
+
+    def reduce_C(C, i, j, m, n):
+        # assert(C.shape == (m + n, m + n))
+        row_ind = [k not in i and k - m not in j for k in range(m + n)]
+        col_ind = [k not in j and k - n not in i for k in range(m + n)]
+        return C[row_ind, :][:, col_ind]
+
+    def reduce_ind(ind, i):
+        # assert set(ind) == set(range(len(ind)))
+        rind = ind[[k not in i for k in ind]]
+        for k in set(i):
+            rind[rind >= k] -= 1
+        return rind
+
+    def match_edges(u, v, pending_g, pending_h, Ce, matched_uv=None):
+        """
+        Parameters:
+            u, v: matched vertices, u=None or v=None for
+               deletion/insertion
+            pending_g, pending_h: lists of edges not yet mapped
+            Ce: CostMatrix of pending edge mappings
+            matched_uv: partial vertex edit path
+                list of tuples (u, v) of previously matched vertex
+                    mappings u<->v, u=None or v=None for
+                    deletion/insertion
+
+        Returns:
+            list of (i, j): indices of edge mappings g<->h
+            localCe: local CostMatrix of edge mappings
+                (basically submatrix of Ce at cross of rows i, cols j)
+        """
+        M = len(pending_g)
+        N = len(pending_h)
+        # assert Ce.C.shape == (M + N, M + N)
+
+        # only attempt to match edges after one node match has been made
+        # this will stop self-edges on the first node being automatically deleted
+        # even when a substitution is the better option
+        if matched_uv is None or len(matched_uv) == 0:
+            g_ind = []
+            h_ind = []
+        else:
+            g_ind = [
+                i
+                for i in range(M)
+                if pending_g[i][:2] == (u, u)
+                or any(
+                    pending_g[i][:2] in ((p, u), (u, p), (p, p)) for p, q in matched_uv
+                )
+            ]
+            h_ind = [
+                j
+                for j in range(N)
+                if pending_h[j][:2] == (v, v)
+                or any(
+                    pending_h[j][:2] in ((q, v), (v, q), (q, q)) for p, q in matched_uv
+                )
+            ]
+
+        m = len(g_ind)
+        n = len(h_ind)
+
+        if m or n:
+            C = extract_C(Ce.C, g_ind, h_ind, M, N)
+            # assert C.shape == (m + n, m + n)
+
+            # Forbid structurally invalid matches
+            # NOTE: inf remembered from Ce construction
+            for k, i in enumerate(g_ind):
+                g = pending_g[i][:2]
+                for l, j in enumerate(h_ind):
+                    h = pending_h[j][:2]
+                    if nx.is_directed(G1) or nx.is_directed(G2):
+                        if any(
+                            g == (p, u) and h == (q, v) or g == (u, p) and h == (v, q)
+                            for p, q in matched_uv
+                        ):
+                            continue
+                    else:
+                        if any(
+                            g in ((p, u), (u, p)) and h in ((q, v), (v, q))
+                            for p, q in matched_uv
+                        ):
+                            continue
+                    if g == (u, u) or any(g == (p, p) for p, q in matched_uv):
+                        continue
+                    if h == (v, v) or any(h == (q, q) for p, q in matched_uv):
+                        continue
+                    C[k, l] = inf
+
+            localCe = make_CostMatrix(C, m, n)
+            ij = [
+                (
+                    g_ind[k] if k < m else M + h_ind[l],
+                    h_ind[l] if l < n else N + g_ind[k],
+                )
+                for k, l in zip(localCe.lsa_row_ind, localCe.lsa_col_ind)
+                if k < m or l < n
+            ]
+
+        else:
+            ij = []
+            localCe = CostMatrix(np.empty((0, 0)), [], [], 0)
+
+        return ij, localCe
+
+    def reduce_Ce(Ce, ij, m, n):
+        if len(ij):
+            i, j = zip(*ij)
+            m_i = m - sum(1 for t in i if t < m)
+            n_j = n - sum(1 for t in j if t < n)
+            return make_CostMatrix(reduce_C(Ce.C, i, j, m, n), m_i, n_j)
+        return Ce
+
+    def get_edit_ops(
+        matched_uv, pending_u, pending_v, Cv, pending_g, pending_h, Ce, matched_cost
+    ):
+        """
+        Parameters:
+            matched_uv: partial vertex edit path
+                list of tuples (u, v) of vertex mappings u<->v,
+                u=None or v=None for deletion/insertion
+            pending_u, pending_v: lists of vertices not yet mapped
+            Cv: CostMatrix of pending vertex mappings
+            pending_g, pending_h: lists of edges not yet mapped
+            Ce: CostMatrix of pending edge mappings
+            matched_cost: cost of partial edit path
+
+        Returns:
+            sequence of
+                (i, j): indices of vertex mapping u<->v
+                Cv_ij: reduced CostMatrix of pending vertex mappings
+                    (basically Cv with row i, col j removed)
+                list of (x, y): indices of edge mappings g<->h
+                Ce_xy: reduced CostMatrix of pending edge mappings
+                    (basically Ce with rows x, cols y removed)
+                cost: total cost of edit operation
+            NOTE: most promising ops first
+        """
+        m = len(pending_u)
+        n = len(pending_v)
+        # assert Cv.C.shape == (m + n, m + n)
+
+        # 1) a vertex mapping from optimal linear sum assignment
+        i, j = min(
+            (k, l) for k, l in zip(Cv.lsa_row_ind, Cv.lsa_col_ind) if k < m or l < n
+        )
+        xy, localCe = match_edges(
+            pending_u[i] if i < m else None,
+            pending_v[j] if j < n else None,
+            pending_g,
+            pending_h,
+            Ce,
+            matched_uv,
+        )
+        Ce_xy = reduce_Ce(Ce, xy, len(pending_g), len(pending_h))
+        # assert Ce.ls <= localCe.ls + Ce_xy.ls
+        if prune(matched_cost + Cv.ls + localCe.ls + Ce_xy.ls):
+            pass
+        else:
+            # get reduced Cv efficiently
+            Cv_ij = CostMatrix(
+                reduce_C(Cv.C, (i,), (j,), m, n),
+                reduce_ind(Cv.lsa_row_ind, (i, m + j)),
+                reduce_ind(Cv.lsa_col_ind, (j, n + i)),
+                Cv.ls - Cv.C[i, j],
+            )
+            yield (i, j), Cv_ij, xy, Ce_xy, Cv.C[i, j] + localCe.ls
+
+        # 2) other candidates, sorted by lower-bound cost estimate
+        other = []
+        fixed_i, fixed_j = i, j
+        if m <= n:
+            candidates = (
+                (t, fixed_j)
+                for t in range(m + n)
+                if t != fixed_i and (t < m or t == m + fixed_j)
+            )
+        else:
+            candidates = (
+                (fixed_i, t)
+                for t in range(m + n)
+                if t != fixed_j and (t < n or t == n + fixed_i)
+            )
+        for i, j in candidates:
+            if prune(matched_cost + Cv.C[i, j] + Ce.ls):
+                continue
+            Cv_ij = make_CostMatrix(
+                reduce_C(Cv.C, (i,), (j,), m, n),
+                m - 1 if i < m else m,
+                n - 1 if j < n else n,
+            )
+            # assert Cv.ls <= Cv.C[i, j] + Cv_ij.ls
+            if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + Ce.ls):
+                continue
+            xy, localCe = match_edges(
+                pending_u[i] if i < m else None,
+                pending_v[j] if j < n else None,
+                pending_g,
+                pending_h,
+                Ce,
+                matched_uv,
+            )
+            if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + localCe.ls):
+                continue
+            Ce_xy = reduce_Ce(Ce, xy, len(pending_g), len(pending_h))
+            # assert Ce.ls <= localCe.ls + Ce_xy.ls
+            if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + localCe.ls + Ce_xy.ls):
+                continue
+            other.append(((i, j), Cv_ij, xy, Ce_xy, Cv.C[i, j] + localCe.ls))
+
+        yield from sorted(other, key=lambda t: t[4] + t[1].ls + t[3].ls)
+
+    def get_edit_paths(
+        matched_uv,
+        pending_u,
+        pending_v,
+        Cv,
+        matched_gh,
+        pending_g,
+        pending_h,
+        Ce,
+        matched_cost,
+    ):
+        """
+        Parameters:
+            matched_uv: partial vertex edit path
+                list of tuples (u, v) of vertex mappings u<->v,
+                u=None or v=None for deletion/insertion
+            pending_u, pending_v: lists of vertices not yet mapped
+            Cv: CostMatrix of pending vertex mappings
+            matched_gh: partial edge edit path
+                list of tuples (g, h) of edge mappings g<->h,
+                g=None or h=None for deletion/insertion
+            pending_g, pending_h: lists of edges not yet mapped
+            Ce: CostMatrix of pending edge mappings
+            matched_cost: cost of partial edit path
+
+        Returns:
+            sequence of (vertex_path, edge_path, cost)
+                vertex_path: complete vertex edit path
+                    list of tuples (u, v) of vertex mappings u<->v,
+                    u=None or v=None for deletion/insertion
+                edge_path: complete edge edit path
+                    list of tuples (g, h) of edge mappings g<->h,
+                    g=None or h=None for deletion/insertion
+                cost: total cost of edit path
+            NOTE: path costs are non-increasing
+        """
+        # debug_print('matched-uv:', matched_uv)
+        # debug_print('matched-gh:', matched_gh)
+        # debug_print('matched-cost:', matched_cost)
+        # debug_print('pending-u:', pending_u)
+        # debug_print('pending-v:', pending_v)
+        # debug_print(Cv.C)
+        # assert list(sorted(G1.nodes)) == list(sorted(list(u for u, v in matched_uv if u is not None) + pending_u))
+        # assert list(sorted(G2.nodes)) == list(sorted(list(v for u, v in matched_uv if v is not None) + pending_v))
+        # debug_print('pending-g:', pending_g)
+        # debug_print('pending-h:', pending_h)
+        # debug_print(Ce.C)
+        # assert list(sorted(G1.edges)) == list(sorted(list(g for g, h in matched_gh if g is not None) + pending_g))
+        # assert list(sorted(G2.edges)) == list(sorted(list(h for g, h in matched_gh if h is not None) + pending_h))
+        # debug_print()
+
+        if prune(matched_cost + Cv.ls + Ce.ls):
+            return
+
+        if not max(len(pending_u), len(pending_v)):
+            # assert not len(pending_g)
+            # assert not len(pending_h)
+            # path completed!
+            # assert matched_cost <= maxcost_value
+            nonlocal maxcost_value
+            maxcost_value = min(maxcost_value, matched_cost)
+            yield matched_uv, matched_gh, matched_cost
+
+        else:
+            edit_ops = get_edit_ops(
+                matched_uv,
+                pending_u,
+                pending_v,
+                Cv,
+                pending_g,
+                pending_h,
+                Ce,
+                matched_cost,
+            )
+            for ij, Cv_ij, xy, Ce_xy, edit_cost in edit_ops:
+                i, j = ij
+                # assert Cv.C[i, j] + sum(Ce.C[t] for t in xy) == edit_cost
+                if prune(matched_cost + edit_cost + Cv_ij.ls + Ce_xy.ls):
+                    continue
+
+                # dive deeper
+                u = pending_u.pop(i) if i < len(pending_u) else None
+                v = pending_v.pop(j) if j < len(pending_v) else None
+                matched_uv.append((u, v))
+                for x, y in xy:
+                    len_g = len(pending_g)
+                    len_h = len(pending_h)
+                    matched_gh.append(
+                        (
+                            pending_g[x] if x < len_g else None,
+                            pending_h[y] if y < len_h else None,
+                        )
+                    )
+                sortedx = sorted(x for x, y in xy)
+                sortedy = sorted(y for x, y in xy)
+                G = [
+                    (pending_g.pop(x) if x < len(pending_g) else None)
+                    for x in reversed(sortedx)
+                ]
+                H = [
+                    (pending_h.pop(y) if y < len(pending_h) else None)
+                    for y in reversed(sortedy)
+                ]
+
+                yield from get_edit_paths(
+                    matched_uv,
+                    pending_u,
+                    pending_v,
+                    Cv_ij,
+                    matched_gh,
+                    pending_g,
+                    pending_h,
+                    Ce_xy,
+                    matched_cost + edit_cost,
+                )
+
+                # backtrack
+                if u is not None:
+                    pending_u.insert(i, u)
+                if v is not None:
+                    pending_v.insert(j, v)
+                matched_uv.pop()
+                for x, g in zip(sortedx, reversed(G)):
+                    if g is not None:
+                        pending_g.insert(x, g)
+                for y, h in zip(sortedy, reversed(H)):
+                    if h is not None:
+                        pending_h.insert(y, h)
+                for _ in xy:
+                    matched_gh.pop()
+
+    # Initialization
+
+    pending_u = list(G1.nodes)
+    pending_v = list(G2.nodes)
+
+    initial_cost = 0
+    if roots:
+        root_u, root_v = roots
+        if root_u not in pending_u or root_v not in pending_v:
+            raise nx.NodeNotFound("Root node not in graph.")
+
+        # remove roots from pending
+        pending_u.remove(root_u)
+        pending_v.remove(root_v)
+
+    # cost matrix of vertex mappings
+    m = len(pending_u)
+    n = len(pending_v)
+    C = np.zeros((m + n, m + n))
+    if node_subst_cost:
+        C[0:m, 0:n] = np.array(
+            [
+                node_subst_cost(G1.nodes[u], G2.nodes[v])
+                for u in pending_u
+                for v in pending_v
+            ]
+        ).reshape(m, n)
+        if roots:
+            initial_cost = node_subst_cost(G1.nodes[root_u], G2.nodes[root_v])
+    elif node_match:
+        C[0:m, 0:n] = np.array(
+            [
+                1 - int(node_match(G1.nodes[u], G2.nodes[v]))
+                for u in pending_u
+                for v in pending_v
+            ]
+        ).reshape(m, n)
+        if roots:
+            initial_cost = 1 - node_match(G1.nodes[root_u], G2.nodes[root_v])
+    else:
+        # all zeroes
+        pass
+    # assert not min(m, n) or C[0:m, 0:n].min() >= 0
+    if node_del_cost:
+        del_costs = [node_del_cost(G1.nodes[u]) for u in pending_u]
+    else:
+        del_costs = [1] * len(pending_u)
+    # assert not m or min(del_costs) >= 0
+    if node_ins_cost:
+        ins_costs = [node_ins_cost(G2.nodes[v]) for v in pending_v]
+    else:
+        ins_costs = [1] * len(pending_v)
+    # assert not n or min(ins_costs) >= 0
+    inf = C[0:m, 0:n].sum() + sum(del_costs) + sum(ins_costs) + 1
+    C[0:m, n : n + m] = np.array(
+        [del_costs[i] if i == j else inf for i in range(m) for j in range(m)]
+    ).reshape(m, m)
+    C[m : m + n, 0:n] = np.array(
+        [ins_costs[i] if i == j else inf for i in range(n) for j in range(n)]
+    ).reshape(n, n)
+    Cv = make_CostMatrix(C, m, n)
+    # debug_print(f"Cv: {m} x {n}")
+    # debug_print(Cv.C)
+
+    pending_g = list(G1.edges)
+    pending_h = list(G2.edges)
+
+    # cost matrix of edge mappings
+    m = len(pending_g)
+    n = len(pending_h)
+    C = np.zeros((m + n, m + n))
+    if edge_subst_cost:
+        C[0:m, 0:n] = np.array(
+            [
+                edge_subst_cost(G1.edges[g], G2.edges[h])
+                for g in pending_g
+                for h in pending_h
+            ]
+        ).reshape(m, n)
+    elif edge_match:
+        C[0:m, 0:n] = np.array(
+            [
+                1 - int(edge_match(G1.edges[g], G2.edges[h]))
+                for g in pending_g
+                for h in pending_h
+            ]
+        ).reshape(m, n)
+    else:
+        # all zeroes
+        pass
+    # assert not min(m, n) or C[0:m, 0:n].min() >= 0
+    if edge_del_cost:
+        del_costs = [edge_del_cost(G1.edges[g]) for g in pending_g]
+    else:
+        del_costs = [1] * len(pending_g)
+    # assert not m or min(del_costs) >= 0
+    if edge_ins_cost:
+        ins_costs = [edge_ins_cost(G2.edges[h]) for h in pending_h]
+    else:
+        ins_costs = [1] * len(pending_h)
+    # assert not n or min(ins_costs) >= 0
+    inf = C[0:m, 0:n].sum() + sum(del_costs) + sum(ins_costs) + 1
+    C[0:m, n : n + m] = np.array(
+        [del_costs[i] if i == j else inf for i in range(m) for j in range(m)]
+    ).reshape(m, m)
+    C[m : m + n, 0:n] = np.array(
+        [ins_costs[i] if i == j else inf for i in range(n) for j in range(n)]
+    ).reshape(n, n)
+    Ce = make_CostMatrix(C, m, n)
+    # debug_print(f'Ce: {m} x {n}')
+    # debug_print(Ce.C)
+    # debug_print()
+
+    maxcost_value = Cv.C.sum() + Ce.C.sum() + 1
+
+    if timeout is not None:
+        if timeout <= 0:
+            raise nx.NetworkXError("Timeout value must be greater than 0")
+        start = time.perf_counter()
+
+    def prune(cost):
+        if timeout is not None:
+            if time.perf_counter() - start > timeout:
+                return True
+        if upper_bound is not None:
+            if cost > upper_bound:
+                return True
+        if cost > maxcost_value:
+            return True
+        if strictly_decreasing and cost >= maxcost_value:
+            return True
+        return False
+
+    # Now go!
+
+    done_uv = [] if roots is None else [roots]
+
+    for vertex_path, edge_path, cost in get_edit_paths(
+        done_uv, pending_u, pending_v, Cv, [], pending_g, pending_h, Ce, initial_cost
+    ):
+        # assert sorted(G1.nodes) == sorted(u for u, v in vertex_path if u is not None)
+        # assert sorted(G2.nodes) == sorted(v for u, v in vertex_path if v is not None)
+        # assert sorted(G1.edges) == sorted(g for g, h in edge_path if g is not None)
+        # assert sorted(G2.edges) == sorted(h for g, h in edge_path if h is not None)
+        # print(vertex_path, edge_path, cost, file = sys.stderr)
+        # assert cost == maxcost_value
+        yield list(vertex_path), list(edge_path), float(cost)
+
+
+@nx._dispatchable
+def simrank_similarity(
+    G,
+    source=None,
+    target=None,
+    importance_factor=0.9,
+    max_iterations=1000,
+    tolerance=1e-4,
+):
+    """Returns the SimRank similarity of nodes in the graph ``G``.
+
+    SimRank is a similarity metric that says "two objects are considered
+    to be similar if they are referenced by similar objects." [1]_.
+
+    The pseudo-code definition from the paper is::
+
+        def simrank(G, u, v):
+            in_neighbors_u = G.predecessors(u)
+            in_neighbors_v = G.predecessors(v)
+            scale = C / (len(in_neighbors_u) * len(in_neighbors_v))
+            return scale * sum(
+                simrank(G, w, x) for w, x in product(in_neighbors_u, in_neighbors_v)
+            )
+
+    where ``G`` is the graph, ``u`` is the source, ``v`` is the target,
+    and ``C`` is a float decay or importance factor between 0 and 1.
+
+    The SimRank algorithm for determining node similarity is defined in
+    [2]_.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A NetworkX graph
+
+    source : node
+        If this is specified, the returned dictionary maps each node
+        ``v`` in the graph to the similarity between ``source`` and
+        ``v``.
+
+    target : node
+        If both ``source`` and ``target`` are specified, the similarity
+        value between ``source`` and ``target`` is returned. If
+        ``target`` is specified but ``source`` is not, this argument is
+        ignored.
+
+    importance_factor : float
+        The relative importance of indirect neighbors with respect to
+        direct neighbors.
+
+    max_iterations : integer
+        Maximum number of iterations.
+
+    tolerance : float
+        Error tolerance used to check convergence. When an iteration of
+        the algorithm finds that no similarity value changes more than
+        this amount, the algorithm halts.
+
+    Returns
+    -------
+    similarity : dictionary or float
+        If ``source`` and ``target`` are both ``None``, this returns a
+        dictionary of dictionaries, where keys are node pairs and value
+        are similarity of the pair of nodes.
+
+        If ``source`` is not ``None`` but ``target`` is, this returns a
+        dictionary mapping node to the similarity of ``source`` and that
+        node.
+
+        If neither ``source`` nor ``target`` is ``None``, this returns
+        the similarity value for the given pair of nodes.
+
+    Raises
+    ------
+    ExceededMaxIterations
+        If the algorithm does not converge within ``max_iterations``.
+
+    NodeNotFound
+        If either ``source`` or ``target`` is not in `G`.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(2)
+    >>> nx.simrank_similarity(G)
+    {0: {0: 1.0, 1: 0.0}, 1: {0: 0.0, 1: 1.0}}
+    >>> nx.simrank_similarity(G, source=0)
+    {0: 1.0, 1: 0.0}
+    >>> nx.simrank_similarity(G, source=0, target=0)
+    1.0
+
+    The result of this function can be converted to a numpy array
+    representing the SimRank matrix by using the node order of the
+    graph to determine which row and column represent each node.
+    Other ordering of nodes is also possible.
+
+    >>> import numpy as np
+    >>> sim = nx.simrank_similarity(G)
+    >>> np.array([[sim[u][v] for v in G] for u in G])
+    array([[1., 0.],
+           [0., 1.]])
+    >>> sim_1d = nx.simrank_similarity(G, source=0)
+    >>> np.array([sim[0][v] for v in G])
+    array([1., 0.])
+
+    References
+    ----------
+    .. [1] https://en.wikipedia.org/wiki/SimRank
+    .. [2] G. Jeh and J. Widom.
+           "SimRank: a measure of structural-context similarity",
+           In KDD'02: Proceedings of the Eighth ACM SIGKDD
+           International Conference on Knowledge Discovery and Data Mining,
+           pp. 538--543. ACM Press, 2002.
+    """
+    import numpy as np
+
+    nodelist = list(G)
+    if source is not None:
+        if source not in nodelist:
+            raise nx.NodeNotFound(f"Source node {source} not in G")
+        else:
+            s_indx = nodelist.index(source)
+    else:
+        s_indx = None
+
+    if target is not None:
+        if target not in nodelist:
+            raise nx.NodeNotFound(f"Target node {target} not in G")
+        else:
+            t_indx = nodelist.index(target)
+    else:
+        t_indx = None
+
+    x = _simrank_similarity_numpy(
+        G, s_indx, t_indx, importance_factor, max_iterations, tolerance
+    )
+
+    if isinstance(x, np.ndarray):
+        if x.ndim == 1:
+            return dict(zip(G, x.tolist()))
+        # else x.ndim == 2
+        return {u: dict(zip(G, row)) for u, row in zip(G, x.tolist())}
+    return float(x)
+
+
+def _simrank_similarity_python(
+    G,
+    source=None,
+    target=None,
+    importance_factor=0.9,
+    max_iterations=1000,
+    tolerance=1e-4,
+):
+    """Returns the SimRank similarity of nodes in the graph ``G``.
+
+    This pure Python version is provided for pedagogical purposes.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(2)
+    >>> nx.similarity._simrank_similarity_python(G)
+    {0: {0: 1, 1: 0.0}, 1: {0: 0.0, 1: 1}}
+    >>> nx.similarity._simrank_similarity_python(G, source=0)
+    {0: 1, 1: 0.0}
+    >>> nx.similarity._simrank_similarity_python(G, source=0, target=0)
+    1
+    """
+    # build up our similarity adjacency dictionary output
+    newsim = {u: {v: 1 if u == v else 0 for v in G} for u in G}
+
+    # These functions compute the update to the similarity value of the nodes
+    # `u` and `v` with respect to the previous similarity values.
+    def avg_sim(s):
+        return sum(newsim[w][x] for (w, x) in s) / len(s) if s else 0.0
+
+    Gadj = G.pred if G.is_directed() else G.adj
+
+    def sim(u, v):
+        return importance_factor * avg_sim(list(product(Gadj[u], Gadj[v])))
+
+    for its in range(max_iterations):
+        oldsim = newsim
+        newsim = {u: {v: sim(u, v) if u != v else 1 for v in G} for u in G}
+        is_close = all(
+            all(
+                abs(newsim[u][v] - old) <= tolerance * (1 + abs(old))
+                for v, old in nbrs.items()
+            )
+            for u, nbrs in oldsim.items()
+        )
+        if is_close:
+            break
+
+    if its + 1 == max_iterations:
+        raise nx.ExceededMaxIterations(
+            f"simrank did not converge after {max_iterations} iterations."
+        )
+
+    if source is not None and target is not None:
+        return newsim[source][target]
+    if source is not None:
+        return newsim[source]
+    return newsim
+
+
+def _simrank_similarity_numpy(
+    G,
+    source=None,
+    target=None,
+    importance_factor=0.9,
+    max_iterations=1000,
+    tolerance=1e-4,
+):
+    """Calculate SimRank of nodes in ``G`` using matrices with ``numpy``.
+
+    The SimRank algorithm for determining node similarity is defined in
+    [1]_.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A NetworkX graph
+
+    source : node
+        If this is specified, the returned dictionary maps each node
+        ``v`` in the graph to the similarity between ``source`` and
+        ``v``.
+
+    target : node
+        If both ``source`` and ``target`` are specified, the similarity
+        value between ``source`` and ``target`` is returned. If
+        ``target`` is specified but ``source`` is not, this argument is
+        ignored.
+
+    importance_factor : float
+        The relative importance of indirect neighbors with respect to
+        direct neighbors.
+
+    max_iterations : integer
+        Maximum number of iterations.
+
+    tolerance : float
+        Error tolerance used to check convergence. When an iteration of
+        the algorithm finds that no similarity value changes more than
+        this amount, the algorithm halts.
+
+    Returns
+    -------
+    similarity : numpy array or float
+        If ``source`` and ``target`` are both ``None``, this returns a
+        2D array containing SimRank scores of the nodes.
+
+        If ``source`` is not ``None`` but ``target`` is, this returns an
+        1D array containing SimRank scores of ``source`` and that
+        node.
+
+        If neither ``source`` nor ``target`` is ``None``, this returns
+        the similarity value for the given pair of nodes.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(2)
+    >>> nx.similarity._simrank_similarity_numpy(G)
+    array([[1., 0.],
+           [0., 1.]])
+    >>> nx.similarity._simrank_similarity_numpy(G, source=0)
+    array([1., 0.])
+    >>> nx.similarity._simrank_similarity_numpy(G, source=0, target=0)
+    1.0
+
+    References
+    ----------
+    .. [1] G. Jeh and J. Widom.
+           "SimRank: a measure of structural-context similarity",
+           In KDD'02: Proceedings of the Eighth ACM SIGKDD
+           International Conference on Knowledge Discovery and Data Mining,
+           pp. 538--543. ACM Press, 2002.
+    """
+    # This algorithm follows roughly
+    #
+    #     S = max{C * (A.T * S * A), I}
+    #
+    # where C is the importance factor, A is the column normalized
+    # adjacency matrix, and I is the identity matrix.
+    import numpy as np
+
+    adjacency_matrix = nx.to_numpy_array(G)
+
+    # column-normalize the ``adjacency_matrix``
+    s = np.array(adjacency_matrix.sum(axis=0))
+    s[s == 0] = 1
+    adjacency_matrix /= s  # adjacency_matrix.sum(axis=0)
+
+    newsim = np.eye(len(G), dtype=np.float64)
+    for its in range(max_iterations):
+        prevsim = newsim.copy()
+        newsim = importance_factor * ((adjacency_matrix.T @ prevsim) @ adjacency_matrix)
+        np.fill_diagonal(newsim, 1.0)
+
+        if np.allclose(prevsim, newsim, atol=tolerance):
+            break
+
+    if its + 1 == max_iterations:
+        raise nx.ExceededMaxIterations(
+            f"simrank did not converge after {max_iterations} iterations."
+        )
+
+    if source is not None and target is not None:
+        return float(newsim[source, target])
+    if source is not None:
+        return newsim[source]
+    return newsim
+
+
+@nx._dispatchable(edge_attrs="weight")
+def panther_similarity(
+    G, source, k=5, path_length=5, c=0.5, delta=0.1, eps=None, weight="weight"
+):
+    r"""Returns the Panther similarity of nodes in the graph `G` to node ``v``.
+
+    Panther is a similarity metric that says "two objects are considered
+    to be similar if they frequently appear on the same paths." [1]_.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A NetworkX graph
+    source : node
+        Source node for which to find the top `k` similar other nodes
+    k : int (default = 5)
+        The number of most similar nodes to return.
+    path_length : int (default = 5)
+        How long the randomly generated paths should be (``T`` in [1]_)
+    c : float (default = 0.5)
+        A universal positive constant used to scale the number
+        of sample random paths to generate.
+    delta : float (default = 0.1)
+        The probability that the similarity $S$ is not an epsilon-approximation to (R, phi),
+        where $R$ is the number of random paths and $\phi$ is the probability
+        that an element sampled from a set $A \subseteq D$, where $D$ is the domain.
+    eps : float or None (default = None)
+        The error bound. Per [1]_, a good value is ``sqrt(1/|E|)``. Therefore,
+        if no value is provided, the recommended computed value will be used.
+    weight : string or None, optional (default="weight")
+        The name of an edge attribute that holds the numerical value
+        used as a weight. If None then each edge has weight 1.
+
+    Returns
+    -------
+    similarity : dictionary
+        Dictionary of nodes to similarity scores (as floats). Note:
+        the self-similarity (i.e., ``v``) will not be included in
+        the returned dictionary. So, for ``k = 5``, a dictionary of
+        top 4 nodes and their similarity scores will be returned.
+
+    Raises
+    ------
+    NetworkXUnfeasible
+        If `source` is an isolated node.
+
+    NodeNotFound
+        If `source` is not in `G`.
+
+    Notes
+    -----
+        The isolated nodes in `G` are ignored.
+
+    Examples
+    --------
+    >>> G = nx.star_graph(10)
+    >>> sim = nx.panther_similarity(G, 0)
+
+    References
+    ----------
+    .. [1] Zhang, J., Tang, J., Ma, C., Tong, H., Jing, Y., & Li, J.
+           Panther: Fast top-k similarity search on large networks.
+           In Proceedings of the ACM SIGKDD International Conference
+           on Knowledge Discovery and Data Mining (Vol. 2015-August, pp. 1445–1454).
+           Association for Computing Machinery. https://doi.org/10.1145/2783258.2783267.
+    """
+    import numpy as np
+
+    if source not in G:
+        raise nx.NodeNotFound(f"Source node {source} not in G")
+
+    isolates = set(nx.isolates(G))
+
+    if source in isolates:
+        raise nx.NetworkXUnfeasible(
+            f"Panther similarity is not defined for the isolated source node {source}."
+        )
+
+    G = G.subgraph([node for node in G.nodes if node not in isolates]).copy()
+
+    num_nodes = G.number_of_nodes()
+    if num_nodes < k:
+        warnings.warn(
+            f"Number of nodes is {num_nodes}, but requested k is {k}. "
+            "Setting k to number of nodes."
+        )
+        k = num_nodes
+    # According to [1], they empirically determined
+    # a good value for ``eps`` to be sqrt( 1 / |E| )
+    if eps is None:
+        eps = np.sqrt(1.0 / G.number_of_edges())
+
+    inv_node_map = {name: index for index, name in enumerate(G.nodes)}
+    node_map = np.array(G)
+
+    # Calculate the sample size ``R`` for how many paths
+    # to randomly generate
+    t_choose_2 = math.comb(path_length, 2)
+    sample_size = int((c / eps**2) * (np.log2(t_choose_2) + 1 + np.log(1 / delta)))
+    index_map = {}
+    _ = list(
+        generate_random_paths(
+            G, sample_size, path_length=path_length, index_map=index_map, weight=weight
+        )
+    )
+    S = np.zeros(num_nodes)
+
+    inv_sample_size = 1 / sample_size
+
+    source_paths = set(index_map[source])
+
+    # Calculate the path similarities
+    # between ``source`` (v) and ``node`` (v_j)
+    # using our inverted index mapping of
+    # vertices to paths
+    for node, paths in index_map.items():
+        # Only consider paths where both
+        # ``node`` and ``source`` are present
+        common_paths = source_paths.intersection(paths)
+        S[inv_node_map[node]] = len(common_paths) * inv_sample_size
+
+    # Retrieve top ``k`` similar
+    # Note: the below performed anywhere from 4-10x faster
+    # (depending on input sizes) vs the equivalent ``np.argsort(S)[::-1]``
+    top_k_unsorted = np.argpartition(S, -k)[-k:]
+    top_k_sorted = top_k_unsorted[np.argsort(S[top_k_unsorted])][::-1]
+
+    # Add back the similarity scores
+    top_k_with_val = dict(
+        zip(node_map[top_k_sorted].tolist(), S[top_k_sorted].tolist())
+    )
+
+    # Remove the self-similarity
+    top_k_with_val.pop(source, None)
+    return top_k_with_val
+
+
+@np_random_state(5)
+@nx._dispatchable(edge_attrs="weight")
+def generate_random_paths(
+    G, sample_size, path_length=5, index_map=None, weight="weight", seed=None
+):
+    """Randomly generate `sample_size` paths of length `path_length`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A NetworkX graph
+    sample_size : integer
+        The number of paths to generate. This is ``R`` in [1]_.
+    path_length : integer (default = 5)
+        The maximum size of the path to randomly generate.
+        This is ``T`` in [1]_. According to the paper, ``T >= 5`` is
+        recommended.
+    index_map : dictionary, optional
+        If provided, this will be populated with the inverted
+        index of nodes mapped to the set of generated random path
+        indices within ``paths``.
+    weight : string or None, optional (default="weight")
+        The name of an edge attribute that holds the numerical value
+        used as a weight. If None then each edge has weight 1.
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    paths : generator of lists
+        Generator of `sample_size` paths each with length `path_length`.
+
+    Examples
+    --------
+    Note that the return value is the list of paths:
+
+    >>> G = nx.star_graph(3)
+    >>> random_path = nx.generate_random_paths(G, 2)
+
+    By passing a dictionary into `index_map`, it will build an
+    inverted index mapping of nodes to the paths in which that node is present:
+
+    >>> G = nx.star_graph(3)
+    >>> index_map = {}
+    >>> random_path = nx.generate_random_paths(G, 3, index_map=index_map)
+    >>> paths_containing_node_0 = [
+    ...     random_path[path_idx] for path_idx in index_map.get(0, [])
+    ... ]
+
+    References
+    ----------
+    .. [1] Zhang, J., Tang, J., Ma, C., Tong, H., Jing, Y., & Li, J.
+           Panther: Fast top-k similarity search on large networks.
+           In Proceedings of the ACM SIGKDD International Conference
+           on Knowledge Discovery and Data Mining (Vol. 2015-August, pp. 1445–1454).
+           Association for Computing Machinery. https://doi.org/10.1145/2783258.2783267.
+    """
+    import numpy as np
+
+    randint_fn = (
+        seed.integers if isinstance(seed, np.random.Generator) else seed.randint
+    )
+
+    # Calculate transition probabilities between
+    # every pair of vertices according to Eq. (3)
+    adj_mat = nx.to_numpy_array(G, weight=weight)
+    inv_row_sums = np.reciprocal(adj_mat.sum(axis=1)).reshape(-1, 1)
+    transition_probabilities = adj_mat * inv_row_sums
+
+    node_map = list(G)
+    num_nodes = G.number_of_nodes()
+
+    for path_index in range(sample_size):
+        # Sample current vertex v = v_i uniformly at random
+        node_index = randint_fn(num_nodes)
+        node = node_map[node_index]
+
+        # Add v into p_r and add p_r into the path set
+        # of v, i.e., P_v
+        path = [node]
+
+        # Build the inverted index (P_v) of vertices to paths
+        if index_map is not None:
+            if node in index_map:
+                index_map[node].add(path_index)
+            else:
+                index_map[node] = {path_index}
+
+        starting_index = node_index
+        for _ in range(path_length):
+            # Randomly sample a neighbor (v_j) according
+            # to transition probabilities from ``node`` (v) to its neighbors
+            nbr_index = seed.choice(
+                num_nodes, p=transition_probabilities[starting_index]
+            )
+
+            # Set current vertex (v = v_j)
+            starting_index = nbr_index
+
+            # Add v into p_r
+            nbr_node = node_map[nbr_index]
+            path.append(nbr_node)
+
+            # Add p_r into P_v
+            if index_map is not None:
+                if nbr_node in index_map:
+                    index_map[nbr_node].add(path_index)
+                else:
+                    index_map[nbr_node] = {path_index}
+
+        yield path
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/simple_paths.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/simple_paths.py
new file mode 100644
index 00000000..3605522f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/simple_paths.py
@@ -0,0 +1,950 @@
+from heapq import heappop, heappush
+from itertools import count
+
+import networkx as nx
+from networkx.algorithms.shortest_paths.weighted import _weight_function
+from networkx.utils import not_implemented_for, pairwise
+
+__all__ = [
+    "all_simple_paths",
+    "is_simple_path",
+    "shortest_simple_paths",
+    "all_simple_edge_paths",
+]
+
+
+@nx._dispatchable
+def is_simple_path(G, nodes):
+    """Returns True if and only if `nodes` form a simple path in `G`.
+
+    A *simple path* in a graph is a nonempty sequence of nodes in which
+    no node appears more than once in the sequence, and each adjacent
+    pair of nodes in the sequence is adjacent in the graph.
+
+    Parameters
+    ----------
+    G : graph
+        A NetworkX graph.
+    nodes : list
+        A list of one or more nodes in the graph `G`.
+
+    Returns
+    -------
+    bool
+        Whether the given list of nodes represents a simple path in `G`.
+
+    Notes
+    -----
+    An empty list of nodes is not a path but a list of one node is a
+    path. Here's an explanation why.
+
+    This function operates on *node paths*. One could also consider
+    *edge paths*. There is a bijection between node paths and edge
+    paths.
+
+    The *length of a path* is the number of edges in the path, so a list
+    of nodes of length *n* corresponds to a path of length *n* - 1.
+    Thus the smallest edge path would be a list of zero edges, the empty
+    path. This corresponds to a list of one node.
+
+    To convert between a node path and an edge path, you can use code
+    like the following::
+
+        >>> from networkx.utils import pairwise
+        >>> nodes = [0, 1, 2, 3]
+        >>> edges = list(pairwise(nodes))
+        >>> edges
+        [(0, 1), (1, 2), (2, 3)]
+        >>> nodes = [edges[0][0]] + [v for u, v in edges]
+        >>> nodes
+        [0, 1, 2, 3]
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4)
+    >>> nx.is_simple_path(G, [2, 3, 0])
+    True
+    >>> nx.is_simple_path(G, [0, 2])
+    False
+
+    """
+    # The empty list is not a valid path. Could also return
+    # NetworkXPointlessConcept here.
+    if len(nodes) == 0:
+        return False
+
+    # If the list is a single node, just check that the node is actually
+    # in the graph.
+    if len(nodes) == 1:
+        return nodes[0] in G
+
+    # check that all nodes in the list are in the graph, if at least one
+    # is not in the graph, then this is not a simple path
+    if not all(n in G for n in nodes):
+        return False
+
+    # If the list contains repeated nodes, then it's not a simple path
+    if len(set(nodes)) != len(nodes):
+        return False
+
+    # Test that each adjacent pair of nodes is adjacent.
+    return all(v in G[u] for u, v in pairwise(nodes))
+
+
+@nx._dispatchable
+def all_simple_paths(G, source, target, cutoff=None):
+    """Generate all simple paths in the graph G from source to target.
+
+    A simple path is a path with no repeated nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Starting node for path
+
+    target : nodes
+       Single node or iterable of nodes at which to end path
+
+    cutoff : integer, optional
+        Depth to stop the search. Only paths of length <= cutoff are returned.
+
+    Returns
+    -------
+    path_generator: generator
+       A generator that produces lists of simple paths.  If there are no paths
+       between the source and target within the given cutoff the generator
+       produces no output. If it is possible to traverse the same sequence of
+       nodes in multiple ways, namely through parallel edges, then it will be
+       returned multiple times (once for each viable edge combination).
+
+    Examples
+    --------
+    This iterator generates lists of nodes::
+
+        >>> G = nx.complete_graph(4)
+        >>> for path in nx.all_simple_paths(G, source=0, target=3):
+        ...     print(path)
+        ...
+        [0, 1, 2, 3]
+        [0, 1, 3]
+        [0, 2, 1, 3]
+        [0, 2, 3]
+        [0, 3]
+
+    You can generate only those paths that are shorter than a certain
+    length by using the `cutoff` keyword argument::
+
+        >>> paths = nx.all_simple_paths(G, source=0, target=3, cutoff=2)
+        >>> print(list(paths))
+        [[0, 1, 3], [0, 2, 3], [0, 3]]
+
+    To get each path as the corresponding list of edges, you can use the
+    :func:`networkx.utils.pairwise` helper function::
+
+        >>> paths = nx.all_simple_paths(G, source=0, target=3)
+        >>> for path in map(nx.utils.pairwise, paths):
+        ...     print(list(path))
+        [(0, 1), (1, 2), (2, 3)]
+        [(0, 1), (1, 3)]
+        [(0, 2), (2, 1), (1, 3)]
+        [(0, 2), (2, 3)]
+        [(0, 3)]
+
+    Pass an iterable of nodes as target to generate all paths ending in any of several nodes::
+
+        >>> G = nx.complete_graph(4)
+        >>> for path in nx.all_simple_paths(G, source=0, target=[3, 2]):
+        ...     print(path)
+        ...
+        [0, 1, 2]
+        [0, 1, 2, 3]
+        [0, 1, 3]
+        [0, 1, 3, 2]
+        [0, 2]
+        [0, 2, 1, 3]
+        [0, 2, 3]
+        [0, 3]
+        [0, 3, 1, 2]
+        [0, 3, 2]
+
+    The singleton path from ``source`` to itself is considered a simple path and is
+    included in the results:
+
+        >>> G = nx.empty_graph(5)
+        >>> list(nx.all_simple_paths(G, source=0, target=0))
+        [[0]]
+
+        >>> G = nx.path_graph(3)
+        >>> list(nx.all_simple_paths(G, source=0, target={0, 1, 2}))
+        [[0], [0, 1], [0, 1, 2]]
+
+    Iterate over each path from the root nodes to the leaf nodes in a
+    directed acyclic graph using a functional programming approach::
+
+        >>> from itertools import chain
+        >>> from itertools import product
+        >>> from itertools import starmap
+        >>> from functools import partial
+        >>>
+        >>> chaini = chain.from_iterable
+        >>>
+        >>> G = nx.DiGraph([(0, 1), (1, 2), (0, 3), (3, 2)])
+        >>> roots = (v for v, d in G.in_degree() if d == 0)
+        >>> leaves = (v for v, d in G.out_degree() if d == 0)
+        >>> all_paths = partial(nx.all_simple_paths, G)
+        >>> list(chaini(starmap(all_paths, product(roots, leaves))))
+        [[0, 1, 2], [0, 3, 2]]
+
+    The same list computed using an iterative approach::
+
+        >>> G = nx.DiGraph([(0, 1), (1, 2), (0, 3), (3, 2)])
+        >>> roots = (v for v, d in G.in_degree() if d == 0)
+        >>> leaves = (v for v, d in G.out_degree() if d == 0)
+        >>> all_paths = []
+        >>> for root in roots:
+        ...     for leaf in leaves:
+        ...         paths = nx.all_simple_paths(G, root, leaf)
+        ...         all_paths.extend(paths)
+        >>> all_paths
+        [[0, 1, 2], [0, 3, 2]]
+
+    Iterate over each path from the root nodes to the leaf nodes in a
+    directed acyclic graph passing all leaves together to avoid unnecessary
+    compute::
+
+        >>> G = nx.DiGraph([(0, 1), (2, 1), (1, 3), (1, 4)])
+        >>> roots = (v for v, d in G.in_degree() if d == 0)
+        >>> leaves = [v for v, d in G.out_degree() if d == 0]
+        >>> all_paths = []
+        >>> for root in roots:
+        ...     paths = nx.all_simple_paths(G, root, leaves)
+        ...     all_paths.extend(paths)
+        >>> all_paths
+        [[0, 1, 3], [0, 1, 4], [2, 1, 3], [2, 1, 4]]
+
+    If parallel edges offer multiple ways to traverse a given sequence of
+    nodes, this sequence of nodes will be returned multiple times:
+
+        >>> G = nx.MultiDiGraph([(0, 1), (0, 1), (1, 2)])
+        >>> list(nx.all_simple_paths(G, 0, 2))
+        [[0, 1, 2], [0, 1, 2]]
+
+    Notes
+    -----
+    This algorithm uses a modified depth-first search to generate the
+    paths [1]_.  A single path can be found in $O(V+E)$ time but the
+    number of simple paths in a graph can be very large, e.g. $O(n!)$ in
+    the complete graph of order $n$.
+
+    This function does not check that a path exists between `source` and
+    `target`. For large graphs, this may result in very long runtimes.
+    Consider using `has_path` to check that a path exists between `source` and
+    `target` before calling this function on large graphs.
+
+    References
+    ----------
+    .. [1] R. Sedgewick, "Algorithms in C, Part 5: Graph Algorithms",
+       Addison Wesley Professional, 3rd ed., 2001.
+
+    See Also
+    --------
+    all_shortest_paths, shortest_path, has_path
+
+    """
+    for edge_path in all_simple_edge_paths(G, source, target, cutoff):
+        yield [source] + [edge[1] for edge in edge_path]
+
+
+@nx._dispatchable
+def all_simple_edge_paths(G, source, target, cutoff=None):
+    """Generate lists of edges for all simple paths in G from source to target.
+
+    A simple path is a path with no repeated nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Starting node for path
+
+    target : nodes
+       Single node or iterable of nodes at which to end path
+
+    cutoff : integer, optional
+        Depth to stop the search. Only paths of length <= cutoff are returned.
+
+    Returns
+    -------
+    path_generator: generator
+       A generator that produces lists of simple paths.  If there are no paths
+       between the source and target within the given cutoff the generator
+       produces no output.
+       For multigraphs, the list of edges have elements of the form `(u,v,k)`.
+       Where `k` corresponds to the edge key.
+
+    Examples
+    --------
+
+    Print the simple path edges of a Graph::
+
+        >>> g = nx.Graph([(1, 2), (2, 4), (1, 3), (3, 4)])
+        >>> for path in sorted(nx.all_simple_edge_paths(g, 1, 4)):
+        ...     print(path)
+        [(1, 2), (2, 4)]
+        [(1, 3), (3, 4)]
+
+    Print the simple path edges of a MultiGraph. Returned edges come with
+    their associated keys::
+
+        >>> mg = nx.MultiGraph()
+        >>> mg.add_edge(1, 2, key="k0")
+        'k0'
+        >>> mg.add_edge(1, 2, key="k1")
+        'k1'
+        >>> mg.add_edge(2, 3, key="k0")
+        'k0'
+        >>> for path in sorted(nx.all_simple_edge_paths(mg, 1, 3)):
+        ...     print(path)
+        [(1, 2, 'k0'), (2, 3, 'k0')]
+        [(1, 2, 'k1'), (2, 3, 'k0')]
+
+    When ``source`` is one of the targets, the empty path starting and ending at
+    ``source`` without traversing any edge is considered a valid simple edge path
+    and is included in the results:
+
+        >>> G = nx.Graph()
+        >>> G.add_node(0)
+        >>> paths = list(nx.all_simple_edge_paths(G, 0, 0))
+        >>> for path in paths:
+        ...     print(path)
+        []
+        >>> len(paths)
+        1
+
+
+    Notes
+    -----
+    This algorithm uses a modified depth-first search to generate the
+    paths [1]_.  A single path can be found in $O(V+E)$ time but the
+    number of simple paths in a graph can be very large, e.g. $O(n!)$ in
+    the complete graph of order $n$.
+
+    References
+    ----------
+    .. [1] R. Sedgewick, "Algorithms in C, Part 5: Graph Algorithms",
+       Addison Wesley Professional, 3rd ed., 2001.
+
+    See Also
+    --------
+    all_shortest_paths, shortest_path, all_simple_paths
+
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"source node {source} not in graph")
+
+    if target in G:
+        targets = {target}
+    else:
+        try:
+            targets = set(target)
+        except TypeError as err:
+            raise nx.NodeNotFound(f"target node {target} not in graph") from err
+
+    cutoff = cutoff if cutoff is not None else len(G) - 1
+
+    if cutoff >= 0 and targets:
+        yield from _all_simple_edge_paths(G, source, targets, cutoff)
+
+
+def _all_simple_edge_paths(G, source, targets, cutoff):
+    # We simulate recursion with a stack, keeping the current path being explored
+    # and the outgoing edge iterators at each point in the stack.
+    # To avoid unnecessary checks, the loop is structured in a way such that a path
+    # is considered for yielding only after a new node/edge is added.
+    # We bootstrap the search by adding a dummy iterator to the stack that only yields
+    # a dummy edge to source (so that the trivial path has a chance of being included).
+
+    get_edges = (
+        (lambda node: G.edges(node, keys=True))
+        if G.is_multigraph()
+        else (lambda node: G.edges(node))
+    )
+
+    # The current_path is a dictionary that maps nodes in the path to the edge that was
+    # used to enter that node (instead of a list of edges) because we want both a fast
+    # membership test for nodes in the path and the preservation of insertion order.
+    current_path = {None: None}
+    stack = [iter([(None, source)])]
+
+    while stack:
+        # 1. Try to extend the current path.
+        next_edge = next((e for e in stack[-1] if e[1] not in current_path), None)
+        if next_edge is None:
+            # All edges of the last node in the current path have been explored.
+            stack.pop()
+            current_path.popitem()
+            continue
+        previous_node, next_node, *_ = next_edge
+
+        # 2. Check if we've reached a target.
+        if next_node in targets:
+            yield (list(current_path.values()) + [next_edge])[2:]  # remove dummy edge
+
+        # 3. Only expand the search through the next node if it makes sense.
+        if len(current_path) - 1 < cutoff and (
+            targets - current_path.keys() - {next_node}
+        ):
+            current_path[next_node] = next_edge
+            stack.append(iter(get_edges(next_node)))
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def shortest_simple_paths(G, source, target, weight=None):
+    """Generate all simple paths in the graph G from source to target,
+       starting from shortest ones.
+
+    A simple path is a path with no repeated nodes.
+
+    If a weighted shortest path search is to be used, no negative weights
+    are allowed.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Starting node for path
+
+    target : node
+       Ending node for path
+
+    weight : string or function
+        If it is a string, it is the name of the edge attribute to be
+        used as a weight.
+
+        If it is a function, the weight of an edge is the value returned
+        by the function. The function must accept exactly three positional
+        arguments: the two endpoints of an edge and the dictionary of edge
+        attributes for that edge. The function must return a number or None.
+        The weight function can be used to hide edges by returning None.
+        So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+        will find the shortest red path.
+
+        If None all edges are considered to have unit weight. Default
+        value None.
+
+    Returns
+    -------
+    path_generator: generator
+       A generator that produces lists of simple paths, in order from
+       shortest to longest.
+
+    Raises
+    ------
+    NetworkXNoPath
+       If no path exists between source and target.
+
+    NetworkXError
+       If source or target nodes are not in the input graph.
+
+    NetworkXNotImplemented
+       If the input graph is a Multi[Di]Graph.
+
+    Examples
+    --------
+
+    >>> G = nx.cycle_graph(7)
+    >>> paths = list(nx.shortest_simple_paths(G, 0, 3))
+    >>> print(paths)
+    [[0, 1, 2, 3], [0, 6, 5, 4, 3]]
+
+    You can use this function to efficiently compute the k shortest/best
+    paths between two nodes.
+
+    >>> from itertools import islice
+    >>> def k_shortest_paths(G, source, target, k, weight=None):
+    ...     return list(
+    ...         islice(nx.shortest_simple_paths(G, source, target, weight=weight), k)
+    ...     )
+    >>> for path in k_shortest_paths(G, 0, 3, 2):
+    ...     print(path)
+    [0, 1, 2, 3]
+    [0, 6, 5, 4, 3]
+
+    Notes
+    -----
+    This procedure is based on algorithm by Jin Y. Yen [1]_.  Finding
+    the first $K$ paths requires $O(KN^3)$ operations.
+
+    See Also
+    --------
+    all_shortest_paths
+    shortest_path
+    all_simple_paths
+
+    References
+    ----------
+    .. [1] Jin Y. Yen, "Finding the K Shortest Loopless Paths in a
+       Network", Management Science, Vol. 17, No. 11, Theory Series
+       (Jul., 1971), pp. 712-716.
+
+    """
+    if source not in G:
+        raise nx.NodeNotFound(f"source node {source} not in graph")
+
+    if target not in G:
+        raise nx.NodeNotFound(f"target node {target} not in graph")
+
+    if weight is None:
+        length_func = len
+        shortest_path_func = _bidirectional_shortest_path
+    else:
+        wt = _weight_function(G, weight)
+
+        def length_func(path):
+            return sum(
+                wt(u, v, G.get_edge_data(u, v)) for (u, v) in zip(path, path[1:])
+            )
+
+        shortest_path_func = _bidirectional_dijkstra
+
+    listA = []
+    listB = PathBuffer()
+    prev_path = None
+    while True:
+        if not prev_path:
+            length, path = shortest_path_func(G, source, target, weight=weight)
+            listB.push(length, path)
+        else:
+            ignore_nodes = set()
+            ignore_edges = set()
+            for i in range(1, len(prev_path)):
+                root = prev_path[:i]
+                root_length = length_func(root)
+                for path in listA:
+                    if path[:i] == root:
+                        ignore_edges.add((path[i - 1], path[i]))
+                try:
+                    length, spur = shortest_path_func(
+                        G,
+                        root[-1],
+                        target,
+                        ignore_nodes=ignore_nodes,
+                        ignore_edges=ignore_edges,
+                        weight=weight,
+                    )
+                    path = root[:-1] + spur
+                    listB.push(root_length + length, path)
+                except nx.NetworkXNoPath:
+                    pass
+                ignore_nodes.add(root[-1])
+
+        if listB:
+            path = listB.pop()
+            yield path
+            listA.append(path)
+            prev_path = path
+        else:
+            break
+
+
+class PathBuffer:
+    def __init__(self):
+        self.paths = set()
+        self.sortedpaths = []
+        self.counter = count()
+
+    def __len__(self):
+        return len(self.sortedpaths)
+
+    def push(self, cost, path):
+        hashable_path = tuple(path)
+        if hashable_path not in self.paths:
+            heappush(self.sortedpaths, (cost, next(self.counter), path))
+            self.paths.add(hashable_path)
+
+    def pop(self):
+        (cost, num, path) = heappop(self.sortedpaths)
+        hashable_path = tuple(path)
+        self.paths.remove(hashable_path)
+        return path
+
+
+def _bidirectional_shortest_path(
+    G, source, target, ignore_nodes=None, ignore_edges=None, weight=None
+):
+    """Returns the shortest path between source and target ignoring
+       nodes and edges in the containers ignore_nodes and ignore_edges.
+
+    This is a custom modification of the standard bidirectional shortest
+    path implementation at networkx.algorithms.unweighted
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       starting node for path
+
+    target : node
+       ending node for path
+
+    ignore_nodes : container of nodes
+       nodes to ignore, optional
+
+    ignore_edges : container of edges
+       edges to ignore, optional
+
+    weight : None
+       This function accepts a weight argument for convenience of
+       shortest_simple_paths function. It will be ignored.
+
+    Returns
+    -------
+    path: list
+       List of nodes in a path from source to target.
+
+    Raises
+    ------
+    NetworkXNoPath
+       If no path exists between source and target.
+
+    See Also
+    --------
+    shortest_path
+
+    """
+    # call helper to do the real work
+    results = _bidirectional_pred_succ(G, source, target, ignore_nodes, ignore_edges)
+    pred, succ, w = results
+
+    # build path from pred+w+succ
+    path = []
+    # from w to target
+    while w is not None:
+        path.append(w)
+        w = succ[w]
+    # from source to w
+    w = pred[path[0]]
+    while w is not None:
+        path.insert(0, w)
+        w = pred[w]
+
+    return len(path), path
+
+
+def _bidirectional_pred_succ(G, source, target, ignore_nodes=None, ignore_edges=None):
+    """Bidirectional shortest path helper.
+    Returns (pred,succ,w) where
+    pred is a dictionary of predecessors from w to the source, and
+    succ is a dictionary of successors from w to the target.
+    """
+    # does BFS from both source and target and meets in the middle
+    if ignore_nodes and (source in ignore_nodes or target in ignore_nodes):
+        raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
+    if target == source:
+        return ({target: None}, {source: None}, source)
+
+    # handle either directed or undirected
+    if G.is_directed():
+        Gpred = G.predecessors
+        Gsucc = G.successors
+    else:
+        Gpred = G.neighbors
+        Gsucc = G.neighbors
+
+    # support optional nodes filter
+    if ignore_nodes:
+
+        def filter_iter(nodes):
+            def iterate(v):
+                for w in nodes(v):
+                    if w not in ignore_nodes:
+                        yield w
+
+            return iterate
+
+        Gpred = filter_iter(Gpred)
+        Gsucc = filter_iter(Gsucc)
+
+    # support optional edges filter
+    if ignore_edges:
+        if G.is_directed():
+
+            def filter_pred_iter(pred_iter):
+                def iterate(v):
+                    for w in pred_iter(v):
+                        if (w, v) not in ignore_edges:
+                            yield w
+
+                return iterate
+
+            def filter_succ_iter(succ_iter):
+                def iterate(v):
+                    for w in succ_iter(v):
+                        if (v, w) not in ignore_edges:
+                            yield w
+
+                return iterate
+
+            Gpred = filter_pred_iter(Gpred)
+            Gsucc = filter_succ_iter(Gsucc)
+
+        else:
+
+            def filter_iter(nodes):
+                def iterate(v):
+                    for w in nodes(v):
+                        if (v, w) not in ignore_edges and (w, v) not in ignore_edges:
+                            yield w
+
+                return iterate
+
+            Gpred = filter_iter(Gpred)
+            Gsucc = filter_iter(Gsucc)
+
+    # predecessor and successors in search
+    pred = {source: None}
+    succ = {target: None}
+
+    # initialize fringes, start with forward
+    forward_fringe = [source]
+    reverse_fringe = [target]
+
+    while forward_fringe and reverse_fringe:
+        if len(forward_fringe) <= len(reverse_fringe):
+            this_level = forward_fringe
+            forward_fringe = []
+            for v in this_level:
+                for w in Gsucc(v):
+                    if w not in pred:
+                        forward_fringe.append(w)
+                        pred[w] = v
+                    if w in succ:
+                        # found path
+                        return pred, succ, w
+        else:
+            this_level = reverse_fringe
+            reverse_fringe = []
+            for v in this_level:
+                for w in Gpred(v):
+                    if w not in succ:
+                        succ[w] = v
+                        reverse_fringe.append(w)
+                    if w in pred:
+                        # found path
+                        return pred, succ, w
+
+    raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
+
+
+def _bidirectional_dijkstra(
+    G, source, target, weight="weight", ignore_nodes=None, ignore_edges=None
+):
+    """Dijkstra's algorithm for shortest paths using bidirectional search.
+
+    This function returns the shortest path between source and target
+    ignoring nodes and edges in the containers ignore_nodes and
+    ignore_edges.
+
+    This is a custom modification of the standard Dijkstra bidirectional
+    shortest path implementation at networkx.algorithms.weighted
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+        Starting node.
+
+    target : node
+        Ending node.
+
+    weight: string, function, optional (default='weight')
+        Edge data key or weight function corresponding to the edge weight
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number or None to indicate a hidden edge.
+
+    ignore_nodes : container of nodes
+        nodes to ignore, optional
+
+    ignore_edges : container of edges
+        edges to ignore, optional
+
+    Returns
+    -------
+    length : number
+        Shortest path length.
+
+    Returns a tuple of two dictionaries keyed by node.
+    The first dictionary stores distance from the source.
+    The second stores the path from the source to that node.
+
+    Raises
+    ------
+    NetworkXNoPath
+        If no path exists between source and target.
+
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
+
+    In practice  bidirectional Dijkstra is much more than twice as fast as
+    ordinary Dijkstra.
+
+    Ordinary Dijkstra expands nodes in a sphere-like manner from the
+    source. The radius of this sphere will eventually be the length
+    of the shortest path. Bidirectional Dijkstra will expand nodes
+    from both the source and the target, making two spheres of half
+    this radius. Volume of the first sphere is pi*r*r while the
+    others are 2*pi*r/2*r/2, making up half the volume.
+
+    This algorithm is not guaranteed to work if edge weights
+    are negative or are floating point numbers
+    (overflows and roundoff errors can cause problems).
+
+    See Also
+    --------
+    shortest_path
+    shortest_path_length
+    """
+    if ignore_nodes and (source in ignore_nodes or target in ignore_nodes):
+        raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
+    if source == target:
+        if source not in G:
+            raise nx.NodeNotFound(f"Node {source} not in graph")
+        return (0, [source])
+
+    # handle either directed or undirected
+    if G.is_directed():
+        Gpred = G.predecessors
+        Gsucc = G.successors
+    else:
+        Gpred = G.neighbors
+        Gsucc = G.neighbors
+
+    # support optional nodes filter
+    if ignore_nodes:
+
+        def filter_iter(nodes):
+            def iterate(v):
+                for w in nodes(v):
+                    if w not in ignore_nodes:
+                        yield w
+
+            return iterate
+
+        Gpred = filter_iter(Gpred)
+        Gsucc = filter_iter(Gsucc)
+
+    # support optional edges filter
+    if ignore_edges:
+        if G.is_directed():
+
+            def filter_pred_iter(pred_iter):
+                def iterate(v):
+                    for w in pred_iter(v):
+                        if (w, v) not in ignore_edges:
+                            yield w
+
+                return iterate
+
+            def filter_succ_iter(succ_iter):
+                def iterate(v):
+                    for w in succ_iter(v):
+                        if (v, w) not in ignore_edges:
+                            yield w
+
+                return iterate
+
+            Gpred = filter_pred_iter(Gpred)
+            Gsucc = filter_succ_iter(Gsucc)
+
+        else:
+
+            def filter_iter(nodes):
+                def iterate(v):
+                    for w in nodes(v):
+                        if (v, w) not in ignore_edges and (w, v) not in ignore_edges:
+                            yield w
+
+                return iterate
+
+            Gpred = filter_iter(Gpred)
+            Gsucc = filter_iter(Gsucc)
+
+    wt = _weight_function(G, weight)
+    push = heappush
+    pop = heappop
+    # Init:   Forward             Backward
+    dists = [{}, {}]  # dictionary of final distances
+    paths = [{source: [source]}, {target: [target]}]  # dictionary of paths
+    fringe = [[], []]  # heap of (distance, node) tuples for
+    # extracting next node to expand
+    seen = [{source: 0}, {target: 0}]  # dictionary of distances to
+    # nodes seen
+    c = count()
+    # initialize fringe heap
+    push(fringe[0], (0, next(c), source))
+    push(fringe[1], (0, next(c), target))
+    # neighs for extracting correct neighbor information
+    neighs = [Gsucc, Gpred]
+    # variables to hold shortest discovered path
+    # finaldist = 1e30000
+    finalpath = []
+    dir = 1
+    while fringe[0] and fringe[1]:
+        # choose direction
+        # dir == 0 is forward direction and dir == 1 is back
+        dir = 1 - dir
+        # extract closest to expand
+        (dist, _, v) = pop(fringe[dir])
+        if v in dists[dir]:
+            # Shortest path to v has already been found
+            continue
+        # update distance
+        dists[dir][v] = dist  # equal to seen[dir][v]
+        if v in dists[1 - dir]:
+            # if we have scanned v in both directions we are done
+            # we have now discovered the shortest path
+            return (finaldist, finalpath)
+
+        for w in neighs[dir](v):
+            if dir == 0:  # forward
+                minweight = wt(v, w, G.get_edge_data(v, w))
+            else:  # back, must remember to change v,w->w,v
+                minweight = wt(w, v, G.get_edge_data(w, v))
+            if minweight is None:
+                continue
+            vwLength = dists[dir][v] + minweight
+
+            if w in dists[dir]:
+                if vwLength < dists[dir][w]:
+                    raise ValueError("Contradictory paths found: negative weights?")
+            elif w not in seen[dir] or vwLength < seen[dir][w]:
+                # relaxing
+                seen[dir][w] = vwLength
+                push(fringe[dir], (vwLength, next(c), w))
+                paths[dir][w] = paths[dir][v] + [w]
+                if w in seen[0] and w in seen[1]:
+                    # see if this path is better than the already
+                    # discovered shortest path
+                    totaldist = seen[0][w] + seen[1][w]
+                    if finalpath == [] or finaldist > totaldist:
+                        finaldist = totaldist
+                        revpath = paths[1][w][:]
+                        revpath.reverse()
+                        finalpath = paths[0][w] + revpath[1:]
+    raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/smallworld.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/smallworld.py
new file mode 100644
index 00000000..456a4ca1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/smallworld.py
@@ -0,0 +1,404 @@
+"""Functions for estimating the small-world-ness of graphs.
+
+A small world network is characterized by a small average shortest path length,
+and a large clustering coefficient.
+
+Small-worldness is commonly measured with the coefficient sigma or omega.
+
+Both coefficients compare the average clustering coefficient and shortest path
+length of a given graph against the same quantities for an equivalent random
+or lattice graph.
+
+For more information, see the Wikipedia article on small-world network [1]_.
+
+.. [1] Small-world network:: https://en.wikipedia.org/wiki/Small-world_network
+
+"""
+
+import networkx as nx
+from networkx.utils import not_implemented_for, py_random_state
+
+__all__ = ["random_reference", "lattice_reference", "sigma", "omega"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@py_random_state(3)
+@nx._dispatchable(returns_graph=True)
+def random_reference(G, niter=1, connectivity=True, seed=None):
+    """Compute a random graph by swapping edges of a given graph.
+
+    Parameters
+    ----------
+    G : graph
+        An undirected graph with 4 or more nodes.
+
+    niter : integer (optional, default=1)
+        An edge is rewired approximately `niter` times.
+
+    connectivity : boolean (optional, default=True)
+        When True, ensure connectivity for the randomized graph.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    G : graph
+        The randomized graph.
+
+    Raises
+    ------
+    NetworkXError
+        If there are fewer than 4 nodes or 2 edges in `G`
+
+    Notes
+    -----
+    The implementation is adapted from the algorithm by Maslov and Sneppen
+    (2002) [1]_.
+
+    References
+    ----------
+    .. [1] Maslov, Sergei, and Kim Sneppen.
+           "Specificity and stability in topology of protein networks."
+           Science 296.5569 (2002): 910-913.
+    """
+    if len(G) < 4:
+        raise nx.NetworkXError("Graph has fewer than four nodes.")
+    if len(G.edges) < 2:
+        raise nx.NetworkXError("Graph has fewer that 2 edges")
+
+    from networkx.utils import cumulative_distribution, discrete_sequence
+
+    local_conn = nx.connectivity.local_edge_connectivity
+
+    G = G.copy()
+    keys, degrees = zip(*G.degree())  # keys, degree
+    cdf = cumulative_distribution(degrees)  # cdf of degree
+    nnodes = len(G)
+    nedges = nx.number_of_edges(G)
+    niter = niter * nedges
+    ntries = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2))
+    swapcount = 0
+
+    for i in range(niter):
+        n = 0
+        while n < ntries:
+            # pick two random edges without creating edge list
+            # choose source node indices from discrete distribution
+            (ai, ci) = discrete_sequence(2, cdistribution=cdf, seed=seed)
+            if ai == ci:
+                continue  # same source, skip
+            a = keys[ai]  # convert index to label
+            c = keys[ci]
+            # choose target uniformly from neighbors
+            b = seed.choice(list(G.neighbors(a)))
+            d = seed.choice(list(G.neighbors(c)))
+            if b in [a, c, d] or d in [a, b, c]:
+                continue  # all vertices should be different
+
+            # don't create parallel edges
+            if (d not in G[a]) and (b not in G[c]):
+                G.add_edge(a, d)
+                G.add_edge(c, b)
+                G.remove_edge(a, b)
+                G.remove_edge(c, d)
+
+                # Check if the graph is still connected
+                if connectivity and local_conn(G, a, b) == 0:
+                    # Not connected, revert the swap
+                    G.remove_edge(a, d)
+                    G.remove_edge(c, b)
+                    G.add_edge(a, b)
+                    G.add_edge(c, d)
+                else:
+                    swapcount += 1
+                    break
+            n += 1
+    return G
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@py_random_state(4)
+@nx._dispatchable(returns_graph=True)
+def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None):
+    """Latticize the given graph by swapping edges.
+
+    Parameters
+    ----------
+    G : graph
+        An undirected graph.
+
+    niter : integer (optional, default=1)
+        An edge is rewired approximately niter times.
+
+    D : numpy.array (optional, default=None)
+        Distance to the diagonal matrix.
+
+    connectivity : boolean (optional, default=True)
+        Ensure connectivity for the latticized graph when set to True.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    G : graph
+        The latticized graph.
+
+    Raises
+    ------
+    NetworkXError
+        If there are fewer than 4 nodes or 2 edges in `G`
+
+    Notes
+    -----
+    The implementation is adapted from the algorithm by Sporns et al. [1]_.
+    which is inspired from the original work by Maslov and Sneppen(2002) [2]_.
+
+    References
+    ----------
+    .. [1] Sporns, Olaf, and Jonathan D. Zwi.
+       "The small world of the cerebral cortex."
+       Neuroinformatics 2.2 (2004): 145-162.
+    .. [2] Maslov, Sergei, and Kim Sneppen.
+       "Specificity and stability in topology of protein networks."
+       Science 296.5569 (2002): 910-913.
+    """
+    import numpy as np
+
+    from networkx.utils import cumulative_distribution, discrete_sequence
+
+    local_conn = nx.connectivity.local_edge_connectivity
+
+    if len(G) < 4:
+        raise nx.NetworkXError("Graph has fewer than four nodes.")
+    if len(G.edges) < 2:
+        raise nx.NetworkXError("Graph has fewer that 2 edges")
+    # Instead of choosing uniformly at random from a generated edge list,
+    # this algorithm chooses nonuniformly from the set of nodes with
+    # probability weighted by degree.
+    G = G.copy()
+    keys, degrees = zip(*G.degree())  # keys, degree
+    cdf = cumulative_distribution(degrees)  # cdf of degree
+
+    nnodes = len(G)
+    nedges = nx.number_of_edges(G)
+    if D is None:
+        D = np.zeros((nnodes, nnodes))
+        un = np.arange(1, nnodes)
+        um = np.arange(nnodes - 1, 0, -1)
+        u = np.append((0,), np.where(un < um, un, um))
+
+        for v in range(int(np.ceil(nnodes / 2))):
+            D[nnodes - v - 1, :] = np.append(u[v + 1 :], u[: v + 1])
+            D[v, :] = D[nnodes - v - 1, :][::-1]
+
+    niter = niter * nedges
+    # maximal number of rewiring attempts per 'niter'
+    max_attempts = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2))
+
+    for _ in range(niter):
+        n = 0
+        while n < max_attempts:
+            # pick two random edges without creating edge list
+            # choose source node indices from discrete distribution
+            (ai, ci) = discrete_sequence(2, cdistribution=cdf, seed=seed)
+            if ai == ci:
+                continue  # same source, skip
+            a = keys[ai]  # convert index to label
+            c = keys[ci]
+            # choose target uniformly from neighbors
+            b = seed.choice(list(G.neighbors(a)))
+            d = seed.choice(list(G.neighbors(c)))
+            bi = keys.index(b)
+            di = keys.index(d)
+
+            if b in [a, c, d] or d in [a, b, c]:
+                continue  # all vertices should be different
+
+            # don't create parallel edges
+            if (d not in G[a]) and (b not in G[c]):
+                if D[ai, bi] + D[ci, di] >= D[ai, ci] + D[bi, di]:
+                    # only swap if we get closer to the diagonal
+                    G.add_edge(a, d)
+                    G.add_edge(c, b)
+                    G.remove_edge(a, b)
+                    G.remove_edge(c, d)
+
+                    # Check if the graph is still connected
+                    if connectivity and local_conn(G, a, b) == 0:
+                        # Not connected, revert the swap
+                        G.remove_edge(a, d)
+                        G.remove_edge(c, b)
+                        G.add_edge(a, b)
+                        G.add_edge(c, d)
+                    else:
+                        break
+            n += 1
+
+    return G
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@py_random_state(3)
+@nx._dispatchable
+def sigma(G, niter=100, nrand=10, seed=None):
+    """Returns the small-world coefficient (sigma) of the given graph.
+
+    The small-world coefficient is defined as:
+    sigma = C/Cr / L/Lr
+    where C and L are respectively the average clustering coefficient and
+    average shortest path length of G. Cr and Lr are respectively the average
+    clustering coefficient and average shortest path length of an equivalent
+    random graph.
+
+    A graph is commonly classified as small-world if sigma>1.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected graph.
+    niter : integer (optional, default=100)
+        Approximate number of rewiring per edge to compute the equivalent
+        random graph.
+    nrand : integer (optional, default=10)
+        Number of random graphs generated to compute the average clustering
+        coefficient (Cr) and average shortest path length (Lr).
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    sigma : float
+        The small-world coefficient of G.
+
+    Notes
+    -----
+    The implementation is adapted from Humphries et al. [1]_ [2]_.
+
+    References
+    ----------
+    .. [1] The brainstem reticular formation is a small-world, not scale-free,
+           network M. D. Humphries, K. Gurney and T. J. Prescott,
+           Proc. Roy. Soc. B 2006 273, 503-511, doi:10.1098/rspb.2005.3354.
+    .. [2] Humphries and Gurney (2008).
+           "Network 'Small-World-Ness': A Quantitative Method for Determining
+           Canonical Network Equivalence".
+           PLoS One. 3 (4). PMID 18446219. doi:10.1371/journal.pone.0002051.
+    """
+    import numpy as np
+
+    # Compute the mean clustering coefficient and average shortest path length
+    # for an equivalent random graph
+    randMetrics = {"C": [], "L": []}
+    for i in range(nrand):
+        Gr = random_reference(G, niter=niter, seed=seed)
+        randMetrics["C"].append(nx.transitivity(Gr))
+        randMetrics["L"].append(nx.average_shortest_path_length(Gr))
+
+    C = nx.transitivity(G)
+    L = nx.average_shortest_path_length(G)
+    Cr = np.mean(randMetrics["C"])
+    Lr = np.mean(randMetrics["L"])
+
+    sigma = (C / Cr) / (L / Lr)
+
+    return float(sigma)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@py_random_state(3)
+@nx._dispatchable
+def omega(G, niter=5, nrand=10, seed=None):
+    """Returns the small-world coefficient (omega) of a graph
+
+    The small-world coefficient of a graph G is:
+
+    omega = Lr/L - C/Cl
+
+    where C and L are respectively the average clustering coefficient and
+    average shortest path length of G. Lr is the average shortest path length
+    of an equivalent random graph and Cl is the average clustering coefficient
+    of an equivalent lattice graph.
+
+    The small-world coefficient (omega) measures how much G is like a lattice
+    or a random graph. Negative values mean G is similar to a lattice whereas
+    positive values mean G is a random graph.
+    Values close to 0 mean that G has small-world characteristics.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected graph.
+
+    niter: integer (optional, default=5)
+        Approximate number of rewiring per edge to compute the equivalent
+        random graph.
+
+    nrand: integer (optional, default=10)
+        Number of random graphs generated to compute the maximal clustering
+        coefficient (Cr) and average shortest path length (Lr).
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+
+    Returns
+    -------
+    omega : float
+        The small-world coefficient (omega)
+
+    Notes
+    -----
+    The implementation is adapted from the algorithm by Telesford et al. [1]_.
+
+    References
+    ----------
+    .. [1] Telesford, Joyce, Hayasaka, Burdette, and Laurienti (2011).
+           "The Ubiquity of Small-World Networks".
+           Brain Connectivity. 1 (0038): 367-75.  PMC 3604768. PMID 22432451.
+           doi:10.1089/brain.2011.0038.
+    """
+    import numpy as np
+
+    # Compute the mean clustering coefficient and average shortest path length
+    # for an equivalent random graph
+    randMetrics = {"C": [], "L": []}
+
+    # Calculate initial average clustering coefficient which potentially will
+    # get replaced by higher clustering coefficients from generated lattice
+    # reference graphs
+    Cl = nx.average_clustering(G)
+
+    niter_lattice_reference = niter
+    niter_random_reference = niter * 2
+
+    for _ in range(nrand):
+        # Generate random graph
+        Gr = random_reference(G, niter=niter_random_reference, seed=seed)
+        randMetrics["L"].append(nx.average_shortest_path_length(Gr))
+
+        # Generate lattice graph
+        Gl = lattice_reference(G, niter=niter_lattice_reference, seed=seed)
+
+        # Replace old clustering coefficient, if clustering is higher in
+        # generated lattice reference
+        Cl_temp = nx.average_clustering(Gl)
+        if Cl_temp > Cl:
+            Cl = Cl_temp
+
+    C = nx.average_clustering(G)
+    L = nx.average_shortest_path_length(G)
+    Lr = np.mean(randMetrics["L"])
+
+    omega = (Lr / L) - (C / Cl)
+
+    return float(omega)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/smetric.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/smetric.py
new file mode 100644
index 00000000..d985aa80
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/smetric.py
@@ -0,0 +1,30 @@
+import networkx as nx
+
+__all__ = ["s_metric"]
+
+
+@nx._dispatchable
+def s_metric(G):
+    """Returns the s-metric [1]_ of graph.
+
+    The s-metric is defined as the sum of the products ``deg(u) * deg(v)``
+    for every edge ``(u, v)`` in `G`.
+
+    Parameters
+    ----------
+    G : graph
+        The graph used to compute the s-metric.
+
+    Returns
+    -------
+    s : float
+        The s-metric of the graph.
+
+    References
+    ----------
+    .. [1] Lun Li, David Alderson, John C. Doyle, and Walter Willinger,
+           Towards a Theory of Scale-Free Graphs:
+           Definition, Properties, and  Implications (Extended Version), 2005.
+           https://arxiv.org/abs/cond-mat/0501169
+    """
+    return float(sum(G.degree(u) * G.degree(v) for (u, v) in G.edges()))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/sparsifiers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/sparsifiers.py
new file mode 100644
index 00000000..59322372
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/sparsifiers.py
@@ -0,0 +1,296 @@
+"""Functions for computing sparsifiers of graphs."""
+
+import math
+
+import networkx as nx
+from networkx.utils import not_implemented_for, py_random_state
+
+__all__ = ["spanner"]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@py_random_state(3)
+@nx._dispatchable(edge_attrs="weight", returns_graph=True)
+def spanner(G, stretch, weight=None, seed=None):
+    """Returns a spanner of the given graph with the given stretch.
+
+    A spanner of a graph G = (V, E) with stretch t is a subgraph
+    H = (V, E_S) such that E_S is a subset of E and the distance between
+    any pair of nodes in H is at most t times the distance between the
+    nodes in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected simple graph.
+
+    stretch : float
+        The stretch of the spanner.
+
+    weight : object
+        The edge attribute to use as distance.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    NetworkX graph
+        A spanner of the given graph with the given stretch.
+
+    Raises
+    ------
+    ValueError
+        If a stretch less than 1 is given.
+
+    Notes
+    -----
+    This function implements the spanner algorithm by Baswana and Sen,
+    see [1].
+
+    This algorithm is a randomized las vegas algorithm: The expected
+    running time is O(km) where k = (stretch + 1) // 2 and m is the
+    number of edges in G. The returned graph is always a spanner of the
+    given graph with the specified stretch. For weighted graphs the
+    number of edges in the spanner is O(k * n^(1 + 1 / k)) where k is
+    defined as above and n is the number of nodes in G. For unweighted
+    graphs the number of edges is O(n^(1 + 1 / k) + kn).
+
+    References
+    ----------
+    [1] S. Baswana, S. Sen. A Simple and Linear Time Randomized
+    Algorithm for Computing Sparse Spanners in Weighted Graphs.
+    Random Struct. Algorithms 30(4): 532-563 (2007).
+    """
+    if stretch < 1:
+        raise ValueError("stretch must be at least 1")
+
+    k = (stretch + 1) // 2
+
+    # initialize spanner H with empty edge set
+    H = nx.empty_graph()
+    H.add_nodes_from(G.nodes)
+
+    # phase 1: forming the clusters
+    # the residual graph has V' from the paper as its node set
+    # and E' from the paper as its edge set
+    residual_graph = _setup_residual_graph(G, weight)
+    # clustering is a dictionary that maps nodes in a cluster to the
+    # cluster center
+    clustering = {v: v for v in G.nodes}
+    sample_prob = math.pow(G.number_of_nodes(), -1 / k)
+    size_limit = 2 * math.pow(G.number_of_nodes(), 1 + 1 / k)
+
+    i = 0
+    while i < k - 1:
+        # step 1: sample centers
+        sampled_centers = set()
+        for center in set(clustering.values()):
+            if seed.random() < sample_prob:
+                sampled_centers.add(center)
+
+        # combined loop for steps 2 and 3
+        edges_to_add = set()
+        edges_to_remove = set()
+        new_clustering = {}
+        for v in residual_graph.nodes:
+            if clustering[v] in sampled_centers:
+                continue
+
+            # step 2: find neighboring (sampled) clusters and
+            # lightest edges to them
+            lightest_edge_neighbor, lightest_edge_weight = _lightest_edge_dicts(
+                residual_graph, clustering, v
+            )
+            neighboring_sampled_centers = (
+                set(lightest_edge_weight.keys()) & sampled_centers
+            )
+
+            # step 3: add edges to spanner
+            if not neighboring_sampled_centers:
+                # connect to each neighboring center via lightest edge
+                for neighbor in lightest_edge_neighbor.values():
+                    edges_to_add.add((v, neighbor))
+                # remove all incident edges
+                for neighbor in residual_graph.adj[v]:
+                    edges_to_remove.add((v, neighbor))
+
+            else:  # there is a neighboring sampled center
+                closest_center = min(
+                    neighboring_sampled_centers, key=lightest_edge_weight.get
+                )
+                closest_center_weight = lightest_edge_weight[closest_center]
+                closest_center_neighbor = lightest_edge_neighbor[closest_center]
+
+                edges_to_add.add((v, closest_center_neighbor))
+                new_clustering[v] = closest_center
+
+                # connect to centers with edge weight less than
+                # closest_center_weight
+                for center, edge_weight in lightest_edge_weight.items():
+                    if edge_weight < closest_center_weight:
+                        neighbor = lightest_edge_neighbor[center]
+                        edges_to_add.add((v, neighbor))
+
+                # remove edges to centers with edge weight less than
+                # closest_center_weight
+                for neighbor in residual_graph.adj[v]:
+                    nbr_cluster = clustering[neighbor]
+                    nbr_weight = lightest_edge_weight[nbr_cluster]
+                    if (
+                        nbr_cluster == closest_center
+                        or nbr_weight < closest_center_weight
+                    ):
+                        edges_to_remove.add((v, neighbor))
+
+        # check whether iteration added too many edges to spanner,
+        # if so repeat
+        if len(edges_to_add) > size_limit:
+            # an iteration is repeated O(1) times on expectation
+            continue
+
+        # iteration succeeded
+        i = i + 1
+
+        # actually add edges to spanner
+        for u, v in edges_to_add:
+            _add_edge_to_spanner(H, residual_graph, u, v, weight)
+
+        # actually delete edges from residual graph
+        residual_graph.remove_edges_from(edges_to_remove)
+
+        # copy old clustering data to new_clustering
+        for node, center in clustering.items():
+            if center in sampled_centers:
+                new_clustering[node] = center
+        clustering = new_clustering
+
+        # step 4: remove intra-cluster edges
+        for u in residual_graph.nodes:
+            for v in list(residual_graph.adj[u]):
+                if clustering[u] == clustering[v]:
+                    residual_graph.remove_edge(u, v)
+
+        # update residual graph node set
+        for v in list(residual_graph.nodes):
+            if v not in clustering:
+                residual_graph.remove_node(v)
+
+    # phase 2: vertex-cluster joining
+    for v in residual_graph.nodes:
+        lightest_edge_neighbor, _ = _lightest_edge_dicts(residual_graph, clustering, v)
+        for neighbor in lightest_edge_neighbor.values():
+            _add_edge_to_spanner(H, residual_graph, v, neighbor, weight)
+
+    return H
+
+
+def _setup_residual_graph(G, weight):
+    """Setup residual graph as a copy of G with unique edges weights.
+
+    The node set of the residual graph corresponds to the set V' from
+    the Baswana-Sen paper and the edge set corresponds to the set E'
+    from the paper.
+
+    This function associates distinct weights to the edges of the
+    residual graph (even for unweighted input graphs), as required by
+    the algorithm.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        An undirected simple graph.
+
+    weight : object
+        The edge attribute to use as distance.
+
+    Returns
+    -------
+    NetworkX graph
+        The residual graph used for the Baswana-Sen algorithm.
+    """
+    residual_graph = G.copy()
+
+    # establish unique edge weights, even for unweighted graphs
+    for u, v in G.edges():
+        if not weight:
+            residual_graph[u][v]["weight"] = (id(u), id(v))
+        else:
+            residual_graph[u][v]["weight"] = (G[u][v][weight], id(u), id(v))
+
+    return residual_graph
+
+
+def _lightest_edge_dicts(residual_graph, clustering, node):
+    """Find the lightest edge to each cluster.
+
+    Searches for the minimum-weight edge to each cluster adjacent to
+    the given node.
+
+    Parameters
+    ----------
+    residual_graph : NetworkX graph
+        The residual graph used by the Baswana-Sen algorithm.
+
+    clustering : dictionary
+        The current clustering of the nodes.
+
+    node : node
+        The node from which the search originates.
+
+    Returns
+    -------
+    lightest_edge_neighbor, lightest_edge_weight : dictionary, dictionary
+        lightest_edge_neighbor is a dictionary that maps a center C to
+        a node v in the corresponding cluster such that the edge from
+        the given node to v is the lightest edge from the given node to
+        any node in cluster. lightest_edge_weight maps a center C to the
+        weight of the aforementioned edge.
+
+    Notes
+    -----
+    If a cluster has no node that is adjacent to the given node in the
+    residual graph then the center of the cluster is not a key in the
+    returned dictionaries.
+    """
+    lightest_edge_neighbor = {}
+    lightest_edge_weight = {}
+    for neighbor in residual_graph.adj[node]:
+        nbr_center = clustering[neighbor]
+        weight = residual_graph[node][neighbor]["weight"]
+        if (
+            nbr_center not in lightest_edge_weight
+            or weight < lightest_edge_weight[nbr_center]
+        ):
+            lightest_edge_neighbor[nbr_center] = neighbor
+            lightest_edge_weight[nbr_center] = weight
+    return lightest_edge_neighbor, lightest_edge_weight
+
+
+def _add_edge_to_spanner(H, residual_graph, u, v, weight):
+    """Add the edge {u, v} to the spanner H and take weight from
+    the residual graph.
+
+    Parameters
+    ----------
+    H : NetworkX graph
+        The spanner under construction.
+
+    residual_graph : NetworkX graph
+        The residual graph used by the Baswana-Sen algorithm. The weight
+        for the edge is taken from this graph.
+
+    u : node
+        One endpoint of the edge.
+
+    v : node
+        The other endpoint of the edge.
+
+    weight : object
+        The edge attribute to use as distance.
+    """
+    H.add_edge(u, v)
+    if weight:
+        H[u][v][weight] = residual_graph[u][v]["weight"][0]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/structuralholes.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/structuralholes.py
new file mode 100644
index 00000000..bae42d06
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/structuralholes.py
@@ -0,0 +1,283 @@
+"""Functions for computing measures of structural holes."""
+
+import networkx as nx
+
+__all__ = ["constraint", "local_constraint", "effective_size"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def mutual_weight(G, u, v, weight=None):
+    """Returns the sum of the weights of the edge from `u` to `v` and
+    the edge from `v` to `u` in `G`.
+
+    `weight` is the edge data key that represents the edge weight. If
+    the specified key is `None` or is not in the edge data for an edge,
+    that edge is assumed to have weight 1.
+
+    Pre-conditions: `u` and `v` must both be in `G`.
+
+    """
+    try:
+        a_uv = G[u][v].get(weight, 1)
+    except KeyError:
+        a_uv = 0
+    try:
+        a_vu = G[v][u].get(weight, 1)
+    except KeyError:
+        a_vu = 0
+    return a_uv + a_vu
+
+
+@nx._dispatchable(edge_attrs="weight")
+def normalized_mutual_weight(G, u, v, norm=sum, weight=None):
+    """Returns normalized mutual weight of the edges from `u` to `v`
+    with respect to the mutual weights of the neighbors of `u` in `G`.
+
+    `norm` specifies how the normalization factor is computed. It must
+    be a function that takes a single argument and returns a number.
+    The argument will be an iterable of mutual weights
+    of pairs ``(u, w)``, where ``w`` ranges over each (in- and
+    out-)neighbor of ``u``. Commons values for `normalization` are
+    ``sum`` and ``max``.
+
+    `weight` can be ``None`` or a string, if None, all edge weights
+    are considered equal. Otherwise holds the name of the edge
+    attribute used as weight.
+
+    """
+    scale = norm(mutual_weight(G, u, w, weight) for w in set(nx.all_neighbors(G, u)))
+    return 0 if scale == 0 else mutual_weight(G, u, v, weight) / scale
+
+
+@nx._dispatchable(edge_attrs="weight")
+def effective_size(G, nodes=None, weight=None):
+    r"""Returns the effective size of all nodes in the graph ``G``.
+
+    The *effective size* of a node's ego network is based on the concept
+    of redundancy. A person's ego network has redundancy to the extent
+    that her contacts are connected to each other as well. The
+    nonredundant part of a person's relationships is the effective
+    size of her ego network [1]_.  Formally, the effective size of a
+    node $u$, denoted $e(u)$, is defined by
+
+    .. math::
+
+       e(u) = \sum_{v \in N(u) \setminus \{u\}}
+       \left(1 - \sum_{w \in N(v)} p_{uw} m_{vw}\right)
+
+    where $N(u)$ is the set of neighbors of $u$ and $p_{uw}$ is the
+    normalized mutual weight of the (directed or undirected) edges
+    joining $u$ and $v$, for each vertex $u$ and $v$ [1]_. And $m_{vw}$
+    is the mutual weight of $v$ and $w$ divided by $v$ highest mutual
+    weight with any of its neighbors. The *mutual weight* of $u$ and $v$
+    is the sum of the weights of edges joining them (edge weights are
+    assumed to be one if the graph is unweighted).
+
+    For the case of unweighted and undirected graphs, Borgatti proposed
+    a simplified formula to compute effective size [2]_
+
+    .. math::
+
+       e(u) = n - \frac{2t}{n}
+
+    where `t` is the number of ties in the ego network (not including
+    ties to ego) and `n` is the number of nodes (excluding ego).
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The graph containing ``v``. Directed graphs are treated like
+        undirected graphs when computing neighbors of ``v``.
+
+    nodes : container, optional
+        Container of nodes in the graph ``G`` to compute the effective size.
+        If None, the effective size of every node is computed.
+
+    weight : None or string, optional
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+
+    Returns
+    -------
+    dict
+        Dictionary with nodes as keys and the effective size of the node as values.
+
+    Notes
+    -----
+    Burt also defined the related concept of *efficiency* of a node's ego
+    network, which is its effective size divided by the degree of that
+    node [1]_. So you can easily compute efficiency:
+
+    >>> G = nx.DiGraph()
+    >>> G.add_edges_from([(0, 1), (0, 2), (1, 0), (2, 1)])
+    >>> esize = nx.effective_size(G)
+    >>> efficiency = {n: v / G.degree(n) for n, v in esize.items()}
+
+    See also
+    --------
+    constraint
+
+    References
+    ----------
+    .. [1] Burt, Ronald S.
+           *Structural Holes: The Social Structure of Competition.*
+           Cambridge: Harvard University Press, 1995.
+
+    .. [2] Borgatti, S.
+           "Structural Holes: Unpacking Burt's Redundancy Measures"
+           CONNECTIONS 20(1):35-38.
+           http://www.analytictech.com/connections/v20(1)/holes.htm
+
+    """
+
+    def redundancy(G, u, v, weight=None):
+        nmw = normalized_mutual_weight
+        r = sum(
+            nmw(G, u, w, weight=weight) * nmw(G, v, w, norm=max, weight=weight)
+            for w in set(nx.all_neighbors(G, u))
+        )
+        return 1 - r
+
+    effective_size = {}
+    if nodes is None:
+        nodes = G
+    # Use Borgatti's simplified formula for unweighted and undirected graphs
+    if not G.is_directed() and weight is None:
+        for v in nodes:
+            # Effective size is not defined for isolated nodes
+            if len(G[v]) == 0:
+                effective_size[v] = float("nan")
+                continue
+            E = nx.ego_graph(G, v, center=False, undirected=True)
+            effective_size[v] = len(E) - (2 * E.size()) / len(E)
+    else:
+        for v in nodes:
+            # Effective size is not defined for isolated nodes
+            if len(G[v]) == 0:
+                effective_size[v] = float("nan")
+                continue
+            effective_size[v] = sum(
+                redundancy(G, v, u, weight) for u in set(nx.all_neighbors(G, v))
+            )
+    return effective_size
+
+
+@nx._dispatchable(edge_attrs="weight")
+def constraint(G, nodes=None, weight=None):
+    r"""Returns the constraint on all nodes in the graph ``G``.
+
+    The *constraint* is a measure of the extent to which a node *v* is
+    invested in those nodes that are themselves invested in the
+    neighbors of *v*. Formally, the *constraint on v*, denoted `c(v)`,
+    is defined by
+
+    .. math::
+
+       c(v) = \sum_{w \in N(v) \setminus \{v\}} \ell(v, w)
+
+    where $N(v)$ is the subset of the neighbors of `v` that are either
+    predecessors or successors of `v` and $\ell(v, w)$ is the local
+    constraint on `v` with respect to `w` [1]_. For the definition of local
+    constraint, see :func:`local_constraint`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The graph containing ``v``. This can be either directed or undirected.
+
+    nodes : container, optional
+        Container of nodes in the graph ``G`` to compute the constraint. If
+        None, the constraint of every node is computed.
+
+    weight : None or string, optional
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+
+    Returns
+    -------
+    dict
+        Dictionary with nodes as keys and the constraint on the node as values.
+
+    See also
+    --------
+    local_constraint
+
+    References
+    ----------
+    .. [1] Burt, Ronald S.
+           "Structural holes and good ideas".
+           American Journal of Sociology (110): 349–399.
+
+    """
+    if nodes is None:
+        nodes = G
+    constraint = {}
+    for v in nodes:
+        # Constraint is not defined for isolated nodes
+        if len(G[v]) == 0:
+            constraint[v] = float("nan")
+            continue
+        constraint[v] = sum(
+            local_constraint(G, v, n, weight) for n in set(nx.all_neighbors(G, v))
+        )
+    return constraint
+
+
+@nx._dispatchable(edge_attrs="weight")
+def local_constraint(G, u, v, weight=None):
+    r"""Returns the local constraint on the node ``u`` with respect to
+    the node ``v`` in the graph ``G``.
+
+    Formally, the *local constraint on u with respect to v*, denoted
+    $\ell(u, v)$, is defined by
+
+    .. math::
+
+       \ell(u, v) = \left(p_{uv} + \sum_{w \in N(v)} p_{uw} p_{wv}\right)^2,
+
+    where $N(v)$ is the set of neighbors of $v$ and $p_{uv}$ is the
+    normalized mutual weight of the (directed or undirected) edges
+    joining $u$ and $v$, for each vertex $u$ and $v$ [1]_. The *mutual
+    weight* of $u$ and $v$ is the sum of the weights of edges joining
+    them (edge weights are assumed to be one if the graph is
+    unweighted).
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The graph containing ``u`` and ``v``. This can be either
+        directed or undirected.
+
+    u : node
+        A node in the graph ``G``.
+
+    v : node
+        A node in the graph ``G``.
+
+    weight : None or string, optional
+      If None, all edge weights are considered equal.
+      Otherwise holds the name of the edge attribute used as weight.
+
+    Returns
+    -------
+    float
+        The constraint of the node ``v`` in the graph ``G``.
+
+    See also
+    --------
+    constraint
+
+    References
+    ----------
+    .. [1] Burt, Ronald S.
+           "Structural holes and good ideas".
+           American Journal of Sociology (110): 349–399.
+
+    """
+    nmw = normalized_mutual_weight
+    direct = nmw(G, u, v, weight=weight)
+    indirect = sum(
+        nmw(G, u, w, weight=weight) * nmw(G, w, v, weight=weight)
+        for w in set(nx.all_neighbors(G, u))
+    )
+    return (direct + indirect) ** 2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/summarization.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/summarization.py
new file mode 100644
index 00000000..23db8da4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/summarization.py
@@ -0,0 +1,564 @@
+"""
+Graph summarization finds smaller representations of graphs resulting in faster
+runtime of algorithms, reduced storage needs, and noise reduction.
+Summarization has applications in areas such as visualization, pattern mining,
+clustering and community detection, and more.  Core graph summarization
+techniques are grouping/aggregation, bit-compression,
+simplification/sparsification, and influence based. Graph summarization
+algorithms often produce either summary graphs in the form of supergraphs or
+sparsified graphs, or a list of independent structures. Supergraphs are the
+most common product, which consist of supernodes and original nodes and are
+connected by edges and superedges, which represent aggregate edges between
+nodes and supernodes.
+
+Grouping/aggregation based techniques compress graphs by representing
+close/connected nodes and edges in a graph by a single node/edge in a
+supergraph. Nodes can be grouped together into supernodes based on their
+structural similarities or proximity within a graph to reduce the total number
+of nodes in a graph. Edge-grouping techniques group edges into lossy/lossless
+nodes called compressor or virtual nodes to reduce the total number of edges in
+a graph. Edge-grouping techniques can be lossless, meaning that they can be
+used to re-create the original graph, or techniques can be lossy, requiring
+less space to store the summary graph, but at the expense of lower
+reconstruction accuracy of the original graph.
+
+Bit-compression techniques minimize the amount of information needed to
+describe the original graph, while revealing structural patterns in the
+original graph.  The two-part minimum description length (MDL) is often used to
+represent the model and the original graph in terms of the model.  A key
+difference between graph compression and graph summarization is that graph
+summarization focuses on finding structural patterns within the original graph,
+whereas graph compression focuses on compressions the original graph to be as
+small as possible.  **NOTE**: Some bit-compression methods exist solely to
+compress a graph without creating a summary graph or finding comprehensible
+structural patterns.
+
+Simplification/Sparsification techniques attempt to create a sparse
+representation of a graph by removing unimportant nodes and edges from the
+graph.  Sparsified graphs differ from supergraphs created by
+grouping/aggregation by only containing a subset of the original nodes and
+edges of the original graph.
+
+Influence based techniques aim to find a high-level description of influence
+propagation in a large graph.  These methods are scarce and have been mostly
+applied to social graphs.
+
+*dedensification* is a grouping/aggregation based technique to compress the
+neighborhoods around high-degree nodes in unweighted graphs by adding
+compressor nodes that summarize multiple edges of the same type to
+high-degree nodes (nodes with a degree greater than a given threshold).
+Dedensification was developed for the purpose of increasing performance of
+query processing around high-degree nodes in graph databases and enables direct
+operations on the compressed graph.  The structural patterns surrounding
+high-degree nodes in the original is preserved while using fewer edges and
+adding a small number of compressor nodes.  The degree of nodes present in the
+original graph is also preserved. The current implementation of dedensification
+supports graphs with one edge type.
+
+For more information on graph summarization, see `Graph Summarization Methods
+and Applications: A Survey <https://dl.acm.org/doi/abs/10.1145/3186727>`_
+"""
+
+from collections import Counter, defaultdict
+
+import networkx as nx
+
+__all__ = ["dedensify", "snap_aggregation"]
+
+
+@nx._dispatchable(mutates_input={"not copy": 3}, returns_graph=True)
+def dedensify(G, threshold, prefix=None, copy=True):
+    """Compresses neighborhoods around high-degree nodes
+
+    Reduces the number of edges to high-degree nodes by adding compressor nodes
+    that summarize multiple edges of the same type to high-degree nodes (nodes
+    with a degree greater than a given threshold).  Dedensification also has
+    the added benefit of reducing the number of edges around high-degree nodes.
+    The implementation currently supports graphs with a single edge type.
+
+    Parameters
+    ----------
+    G: graph
+       A networkx graph
+    threshold: int
+       Minimum degree threshold of a node to be considered a high degree node.
+       The threshold must be greater than or equal to 2.
+    prefix: str or None, optional (default: None)
+       An optional prefix for denoting compressor nodes
+    copy: bool, optional (default: True)
+       Indicates if dedensification should be done inplace
+
+    Returns
+    -------
+    dedensified networkx graph : (graph, set)
+        2-tuple of the dedensified graph and set of compressor nodes
+
+    Notes
+    -----
+    According to the algorithm in [1]_, removes edges in a graph by
+    compressing/decompressing the neighborhoods around high degree nodes by
+    adding compressor nodes that summarize multiple edges of the same type
+    to high-degree nodes.  Dedensification will only add a compressor node when
+    doing so will reduce the total number of edges in the given graph. This
+    implementation currently supports graphs with a single edge type.
+
+    Examples
+    --------
+    Dedensification will only add compressor nodes when doing so would result
+    in fewer edges::
+
+        >>> original_graph = nx.DiGraph()
+        >>> original_graph.add_nodes_from(
+        ...     ["1", "2", "3", "4", "5", "6", "A", "B", "C"]
+        ... )
+        >>> original_graph.add_edges_from(
+        ...     [
+        ...         ("1", "C"), ("1", "B"),
+        ...         ("2", "C"), ("2", "B"), ("2", "A"),
+        ...         ("3", "B"), ("3", "A"), ("3", "6"),
+        ...         ("4", "C"), ("4", "B"), ("4", "A"),
+        ...         ("5", "B"), ("5", "A"),
+        ...         ("6", "5"),
+        ...         ("A", "6")
+        ...     ]
+        ... )
+        >>> c_graph, c_nodes = nx.dedensify(original_graph, threshold=2)
+        >>> original_graph.number_of_edges()
+        15
+        >>> c_graph.number_of_edges()
+        14
+
+    A dedensified, directed graph can be "densified" to reconstruct the
+    original graph::
+
+        >>> original_graph = nx.DiGraph()
+        >>> original_graph.add_nodes_from(
+        ...     ["1", "2", "3", "4", "5", "6", "A", "B", "C"]
+        ... )
+        >>> original_graph.add_edges_from(
+        ...     [
+        ...         ("1", "C"), ("1", "B"),
+        ...         ("2", "C"), ("2", "B"), ("2", "A"),
+        ...         ("3", "B"), ("3", "A"), ("3", "6"),
+        ...         ("4", "C"), ("4", "B"), ("4", "A"),
+        ...         ("5", "B"), ("5", "A"),
+        ...         ("6", "5"),
+        ...         ("A", "6")
+        ...     ]
+        ... )
+        >>> c_graph, c_nodes = nx.dedensify(original_graph, threshold=2)
+        >>> # re-densifies the compressed graph into the original graph
+        >>> for c_node in c_nodes:
+        ...     all_neighbors = set(nx.all_neighbors(c_graph, c_node))
+        ...     out_neighbors = set(c_graph.neighbors(c_node))
+        ...     for out_neighbor in out_neighbors:
+        ...         c_graph.remove_edge(c_node, out_neighbor)
+        ...     in_neighbors = all_neighbors - out_neighbors
+        ...     for in_neighbor in in_neighbors:
+        ...         c_graph.remove_edge(in_neighbor, c_node)
+        ...         for out_neighbor in out_neighbors:
+        ...             c_graph.add_edge(in_neighbor, out_neighbor)
+        ...     c_graph.remove_node(c_node)
+        ...
+        >>> nx.is_isomorphic(original_graph, c_graph)
+        True
+
+    References
+    ----------
+    .. [1] Maccioni, A., & Abadi, D. J. (2016, August).
+       Scalable pattern matching over compressed graphs via dedensification.
+       In Proceedings of the 22nd ACM SIGKDD International Conference on
+       Knowledge Discovery and Data Mining (pp. 1755-1764).
+       http://www.cs.umd.edu/~abadi/papers/graph-dedense.pdf
+    """
+    if threshold < 2:
+        raise nx.NetworkXError("The degree threshold must be >= 2")
+
+    degrees = G.in_degree if G.is_directed() else G.degree
+    # Group nodes based on degree threshold
+    high_degree_nodes = {n for n, d in degrees if d > threshold}
+    low_degree_nodes = G.nodes() - high_degree_nodes
+
+    auxiliary = {}
+    for node in G:
+        high_degree_nbrs = frozenset(high_degree_nodes & set(G[node]))
+        if high_degree_nbrs:
+            if high_degree_nbrs in auxiliary:
+                auxiliary[high_degree_nbrs].add(node)
+            else:
+                auxiliary[high_degree_nbrs] = {node}
+
+    if copy:
+        G = G.copy()
+
+    compressor_nodes = set()
+    for index, (high_degree_nodes, low_degree_nodes) in enumerate(auxiliary.items()):
+        low_degree_node_count = len(low_degree_nodes)
+        high_degree_node_count = len(high_degree_nodes)
+        old_edges = high_degree_node_count * low_degree_node_count
+        new_edges = high_degree_node_count + low_degree_node_count
+        if old_edges <= new_edges:
+            continue
+        compression_node = "".join(str(node) for node in high_degree_nodes)
+        if prefix:
+            compression_node = str(prefix) + compression_node
+        for node in low_degree_nodes:
+            for high_node in high_degree_nodes:
+                if G.has_edge(node, high_node):
+                    G.remove_edge(node, high_node)
+
+            G.add_edge(node, compression_node)
+        for node in high_degree_nodes:
+            G.add_edge(compression_node, node)
+        compressor_nodes.add(compression_node)
+    return G, compressor_nodes
+
+
+def _snap_build_graph(
+    G,
+    groups,
+    node_attributes,
+    edge_attributes,
+    neighbor_info,
+    edge_types,
+    prefix,
+    supernode_attribute,
+    superedge_attribute,
+):
+    """
+    Build the summary graph from the data structures produced in the SNAP aggregation algorithm
+
+    Used in the SNAP aggregation algorithm to build the output summary graph and supernode
+    lookup dictionary.  This process uses the original graph and the data structures to
+    create the supernodes with the correct node attributes, and the superedges with the correct
+    edge attributes
+
+    Parameters
+    ----------
+    G: networkx.Graph
+        the original graph to be summarized
+    groups: dict
+        A dictionary of unique group IDs and their corresponding node groups
+    node_attributes: iterable
+        An iterable of the node attributes considered in the summarization process
+    edge_attributes: iterable
+        An iterable of the edge attributes considered in the summarization process
+    neighbor_info: dict
+        A data structure indicating the number of edges a node has with the
+        groups in the current summarization of each edge type
+    edge_types: dict
+        dictionary of edges in the graph and their corresponding attributes recognized
+        in the summarization
+    prefix: string
+        The prefix to be added to all supernodes
+    supernode_attribute: str
+        The node attribute for recording the supernode groupings of nodes
+    superedge_attribute: str
+        The edge attribute for recording the edge types represented by superedges
+
+    Returns
+    -------
+    summary graph: Networkx graph
+    """
+    output = G.__class__()
+    node_label_lookup = {}
+    for index, group_id in enumerate(groups):
+        group_set = groups[group_id]
+        supernode = f"{prefix}{index}"
+        node_label_lookup[group_id] = supernode
+        supernode_attributes = {
+            attr: G.nodes[next(iter(group_set))][attr] for attr in node_attributes
+        }
+        supernode_attributes[supernode_attribute] = group_set
+        output.add_node(supernode, **supernode_attributes)
+
+    for group_id in groups:
+        group_set = groups[group_id]
+        source_supernode = node_label_lookup[group_id]
+        for other_group, group_edge_types in neighbor_info[
+            next(iter(group_set))
+        ].items():
+            if group_edge_types:
+                target_supernode = node_label_lookup[other_group]
+                summary_graph_edge = (source_supernode, target_supernode)
+
+                edge_types = [
+                    dict(zip(edge_attributes, edge_type))
+                    for edge_type in group_edge_types
+                ]
+
+                has_edge = output.has_edge(*summary_graph_edge)
+                if output.is_multigraph():
+                    if not has_edge:
+                        for edge_type in edge_types:
+                            output.add_edge(*summary_graph_edge, **edge_type)
+                    elif not output.is_directed():
+                        existing_edge_data = output.get_edge_data(*summary_graph_edge)
+                        for edge_type in edge_types:
+                            if edge_type not in existing_edge_data.values():
+                                output.add_edge(*summary_graph_edge, **edge_type)
+                else:
+                    superedge_attributes = {superedge_attribute: edge_types}
+                    output.add_edge(*summary_graph_edge, **superedge_attributes)
+
+    return output
+
+
+def _snap_eligible_group(G, groups, group_lookup, edge_types):
+    """
+    Determines if a group is eligible to be split.
+
+    A group is eligible to be split if all nodes in the group have edges of the same type(s)
+    with the same other groups.
+
+    Parameters
+    ----------
+    G: graph
+        graph to be summarized
+    groups: dict
+        A dictionary of unique group IDs and their corresponding node groups
+    group_lookup: dict
+        dictionary of nodes and their current corresponding group ID
+    edge_types: dict
+        dictionary of edges in the graph and their corresponding attributes recognized
+        in the summarization
+
+    Returns
+    -------
+    tuple: group ID to split, and neighbor-groups participation_counts data structure
+    """
+    nbr_info = {node: {gid: Counter() for gid in groups} for node in group_lookup}
+    for group_id in groups:
+        current_group = groups[group_id]
+
+        # build nbr_info for nodes in group
+        for node in current_group:
+            nbr_info[node] = {group_id: Counter() for group_id in groups}
+            edges = G.edges(node, keys=True) if G.is_multigraph() else G.edges(node)
+            for edge in edges:
+                neighbor = edge[1]
+                edge_type = edge_types[edge]
+                neighbor_group_id = group_lookup[neighbor]
+                nbr_info[node][neighbor_group_id][edge_type] += 1
+
+        # check if group_id is eligible to be split
+        group_size = len(current_group)
+        for other_group_id in groups:
+            edge_counts = Counter()
+            for node in current_group:
+                edge_counts.update(nbr_info[node][other_group_id].keys())
+
+            if not all(count == group_size for count in edge_counts.values()):
+                # only the nbr_info of the returned group_id is required for handling group splits
+                return group_id, nbr_info
+
+    # if no eligible groups, complete nbr_info is calculated
+    return None, nbr_info
+
+
+def _snap_split(groups, neighbor_info, group_lookup, group_id):
+    """
+    Splits a group based on edge types and updates the groups accordingly
+
+    Splits the group with the given group_id based on the edge types
+    of the nodes so that each new grouping will all have the same
+    edges with other nodes.
+
+    Parameters
+    ----------
+    groups: dict
+        A dictionary of unique group IDs and their corresponding node groups
+    neighbor_info: dict
+        A data structure indicating the number of edges a node has with the
+        groups in the current summarization of each edge type
+    edge_types: dict
+        dictionary of edges in the graph and their corresponding attributes recognized
+        in the summarization
+    group_lookup: dict
+        dictionary of nodes and their current corresponding group ID
+    group_id: object
+        ID of group to be split
+
+    Returns
+    -------
+    dict
+        The updated groups based on the split
+    """
+    new_group_mappings = defaultdict(set)
+    for node in groups[group_id]:
+        signature = tuple(
+            frozenset(edge_types) for edge_types in neighbor_info[node].values()
+        )
+        new_group_mappings[signature].add(node)
+
+    # leave the biggest new_group as the original group
+    new_groups = sorted(new_group_mappings.values(), key=len)
+    for new_group in new_groups[:-1]:
+        # Assign unused integer as the new_group_id
+        # ids are tuples, so will not interact with the original group_ids
+        new_group_id = len(groups)
+        groups[new_group_id] = new_group
+        groups[group_id] -= new_group
+        for node in new_group:
+            group_lookup[node] = new_group_id
+
+    return groups
+
+
+@nx._dispatchable(
+    node_attrs="[node_attributes]", edge_attrs="[edge_attributes]", returns_graph=True
+)
+def snap_aggregation(
+    G,
+    node_attributes,
+    edge_attributes=(),
+    prefix="Supernode-",
+    supernode_attribute="group",
+    superedge_attribute="types",
+):
+    """Creates a summary graph based on attributes and connectivity.
+
+    This function uses the Summarization by Grouping Nodes on Attributes
+    and Pairwise edges (SNAP) algorithm for summarizing a given
+    graph by grouping nodes by node attributes and their edge attributes
+    into supernodes in a summary graph.  This name SNAP should not be
+    confused with the Stanford Network Analysis Project (SNAP).
+
+    Here is a high-level view of how this algorithm works:
+
+    1) Group nodes by node attribute values.
+
+    2) Iteratively split groups until all nodes in each group have edges
+    to nodes in the same groups. That is, until all the groups are homogeneous
+    in their member nodes' edges to other groups.  For example,
+    if all the nodes in group A only have edge to nodes in group B, then the
+    group is homogeneous and does not need to be split. If all nodes in group B
+    have edges with nodes in groups {A, C}, but some also have edges with other
+    nodes in B, then group B is not homogeneous and needs to be split into
+    groups have edges with {A, C} and a group of nodes having
+    edges with {A, B, C}.  This way, viewers of the summary graph can
+    assume that all nodes in the group have the exact same node attributes and
+    the exact same edges.
+
+    3) Build the output summary graph, where the groups are represented by
+    super-nodes. Edges represent the edges shared between all the nodes in each
+    respective groups.
+
+    A SNAP summary graph can be used to visualize graphs that are too large to display
+    or visually analyze, or to efficiently identify sets of similar nodes with similar connectivity
+    patterns to other sets of similar nodes based on specified node and/or edge attributes in a graph.
+
+    Parameters
+    ----------
+    G: graph
+        Networkx Graph to be summarized
+    node_attributes: iterable, required
+        An iterable of the node attributes used to group nodes in the summarization process. Nodes
+        with the same values for these attributes will be grouped together in the summary graph.
+    edge_attributes: iterable, optional
+        An iterable of the edge attributes considered in the summarization process.  If provided, unique
+        combinations of the attribute values found in the graph are used to
+        determine the edge types in the graph.  If not provided, all edges
+        are considered to be of the same type.
+    prefix: str
+        The prefix used to denote supernodes in the summary graph. Defaults to 'Supernode-'.
+    supernode_attribute: str
+        The node attribute for recording the supernode groupings of nodes. Defaults to 'group'.
+    superedge_attribute: str
+        The edge attribute for recording the edge types of multiple edges. Defaults to 'types'.
+
+    Returns
+    -------
+    networkx.Graph: summary graph
+
+    Examples
+    --------
+    SNAP aggregation takes a graph and summarizes it in the context of user-provided
+    node and edge attributes such that a viewer can more easily extract and
+    analyze the information represented by the graph
+
+    >>> nodes = {
+    ...     "A": dict(color="Red"),
+    ...     "B": dict(color="Red"),
+    ...     "C": dict(color="Red"),
+    ...     "D": dict(color="Red"),
+    ...     "E": dict(color="Blue"),
+    ...     "F": dict(color="Blue"),
+    ... }
+    >>> edges = [
+    ...     ("A", "E", "Strong"),
+    ...     ("B", "F", "Strong"),
+    ...     ("C", "E", "Weak"),
+    ...     ("D", "F", "Weak"),
+    ... ]
+    >>> G = nx.Graph()
+    >>> for node in nodes:
+    ...     attributes = nodes[node]
+    ...     G.add_node(node, **attributes)
+    >>> for source, target, type in edges:
+    ...     G.add_edge(source, target, type=type)
+    >>> node_attributes = ("color",)
+    >>> edge_attributes = ("type",)
+    >>> summary_graph = nx.snap_aggregation(
+    ...     G, node_attributes=node_attributes, edge_attributes=edge_attributes
+    ... )
+
+    Notes
+    -----
+    The summary graph produced is called a maximum Attribute-edge
+    compatible (AR-compatible) grouping.  According to [1]_, an
+    AR-compatible grouping means that all nodes in each group have the same
+    exact node attribute values and the same exact edges and
+    edge types to one or more nodes in the same groups.  The maximal
+    AR-compatible grouping is the grouping with the minimal cardinality.
+
+    The AR-compatible grouping is the most detailed grouping provided by
+    any of the SNAP algorithms.
+
+    References
+    ----------
+    .. [1] Y. Tian, R. A. Hankins, and J. M. Patel. Efficient aggregation
+       for graph summarization. In Proc. 2008 ACM-SIGMOD Int. Conf.
+       Management of Data (SIGMOD’08), pages 567–580, Vancouver, Canada,
+       June 2008.
+    """
+    edge_types = {
+        edge: tuple(attrs.get(attr) for attr in edge_attributes)
+        for edge, attrs in G.edges.items()
+    }
+    if not G.is_directed():
+        if G.is_multigraph():
+            # list is needed to avoid mutating while iterating
+            edges = [((v, u, k), etype) for (u, v, k), etype in edge_types.items()]
+        else:
+            # list is needed to avoid mutating while iterating
+            edges = [((v, u), etype) for (u, v), etype in edge_types.items()]
+        edge_types.update(edges)
+
+    group_lookup = {
+        node: tuple(attrs[attr] for attr in node_attributes)
+        for node, attrs in G.nodes.items()
+    }
+    groups = defaultdict(set)
+    for node, node_type in group_lookup.items():
+        groups[node_type].add(node)
+
+    eligible_group_id, nbr_info = _snap_eligible_group(
+        G, groups, group_lookup, edge_types
+    )
+    while eligible_group_id:
+        groups = _snap_split(groups, nbr_info, group_lookup, eligible_group_id)
+        eligible_group_id, nbr_info = _snap_eligible_group(
+            G, groups, group_lookup, edge_types
+        )
+    return _snap_build_graph(
+        G,
+        groups,
+        node_attributes,
+        edge_attributes,
+        nbr_info,
+        edge_types,
+        prefix,
+        supernode_attribute,
+        superedge_attribute,
+    )
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/swap.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/swap.py
new file mode 100644
index 00000000..cb3cc1c0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/swap.py
@@ -0,0 +1,406 @@
+"""Swap edges in a graph."""
+
+import math
+
+import networkx as nx
+from networkx.utils import py_random_state
+
+__all__ = ["double_edge_swap", "connected_double_edge_swap", "directed_edge_swap"]
+
+
+@nx.utils.not_implemented_for("undirected")
+@py_random_state(3)
+@nx._dispatchable(mutates_input=True, returns_graph=True)
+def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None):
+    """Swap three edges in a directed graph while keeping the node degrees fixed.
+
+    A directed edge swap swaps three edges such that a -> b -> c -> d becomes
+    a -> c -> b -> d. This pattern of swapping allows all possible states with the
+    same in- and out-degree distribution in a directed graph to be reached.
+
+    If the swap would create parallel edges (e.g. if a -> c already existed in the
+    previous example), another attempt is made to find a suitable trio of edges.
+
+    Parameters
+    ----------
+    G : DiGraph
+       A directed graph
+
+    nswap : integer (optional, default=1)
+       Number of three-edge (directed) swaps to perform
+
+    max_tries : integer (optional, default=100)
+       Maximum number of attempts to swap edges
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    G : DiGraph
+       The graph after the edges are swapped.
+
+    Raises
+    ------
+    NetworkXError
+        If `G` is not directed, or
+        If nswap > max_tries, or
+        If there are fewer than 4 nodes or 3 edges in `G`.
+    NetworkXAlgorithmError
+        If the number of swap attempts exceeds `max_tries` before `nswap` swaps are made
+
+    Notes
+    -----
+    Does not enforce any connectivity constraints.
+
+    The graph G is modified in place.
+
+    A later swap is allowed to undo a previous swap.
+
+    References
+    ----------
+    .. [1] Erdős, Péter L., et al. “A Simple Havel-Hakimi Type Algorithm to Realize
+           Graphical Degree Sequences of Directed Graphs.” ArXiv:0905.4913 [Math],
+           Jan. 2010. https://doi.org/10.48550/arXiv.0905.4913.
+           Published  2010 in Elec. J. Combinatorics (17(1)). R66.
+           http://www.combinatorics.org/Volume_17/PDF/v17i1r66.pdf
+    .. [2] “Combinatorics - Reaching All Possible Simple Directed Graphs with a given
+           Degree Sequence with 2-Edge Swaps.” Mathematics Stack Exchange,
+           https://math.stackexchange.com/questions/22272/. Accessed 30 May 2022.
+    """
+    if nswap > max_tries:
+        raise nx.NetworkXError("Number of swaps > number of tries allowed.")
+    if len(G) < 4:
+        raise nx.NetworkXError("DiGraph has fewer than four nodes.")
+    if len(G.edges) < 3:
+        raise nx.NetworkXError("DiGraph has fewer than 3 edges")
+
+    # Instead of choosing uniformly at random from a generated edge list,
+    # this algorithm chooses nonuniformly from the set of nodes with
+    # probability weighted by degree.
+    tries = 0
+    swapcount = 0
+    keys, degrees = zip(*G.degree())  # keys, degree
+    cdf = nx.utils.cumulative_distribution(degrees)  # cdf of degree
+    discrete_sequence = nx.utils.discrete_sequence
+
+    while swapcount < nswap:
+        # choose source node index from discrete distribution
+        start_index = discrete_sequence(1, cdistribution=cdf, seed=seed)[0]
+        start = keys[start_index]
+        tries += 1
+
+        if tries > max_tries:
+            msg = f"Maximum number of swap attempts ({tries}) exceeded before desired swaps achieved ({nswap})."
+            raise nx.NetworkXAlgorithmError(msg)
+
+        # If the given node doesn't have any out edges, then there isn't anything to swap
+        if G.out_degree(start) == 0:
+            continue
+        second = seed.choice(list(G.succ[start]))
+        if start == second:
+            continue
+
+        if G.out_degree(second) == 0:
+            continue
+        third = seed.choice(list(G.succ[second]))
+        if second == third:
+            continue
+
+        if G.out_degree(third) == 0:
+            continue
+        fourth = seed.choice(list(G.succ[third]))
+        if third == fourth:
+            continue
+
+        if (
+            third not in G.succ[start]
+            and fourth not in G.succ[second]
+            and second not in G.succ[third]
+        ):
+            # Swap nodes
+            G.add_edge(start, third)
+            G.add_edge(third, second)
+            G.add_edge(second, fourth)
+            G.remove_edge(start, second)
+            G.remove_edge(second, third)
+            G.remove_edge(third, fourth)
+            swapcount += 1
+
+    return G
+
+
+@py_random_state(3)
+@nx._dispatchable(mutates_input=True, returns_graph=True)
+def double_edge_swap(G, nswap=1, max_tries=100, seed=None):
+    """Swap two edges in the graph while keeping the node degrees fixed.
+
+    A double-edge swap removes two randomly chosen edges u-v and x-y
+    and creates the new edges u-x and v-y::
+
+     u--v            u  v
+            becomes  |  |
+     x--y            x  y
+
+    If either the edge u-x or v-y already exist no swap is performed
+    and another attempt is made to find a suitable edge pair.
+
+    Parameters
+    ----------
+    G : graph
+       An undirected graph
+
+    nswap : integer (optional, default=1)
+       Number of double-edge swaps to perform
+
+    max_tries : integer (optional)
+       Maximum number of attempts to swap edges
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    G : graph
+       The graph after double edge swaps.
+
+    Raises
+    ------
+    NetworkXError
+        If `G` is directed, or
+        If `nswap` > `max_tries`, or
+        If there are fewer than 4 nodes or 2 edges in `G`.
+    NetworkXAlgorithmError
+        If the number of swap attempts exceeds `max_tries` before `nswap` swaps are made
+
+    Notes
+    -----
+    Does not enforce any connectivity constraints.
+
+    The graph G is modified in place.
+    """
+    if G.is_directed():
+        raise nx.NetworkXError(
+            "double_edge_swap() not defined for directed graphs. Use directed_edge_swap instead."
+        )
+    if nswap > max_tries:
+        raise nx.NetworkXError("Number of swaps > number of tries allowed.")
+    if len(G) < 4:
+        raise nx.NetworkXError("Graph has fewer than four nodes.")
+    if len(G.edges) < 2:
+        raise nx.NetworkXError("Graph has fewer than 2 edges")
+    # Instead of choosing uniformly at random from a generated edge list,
+    # this algorithm chooses nonuniformly from the set of nodes with
+    # probability weighted by degree.
+    n = 0
+    swapcount = 0
+    keys, degrees = zip(*G.degree())  # keys, degree
+    cdf = nx.utils.cumulative_distribution(degrees)  # cdf of degree
+    discrete_sequence = nx.utils.discrete_sequence
+    while swapcount < nswap:
+        #        if random.random() < 0.5: continue # trick to avoid periodicities?
+        # pick two random edges without creating edge list
+        # choose source node indices from discrete distribution
+        (ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed)
+        if ui == xi:
+            continue  # same source, skip
+        u = keys[ui]  # convert index to label
+        x = keys[xi]
+        # choose target uniformly from neighbors
+        v = seed.choice(list(G[u]))
+        y = seed.choice(list(G[x]))
+        if v == y:
+            continue  # same target, skip
+        if (x not in G[u]) and (y not in G[v]):  # don't create parallel edges
+            G.add_edge(u, x)
+            G.add_edge(v, y)
+            G.remove_edge(u, v)
+            G.remove_edge(x, y)
+            swapcount += 1
+        if n >= max_tries:
+            e = (
+                f"Maximum number of swap attempts ({n}) exceeded "
+                f"before desired swaps achieved ({nswap})."
+            )
+            raise nx.NetworkXAlgorithmError(e)
+        n += 1
+    return G
+
+
+@py_random_state(3)
+@nx._dispatchable(mutates_input=True)
+def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None):
+    """Attempts the specified number of double-edge swaps in the graph `G`.
+
+    A double-edge swap removes two randomly chosen edges `(u, v)` and `(x,
+    y)` and creates the new edges `(u, x)` and `(v, y)`::
+
+     u--v            u  v
+            becomes  |  |
+     x--y            x  y
+
+    If either `(u, x)` or `(v, y)` already exist, then no swap is performed
+    so the actual number of swapped edges is always *at most* `nswap`.
+
+    Parameters
+    ----------
+    G : graph
+       An undirected graph
+
+    nswap : integer (optional, default=1)
+       Number of double-edge swaps to perform
+
+    _window_threshold : integer
+
+       The window size below which connectedness of the graph will be checked
+       after each swap.
+
+       The "window" in this function is a dynamically updated integer that
+       represents the number of swap attempts to make before checking if the
+       graph remains connected. It is an optimization used to decrease the
+       running time of the algorithm in exchange for increased complexity of
+       implementation.
+
+       If the window size is below this threshold, then the algorithm checks
+       after each swap if the graph remains connected by checking if there is a
+       path joining the two nodes whose edge was just removed. If the window
+       size is above this threshold, then the algorithm performs do all the
+       swaps in the window and only then check if the graph is still connected.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    int
+       The number of successful swaps
+
+    Raises
+    ------
+
+    NetworkXError
+
+       If the input graph is not connected, or if the graph has fewer than four
+       nodes.
+
+    Notes
+    -----
+
+    The initial graph `G` must be connected, and the resulting graph is
+    connected. The graph `G` is modified in place.
+
+    References
+    ----------
+    .. [1] C. Gkantsidis and M. Mihail and E. Zegura,
+           The Markov chain simulation method for generating connected
+           power law random graphs, 2003.
+           http://citeseer.ist.psu.edu/gkantsidis03markov.html
+    """
+    if not nx.is_connected(G):
+        raise nx.NetworkXError("Graph not connected")
+    if len(G) < 4:
+        raise nx.NetworkXError("Graph has fewer than four nodes.")
+    n = 0
+    swapcount = 0
+    deg = G.degree()
+    # Label key for nodes
+    dk = [n for n, d in G.degree()]
+    cdf = nx.utils.cumulative_distribution([d for n, d in G.degree()])
+    discrete_sequence = nx.utils.discrete_sequence
+    window = 1
+    while n < nswap:
+        wcount = 0
+        swapped = []
+        # If the window is small, we just check each time whether the graph is
+        # connected by checking if the nodes that were just separated are still
+        # connected.
+        if window < _window_threshold:
+            # This Boolean keeps track of whether there was a failure or not.
+            fail = False
+            while wcount < window and n < nswap:
+                # Pick two random edges without creating the edge list. Choose
+                # source nodes from the discrete degree distribution.
+                (ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed)
+                # If the source nodes are the same, skip this pair.
+                if ui == xi:
+                    continue
+                # Convert an index to a node label.
+                u = dk[ui]
+                x = dk[xi]
+                # Choose targets uniformly from neighbors.
+                v = seed.choice(list(G.neighbors(u)))
+                y = seed.choice(list(G.neighbors(x)))
+                # If the target nodes are the same, skip this pair.
+                if v == y:
+                    continue
+                if x not in G[u] and y not in G[v]:
+                    G.remove_edge(u, v)
+                    G.remove_edge(x, y)
+                    G.add_edge(u, x)
+                    G.add_edge(v, y)
+                    swapped.append((u, v, x, y))
+                    swapcount += 1
+                n += 1
+                # If G remains connected...
+                if nx.has_path(G, u, v):
+                    wcount += 1
+                # Otherwise, undo the changes.
+                else:
+                    G.add_edge(u, v)
+                    G.add_edge(x, y)
+                    G.remove_edge(u, x)
+                    G.remove_edge(v, y)
+                    swapcount -= 1
+                    fail = True
+            # If one of the swaps failed, reduce the window size.
+            if fail:
+                window = math.ceil(window / 2)
+            else:
+                window += 1
+        # If the window is large, then there is a good chance that a bunch of
+        # swaps will work. It's quicker to do all those swaps first and then
+        # check if the graph remains connected.
+        else:
+            while wcount < window and n < nswap:
+                # Pick two random edges without creating the edge list. Choose
+                # source nodes from the discrete degree distribution.
+                (ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed)
+                # If the source nodes are the same, skip this pair.
+                if ui == xi:
+                    continue
+                # Convert an index to a node label.
+                u = dk[ui]
+                x = dk[xi]
+                # Choose targets uniformly from neighbors.
+                v = seed.choice(list(G.neighbors(u)))
+                y = seed.choice(list(G.neighbors(x)))
+                # If the target nodes are the same, skip this pair.
+                if v == y:
+                    continue
+                if x not in G[u] and y not in G[v]:
+                    G.remove_edge(u, v)
+                    G.remove_edge(x, y)
+                    G.add_edge(u, x)
+                    G.add_edge(v, y)
+                    swapped.append((u, v, x, y))
+                    swapcount += 1
+                n += 1
+                wcount += 1
+            # If the graph remains connected, increase the window size.
+            if nx.is_connected(G):
+                window += 1
+            # Otherwise, undo the changes from the previous window and decrease
+            # the window size.
+            else:
+                while swapped:
+                    (u, v, x, y) = swapped.pop()
+                    G.add_edge(u, v)
+                    G.add_edge(x, y)
+                    G.remove_edge(u, x)
+                    G.remove_edge(v, y)
+                    swapcount -= 1
+                window = math.ceil(window / 2)
+    return swapcount
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_asteroidal.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_asteroidal.py
new file mode 100644
index 00000000..67131b2d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_asteroidal.py
@@ -0,0 +1,23 @@
+import networkx as nx
+
+
+def test_is_at_free():
+    is_at_free = nx.asteroidal.is_at_free
+
+    cycle = nx.cycle_graph(6)
+    assert not is_at_free(cycle)
+
+    path = nx.path_graph(6)
+    assert is_at_free(path)
+
+    small_graph = nx.complete_graph(2)
+    assert is_at_free(small_graph)
+
+    petersen = nx.petersen_graph()
+    assert not is_at_free(petersen)
+
+    clique = nx.complete_graph(6)
+    assert is_at_free(clique)
+
+    line_clique = nx.line_graph(clique)
+    assert not is_at_free(line_clique)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_boundary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_boundary.py
new file mode 100644
index 00000000..856be465
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_boundary.py
@@ -0,0 +1,154 @@
+"""Unit tests for the :mod:`networkx.algorithms.boundary` module."""
+
+from itertools import combinations
+
+import pytest
+
+import networkx as nx
+from networkx import convert_node_labels_to_integers as cnlti
+from networkx.utils import edges_equal
+
+
+class TestNodeBoundary:
+    """Unit tests for the :func:`~networkx.node_boundary` function."""
+
+    def test_null_graph(self):
+        """Tests that the null graph has empty node boundaries."""
+        null = nx.null_graph()
+        assert nx.node_boundary(null, []) == set()
+        assert nx.node_boundary(null, [], []) == set()
+        assert nx.node_boundary(null, [1, 2, 3]) == set()
+        assert nx.node_boundary(null, [1, 2, 3], [4, 5, 6]) == set()
+        assert nx.node_boundary(null, [1, 2, 3], [3, 4, 5]) == set()
+
+    def test_path_graph(self):
+        P10 = cnlti(nx.path_graph(10), first_label=1)
+        assert nx.node_boundary(P10, []) == set()
+        assert nx.node_boundary(P10, [], []) == set()
+        assert nx.node_boundary(P10, [1, 2, 3]) == {4}
+        assert nx.node_boundary(P10, [4, 5, 6]) == {3, 7}
+        assert nx.node_boundary(P10, [3, 4, 5, 6, 7]) == {2, 8}
+        assert nx.node_boundary(P10, [8, 9, 10]) == {7}
+        assert nx.node_boundary(P10, [4, 5, 6], [9, 10]) == set()
+
+    def test_complete_graph(self):
+        K10 = cnlti(nx.complete_graph(10), first_label=1)
+        assert nx.node_boundary(K10, []) == set()
+        assert nx.node_boundary(K10, [], []) == set()
+        assert nx.node_boundary(K10, [1, 2, 3]) == {4, 5, 6, 7, 8, 9, 10}
+        assert nx.node_boundary(K10, [4, 5, 6]) == {1, 2, 3, 7, 8, 9, 10}
+        assert nx.node_boundary(K10, [3, 4, 5, 6, 7]) == {1, 2, 8, 9, 10}
+        assert nx.node_boundary(K10, [4, 5, 6], []) == set()
+        assert nx.node_boundary(K10, K10) == set()
+        assert nx.node_boundary(K10, [1, 2, 3], [3, 4, 5]) == {4, 5}
+
+    def test_petersen(self):
+        """Check boundaries in the petersen graph
+
+        cheeger(G,k)=min(|bdy(S)|/|S| for |S|=k, 0<k<=|V(G)|/2)
+
+        """
+
+        def cheeger(G, k):
+            return min(len(nx.node_boundary(G, nn)) / k for nn in combinations(G, k))
+
+        P = nx.petersen_graph()
+        assert cheeger(P, 1) == pytest.approx(3.00, abs=1e-2)
+        assert cheeger(P, 2) == pytest.approx(2.00, abs=1e-2)
+        assert cheeger(P, 3) == pytest.approx(1.67, abs=1e-2)
+        assert cheeger(P, 4) == pytest.approx(1.00, abs=1e-2)
+        assert cheeger(P, 5) == pytest.approx(0.80, abs=1e-2)
+
+    def test_directed(self):
+        """Tests the node boundary of a directed graph."""
+        G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)])
+        S = {0, 1}
+        boundary = nx.node_boundary(G, S)
+        expected = {2}
+        assert boundary == expected
+
+    def test_multigraph(self):
+        """Tests the node boundary of a multigraph."""
+        G = nx.MultiGraph(list(nx.cycle_graph(5).edges()) * 2)
+        S = {0, 1}
+        boundary = nx.node_boundary(G, S)
+        expected = {2, 4}
+        assert boundary == expected
+
+    def test_multidigraph(self):
+        """Tests the edge boundary of a multidigraph."""
+        edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]
+        G = nx.MultiDiGraph(edges * 2)
+        S = {0, 1}
+        boundary = nx.node_boundary(G, S)
+        expected = {2}
+        assert boundary == expected
+
+
+class TestEdgeBoundary:
+    """Unit tests for the :func:`~networkx.edge_boundary` function."""
+
+    def test_null_graph(self):
+        null = nx.null_graph()
+        assert list(nx.edge_boundary(null, [])) == []
+        assert list(nx.edge_boundary(null, [], [])) == []
+        assert list(nx.edge_boundary(null, [1, 2, 3])) == []
+        assert list(nx.edge_boundary(null, [1, 2, 3], [4, 5, 6])) == []
+        assert list(nx.edge_boundary(null, [1, 2, 3], [3, 4, 5])) == []
+
+    def test_path_graph(self):
+        P10 = cnlti(nx.path_graph(10), first_label=1)
+        assert list(nx.edge_boundary(P10, [])) == []
+        assert list(nx.edge_boundary(P10, [], [])) == []
+        assert list(nx.edge_boundary(P10, [1, 2, 3])) == [(3, 4)]
+        assert sorted(nx.edge_boundary(P10, [4, 5, 6])) == [(4, 3), (6, 7)]
+        assert sorted(nx.edge_boundary(P10, [3, 4, 5, 6, 7])) == [(3, 2), (7, 8)]
+        assert list(nx.edge_boundary(P10, [8, 9, 10])) == [(8, 7)]
+        assert sorted(nx.edge_boundary(P10, [4, 5, 6], [9, 10])) == []
+        assert list(nx.edge_boundary(P10, [1, 2, 3], [3, 4, 5])) == [(2, 3), (3, 4)]
+
+    def test_complete_graph(self):
+        K10 = cnlti(nx.complete_graph(10), first_label=1)
+
+        def ilen(iterable):
+            return sum(1 for i in iterable)
+
+        assert list(nx.edge_boundary(K10, [])) == []
+        assert list(nx.edge_boundary(K10, [], [])) == []
+        assert ilen(nx.edge_boundary(K10, [1, 2, 3])) == 21
+        assert ilen(nx.edge_boundary(K10, [4, 5, 6, 7])) == 24
+        assert ilen(nx.edge_boundary(K10, [3, 4, 5, 6, 7])) == 25
+        assert ilen(nx.edge_boundary(K10, [8, 9, 10])) == 21
+        assert edges_equal(
+            nx.edge_boundary(K10, [4, 5, 6], [9, 10]),
+            [(4, 9), (4, 10), (5, 9), (5, 10), (6, 9), (6, 10)],
+        )
+        assert edges_equal(
+            nx.edge_boundary(K10, [1, 2, 3], [3, 4, 5]),
+            [(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5)],
+        )
+
+    def test_directed(self):
+        """Tests the edge boundary of a directed graph."""
+        G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)])
+        S = {0, 1}
+        boundary = list(nx.edge_boundary(G, S))
+        expected = [(1, 2)]
+        assert boundary == expected
+
+    def test_multigraph(self):
+        """Tests the edge boundary of a multigraph."""
+        G = nx.MultiGraph(list(nx.cycle_graph(5).edges()) * 2)
+        S = {0, 1}
+        boundary = list(nx.edge_boundary(G, S))
+        expected = [(0, 4), (0, 4), (1, 2), (1, 2)]
+        assert boundary == expected
+
+    def test_multidigraph(self):
+        """Tests the edge boundary of a multidigraph."""
+        edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]
+        G = nx.MultiDiGraph(edges * 2)
+        S = {0, 1}
+        boundary = list(nx.edge_boundary(G, S))
+        expected = [(1, 2), (1, 2)]
+        assert boundary == expected
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_bridges.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_bridges.py
new file mode 100644
index 00000000..b47f5860
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_bridges.py
@@ -0,0 +1,144 @@
+"""Unit tests for bridge-finding algorithms."""
+
+import pytest
+
+import networkx as nx
+
+
+class TestBridges:
+    """Unit tests for the bridge-finding function."""
+
+    def test_single_bridge(self):
+        edges = [
+            # DFS tree edges.
+            (1, 2),
+            (2, 3),
+            (3, 4),
+            (3, 5),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+            (5, 9),
+            (9, 10),
+            # Nontree edges.
+            (1, 3),
+            (1, 4),
+            (2, 5),
+            (5, 10),
+            (6, 8),
+        ]
+        G = nx.Graph(edges)
+        source = 1
+        bridges = list(nx.bridges(G, source))
+        assert bridges == [(5, 6)]
+
+    def test_barbell_graph(self):
+        # The (3, 0) barbell graph has two triangles joined by a single edge.
+        G = nx.barbell_graph(3, 0)
+        source = 0
+        bridges = list(nx.bridges(G, source))
+        assert bridges == [(2, 3)]
+
+    def test_multiedge_bridge(self):
+        edges = [
+            (0, 1),
+            (0, 2),
+            (1, 2),
+            (1, 2),
+            (2, 3),
+            (3, 4),
+            (3, 4),
+        ]
+        G = nx.MultiGraph(edges)
+        assert list(nx.bridges(G)) == [(2, 3)]
+
+
+class TestHasBridges:
+    """Unit tests for the has bridges function."""
+
+    def test_single_bridge(self):
+        edges = [
+            # DFS tree edges.
+            (1, 2),
+            (2, 3),
+            (3, 4),
+            (3, 5),
+            (5, 6),  # The only bridge edge
+            (6, 7),
+            (7, 8),
+            (5, 9),
+            (9, 10),
+            # Nontree edges.
+            (1, 3),
+            (1, 4),
+            (2, 5),
+            (5, 10),
+            (6, 8),
+        ]
+        G = nx.Graph(edges)
+        assert nx.has_bridges(G)  # Default root
+        assert nx.has_bridges(G, root=1)  # arbitrary root in G
+
+    def test_has_bridges_raises_root_not_in_G(self):
+        G = nx.Graph()
+        G.add_nodes_from([1, 2, 3])
+        with pytest.raises(nx.NodeNotFound):
+            nx.has_bridges(G, root=6)
+
+    def test_multiedge_bridge(self):
+        edges = [
+            (0, 1),
+            (0, 2),
+            (1, 2),
+            (1, 2),
+            (2, 3),
+            (3, 4),
+            (3, 4),
+        ]
+        G = nx.MultiGraph(edges)
+        assert nx.has_bridges(G)
+        # Make every edge a multiedge
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        assert not nx.has_bridges(G)
+
+    def test_bridges_multiple_components(self):
+        G = nx.Graph()
+        nx.add_path(G, [0, 1, 2])  # One connected component
+        nx.add_path(G, [4, 5, 6])  # Another connected component
+        assert list(nx.bridges(G, root=4)) == [(4, 5), (5, 6)]
+
+
+class TestLocalBridges:
+    """Unit tests for the local_bridge function."""
+
+    @classmethod
+    def setup_class(cls):
+        cls.BB = nx.barbell_graph(4, 0)
+        cls.square = nx.cycle_graph(4)
+        cls.tri = nx.cycle_graph(3)
+
+    def test_nospan(self):
+        expected = {(3, 4), (4, 3)}
+        assert next(nx.local_bridges(self.BB, with_span=False)) in expected
+        assert set(nx.local_bridges(self.square, with_span=False)) == self.square.edges
+        assert list(nx.local_bridges(self.tri, with_span=False)) == []
+
+    def test_no_weight(self):
+        inf = float("inf")
+        expected = {(3, 4, inf), (4, 3, inf)}
+        assert next(nx.local_bridges(self.BB)) in expected
+        expected = {(u, v, 3) for u, v in self.square.edges}
+        assert set(nx.local_bridges(self.square)) == expected
+        assert list(nx.local_bridges(self.tri)) == []
+
+    def test_weight(self):
+        inf = float("inf")
+        G = self.square.copy()
+
+        G.edges[1, 2]["weight"] = 2
+        expected = {(u, v, 5 - wt) for u, v, wt in G.edges(data="weight", default=1)}
+        assert set(nx.local_bridges(G, weight="weight")) == expected
+
+        expected = {(u, v, 6) for u, v in G.edges}
+        lb = nx.local_bridges(G, weight=lambda u, v, d: 2)
+        assert set(lb) == expected
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_broadcasting.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_broadcasting.py
new file mode 100644
index 00000000..73bf83c8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_broadcasting.py
@@ -0,0 +1,82 @@
+"""Unit tests for the broadcasting module."""
+
+import math
+
+import networkx as nx
+
+
+def test_example_tree_broadcast():
+    """
+    Test the BROADCAST algorithm on the example in the paper titled: "Information Dissemination in Trees"
+    """
+    edge_list = [
+        (0, 1),
+        (1, 2),
+        (2, 7),
+        (3, 4),
+        (5, 4),
+        (4, 7),
+        (6, 7),
+        (7, 9),
+        (8, 9),
+        (9, 13),
+        (13, 14),
+        (14, 15),
+        (14, 16),
+        (14, 17),
+        (13, 11),
+        (11, 10),
+        (11, 12),
+        (13, 18),
+        (18, 19),
+        (18, 20),
+    ]
+    G = nx.Graph(edge_list)
+    b_T, b_C = nx.tree_broadcast_center(G)
+    assert b_T == 6
+    assert b_C == {13, 9}
+    # test broadcast time from specific vertex
+    assert nx.tree_broadcast_time(G, 17) == 8
+    assert nx.tree_broadcast_time(G, 3) == 9
+    # test broadcast time of entire tree
+    assert nx.tree_broadcast_time(G) == 10
+
+
+def test_path_broadcast():
+    for i in range(2, 12):
+        G = nx.path_graph(i)
+        b_T, b_C = nx.tree_broadcast_center(G)
+        assert b_T == math.ceil(i / 2)
+        assert b_C == {
+            math.ceil(i / 2),
+            math.floor(i / 2),
+            math.ceil(i / 2 - 1),
+            math.floor(i / 2 - 1),
+        }
+        assert nx.tree_broadcast_time(G) == i - 1
+
+
+def test_empty_graph_broadcast():
+    H = nx.empty_graph(1)
+    b_T, b_C = nx.tree_broadcast_center(H)
+    assert b_T == 0
+    assert b_C == {0}
+    assert nx.tree_broadcast_time(H) == 0
+
+
+def test_star_broadcast():
+    for i in range(4, 12):
+        G = nx.star_graph(i)
+        b_T, b_C = nx.tree_broadcast_center(G)
+        assert b_T == i
+        assert b_C == set(G.nodes())
+        assert nx.tree_broadcast_time(G) == b_T
+
+
+def test_binomial_tree_broadcast():
+    for i in range(2, 8):
+        G = nx.binomial_tree(i)
+        b_T, b_C = nx.tree_broadcast_center(G)
+        assert b_T == i
+        assert b_C == {0, 2 ** (i - 1)}
+        assert nx.tree_broadcast_time(G) == 2 * i - 1
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chains.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chains.py
new file mode 100644
index 00000000..09b4c734
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chains.py
@@ -0,0 +1,141 @@
+"""Unit tests for the chain decomposition functions."""
+
+from itertools import cycle, islice
+
+import pytest
+
+import networkx as nx
+
+
+def cycles(seq):
+    """Yields cyclic permutations of the given sequence.
+
+    For example::
+
+        >>> list(cycles("abc"))
+        [('a', 'b', 'c'), ('b', 'c', 'a'), ('c', 'a', 'b')]
+
+    """
+    n = len(seq)
+    cycled_seq = cycle(seq)
+    for x in seq:
+        yield tuple(islice(cycled_seq, n))
+        next(cycled_seq)
+
+
+def cyclic_equals(seq1, seq2):
+    """Decide whether two sequences are equal up to cyclic permutations.
+
+    For example::
+
+        >>> cyclic_equals("xyz", "zxy")
+        True
+        >>> cyclic_equals("xyz", "zyx")
+        False
+
+    """
+    # Cast seq2 to a tuple since `cycles()` yields tuples.
+    seq2 = tuple(seq2)
+    return any(x == tuple(seq2) for x in cycles(seq1))
+
+
+class TestChainDecomposition:
+    """Unit tests for the chain decomposition function."""
+
+    def assertContainsChain(self, chain, expected):
+        # A cycle could be expressed in two different orientations, one
+        # forward and one backward, so we need to check for cyclic
+        # equality in both orientations.
+        reversed_chain = list(reversed([tuple(reversed(e)) for e in chain]))
+        for candidate in expected:
+            if cyclic_equals(chain, candidate):
+                break
+            if cyclic_equals(reversed_chain, candidate):
+                break
+        else:
+            self.fail("chain not found")
+
+    def test_decomposition(self):
+        edges = [
+            # DFS tree edges.
+            (1, 2),
+            (2, 3),
+            (3, 4),
+            (3, 5),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+            (5, 9),
+            (9, 10),
+            # Nontree edges.
+            (1, 3),
+            (1, 4),
+            (2, 5),
+            (5, 10),
+            (6, 8),
+        ]
+        G = nx.Graph(edges)
+        expected = [
+            [(1, 3), (3, 2), (2, 1)],
+            [(1, 4), (4, 3)],
+            [(2, 5), (5, 3)],
+            [(5, 10), (10, 9), (9, 5)],
+            [(6, 8), (8, 7), (7, 6)],
+        ]
+        chains = list(nx.chain_decomposition(G, root=1))
+        assert len(chains) == len(expected)
+
+    # This chain decomposition isn't unique
+    #        for chain in chains:
+    #            print(chain)
+    #            self.assertContainsChain(chain, expected)
+
+    def test_barbell_graph(self):
+        # The (3, 0) barbell graph has two triangles joined by a single edge.
+        G = nx.barbell_graph(3, 0)
+        chains = list(nx.chain_decomposition(G, root=0))
+        expected = [[(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)]]
+        assert len(chains) == len(expected)
+        for chain in chains:
+            self.assertContainsChain(chain, expected)
+
+    def test_disconnected_graph(self):
+        """Test for a graph with multiple connected components."""
+        G = nx.barbell_graph(3, 0)
+        H = nx.barbell_graph(3, 0)
+        mapping = dict(zip(range(6), "abcdef"))
+        nx.relabel_nodes(H, mapping, copy=False)
+        G = nx.union(G, H)
+        chains = list(nx.chain_decomposition(G))
+        expected = [
+            [(0, 1), (1, 2), (2, 0)],
+            [(3, 4), (4, 5), (5, 3)],
+            [("a", "b"), ("b", "c"), ("c", "a")],
+            [("d", "e"), ("e", "f"), ("f", "d")],
+        ]
+        assert len(chains) == len(expected)
+        for chain in chains:
+            self.assertContainsChain(chain, expected)
+
+    def test_disconnected_graph_root_node(self):
+        """Test for a single component of a disconnected graph."""
+        G = nx.barbell_graph(3, 0)
+        H = nx.barbell_graph(3, 0)
+        mapping = dict(zip(range(6), "abcdef"))
+        nx.relabel_nodes(H, mapping, copy=False)
+        G = nx.union(G, H)
+        chains = list(nx.chain_decomposition(G, root="a"))
+        expected = [
+            [("a", "b"), ("b", "c"), ("c", "a")],
+            [("d", "e"), ("e", "f"), ("f", "d")],
+        ]
+        assert len(chains) == len(expected)
+        for chain in chains:
+            self.assertContainsChain(chain, expected)
+
+    def test_chain_decomposition_root_not_in_G(self):
+        """Test chain decomposition when root is not in graph"""
+        G = nx.Graph()
+        G.add_nodes_from([1, 2, 3])
+        with pytest.raises(nx.NodeNotFound):
+            nx.has_bridges(G, root=6)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chordal.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chordal.py
new file mode 100644
index 00000000..148b22f2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chordal.py
@@ -0,0 +1,129 @@
+import pytest
+
+import networkx as nx
+
+
+class TestMCS:
+    @classmethod
+    def setup_class(cls):
+        # simple graph
+        connected_chordal_G = nx.Graph()
+        connected_chordal_G.add_edges_from(
+            [
+                (1, 2),
+                (1, 3),
+                (2, 3),
+                (2, 4),
+                (3, 4),
+                (3, 5),
+                (3, 6),
+                (4, 5),
+                (4, 6),
+                (5, 6),
+            ]
+        )
+        cls.connected_chordal_G = connected_chordal_G
+
+        chordal_G = nx.Graph()
+        chordal_G.add_edges_from(
+            [
+                (1, 2),
+                (1, 3),
+                (2, 3),
+                (2, 4),
+                (3, 4),
+                (3, 5),
+                (3, 6),
+                (4, 5),
+                (4, 6),
+                (5, 6),
+                (7, 8),
+            ]
+        )
+        chordal_G.add_node(9)
+        cls.chordal_G = chordal_G
+
+        non_chordal_G = nx.Graph()
+        non_chordal_G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5), (3, 4), (3, 5)])
+        cls.non_chordal_G = non_chordal_G
+
+        self_loop_G = nx.Graph()
+        self_loop_G.add_edges_from([(1, 1)])
+        cls.self_loop_G = self_loop_G
+
+    @pytest.mark.parametrize("G", (nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()))
+    def test_is_chordal_not_implemented(self, G):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            nx.is_chordal(G)
+
+    def test_is_chordal(self):
+        assert not nx.is_chordal(self.non_chordal_G)
+        assert nx.is_chordal(self.chordal_G)
+        assert nx.is_chordal(self.connected_chordal_G)
+        assert nx.is_chordal(nx.Graph())
+        assert nx.is_chordal(nx.complete_graph(3))
+        assert nx.is_chordal(nx.cycle_graph(3))
+        assert not nx.is_chordal(nx.cycle_graph(5))
+        assert nx.is_chordal(self.self_loop_G)
+
+    def test_induced_nodes(self):
+        G = nx.generators.classic.path_graph(10)
+        Induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
+        assert Induced_nodes == {1, 2, 3, 4, 5, 6, 7, 8, 9}
+        pytest.raises(
+            nx.NetworkXTreewidthBoundExceeded, nx.find_induced_nodes, G, 1, 9, 1
+        )
+        Induced_nodes = nx.find_induced_nodes(self.chordal_G, 1, 6)
+        assert Induced_nodes == {1, 2, 4, 6}
+        pytest.raises(nx.NetworkXError, nx.find_induced_nodes, self.non_chordal_G, 1, 5)
+
+    def test_graph_treewidth(self):
+        with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
+            nx.chordal_graph_treewidth(self.non_chordal_G)
+
+    def test_chordal_find_cliques(self):
+        cliques = {
+            frozenset([9]),
+            frozenset([7, 8]),
+            frozenset([1, 2, 3]),
+            frozenset([2, 3, 4]),
+            frozenset([3, 4, 5, 6]),
+        }
+        assert set(nx.chordal_graph_cliques(self.chordal_G)) == cliques
+        with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
+            set(nx.chordal_graph_cliques(self.non_chordal_G))
+        with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
+            set(nx.chordal_graph_cliques(self.self_loop_G))
+
+    def test_chordal_find_cliques_path(self):
+        G = nx.path_graph(10)
+        cliqueset = nx.chordal_graph_cliques(G)
+        for u, v in G.edges():
+            assert frozenset([u, v]) in cliqueset or frozenset([v, u]) in cliqueset
+
+    def test_chordal_find_cliquesCC(self):
+        cliques = {frozenset([1, 2, 3]), frozenset([2, 3, 4]), frozenset([3, 4, 5, 6])}
+        cgc = nx.chordal_graph_cliques
+        assert set(cgc(self.connected_chordal_G)) == cliques
+
+    def test_complete_to_chordal_graph(self):
+        fgrg = nx.fast_gnp_random_graph
+        test_graphs = [
+            nx.barbell_graph(6, 2),
+            nx.cycle_graph(15),
+            nx.wheel_graph(20),
+            nx.grid_graph([10, 4]),
+            nx.ladder_graph(15),
+            nx.star_graph(5),
+            nx.bull_graph(),
+            fgrg(20, 0.3, seed=1),
+        ]
+        for G in test_graphs:
+            H, a = nx.complete_to_chordal_graph(G)
+            assert nx.is_chordal(H)
+            assert len(a) == H.number_of_nodes()
+            if nx.is_chordal(G):
+                assert G.number_of_edges() == H.number_of_edges()
+                assert set(a.values()) == {0}
+            else:
+                assert len(set(a.values())) == H.number_of_nodes()
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_clique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_clique.py
new file mode 100644
index 00000000..3bee2109
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_clique.py
@@ -0,0 +1,291 @@
+import pytest
+
+import networkx as nx
+from networkx import convert_node_labels_to_integers as cnlti
+
+
+class TestCliques:
+    def setup_method(self):
+        z = [3, 4, 3, 4, 2, 4, 2, 1, 1, 1, 1]
+        self.G = cnlti(nx.generators.havel_hakimi_graph(z), first_label=1)
+        self.cl = list(nx.find_cliques(self.G))
+        H = nx.complete_graph(6)
+        H = nx.relabel_nodes(H, {i: i + 1 for i in range(6)})
+        H.remove_edges_from([(2, 6), (2, 5), (2, 4), (1, 3), (5, 3)])
+        self.H = H
+
+    def test_find_cliques1(self):
+        cl = list(nx.find_cliques(self.G))
+        rcl = nx.find_cliques_recursive(self.G)
+        expected = [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]]
+        assert sorted(map(sorted, cl)) == sorted(map(sorted, rcl))
+        assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
+
+    def test_selfloops(self):
+        self.G.add_edge(1, 1)
+        cl = list(nx.find_cliques(self.G))
+        rcl = list(nx.find_cliques_recursive(self.G))
+        assert set(map(frozenset, cl)) == set(map(frozenset, rcl))
+        answer = [{2, 6, 1, 3}, {2, 6, 4}, {5, 4, 7}, {8, 9}, {10, 11}]
+        assert len(answer) == len(cl)
+        assert all(set(c) in answer for c in cl)
+
+    def test_find_cliques2(self):
+        hcl = list(nx.find_cliques(self.H))
+        assert sorted(map(sorted, hcl)) == [[1, 2], [1, 4, 5, 6], [2, 3], [3, 4, 6]]
+
+    def test_find_cliques3(self):
+        # all cliques are [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]]
+
+        cl = list(nx.find_cliques(self.G, [2]))
+        rcl = nx.find_cliques_recursive(self.G, [2])
+        expected = [[2, 6, 1, 3], [2, 6, 4]]
+        assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
+        assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
+
+        cl = list(nx.find_cliques(self.G, [2, 3]))
+        rcl = nx.find_cliques_recursive(self.G, [2, 3])
+        expected = [[2, 6, 1, 3]]
+        assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
+        assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
+
+        cl = list(nx.find_cliques(self.G, [2, 6, 4]))
+        rcl = nx.find_cliques_recursive(self.G, [2, 6, 4])
+        expected = [[2, 6, 4]]
+        assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
+        assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
+
+        cl = list(nx.find_cliques(self.G, [2, 6, 4]))
+        rcl = nx.find_cliques_recursive(self.G, [2, 6, 4])
+        expected = [[2, 6, 4]]
+        assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
+        assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
+
+        with pytest.raises(ValueError):
+            list(nx.find_cliques(self.G, [2, 6, 4, 1]))
+
+        with pytest.raises(ValueError):
+            list(nx.find_cliques_recursive(self.G, [2, 6, 4, 1]))
+
+    def test_number_of_cliques(self):
+        G = self.G
+        assert nx.number_of_cliques(G, 1) == 1
+        assert list(nx.number_of_cliques(G, [1]).values()) == [1]
+        assert list(nx.number_of_cliques(G, [1, 2]).values()) == [1, 2]
+        assert nx.number_of_cliques(G, [1, 2]) == {1: 1, 2: 2}
+        assert nx.number_of_cliques(G, 2) == 2
+        assert nx.number_of_cliques(G) == {
+            1: 1,
+            2: 2,
+            3: 1,
+            4: 2,
+            5: 1,
+            6: 2,
+            7: 1,
+            8: 1,
+            9: 1,
+            10: 1,
+            11: 1,
+        }
+        assert nx.number_of_cliques(G, nodes=list(G)) == {
+            1: 1,
+            2: 2,
+            3: 1,
+            4: 2,
+            5: 1,
+            6: 2,
+            7: 1,
+            8: 1,
+            9: 1,
+            10: 1,
+            11: 1,
+        }
+        assert nx.number_of_cliques(G, nodes=[2, 3, 4]) == {2: 2, 3: 1, 4: 2}
+        assert nx.number_of_cliques(G, cliques=self.cl) == {
+            1: 1,
+            2: 2,
+            3: 1,
+            4: 2,
+            5: 1,
+            6: 2,
+            7: 1,
+            8: 1,
+            9: 1,
+            10: 1,
+            11: 1,
+        }
+        assert nx.number_of_cliques(G, list(G), cliques=self.cl) == {
+            1: 1,
+            2: 2,
+            3: 1,
+            4: 2,
+            5: 1,
+            6: 2,
+            7: 1,
+            8: 1,
+            9: 1,
+            10: 1,
+            11: 1,
+        }
+
+    def test_node_clique_number(self):
+        G = self.G
+        assert nx.node_clique_number(G, 1) == 4
+        assert list(nx.node_clique_number(G, [1]).values()) == [4]
+        assert list(nx.node_clique_number(G, [1, 2]).values()) == [4, 4]
+        assert nx.node_clique_number(G, [1, 2]) == {1: 4, 2: 4}
+        assert nx.node_clique_number(G, 1) == 4
+        assert nx.node_clique_number(G) == {
+            1: 4,
+            2: 4,
+            3: 4,
+            4: 3,
+            5: 3,
+            6: 4,
+            7: 3,
+            8: 2,
+            9: 2,
+            10: 2,
+            11: 2,
+        }
+        assert nx.node_clique_number(G, cliques=self.cl) == {
+            1: 4,
+            2: 4,
+            3: 4,
+            4: 3,
+            5: 3,
+            6: 4,
+            7: 3,
+            8: 2,
+            9: 2,
+            10: 2,
+            11: 2,
+        }
+        assert nx.node_clique_number(G, [1, 2], cliques=self.cl) == {1: 4, 2: 4}
+        assert nx.node_clique_number(G, 1, cliques=self.cl) == 4
+
+    def test_make_clique_bipartite(self):
+        G = self.G
+        B = nx.make_clique_bipartite(G)
+        assert sorted(B) == [-5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
+        # Project onto the nodes of the original graph.
+        H = nx.projected_graph(B, range(1, 12))
+        assert H.adj == G.adj
+        # Project onto the nodes representing the cliques.
+        H1 = nx.projected_graph(B, range(-5, 0))
+        # Relabel the negative numbers as positive ones.
+        H1 = nx.relabel_nodes(H1, {-v: v for v in range(1, 6)})
+        assert sorted(H1) == [1, 2, 3, 4, 5]
+
+    def test_make_max_clique_graph(self):
+        """Tests that the maximal clique graph is the same as the bipartite
+        clique graph after being projected onto the nodes representing the
+        cliques.
+
+        """
+        G = self.G
+        B = nx.make_clique_bipartite(G)
+        # Project onto the nodes representing the cliques.
+        H1 = nx.projected_graph(B, range(-5, 0))
+        # Relabel the negative numbers as nonnegative ones, starting at
+        # 0.
+        H1 = nx.relabel_nodes(H1, {-v: v - 1 for v in range(1, 6)})
+        H2 = nx.make_max_clique_graph(G)
+        assert H1.adj == H2.adj
+
+    def test_directed(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            next(nx.find_cliques(nx.DiGraph()))
+
+    def test_find_cliques_trivial(self):
+        G = nx.Graph()
+        assert sorted(nx.find_cliques(G)) == []
+        assert sorted(nx.find_cliques_recursive(G)) == []
+
+    def test_make_max_clique_graph_create_using(self):
+        G = nx.Graph([(1, 2), (3, 1), (4, 1), (5, 6)])
+        E = nx.Graph([(0, 1), (0, 2), (1, 2)])
+        E.add_node(3)
+        assert nx.is_isomorphic(nx.make_max_clique_graph(G, create_using=nx.Graph), E)
+
+
+class TestEnumerateAllCliques:
+    def test_paper_figure_4(self):
+        # Same graph as given in Fig. 4 of paper enumerate_all_cliques is
+        # based on.
+        # http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=1559964&isnumber=33129
+        G = nx.Graph()
+        edges_fig_4 = [
+            ("a", "b"),
+            ("a", "c"),
+            ("a", "d"),
+            ("a", "e"),
+            ("b", "c"),
+            ("b", "d"),
+            ("b", "e"),
+            ("c", "d"),
+            ("c", "e"),
+            ("d", "e"),
+            ("f", "b"),
+            ("f", "c"),
+            ("f", "g"),
+            ("g", "f"),
+            ("g", "c"),
+            ("g", "d"),
+            ("g", "e"),
+        ]
+        G.add_edges_from(edges_fig_4)
+
+        cliques = list(nx.enumerate_all_cliques(G))
+        clique_sizes = list(map(len, cliques))
+        assert sorted(clique_sizes) == clique_sizes
+
+        expected_cliques = [
+            ["a"],
+            ["b"],
+            ["c"],
+            ["d"],
+            ["e"],
+            ["f"],
+            ["g"],
+            ["a", "b"],
+            ["a", "b", "d"],
+            ["a", "b", "d", "e"],
+            ["a", "b", "e"],
+            ["a", "c"],
+            ["a", "c", "d"],
+            ["a", "c", "d", "e"],
+            ["a", "c", "e"],
+            ["a", "d"],
+            ["a", "d", "e"],
+            ["a", "e"],
+            ["b", "c"],
+            ["b", "c", "d"],
+            ["b", "c", "d", "e"],
+            ["b", "c", "e"],
+            ["b", "c", "f"],
+            ["b", "d"],
+            ["b", "d", "e"],
+            ["b", "e"],
+            ["b", "f"],
+            ["c", "d"],
+            ["c", "d", "e"],
+            ["c", "d", "e", "g"],
+            ["c", "d", "g"],
+            ["c", "e"],
+            ["c", "e", "g"],
+            ["c", "f"],
+            ["c", "f", "g"],
+            ["c", "g"],
+            ["d", "e"],
+            ["d", "e", "g"],
+            ["d", "g"],
+            ["e", "g"],
+            ["f", "g"],
+            ["a", "b", "c"],
+            ["a", "b", "c", "d"],
+            ["a", "b", "c", "d", "e"],
+            ["a", "b", "c", "e"],
+        ]
+
+        assert sorted(map(sorted, cliques)) == sorted(map(sorted, expected_cliques))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cluster.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cluster.py
new file mode 100644
index 00000000..b656ba81
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cluster.py
@@ -0,0 +1,549 @@
+import pytest
+
+import networkx as nx
+
+
+class TestTriangles:
+    def test_empty(self):
+        G = nx.Graph()
+        assert list(nx.triangles(G).values()) == []
+
+    def test_path(self):
+        G = nx.path_graph(10)
+        assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
+        assert nx.triangles(G) == {
+            0: 0,
+            1: 0,
+            2: 0,
+            3: 0,
+            4: 0,
+            5: 0,
+            6: 0,
+            7: 0,
+            8: 0,
+            9: 0,
+        }
+
+    def test_cubical(self):
+        G = nx.cubical_graph()
+        assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0]
+        assert nx.triangles(G, 1) == 0
+        assert list(nx.triangles(G, [1, 2]).values()) == [0, 0]
+        assert nx.triangles(G, 1) == 0
+        assert nx.triangles(G, [1, 2]) == {1: 0, 2: 0}
+
+    def test_k5(self):
+        G = nx.complete_graph(5)
+        assert list(nx.triangles(G).values()) == [6, 6, 6, 6, 6]
+        assert sum(nx.triangles(G).values()) / 3 == 10
+        assert nx.triangles(G, 1) == 6
+        G.remove_edge(1, 2)
+        assert list(nx.triangles(G).values()) == [5, 3, 3, 5, 5]
+        assert nx.triangles(G, 1) == 3
+        G.add_edge(3, 3)  # ignore self-edges
+        assert list(nx.triangles(G).values()) == [5, 3, 3, 5, 5]
+        assert nx.triangles(G, 3) == 5
+
+
+class TestDirectedClustering:
+    def test_clustering(self):
+        G = nx.DiGraph()
+        assert list(nx.clustering(G).values()) == []
+        assert nx.clustering(G) == {}
+
+    def test_path(self):
+        G = nx.path_graph(10, create_using=nx.DiGraph())
+        assert list(nx.clustering(G).values()) == [
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+        ]
+        assert nx.clustering(G) == {
+            0: 0,
+            1: 0,
+            2: 0,
+            3: 0,
+            4: 0,
+            5: 0,
+            6: 0,
+            7: 0,
+            8: 0,
+            9: 0,
+        }
+        assert nx.clustering(G, 0) == 0
+
+    def test_k5(self):
+        G = nx.complete_graph(5, create_using=nx.DiGraph())
+        assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1]
+        assert nx.average_clustering(G) == 1
+        G.remove_edge(1, 2)
+        assert list(nx.clustering(G).values()) == [
+            11 / 12,
+            1,
+            1,
+            11 / 12,
+            11 / 12,
+        ]
+        assert nx.clustering(G, [1, 4]) == {1: 1, 4: 11 / 12}
+        G.remove_edge(2, 1)
+        assert list(nx.clustering(G).values()) == [
+            5 / 6,
+            1,
+            1,
+            5 / 6,
+            5 / 6,
+        ]
+        assert nx.clustering(G, [1, 4]) == {1: 1, 4: 0.83333333333333337}
+        assert nx.clustering(G, 4) == 5 / 6
+
+    def test_triangle_and_edge(self):
+        G = nx.cycle_graph(3, create_using=nx.DiGraph())
+        G.add_edge(0, 4)
+        assert nx.clustering(G)[0] == 1 / 6
+
+
+class TestDirectedWeightedClustering:
+    @classmethod
+    def setup_class(cls):
+        global np
+        np = pytest.importorskip("numpy")
+
+    def test_clustering(self):
+        G = nx.DiGraph()
+        assert list(nx.clustering(G, weight="weight").values()) == []
+        assert nx.clustering(G) == {}
+
+    def test_path(self):
+        G = nx.path_graph(10, create_using=nx.DiGraph())
+        assert list(nx.clustering(G, weight="weight").values()) == [
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+        ]
+        assert nx.clustering(G, weight="weight") == {
+            0: 0,
+            1: 0,
+            2: 0,
+            3: 0,
+            4: 0,
+            5: 0,
+            6: 0,
+            7: 0,
+            8: 0,
+            9: 0,
+        }
+
+    def test_k5(self):
+        G = nx.complete_graph(5, create_using=nx.DiGraph())
+        assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1]
+        assert nx.average_clustering(G, weight="weight") == 1
+        G.remove_edge(1, 2)
+        assert list(nx.clustering(G, weight="weight").values()) == [
+            11 / 12,
+            1,
+            1,
+            11 / 12,
+            11 / 12,
+        ]
+        assert nx.clustering(G, [1, 4], weight="weight") == {1: 1, 4: 11 / 12}
+        G.remove_edge(2, 1)
+        assert list(nx.clustering(G, weight="weight").values()) == [
+            5 / 6,
+            1,
+            1,
+            5 / 6,
+            5 / 6,
+        ]
+        assert nx.clustering(G, [1, 4], weight="weight") == {
+            1: 1,
+            4: 0.83333333333333337,
+        }
+
+    def test_triangle_and_edge(self):
+        G = nx.cycle_graph(3, create_using=nx.DiGraph())
+        G.add_edge(0, 4, weight=2)
+        assert nx.clustering(G)[0] == 1 / 6
+        # Relaxed comparisons to allow graphblas-algorithms to pass tests
+        np.testing.assert_allclose(nx.clustering(G, weight="weight")[0], 1 / 12)
+        np.testing.assert_allclose(nx.clustering(G, 0, weight="weight"), 1 / 12)
+
+
+class TestWeightedClustering:
+    @classmethod
+    def setup_class(cls):
+        global np
+        np = pytest.importorskip("numpy")
+
+    def test_clustering(self):
+        G = nx.Graph()
+        assert list(nx.clustering(G, weight="weight").values()) == []
+        assert nx.clustering(G) == {}
+
+    def test_path(self):
+        G = nx.path_graph(10)
+        assert list(nx.clustering(G, weight="weight").values()) == [
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+        ]
+        assert nx.clustering(G, weight="weight") == {
+            0: 0,
+            1: 0,
+            2: 0,
+            3: 0,
+            4: 0,
+            5: 0,
+            6: 0,
+            7: 0,
+            8: 0,
+            9: 0,
+        }
+
+    def test_cubical(self):
+        G = nx.cubical_graph()
+        assert list(nx.clustering(G, weight="weight").values()) == [
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+        ]
+        assert nx.clustering(G, 1) == 0
+        assert list(nx.clustering(G, [1, 2], weight="weight").values()) == [0, 0]
+        assert nx.clustering(G, 1, weight="weight") == 0
+        assert nx.clustering(G, [1, 2], weight="weight") == {1: 0, 2: 0}
+
+    def test_k5(self):
+        G = nx.complete_graph(5)
+        assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1]
+        assert nx.average_clustering(G, weight="weight") == 1
+        G.remove_edge(1, 2)
+        assert list(nx.clustering(G, weight="weight").values()) == [
+            5 / 6,
+            1,
+            1,
+            5 / 6,
+            5 / 6,
+        ]
+        assert nx.clustering(G, [1, 4], weight="weight") == {
+            1: 1,
+            4: 0.83333333333333337,
+        }
+
+    def test_triangle_and_edge(self):
+        G = nx.cycle_graph(3)
+        G.add_edge(0, 4, weight=2)
+        assert nx.clustering(G)[0] == 1 / 3
+        np.testing.assert_allclose(nx.clustering(G, weight="weight")[0], 1 / 6)
+        np.testing.assert_allclose(nx.clustering(G, 0, weight="weight"), 1 / 6)
+
+    def test_triangle_and_signed_edge(self):
+        G = nx.cycle_graph(3)
+        G.add_edge(0, 1, weight=-1)
+        G.add_edge(3, 0, weight=0)
+        assert nx.clustering(G)[0] == 1 / 3
+        assert nx.clustering(G, weight="weight")[0] == -1 / 3
+
+
+class TestClustering:
+    @classmethod
+    def setup_class(cls):
+        pytest.importorskip("numpy")
+
+    def test_clustering(self):
+        G = nx.Graph()
+        assert list(nx.clustering(G).values()) == []
+        assert nx.clustering(G) == {}
+
+    def test_path(self):
+        G = nx.path_graph(10)
+        assert list(nx.clustering(G).values()) == [
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+        ]
+        assert nx.clustering(G) == {
+            0: 0,
+            1: 0,
+            2: 0,
+            3: 0,
+            4: 0,
+            5: 0,
+            6: 0,
+            7: 0,
+            8: 0,
+            9: 0,
+        }
+
+    def test_cubical(self):
+        G = nx.cubical_graph()
+        assert list(nx.clustering(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0]
+        assert nx.clustering(G, 1) == 0
+        assert list(nx.clustering(G, [1, 2]).values()) == [0, 0]
+        assert nx.clustering(G, 1) == 0
+        assert nx.clustering(G, [1, 2]) == {1: 0, 2: 0}
+
+    def test_k5(self):
+        G = nx.complete_graph(5)
+        assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1]
+        assert nx.average_clustering(G) == 1
+        G.remove_edge(1, 2)
+        assert list(nx.clustering(G).values()) == [
+            5 / 6,
+            1,
+            1,
+            5 / 6,
+            5 / 6,
+        ]
+        assert nx.clustering(G, [1, 4]) == {1: 1, 4: 0.83333333333333337}
+
+    def test_k5_signed(self):
+        G = nx.complete_graph(5)
+        assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1]
+        assert nx.average_clustering(G) == 1
+        G.remove_edge(1, 2)
+        G.add_edge(0, 1, weight=-1)
+        assert list(nx.clustering(G, weight="weight").values()) == [
+            1 / 6,
+            -1 / 3,
+            1,
+            3 / 6,
+            3 / 6,
+        ]
+
+
+class TestTransitivity:
+    def test_transitivity(self):
+        G = nx.Graph()
+        assert nx.transitivity(G) == 0
+
+    def test_path(self):
+        G = nx.path_graph(10)
+        assert nx.transitivity(G) == 0
+
+    def test_cubical(self):
+        G = nx.cubical_graph()
+        assert nx.transitivity(G) == 0
+
+    def test_k5(self):
+        G = nx.complete_graph(5)
+        assert nx.transitivity(G) == 1
+        G.remove_edge(1, 2)
+        assert nx.transitivity(G) == 0.875
+
+
+class TestSquareClustering:
+    def test_clustering(self):
+        G = nx.Graph()
+        assert list(nx.square_clustering(G).values()) == []
+        assert nx.square_clustering(G) == {}
+
+    def test_path(self):
+        G = nx.path_graph(10)
+        assert list(nx.square_clustering(G).values()) == [
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0,
+        ]
+        assert nx.square_clustering(G) == {
+            0: 0,
+            1: 0,
+            2: 0,
+            3: 0,
+            4: 0,
+            5: 0,
+            6: 0,
+            7: 0,
+            8: 0,
+            9: 0,
+        }
+
+    def test_cubical(self):
+        G = nx.cubical_graph()
+        assert list(nx.square_clustering(G).values()) == [
+            1 / 3,
+            1 / 3,
+            1 / 3,
+            1 / 3,
+            1 / 3,
+            1 / 3,
+            1 / 3,
+            1 / 3,
+        ]
+        assert list(nx.square_clustering(G, [1, 2]).values()) == [1 / 3, 1 / 3]
+        assert nx.square_clustering(G, [1])[1] == 1 / 3
+        assert nx.square_clustering(G, 1) == 1 / 3
+        assert nx.square_clustering(G, [1, 2]) == {1: 1 / 3, 2: 1 / 3}
+
+    def test_k5(self):
+        G = nx.complete_graph(5)
+        assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1]
+
+    def test_bipartite_k5(self):
+        G = nx.complete_bipartite_graph(5, 5)
+        assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
+
+    def test_lind_square_clustering(self):
+        """Test C4 for figure 1 Lind et al (2005)"""
+        G = nx.Graph(
+            [
+                (1, 2),
+                (1, 3),
+                (1, 6),
+                (1, 7),
+                (2, 4),
+                (2, 5),
+                (3, 4),
+                (3, 5),
+                (6, 7),
+                (7, 8),
+                (6, 8),
+                (7, 9),
+                (7, 10),
+                (6, 11),
+                (6, 12),
+                (2, 13),
+                (2, 14),
+                (3, 15),
+                (3, 16),
+            ]
+        )
+        G1 = G.subgraph([1, 2, 3, 4, 5, 13, 14, 15, 16])
+        G2 = G.subgraph([1, 6, 7, 8, 9, 10, 11, 12])
+        assert nx.square_clustering(G, [1])[1] == 3 / 43
+        assert nx.square_clustering(G1, [1])[1] == 2 / 6
+        assert nx.square_clustering(G2, [1])[1] == 1 / 5
+
+    def test_peng_square_clustering(self):
+        """Test eq2 for figure 1 Peng et al (2008)"""
+        G = nx.Graph([(1, 2), (1, 3), (2, 4), (3, 4), (3, 5), (3, 6)])
+        assert nx.square_clustering(G, [1])[1] == 1 / 3
+
+    def test_self_loops_square_clustering(self):
+        G = nx.path_graph(5)
+        assert nx.square_clustering(G) == {0: 0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0}
+        G.add_edges_from([(0, 0), (1, 1), (2, 2)])
+        assert nx.square_clustering(G) == {0: 1, 1: 0.5, 2: 0.2, 3: 0.0, 4: 0}
+
+
+class TestAverageClustering:
+    @classmethod
+    def setup_class(cls):
+        pytest.importorskip("numpy")
+
+    def test_empty(self):
+        G = nx.Graph()
+        with pytest.raises(ZeroDivisionError):
+            nx.average_clustering(G)
+
+    def test_average_clustering(self):
+        G = nx.cycle_graph(3)
+        G.add_edge(2, 3)
+        assert nx.average_clustering(G) == (1 + 1 + 1 / 3) / 4
+        assert nx.average_clustering(G, count_zeros=True) == (1 + 1 + 1 / 3) / 4
+        assert nx.average_clustering(G, count_zeros=False) == (1 + 1 + 1 / 3) / 3
+        assert nx.average_clustering(G, [1, 2, 3]) == (1 + 1 / 3) / 3
+        assert nx.average_clustering(G, [1, 2, 3], count_zeros=True) == (1 + 1 / 3) / 3
+        assert nx.average_clustering(G, [1, 2, 3], count_zeros=False) == (1 + 1 / 3) / 2
+
+    def test_average_clustering_signed(self):
+        G = nx.cycle_graph(3)
+        G.add_edge(2, 3)
+        G.add_edge(0, 1, weight=-1)
+        assert nx.average_clustering(G, weight="weight") == (-1 - 1 - 1 / 3) / 4
+        assert (
+            nx.average_clustering(G, weight="weight", count_zeros=True)
+            == (-1 - 1 - 1 / 3) / 4
+        )
+        assert (
+            nx.average_clustering(G, weight="weight", count_zeros=False)
+            == (-1 - 1 - 1 / 3) / 3
+        )
+
+
+class TestDirectedAverageClustering:
+    @classmethod
+    def setup_class(cls):
+        pytest.importorskip("numpy")
+
+    def test_empty(self):
+        G = nx.DiGraph()
+        with pytest.raises(ZeroDivisionError):
+            nx.average_clustering(G)
+
+    def test_average_clustering(self):
+        G = nx.cycle_graph(3, create_using=nx.DiGraph())
+        G.add_edge(2, 3)
+        assert nx.average_clustering(G) == (1 + 1 + 1 / 3) / 8
+        assert nx.average_clustering(G, count_zeros=True) == (1 + 1 + 1 / 3) / 8
+        assert nx.average_clustering(G, count_zeros=False) == (1 + 1 + 1 / 3) / 6
+        assert nx.average_clustering(G, [1, 2, 3]) == (1 + 1 / 3) / 6
+        assert nx.average_clustering(G, [1, 2, 3], count_zeros=True) == (1 + 1 / 3) / 6
+        assert nx.average_clustering(G, [1, 2, 3], count_zeros=False) == (1 + 1 / 3) / 4
+
+
+class TestGeneralizedDegree:
+    def test_generalized_degree(self):
+        G = nx.Graph()
+        assert nx.generalized_degree(G) == {}
+
+    def test_path(self):
+        G = nx.path_graph(5)
+        assert nx.generalized_degree(G, 0) == {0: 1}
+        assert nx.generalized_degree(G, 1) == {0: 2}
+
+    def test_cubical(self):
+        G = nx.cubical_graph()
+        assert nx.generalized_degree(G, 0) == {0: 3}
+
+    def test_k5(self):
+        G = nx.complete_graph(5)
+        assert nx.generalized_degree(G, 0) == {3: 4}
+        G.remove_edge(0, 1)
+        assert nx.generalized_degree(G, 0) == {2: 3}
+        assert nx.generalized_degree(G, [1, 2]) == {1: {2: 3}, 2: {2: 2, 3: 2}}
+        assert nx.generalized_degree(G) == {
+            0: {2: 3},
+            1: {2: 3},
+            2: {2: 2, 3: 2},
+            3: {2: 2, 3: 2},
+            4: {2: 2, 3: 2},
+        }
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_communicability.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_communicability.py
new file mode 100644
index 00000000..0f447094
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_communicability.py
@@ -0,0 +1,80 @@
+from collections import defaultdict
+
+import pytest
+
+pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+import networkx as nx
+from networkx.algorithms.communicability_alg import communicability, communicability_exp
+
+
+class TestCommunicability:
+    def test_communicability(self):
+        answer = {
+            0: {0: 1.5430806348152435, 1: 1.1752011936438012},
+            1: {0: 1.1752011936438012, 1: 1.5430806348152435},
+        }
+        #        answer={(0, 0): 1.5430806348152435,
+        #                (0, 1): 1.1752011936438012,
+        #                (1, 0): 1.1752011936438012,
+        #                (1, 1): 1.5430806348152435}
+
+        result = communicability(nx.path_graph(2))
+        for k1, val in result.items():
+            for k2 in val:
+                assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7)
+
+    def test_communicability2(self):
+        answer_orig = {
+            ("1", "1"): 1.6445956054135658,
+            ("1", "Albert"): 0.7430186221096251,
+            ("1", "Aric"): 0.7430186221096251,
+            ("1", "Dan"): 1.6208126320442937,
+            ("1", "Franck"): 0.42639707170035257,
+            ("Albert", "1"): 0.7430186221096251,
+            ("Albert", "Albert"): 2.4368257358712189,
+            ("Albert", "Aric"): 1.4368257358712191,
+            ("Albert", "Dan"): 2.0472097037446453,
+            ("Albert", "Franck"): 1.8340111678944691,
+            ("Aric", "1"): 0.7430186221096251,
+            ("Aric", "Albert"): 1.4368257358712191,
+            ("Aric", "Aric"): 2.4368257358712193,
+            ("Aric", "Dan"): 2.0472097037446457,
+            ("Aric", "Franck"): 1.8340111678944691,
+            ("Dan", "1"): 1.6208126320442937,
+            ("Dan", "Albert"): 2.0472097037446453,
+            ("Dan", "Aric"): 2.0472097037446457,
+            ("Dan", "Dan"): 3.1306328496328168,
+            ("Dan", "Franck"): 1.4860372442192515,
+            ("Franck", "1"): 0.42639707170035257,
+            ("Franck", "Albert"): 1.8340111678944691,
+            ("Franck", "Aric"): 1.8340111678944691,
+            ("Franck", "Dan"): 1.4860372442192515,
+            ("Franck", "Franck"): 2.3876142275231915,
+        }
+
+        answer = defaultdict(dict)
+        for (k1, k2), v in answer_orig.items():
+            answer[k1][k2] = v
+
+        G1 = nx.Graph(
+            [
+                ("Franck", "Aric"),
+                ("Aric", "Dan"),
+                ("Dan", "Albert"),
+                ("Albert", "Franck"),
+                ("Dan", "1"),
+                ("Franck", "Albert"),
+            ]
+        )
+
+        result = communicability(G1)
+        for k1, val in result.items():
+            for k2 in val:
+                assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7)
+
+        result = communicability_exp(G1)
+        for k1, val in result.items():
+            for k2 in val:
+                assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_core.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_core.py
new file mode 100644
index 00000000..726e98a7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_core.py
@@ -0,0 +1,266 @@
+import pytest
+
+import networkx as nx
+from networkx.utils import nodes_equal
+
+
+class TestCore:
+    @classmethod
+    def setup_class(cls):
+        # G is the example graph in Figure 1 from Batagelj and
+        # Zaversnik's paper titled An O(m) Algorithm for Cores
+        # Decomposition of Networks, 2003,
+        # http://arXiv.org/abs/cs/0310049.  With nodes labeled as
+        # shown, the 3-core is given by nodes 1-8, the 2-core by nodes
+        # 9-16, the 1-core by nodes 17-20 and node 21 is in the
+        # 0-core.
+        t1 = nx.convert_node_labels_to_integers(nx.tetrahedral_graph(), 1)
+        t2 = nx.convert_node_labels_to_integers(t1, 5)
+        G = nx.union(t1, t2)
+        G.add_edges_from(
+            [
+                (3, 7),
+                (2, 11),
+                (11, 5),
+                (11, 12),
+                (5, 12),
+                (12, 19),
+                (12, 18),
+                (3, 9),
+                (7, 9),
+                (7, 10),
+                (9, 10),
+                (9, 20),
+                (17, 13),
+                (13, 14),
+                (14, 15),
+                (15, 16),
+                (16, 13),
+            ]
+        )
+        G.add_node(21)
+        cls.G = G
+
+        # Create the graph H resulting from the degree sequence
+        # [0, 1, 2, 2, 2, 2, 3] when using the Havel-Hakimi algorithm.
+
+        degseq = [0, 1, 2, 2, 2, 2, 3]
+        H = nx.havel_hakimi_graph(degseq)
+        mapping = {6: 0, 0: 1, 4: 3, 5: 6, 3: 4, 1: 2, 2: 5}
+        cls.H = nx.relabel_nodes(H, mapping)
+
+    def test_trivial(self):
+        """Empty graph"""
+        G = nx.Graph()
+        assert nx.core_number(G) == {}
+
+    def test_core_number(self):
+        core = nx.core_number(self.G)
+        nodes_by_core = [sorted(n for n in core if core[n] == val) for val in range(4)]
+        assert nodes_equal(nodes_by_core[0], [21])
+        assert nodes_equal(nodes_by_core[1], [17, 18, 19, 20])
+        assert nodes_equal(nodes_by_core[2], [9, 10, 11, 12, 13, 14, 15, 16])
+        assert nodes_equal(nodes_by_core[3], [1, 2, 3, 4, 5, 6, 7, 8])
+
+    def test_core_number2(self):
+        core = nx.core_number(self.H)
+        nodes_by_core = [sorted(n for n in core if core[n] == val) for val in range(3)]
+        assert nodes_equal(nodes_by_core[0], [0])
+        assert nodes_equal(nodes_by_core[1], [1, 3])
+        assert nodes_equal(nodes_by_core[2], [2, 4, 5, 6])
+
+    def test_core_number_multigraph(self):
+        G = nx.complete_graph(3)
+        G = nx.MultiGraph(G)
+        G.add_edge(1, 2)
+        with pytest.raises(
+            nx.NetworkXNotImplemented, match="not implemented for multigraph type"
+        ):
+            nx.core_number(G)
+
+    def test_core_number_self_loop(self):
+        G = nx.cycle_graph(3)
+        G.add_edge(0, 0)
+        with pytest.raises(
+            nx.NetworkXNotImplemented, match="Input graph has self loops"
+        ):
+            nx.core_number(G)
+
+    def test_directed_core_number(self):
+        """core number had a bug for directed graphs found in issue #1959"""
+        # small example where too timid edge removal can make cn[2] = 3
+        G = nx.DiGraph()
+        edges = [(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)]
+        G.add_edges_from(edges)
+        assert nx.core_number(G) == {1: 2, 2: 2, 3: 2, 4: 2}
+        # small example where too aggressive edge removal can make cn[2] = 2
+        more_edges = [(1, 5), (3, 5), (4, 5), (3, 6), (4, 6), (5, 6)]
+        G.add_edges_from(more_edges)
+        assert nx.core_number(G) == {1: 3, 2: 3, 3: 3, 4: 3, 5: 3, 6: 3}
+
+    def test_main_core(self):
+        main_core_subgraph = nx.k_core(self.H)
+        assert sorted(main_core_subgraph.nodes()) == [2, 4, 5, 6]
+
+    def test_k_core(self):
+        # k=0
+        k_core_subgraph = nx.k_core(self.H, k=0)
+        assert sorted(k_core_subgraph.nodes()) == sorted(self.H.nodes())
+        # k=1
+        k_core_subgraph = nx.k_core(self.H, k=1)
+        assert sorted(k_core_subgraph.nodes()) == [1, 2, 3, 4, 5, 6]
+        # k = 2
+        k_core_subgraph = nx.k_core(self.H, k=2)
+        assert sorted(k_core_subgraph.nodes()) == [2, 4, 5, 6]
+
+    def test_k_core_multigraph(self):
+        core_number = nx.core_number(self.H)
+        H = nx.MultiGraph(self.H)
+        with pytest.deprecated_call():
+            nx.k_core(H, k=0, core_number=core_number)
+
+    def test_main_crust(self):
+        main_crust_subgraph = nx.k_crust(self.H)
+        assert sorted(main_crust_subgraph.nodes()) == [0, 1, 3]
+
+    def test_k_crust(self):
+        # k = 0
+        k_crust_subgraph = nx.k_crust(self.H, k=2)
+        assert sorted(k_crust_subgraph.nodes()) == sorted(self.H.nodes())
+        # k=1
+        k_crust_subgraph = nx.k_crust(self.H, k=1)
+        assert sorted(k_crust_subgraph.nodes()) == [0, 1, 3]
+        # k=2
+        k_crust_subgraph = nx.k_crust(self.H, k=0)
+        assert sorted(k_crust_subgraph.nodes()) == [0]
+
+    def test_k_crust_multigraph(self):
+        core_number = nx.core_number(self.H)
+        H = nx.MultiGraph(self.H)
+        with pytest.deprecated_call():
+            nx.k_crust(H, k=0, core_number=core_number)
+
+    def test_main_shell(self):
+        main_shell_subgraph = nx.k_shell(self.H)
+        assert sorted(main_shell_subgraph.nodes()) == [2, 4, 5, 6]
+
+    def test_k_shell(self):
+        # k=0
+        k_shell_subgraph = nx.k_shell(self.H, k=2)
+        assert sorted(k_shell_subgraph.nodes()) == [2, 4, 5, 6]
+        # k=1
+        k_shell_subgraph = nx.k_shell(self.H, k=1)
+        assert sorted(k_shell_subgraph.nodes()) == [1, 3]
+        # k=2
+        k_shell_subgraph = nx.k_shell(self.H, k=0)
+        assert sorted(k_shell_subgraph.nodes()) == [0]
+
+    def test_k_shell_multigraph(self):
+        core_number = nx.core_number(self.H)
+        H = nx.MultiGraph(self.H)
+        with pytest.deprecated_call():
+            nx.k_shell(H, k=0, core_number=core_number)
+
+    def test_k_corona(self):
+        # k=0
+        k_corona_subgraph = nx.k_corona(self.H, k=2)
+        assert sorted(k_corona_subgraph.nodes()) == [2, 4, 5, 6]
+        # k=1
+        k_corona_subgraph = nx.k_corona(self.H, k=1)
+        assert sorted(k_corona_subgraph.nodes()) == [1]
+        # k=2
+        k_corona_subgraph = nx.k_corona(self.H, k=0)
+        assert sorted(k_corona_subgraph.nodes()) == [0]
+
+    def test_k_corona_multigraph(self):
+        core_number = nx.core_number(self.H)
+        H = nx.MultiGraph(self.H)
+        with pytest.deprecated_call():
+            nx.k_corona(H, k=0, core_number=core_number)
+
+    def test_k_truss(self):
+        # k=-1
+        k_truss_subgraph = nx.k_truss(self.G, -1)
+        assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21))
+        # k=0
+        k_truss_subgraph = nx.k_truss(self.G, 0)
+        assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21))
+        # k=1
+        k_truss_subgraph = nx.k_truss(self.G, 1)
+        assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21))
+        # k=2
+        k_truss_subgraph = nx.k_truss(self.G, 2)
+        assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21))
+        # k=3
+        k_truss_subgraph = nx.k_truss(self.G, 3)
+        assert sorted(k_truss_subgraph.nodes()) == list(range(1, 13))
+
+        k_truss_subgraph = nx.k_truss(self.G, 4)
+        assert sorted(k_truss_subgraph.nodes()) == list(range(1, 9))
+
+        k_truss_subgraph = nx.k_truss(self.G, 5)
+        assert sorted(k_truss_subgraph.nodes()) == []
+
+    def test_k_truss_digraph(self):
+        G = nx.complete_graph(3)
+        G = nx.DiGraph(G)
+        G.add_edge(2, 1)
+        with pytest.raises(
+            nx.NetworkXNotImplemented, match="not implemented for directed type"
+        ):
+            nx.k_truss(G, k=1)
+
+    def test_k_truss_multigraph(self):
+        G = nx.complete_graph(3)
+        G = nx.MultiGraph(G)
+        G.add_edge(1, 2)
+        with pytest.raises(
+            nx.NetworkXNotImplemented, match="not implemented for multigraph type"
+        ):
+            nx.k_truss(G, k=1)
+
+    def test_k_truss_self_loop(self):
+        G = nx.cycle_graph(3)
+        G.add_edge(0, 0)
+        with pytest.raises(
+            nx.NetworkXNotImplemented, match="Input graph has self loops"
+        ):
+            nx.k_truss(G, k=1)
+
+    def test_onion_layers(self):
+        layers = nx.onion_layers(self.G)
+        nodes_by_layer = [
+            sorted(n for n in layers if layers[n] == val) for val in range(1, 7)
+        ]
+        assert nodes_equal(nodes_by_layer[0], [21])
+        assert nodes_equal(nodes_by_layer[1], [17, 18, 19, 20])
+        assert nodes_equal(nodes_by_layer[2], [10, 12, 13, 14, 15, 16])
+        assert nodes_equal(nodes_by_layer[3], [9, 11])
+        assert nodes_equal(nodes_by_layer[4], [1, 2, 4, 5, 6, 8])
+        assert nodes_equal(nodes_by_layer[5], [3, 7])
+
+    def test_onion_digraph(self):
+        G = nx.complete_graph(3)
+        G = nx.DiGraph(G)
+        G.add_edge(2, 1)
+        with pytest.raises(
+            nx.NetworkXNotImplemented, match="not implemented for directed type"
+        ):
+            nx.onion_layers(G)
+
+    def test_onion_multigraph(self):
+        G = nx.complete_graph(3)
+        G = nx.MultiGraph(G)
+        G.add_edge(1, 2)
+        with pytest.raises(
+            nx.NetworkXNotImplemented, match="not implemented for multigraph type"
+        ):
+            nx.onion_layers(G)
+
+    def test_onion_self_loop(self):
+        G = nx.cycle_graph(3)
+        G.add_edge(0, 0)
+        with pytest.raises(
+            nx.NetworkXNotImplemented, match="Input graph contains self loops"
+        ):
+            nx.onion_layers(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_covering.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_covering.py
new file mode 100644
index 00000000..b2f97a86
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_covering.py
@@ -0,0 +1,85 @@
+import pytest
+
+import networkx as nx
+
+
+class TestMinEdgeCover:
+    """Tests for :func:`networkx.algorithms.min_edge_cover`"""
+
+    def test_empty_graph(self):
+        G = nx.Graph()
+        assert nx.min_edge_cover(G) == set()
+
+    def test_graph_with_loop(self):
+        G = nx.Graph()
+        G.add_edge(0, 0)
+        assert nx.min_edge_cover(G) == {(0, 0)}
+
+    def test_graph_with_isolated_v(self):
+        G = nx.Graph()
+        G.add_node(1)
+        with pytest.raises(
+            nx.NetworkXException,
+            match="Graph has a node with no edge incident on it, so no edge cover exists.",
+        ):
+            nx.min_edge_cover(G)
+
+    def test_graph_single_edge(self):
+        G = nx.Graph([(0, 1)])
+        assert nx.min_edge_cover(G) in ({(0, 1)}, {(1, 0)})
+
+    def test_graph_two_edge_path(self):
+        G = nx.path_graph(3)
+        min_cover = nx.min_edge_cover(G)
+        assert len(min_cover) == 2
+        for u, v in G.edges:
+            assert (u, v) in min_cover or (v, u) in min_cover
+
+    def test_bipartite_explicit(self):
+        G = nx.Graph()
+        G.add_nodes_from([1, 2, 3, 4], bipartite=0)
+        G.add_nodes_from(["a", "b", "c"], bipartite=1)
+        G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
+        # Use bipartite method by prescribing the algorithm
+        min_cover = nx.min_edge_cover(
+            G, nx.algorithms.bipartite.matching.eppstein_matching
+        )
+        assert nx.is_edge_cover(G, min_cover)
+        assert len(min_cover) == 8
+        # Use the default method which is not specialized for bipartite
+        min_cover2 = nx.min_edge_cover(G)
+        assert nx.is_edge_cover(G, min_cover2)
+        assert len(min_cover2) == 4
+
+    def test_complete_graph_even(self):
+        G = nx.complete_graph(10)
+        min_cover = nx.min_edge_cover(G)
+        assert nx.is_edge_cover(G, min_cover)
+        assert len(min_cover) == 5
+
+    def test_complete_graph_odd(self):
+        G = nx.complete_graph(11)
+        min_cover = nx.min_edge_cover(G)
+        assert nx.is_edge_cover(G, min_cover)
+        assert len(min_cover) == 6
+
+
+class TestIsEdgeCover:
+    """Tests for :func:`networkx.algorithms.is_edge_cover`"""
+
+    def test_empty_graph(self):
+        G = nx.Graph()
+        assert nx.is_edge_cover(G, set())
+
+    def test_graph_with_loop(self):
+        G = nx.Graph()
+        G.add_edge(1, 1)
+        assert nx.is_edge_cover(G, {(1, 1)})
+
+    def test_graph_single_edge(self):
+        G = nx.Graph()
+        G.add_edge(0, 1)
+        assert nx.is_edge_cover(G, {(0, 0), (1, 1)})
+        assert nx.is_edge_cover(G, {(0, 1), (1, 0)})
+        assert nx.is_edge_cover(G, {(0, 1)})
+        assert not nx.is_edge_cover(G, {(0, 0)})
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cuts.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cuts.py
new file mode 100644
index 00000000..923efa50
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cuts.py
@@ -0,0 +1,171 @@
+"""Unit tests for the :mod:`networkx.algorithms.cuts` module."""
+
+import networkx as nx
+
+
+class TestCutSize:
+    """Unit tests for the :func:`~networkx.cut_size` function."""
+
+    def test_symmetric(self):
+        """Tests that the cut size is symmetric."""
+        G = nx.barbell_graph(3, 0)
+        S = {0, 1, 4}
+        T = {2, 3, 5}
+        assert nx.cut_size(G, S, T) == 4
+        assert nx.cut_size(G, T, S) == 4
+
+    def test_single_edge(self):
+        """Tests for a cut of a single edge."""
+        G = nx.barbell_graph(3, 0)
+        S = {0, 1, 2}
+        T = {3, 4, 5}
+        assert nx.cut_size(G, S, T) == 1
+        assert nx.cut_size(G, T, S) == 1
+
+    def test_directed(self):
+        """Tests that each directed edge is counted once in the cut."""
+        G = nx.barbell_graph(3, 0).to_directed()
+        S = {0, 1, 2}
+        T = {3, 4, 5}
+        assert nx.cut_size(G, S, T) == 2
+        assert nx.cut_size(G, T, S) == 2
+
+    def test_directed_symmetric(self):
+        """Tests that a cut in a directed graph is symmetric."""
+        G = nx.barbell_graph(3, 0).to_directed()
+        S = {0, 1, 4}
+        T = {2, 3, 5}
+        assert nx.cut_size(G, S, T) == 8
+        assert nx.cut_size(G, T, S) == 8
+
+    def test_multigraph(self):
+        """Tests that parallel edges are each counted for a cut."""
+        G = nx.MultiGraph(["ab", "ab"])
+        assert nx.cut_size(G, {"a"}, {"b"}) == 2
+
+
+class TestVolume:
+    """Unit tests for the :func:`~networkx.volume` function."""
+
+    def test_graph(self):
+        G = nx.cycle_graph(4)
+        assert nx.volume(G, {0, 1}) == 4
+
+    def test_digraph(self):
+        G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 0)])
+        assert nx.volume(G, {0, 1}) == 2
+
+    def test_multigraph(self):
+        edges = list(nx.cycle_graph(4).edges())
+        G = nx.MultiGraph(edges * 2)
+        assert nx.volume(G, {0, 1}) == 8
+
+    def test_multidigraph(self):
+        edges = [(0, 1), (1, 2), (2, 3), (3, 0)]
+        G = nx.MultiDiGraph(edges * 2)
+        assert nx.volume(G, {0, 1}) == 4
+
+    def test_barbell(self):
+        G = nx.barbell_graph(3, 0)
+        assert nx.volume(G, {0, 1, 2}) == 7
+        assert nx.volume(G, {3, 4, 5}) == 7
+
+
+class TestNormalizedCutSize:
+    """Unit tests for the :func:`~networkx.normalized_cut_size` function."""
+
+    def test_graph(self):
+        G = nx.path_graph(4)
+        S = {1, 2}
+        T = set(G) - S
+        size = nx.normalized_cut_size(G, S, T)
+        # The cut looks like this: o-{-o--o-}-o
+        expected = 2 * ((1 / 4) + (1 / 2))
+        assert expected == size
+        # Test with no input T
+        assert expected == nx.normalized_cut_size(G, S)
+
+    def test_directed(self):
+        G = nx.DiGraph([(0, 1), (1, 2), (2, 3)])
+        S = {1, 2}
+        T = set(G) - S
+        size = nx.normalized_cut_size(G, S, T)
+        # The cut looks like this: o-{->o-->o-}->o
+        expected = 2 * ((1 / 2) + (1 / 1))
+        assert expected == size
+        # Test with no input T
+        assert expected == nx.normalized_cut_size(G, S)
+
+
+class TestConductance:
+    """Unit tests for the :func:`~networkx.conductance` function."""
+
+    def test_graph(self):
+        G = nx.barbell_graph(5, 0)
+        # Consider the singleton sets containing the "bridge" nodes.
+        # There is only one cut edge, and each set has volume five.
+        S = {4}
+        T = {5}
+        conductance = nx.conductance(G, S, T)
+        expected = 1 / 5
+        assert expected == conductance
+        # Test with no input T
+        G2 = nx.barbell_graph(3, 0)
+        # There is only one cut edge, and each set has volume seven.
+        S2 = {0, 1, 2}
+        assert nx.conductance(G2, S2) == 1 / 7
+
+
+class TestEdgeExpansion:
+    """Unit tests for the :func:`~networkx.edge_expansion` function."""
+
+    def test_graph(self):
+        G = nx.barbell_graph(5, 0)
+        S = set(range(5))
+        T = set(G) - S
+        expansion = nx.edge_expansion(G, S, T)
+        expected = 1 / 5
+        assert expected == expansion
+        # Test with no input T
+        assert expected == nx.edge_expansion(G, S)
+
+
+class TestNodeExpansion:
+    """Unit tests for the :func:`~networkx.node_expansion` function."""
+
+    def test_graph(self):
+        G = nx.path_graph(8)
+        S = {3, 4, 5}
+        expansion = nx.node_expansion(G, S)
+        # The neighborhood of S has cardinality five, and S has
+        # cardinality three.
+        expected = 5 / 3
+        assert expected == expansion
+
+
+class TestBoundaryExpansion:
+    """Unit tests for the :func:`~networkx.boundary_expansion` function."""
+
+    def test_graph(self):
+        G = nx.complete_graph(10)
+        S = set(range(4))
+        expansion = nx.boundary_expansion(G, S)
+        # The node boundary of S has cardinality six, and S has
+        # cardinality three.
+        expected = 6 / 4
+        assert expected == expansion
+
+
+class TestMixingExpansion:
+    """Unit tests for the :func:`~networkx.mixing_expansion` function."""
+
+    def test_graph(self):
+        G = nx.barbell_graph(5, 0)
+        S = set(range(5))
+        T = set(G) - S
+        expansion = nx.mixing_expansion(G, S, T)
+        # There is one cut edge, and the total number of edges in the
+        # graph is twice the total number of edges in a clique of size
+        # five, plus one more for the bridge.
+        expected = 1 / (2 * (5 * 4 + 1))
+        assert expected == expansion
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cycles.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cycles.py
new file mode 100644
index 00000000..dd21405f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cycles.py
@@ -0,0 +1,974 @@
+from itertools import chain, islice, tee
+from math import inf
+from random import shuffle
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.traversal.edgedfs import FORWARD, REVERSE
+
+
+def check_independent(basis):
+    if len(basis) == 0:
+        return
+
+    np = pytest.importorskip("numpy")
+    sp = pytest.importorskip("scipy")  # Required by incidence_matrix
+
+    H = nx.Graph()
+    for b in basis:
+        nx.add_cycle(H, b)
+    inc = nx.incidence_matrix(H, oriented=True)
+    rank = np.linalg.matrix_rank(inc.toarray(), tol=None, hermitian=False)
+    assert inc.shape[1] - rank == len(basis)
+
+
+class TestCycles:
+    @classmethod
+    def setup_class(cls):
+        G = nx.Graph()
+        nx.add_cycle(G, [0, 1, 2, 3])
+        nx.add_cycle(G, [0, 3, 4, 5])
+        nx.add_cycle(G, [0, 1, 6, 7, 8])
+        G.add_edge(8, 9)
+        cls.G = G
+
+    def is_cyclic_permutation(self, a, b):
+        n = len(a)
+        if len(b) != n:
+            return False
+        l = a + a
+        return any(l[i : i + n] == b for i in range(n))
+
+    def test_cycle_basis(self):
+        G = self.G
+        cy = nx.cycle_basis(G, 0)
+        sort_cy = sorted(sorted(c) for c in cy)
+        assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]]
+        cy = nx.cycle_basis(G, 1)
+        sort_cy = sorted(sorted(c) for c in cy)
+        assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]]
+        cy = nx.cycle_basis(G, 9)
+        sort_cy = sorted(sorted(c) for c in cy)
+        assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]]
+        # test disconnected graphs
+        nx.add_cycle(G, "ABC")
+        cy = nx.cycle_basis(G, 9)
+        sort_cy = sorted(sorted(c) for c in cy[:-1]) + [sorted(cy[-1])]
+        assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5], ["A", "B", "C"]]
+
+    def test_cycle_basis2(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            G = nx.DiGraph()
+            cy = nx.cycle_basis(G, 0)
+
+    def test_cycle_basis3(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            G = nx.MultiGraph()
+            cy = nx.cycle_basis(G, 0)
+
+    def test_cycle_basis_ordered(self):
+        # see gh-6654 replace sets with (ordered) dicts
+        G = nx.cycle_graph(5)
+        G.update(nx.cycle_graph(range(3, 8)))
+        cbG = nx.cycle_basis(G)
+
+        perm = {1: 0, 0: 1}  # switch 0 and 1
+        H = nx.relabel_nodes(G, perm)
+        cbH = [[perm.get(n, n) for n in cyc] for cyc in nx.cycle_basis(H)]
+        assert cbG == cbH
+
+    def test_cycle_basis_self_loop(self):
+        """Tests the function for graphs with self loops"""
+        G = nx.Graph()
+        nx.add_cycle(G, [0, 1, 2, 3])
+        nx.add_cycle(G, [0, 0, 6, 2])
+        cy = nx.cycle_basis(G)
+        sort_cy = sorted(sorted(c) for c in cy)
+        assert sort_cy == [[0], [0, 1, 2], [0, 2, 3], [0, 2, 6]]
+
+    def test_simple_cycles(self):
+        edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
+        G = nx.DiGraph(edges)
+        cc = sorted(nx.simple_cycles(G))
+        ca = [[0], [0, 1, 2], [0, 2], [1, 2], [2]]
+        assert len(cc) == len(ca)
+        for c in cc:
+            assert any(self.is_cyclic_permutation(c, rc) for rc in ca)
+
+    def test_simple_cycles_singleton(self):
+        G = nx.Graph([(0, 0)])  # self-loop
+        assert list(nx.simple_cycles(G)) == [[0]]
+
+    def test_unsortable(self):
+        # this test ensures that graphs whose nodes without an intrinsic
+        # ordering do not cause issues
+        G = nx.DiGraph()
+        nx.add_cycle(G, ["a", 1])
+        c = list(nx.simple_cycles(G))
+        assert len(c) == 1
+
+    def test_simple_cycles_small(self):
+        G = nx.DiGraph()
+        nx.add_cycle(G, [1, 2, 3])
+        c = sorted(nx.simple_cycles(G))
+        assert len(c) == 1
+        assert self.is_cyclic_permutation(c[0], [1, 2, 3])
+        nx.add_cycle(G, [10, 20, 30])
+        cc = sorted(nx.simple_cycles(G))
+        assert len(cc) == 2
+        ca = [[1, 2, 3], [10, 20, 30]]
+        for c in cc:
+            assert any(self.is_cyclic_permutation(c, rc) for rc in ca)
+
+    def test_simple_cycles_empty(self):
+        G = nx.DiGraph()
+        assert list(nx.simple_cycles(G)) == []
+
+    def worst_case_graph(self, k):
+        # see figure 1 in Johnson's paper
+        # this graph has exactly 3k simple cycles
+        G = nx.DiGraph()
+        for n in range(2, k + 2):
+            G.add_edge(1, n)
+            G.add_edge(n, k + 2)
+        G.add_edge(2 * k + 1, 1)
+        for n in range(k + 2, 2 * k + 2):
+            G.add_edge(n, 2 * k + 2)
+            G.add_edge(n, n + 1)
+        G.add_edge(2 * k + 3, k + 2)
+        for n in range(2 * k + 3, 3 * k + 3):
+            G.add_edge(2 * k + 2, n)
+            G.add_edge(n, 3 * k + 3)
+        G.add_edge(3 * k + 3, 2 * k + 2)
+        return G
+
+    def test_worst_case_graph(self):
+        # see figure 1 in Johnson's paper
+        for k in range(3, 10):
+            G = self.worst_case_graph(k)
+            l = len(list(nx.simple_cycles(G)))
+            assert l == 3 * k
+
+    def test_recursive_simple_and_not(self):
+        for k in range(2, 10):
+            G = self.worst_case_graph(k)
+            cc = sorted(nx.simple_cycles(G))
+            rcc = sorted(nx.recursive_simple_cycles(G))
+            assert len(cc) == len(rcc)
+            for c in cc:
+                assert any(self.is_cyclic_permutation(c, r) for r in rcc)
+            for rc in rcc:
+                assert any(self.is_cyclic_permutation(rc, c) for c in cc)
+
+    def test_simple_graph_with_reported_bug(self):
+        G = nx.DiGraph()
+        edges = [
+            (0, 2),
+            (0, 3),
+            (1, 0),
+            (1, 3),
+            (2, 1),
+            (2, 4),
+            (3, 2),
+            (3, 4),
+            (4, 0),
+            (4, 1),
+            (4, 5),
+            (5, 0),
+            (5, 1),
+            (5, 2),
+            (5, 3),
+        ]
+        G.add_edges_from(edges)
+        cc = sorted(nx.simple_cycles(G))
+        assert len(cc) == 26
+        rcc = sorted(nx.recursive_simple_cycles(G))
+        assert len(cc) == len(rcc)
+        for c in cc:
+            assert any(self.is_cyclic_permutation(c, rc) for rc in rcc)
+        for rc in rcc:
+            assert any(self.is_cyclic_permutation(rc, c) for c in cc)
+
+
+def pairwise(iterable):
+    a, b = tee(iterable)
+    next(b, None)
+    return zip(a, b)
+
+
+def cycle_edges(c):
+    return pairwise(chain(c, islice(c, 1)))
+
+
+def directed_cycle_edgeset(c):
+    return frozenset(cycle_edges(c))
+
+
+def undirected_cycle_edgeset(c):
+    if len(c) == 1:
+        return frozenset(cycle_edges(c))
+    return frozenset(map(frozenset, cycle_edges(c)))
+
+
+def multigraph_cycle_edgeset(c):
+    if len(c) <= 2:
+        return frozenset(cycle_edges(c))
+    else:
+        return frozenset(map(frozenset, cycle_edges(c)))
+
+
+class TestCycleEnumeration:
+    @staticmethod
+    def K(n):
+        return nx.complete_graph(n)
+
+    @staticmethod
+    def D(n):
+        return nx.complete_graph(n).to_directed()
+
+    @staticmethod
+    def edgeset_function(g):
+        if g.is_directed():
+            return directed_cycle_edgeset
+        elif g.is_multigraph():
+            return multigraph_cycle_edgeset
+        else:
+            return undirected_cycle_edgeset
+
+    def check_cycle(self, g, c, es, cache, source, original_c, length_bound, chordless):
+        if length_bound is not None and len(c) > length_bound:
+            raise RuntimeError(
+                f"computed cycle {original_c} exceeds length bound {length_bound}"
+            )
+        if source == "computed":
+            if es in cache:
+                raise RuntimeError(
+                    f"computed cycle {original_c} has already been found!"
+                )
+            else:
+                cache[es] = tuple(original_c)
+        else:
+            if es in cache:
+                cache.pop(es)
+            else:
+                raise RuntimeError(f"expected cycle {original_c} was not computed")
+
+        if not all(g.has_edge(*e) for e in es):
+            raise RuntimeError(
+                f"{source} claimed cycle {original_c} is not a cycle of g"
+            )
+        if chordless and len(g.subgraph(c).edges) > len(c):
+            raise RuntimeError(f"{source} cycle {original_c} is not chordless")
+
+    def check_cycle_algorithm(
+        self,
+        g,
+        expected_cycles,
+        length_bound=None,
+        chordless=False,
+        algorithm=None,
+    ):
+        if algorithm is None:
+            algorithm = nx.chordless_cycles if chordless else nx.simple_cycles
+
+        # note: we shuffle the labels of g to rule out accidentally-correct
+        # behavior which occurred during the development of chordless cycle
+        # enumeration algorithms
+
+        relabel = list(range(len(g)))
+        shuffle(relabel)
+        label = dict(zip(g, relabel))
+        unlabel = dict(zip(relabel, g))
+        h = nx.relabel_nodes(g, label, copy=True)
+
+        edgeset = self.edgeset_function(h)
+
+        params = {}
+        if length_bound is not None:
+            params["length_bound"] = length_bound
+
+        cycle_cache = {}
+        for c in algorithm(h, **params):
+            original_c = [unlabel[x] for x in c]
+            es = edgeset(c)
+            self.check_cycle(
+                h, c, es, cycle_cache, "computed", original_c, length_bound, chordless
+            )
+
+        if isinstance(expected_cycles, int):
+            if len(cycle_cache) != expected_cycles:
+                raise RuntimeError(
+                    f"expected {expected_cycles} cycles, got {len(cycle_cache)}"
+                )
+            return
+        for original_c in expected_cycles:
+            c = [label[x] for x in original_c]
+            es = edgeset(c)
+            self.check_cycle(
+                h, c, es, cycle_cache, "expected", original_c, length_bound, chordless
+            )
+
+        if len(cycle_cache):
+            for c in cycle_cache.values():
+                raise RuntimeError(
+                    f"computed cycle {c} is valid but not in the expected cycle set!"
+                )
+
+    def check_cycle_enumeration_integer_sequence(
+        self,
+        g_family,
+        cycle_counts,
+        length_bound=None,
+        chordless=False,
+        algorithm=None,
+    ):
+        for g, num_cycles in zip(g_family, cycle_counts):
+            self.check_cycle_algorithm(
+                g,
+                num_cycles,
+                length_bound=length_bound,
+                chordless=chordless,
+                algorithm=algorithm,
+            )
+
+    def test_directed_chordless_cycle_digons(self):
+        g = nx.DiGraph()
+        nx.add_cycle(g, range(5))
+        nx.add_cycle(g, range(5)[::-1])
+        g.add_edge(0, 0)
+        expected_cycles = [(0,), (1, 2), (2, 3), (3, 4)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True, length_bound=2)
+
+        expected_cycles = [c for c in expected_cycles if len(c) < 2]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True, length_bound=1)
+
+    def test_directed_chordless_cycle_undirected(self):
+        g = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 5), (5, 0), (5, 1), (0, 2)])
+        expected_cycles = [(0, 2, 3, 4, 5), (1, 2, 3, 4, 5)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        g = nx.DiGraph()
+        nx.add_cycle(g, range(5))
+        nx.add_cycle(g, range(4, 9))
+        g.add_edge(7, 3)
+        expected_cycles = [(0, 1, 2, 3, 4), (3, 4, 5, 6, 7), (4, 5, 6, 7, 8)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        g.add_edge(3, 7)
+        expected_cycles = [(0, 1, 2, 3, 4), (3, 7), (4, 5, 6, 7, 8)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        expected_cycles = [(3, 7)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True, length_bound=4)
+
+        g.remove_edge(7, 3)
+        expected_cycles = [(0, 1, 2, 3, 4), (4, 5, 6, 7, 8)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        g = nx.DiGraph((i, j) for i in range(10) for j in range(i))
+        expected_cycles = []
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+    def test_chordless_cycles_directed(self):
+        G = nx.DiGraph()
+        nx.add_cycle(G, range(5))
+        nx.add_cycle(G, range(4, 12))
+        expected = [[*range(5)], [*range(4, 12)]]
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+        G.add_edge(7, 3)
+        expected.append([*range(3, 8)])
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+        G.add_edge(3, 7)
+        expected[-1] = [7, 3]
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+        expected.pop()
+        G.remove_edge(7, 3)
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+    def test_directed_chordless_cycle_diclique(self):
+        g_family = [self.D(n) for n in range(10)]
+        expected_cycles = [(n * n - n) // 2 for n in range(10)]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected_cycles, chordless=True
+        )
+
+        expected_cycles = [(n * n - n) // 2 for n in range(10)]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected_cycles, length_bound=2
+        )
+
+    def test_directed_chordless_loop_blockade(self):
+        g = nx.DiGraph((i, i) for i in range(10))
+        nx.add_cycle(g, range(10))
+        expected_cycles = [(i,) for i in range(10)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        self.check_cycle_algorithm(g, expected_cycles, length_bound=1)
+
+        g = nx.MultiDiGraph(g)
+        g.add_edges_from((i, i) for i in range(0, 10, 2))
+        expected_cycles = [(i,) for i in range(1, 10, 2)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+    def test_simple_cycles_notable_clique_sequences(self):
+        # A000292: Number of labeled graphs on n+3 nodes that are triangles.
+        g_family = [self.K(n) for n in range(2, 12)]
+        expected = [0, 1, 4, 10, 20, 35, 56, 84, 120, 165, 220]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, length_bound=3
+        )
+
+        def triangles(g, **kwargs):
+            yield from (c for c in nx.simple_cycles(g, **kwargs) if len(c) == 3)
+
+        # directed complete graphs have twice as many triangles thanks to reversal
+        g_family = [self.D(n) for n in range(2, 12)]
+        expected = [2 * e for e in expected]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, length_bound=3, algorithm=triangles
+        )
+
+        def four_cycles(g, **kwargs):
+            yield from (c for c in nx.simple_cycles(g, **kwargs) if len(c) == 4)
+
+        # A050534: the number of 4-cycles in the complete graph K_{n+1}
+        expected = [0, 0, 0, 3, 15, 45, 105, 210, 378, 630, 990]
+        g_family = [self.K(n) for n in range(1, 12)]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, length_bound=4, algorithm=four_cycles
+        )
+
+        # directed complete graphs have twice as many 4-cycles thanks to reversal
+        expected = [2 * e for e in expected]
+        g_family = [self.D(n) for n in range(1, 15)]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, length_bound=4, algorithm=four_cycles
+        )
+
+        # A006231: the number of elementary circuits in a complete directed graph with n nodes
+        expected = [0, 1, 5, 20, 84, 409, 2365]
+        g_family = [self.D(n) for n in range(1, 8)]
+        self.check_cycle_enumeration_integer_sequence(g_family, expected)
+
+        # A002807: Number of cycles in the complete graph on n nodes K_{n}.
+        expected = [0, 0, 0, 1, 7, 37, 197, 1172]
+        g_family = [self.K(n) for n in range(8)]
+        self.check_cycle_enumeration_integer_sequence(g_family, expected)
+
+    def test_directed_chordless_cycle_parallel_multiedges(self):
+        g = nx.MultiGraph()
+
+        nx.add_cycle(g, range(5))
+        expected = [[*range(5)]]
+        self.check_cycle_algorithm(g, expected, chordless=True)
+
+        nx.add_cycle(g, range(5))
+        expected = [*cycle_edges(range(5))]
+        self.check_cycle_algorithm(g, expected, chordless=True)
+
+        nx.add_cycle(g, range(5))
+        expected = []
+        self.check_cycle_algorithm(g, expected, chordless=True)
+
+        g = nx.MultiDiGraph()
+
+        nx.add_cycle(g, range(5))
+        expected = [[*range(5)]]
+        self.check_cycle_algorithm(g, expected, chordless=True)
+
+        nx.add_cycle(g, range(5))
+        self.check_cycle_algorithm(g, [], chordless=True)
+
+        nx.add_cycle(g, range(5))
+        self.check_cycle_algorithm(g, [], chordless=True)
+
+        g = nx.MultiDiGraph()
+
+        nx.add_cycle(g, range(5))
+        nx.add_cycle(g, range(5)[::-1])
+        expected = [*cycle_edges(range(5))]
+        self.check_cycle_algorithm(g, expected, chordless=True)
+
+        nx.add_cycle(g, range(5))
+        self.check_cycle_algorithm(g, [], chordless=True)
+
+    def test_chordless_cycles_graph(self):
+        G = nx.Graph()
+        nx.add_cycle(G, range(5))
+        nx.add_cycle(G, range(4, 12))
+        expected = [[*range(5)], [*range(4, 12)]]
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+        G.add_edge(7, 3)
+        expected.append([*range(3, 8)])
+        expected.append([4, 3, 7, 8, 9, 10, 11])
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+    def test_chordless_cycles_giant_hamiltonian(self):
+        # ... o - e - o - e - o ... # o = odd, e = even
+        # ... ---/ \-----/ \--- ... # <-- "long" edges
+        #
+        # each long edge belongs to exactly one triangle, and one giant cycle
+        # of length n/2.  The remaining edges each belong to a triangle
+
+        n = 1000
+        assert n % 2 == 0
+        G = nx.Graph()
+        for v in range(n):
+            if not v % 2:
+                G.add_edge(v, (v + 2) % n)
+            G.add_edge(v, (v + 1) % n)
+
+        expected = [[*range(0, n, 2)]] + [
+            [x % n for x in range(i, i + 3)] for i in range(0, n, 2)
+        ]
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 3], length_bound=3, chordless=True
+        )
+
+        # ... o -> e -> o -> e -> o ... # o = odd, e = even
+        # ... <---/ \---<---/ \---< ... # <-- "long" edges
+        #
+        # this time, we orient the short and long edges in opposition
+        # the cycle structure of this graph is the same, but we need to reverse
+        # the long one in our representation.  Also, we need to drop the size
+        # because our partitioning algorithm uses strongly connected components
+        # instead of separating graphs by their strong articulation points
+
+        n = 100
+        assert n % 2 == 0
+        G = nx.DiGraph()
+        for v in range(n):
+            G.add_edge(v, (v + 1) % n)
+            if not v % 2:
+                G.add_edge((v + 2) % n, v)
+
+        expected = [[*range(n - 2, -2, -2)]] + [
+            [x % n for x in range(i, i + 3)] for i in range(0, n, 2)
+        ]
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 3], length_bound=3, chordless=True
+        )
+
+    def test_simple_cycles_acyclic_tournament(self):
+        n = 10
+        G = nx.DiGraph((x, y) for x in range(n) for y in range(x))
+        self.check_cycle_algorithm(G, [])
+        self.check_cycle_algorithm(G, [], chordless=True)
+
+        for k in range(n + 1):
+            self.check_cycle_algorithm(G, [], length_bound=k)
+            self.check_cycle_algorithm(G, [], length_bound=k, chordless=True)
+
+    def test_simple_cycles_graph(self):
+        testG = nx.cycle_graph(8)
+        cyc1 = tuple(range(8))
+        self.check_cycle_algorithm(testG, [cyc1])
+
+        testG.add_edge(4, -1)
+        nx.add_path(testG, [3, -2, -3, -4])
+        self.check_cycle_algorithm(testG, [cyc1])
+
+        testG.update(nx.cycle_graph(range(8, 16)))
+        cyc2 = tuple(range(8, 16))
+        self.check_cycle_algorithm(testG, [cyc1, cyc2])
+
+        testG.update(nx.cycle_graph(range(4, 12)))
+        cyc3 = tuple(range(4, 12))
+        expected = {
+            (0, 1, 2, 3, 4, 5, 6, 7),  # cyc1
+            (8, 9, 10, 11, 12, 13, 14, 15),  # cyc2
+            (4, 5, 6, 7, 8, 9, 10, 11),  # cyc3
+            (4, 5, 6, 7, 8, 15, 14, 13, 12, 11),  # cyc2 + cyc3
+            (0, 1, 2, 3, 4, 11, 10, 9, 8, 7),  # cyc1 + cyc3
+            (0, 1, 2, 3, 4, 11, 12, 13, 14, 15, 8, 7),  # cyc1 + cyc2 + cyc3
+        }
+        self.check_cycle_algorithm(testG, expected)
+        assert len(expected) == (2**3 - 1) - 1  # 1 disjoint comb: cyc1 + cyc2
+
+        # Basis size = 5 (2 loops overlapping gives 5 small loops
+        #        E
+        #       / \         Note: A-F = 10-15
+        #    1-2-3-4-5
+        #    / |   |  \   cyc1=012DAB -- left
+        #   0  D   F  6   cyc2=234E   -- top
+        #   \  |   |  /   cyc3=45678F -- right
+        #    B-A-9-8-7    cyc4=89AC   -- bottom
+        #       \ /       cyc5=234F89AD -- middle
+        #        C
+        #
+        # combinations of 5 basis elements: 2^5 - 1  (one includes no cycles)
+        #
+        # disjoint combs: (11 total) not simple cycles
+        #   Any pair not including cyc5 => choose(4, 2) = 6
+        #   Any triple not including cyc5 => choose(4, 3) = 4
+        #   Any quad not including cyc5 => choose(4, 4) = 1
+        #
+        # we expect 31 - 11 = 20 simple cycles
+        #
+        testG = nx.cycle_graph(12)
+        testG.update(nx.cycle_graph([12, 10, 13, 2, 14, 4, 15, 8]).edges)
+        expected = (2**5 - 1) - 11  # 11 disjoint combinations
+        self.check_cycle_algorithm(testG, expected)
+
+    def test_simple_cycles_bounded(self):
+        # iteratively construct a cluster of nested cycles running in the same direction
+        # there should be one cycle of every length
+        d = nx.DiGraph()
+        expected = []
+        for n in range(10):
+            nx.add_cycle(d, range(n))
+            expected.append(n)
+            for k, e in enumerate(expected):
+                self.check_cycle_algorithm(d, e, length_bound=k)
+
+        # iteratively construct a path of undirected cycles, connected at articulation
+        # points.  there should be one cycle of every length except 2: no digons
+        g = nx.Graph()
+        top = 0
+        expected = []
+        for n in range(10):
+            expected.append(n if n < 2 else n - 1)
+            if n == 2:
+                # no digons in undirected graphs
+                continue
+            nx.add_cycle(g, range(top, top + n))
+            top += n
+            for k, e in enumerate(expected):
+                self.check_cycle_algorithm(g, e, length_bound=k)
+
+    def test_simple_cycles_bound_corner_cases(self):
+        G = nx.cycle_graph(4)
+        DG = nx.cycle_graph(4, create_using=nx.DiGraph)
+        assert list(nx.simple_cycles(G, length_bound=0)) == []
+        assert list(nx.simple_cycles(DG, length_bound=0)) == []
+        assert list(nx.chordless_cycles(G, length_bound=0)) == []
+        assert list(nx.chordless_cycles(DG, length_bound=0)) == []
+
+    def test_simple_cycles_bound_error(self):
+        with pytest.raises(ValueError):
+            G = nx.DiGraph()
+            for c in nx.simple_cycles(G, -1):
+                assert False
+
+        with pytest.raises(ValueError):
+            G = nx.Graph()
+            for c in nx.simple_cycles(G, -1):
+                assert False
+
+        with pytest.raises(ValueError):
+            G = nx.Graph()
+            for c in nx.chordless_cycles(G, -1):
+                assert False
+
+        with pytest.raises(ValueError):
+            G = nx.DiGraph()
+            for c in nx.chordless_cycles(G, -1):
+                assert False
+
+    def test_chordless_cycles_clique(self):
+        g_family = [self.K(n) for n in range(2, 15)]
+        expected = [0, 1, 4, 10, 20, 35, 56, 84, 120, 165, 220, 286, 364]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, chordless=True
+        )
+
+        # directed cliques have as many digons as undirected graphs have edges
+        expected = [(n * n - n) // 2 for n in range(15)]
+        g_family = [self.D(n) for n in range(15)]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, chordless=True
+        )
+
+
+# These tests might fail with hash randomization since they depend on
+# edge_dfs. For more information, see the comments in:
+#    networkx/algorithms/traversal/tests/test_edgedfs.py
+
+
+class TestFindCycle:
+    @classmethod
+    def setup_class(cls):
+        cls.nodes = [0, 1, 2, 3]
+        cls.edges = [(-1, 0), (0, 1), (1, 0), (1, 0), (2, 1), (3, 1)]
+
+    def test_graph_nocycle(self):
+        G = nx.Graph(self.edges)
+        pytest.raises(nx.exception.NetworkXNoCycle, nx.find_cycle, G, self.nodes)
+
+    def test_graph_cycle(self):
+        G = nx.Graph(self.edges)
+        G.add_edge(2, 0)
+        x = list(nx.find_cycle(G, self.nodes))
+        x_ = [(0, 1), (1, 2), (2, 0)]
+        assert x == x_
+
+    def test_graph_orientation_none(self):
+        G = nx.Graph(self.edges)
+        G.add_edge(2, 0)
+        x = list(nx.find_cycle(G, self.nodes, orientation=None))
+        x_ = [(0, 1), (1, 2), (2, 0)]
+        assert x == x_
+
+    def test_graph_orientation_original(self):
+        G = nx.Graph(self.edges)
+        G.add_edge(2, 0)
+        x = list(nx.find_cycle(G, self.nodes, orientation="original"))
+        x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 0, FORWARD)]
+        assert x == x_
+
+    def test_digraph(self):
+        G = nx.DiGraph(self.edges)
+        x = list(nx.find_cycle(G, self.nodes))
+        x_ = [(0, 1), (1, 0)]
+        assert x == x_
+
+    def test_digraph_orientation_none(self):
+        G = nx.DiGraph(self.edges)
+        x = list(nx.find_cycle(G, self.nodes, orientation=None))
+        x_ = [(0, 1), (1, 0)]
+        assert x == x_
+
+    def test_digraph_orientation_original(self):
+        G = nx.DiGraph(self.edges)
+        x = list(nx.find_cycle(G, self.nodes, orientation="original"))
+        x_ = [(0, 1, FORWARD), (1, 0, FORWARD)]
+        assert x == x_
+
+    def test_multigraph(self):
+        G = nx.MultiGraph(self.edges)
+        x = list(nx.find_cycle(G, self.nodes))
+        x_ = [(0, 1, 0), (1, 0, 1)]  # or (1, 0, 2)
+        # Hash randomization...could be any edge.
+        assert x[0] == x_[0]
+        assert x[1][:2] == x_[1][:2]
+
+    def test_multidigraph(self):
+        G = nx.MultiDiGraph(self.edges)
+        x = list(nx.find_cycle(G, self.nodes))
+        x_ = [(0, 1, 0), (1, 0, 0)]  # (1, 0, 1)
+        assert x[0] == x_[0]
+        assert x[1][:2] == x_[1][:2]
+
+    def test_digraph_ignore(self):
+        G = nx.DiGraph(self.edges)
+        x = list(nx.find_cycle(G, self.nodes, orientation="ignore"))
+        x_ = [(0, 1, FORWARD), (1, 0, FORWARD)]
+        assert x == x_
+
+    def test_digraph_reverse(self):
+        G = nx.DiGraph(self.edges)
+        x = list(nx.find_cycle(G, self.nodes, orientation="reverse"))
+        x_ = [(1, 0, REVERSE), (0, 1, REVERSE)]
+        assert x == x_
+
+    def test_multidigraph_ignore(self):
+        G = nx.MultiDiGraph(self.edges)
+        x = list(nx.find_cycle(G, self.nodes, orientation="ignore"))
+        x_ = [(0, 1, 0, FORWARD), (1, 0, 0, FORWARD)]  # or (1, 0, 1, 1)
+        assert x[0] == x_[0]
+        assert x[1][:2] == x_[1][:2]
+        assert x[1][3] == x_[1][3]
+
+    def test_multidigraph_ignore2(self):
+        # Loop traversed an edge while ignoring its orientation.
+        G = nx.MultiDiGraph([(0, 1), (1, 2), (1, 2)])
+        x = list(nx.find_cycle(G, [0, 1, 2], orientation="ignore"))
+        x_ = [(1, 2, 0, FORWARD), (1, 2, 1, REVERSE)]
+        assert x == x_
+
+    def test_multidigraph_original(self):
+        # Node 2 doesn't need to be searched again from visited from 4.
+        # The goal here is to cover the case when 2 to be researched from 4,
+        # when 4 is visited from the first time (so we must make sure that 4
+        # is not visited from 2, and hence, we respect the edge orientation).
+        G = nx.MultiDiGraph([(0, 1), (1, 2), (2, 3), (4, 2)])
+        pytest.raises(
+            nx.exception.NetworkXNoCycle,
+            nx.find_cycle,
+            G,
+            [0, 1, 2, 3, 4],
+            orientation="original",
+        )
+
+    def test_dag(self):
+        G = nx.DiGraph([(0, 1), (0, 2), (1, 2)])
+        pytest.raises(
+            nx.exception.NetworkXNoCycle, nx.find_cycle, G, orientation="original"
+        )
+        x = list(nx.find_cycle(G, orientation="ignore"))
+        assert x == [(0, 1, FORWARD), (1, 2, FORWARD), (0, 2, REVERSE)]
+
+    def test_prev_explored(self):
+        # https://github.com/networkx/networkx/issues/2323
+
+        G = nx.DiGraph()
+        G.add_edges_from([(1, 0), (2, 0), (1, 2), (2, 1)])
+        pytest.raises(nx.NetworkXNoCycle, nx.find_cycle, G, source=0)
+        x = list(nx.find_cycle(G, 1))
+        x_ = [(1, 2), (2, 1)]
+        assert x == x_
+
+        x = list(nx.find_cycle(G, 2))
+        x_ = [(2, 1), (1, 2)]
+        assert x == x_
+
+        x = list(nx.find_cycle(G))
+        x_ = [(1, 2), (2, 1)]
+        assert x == x_
+
+    def test_no_cycle(self):
+        # https://github.com/networkx/networkx/issues/2439
+
+        G = nx.DiGraph()
+        G.add_edges_from([(1, 2), (2, 0), (3, 1), (3, 2)])
+        pytest.raises(nx.NetworkXNoCycle, nx.find_cycle, G, source=0)
+        pytest.raises(nx.NetworkXNoCycle, nx.find_cycle, G)
+
+
+def assert_basis_equal(a, b):
+    assert sorted(a) == sorted(b)
+
+
+class TestMinimumCycleBasis:
+    @classmethod
+    def setup_class(cls):
+        T = nx.Graph()
+        nx.add_cycle(T, [1, 2, 3, 4], weight=1)
+        T.add_edge(2, 4, weight=5)
+        cls.diamond_graph = T
+
+    def test_unweighted_diamond(self):
+        mcb = nx.minimum_cycle_basis(self.diamond_graph)
+        assert_basis_equal(mcb, [[2, 4, 1], [3, 4, 2]])
+
+    def test_weighted_diamond(self):
+        mcb = nx.minimum_cycle_basis(self.diamond_graph, weight="weight")
+        assert_basis_equal(mcb, [[2, 4, 1], [4, 3, 2, 1]])
+
+    def test_dimensionality(self):
+        # checks |MCB|=|E|-|V|+|NC|
+        ntrial = 10
+        for seed in range(1234, 1234 + ntrial):
+            rg = nx.erdos_renyi_graph(10, 0.3, seed=seed)
+            nnodes = rg.number_of_nodes()
+            nedges = rg.number_of_edges()
+            ncomp = nx.number_connected_components(rg)
+
+            mcb = nx.minimum_cycle_basis(rg)
+            assert len(mcb) == nedges - nnodes + ncomp
+            check_independent(mcb)
+
+    def test_complete_graph(self):
+        cg = nx.complete_graph(5)
+        mcb = nx.minimum_cycle_basis(cg)
+        assert all(len(cycle) == 3 for cycle in mcb)
+        check_independent(mcb)
+
+    def test_tree_graph(self):
+        tg = nx.balanced_tree(3, 3)
+        assert not nx.minimum_cycle_basis(tg)
+
+    def test_petersen_graph(self):
+        G = nx.petersen_graph()
+        mcb = list(nx.minimum_cycle_basis(G))
+        expected = [
+            [4, 9, 7, 5, 0],
+            [1, 2, 3, 4, 0],
+            [1, 6, 8, 5, 0],
+            [4, 3, 8, 5, 0],
+            [1, 6, 9, 4, 0],
+            [1, 2, 7, 5, 0],
+        ]
+        assert len(mcb) == len(expected)
+        assert all(c in expected for c in mcb)
+
+        # check that order of the nodes is a path
+        for c in mcb:
+            assert all(G.has_edge(u, v) for u, v in nx.utils.pairwise(c, cyclic=True))
+        # check independence of the basis
+        check_independent(mcb)
+
+    def test_gh6787_variable_weighted_complete_graph(self):
+        N = 8
+        cg = nx.complete_graph(N)
+        cg.add_weighted_edges_from([(u, v, 9) for u, v in cg.edges])
+        cg.add_weighted_edges_from([(u, v, 1) for u, v in nx.cycle_graph(N).edges])
+        mcb = nx.minimum_cycle_basis(cg, weight="weight")
+        check_independent(mcb)
+
+    def test_gh6787_and_edge_attribute_names(self):
+        G = nx.cycle_graph(4)
+        G.add_weighted_edges_from([(0, 2, 10), (1, 3, 10)], weight="dist")
+        expected = [[1, 3, 0], [3, 2, 1, 0], [1, 2, 0]]
+        mcb = list(nx.minimum_cycle_basis(G, weight="dist"))
+        assert len(mcb) == len(expected)
+        assert all(c in expected for c in mcb)
+
+        # test not using a weight with weight attributes
+        expected = [[1, 3, 0], [1, 2, 0], [3, 2, 0]]
+        mcb = list(nx.minimum_cycle_basis(G))
+        assert len(mcb) == len(expected)
+        assert all(c in expected for c in mcb)
+
+
+class TestGirth:
+    @pytest.mark.parametrize(
+        ("G", "expected"),
+        (
+            (nx.chvatal_graph(), 4),
+            (nx.tutte_graph(), 4),
+            (nx.petersen_graph(), 5),
+            (nx.heawood_graph(), 6),
+            (nx.pappus_graph(), 6),
+            (nx.random_labeled_tree(10, seed=42), inf),
+            (nx.empty_graph(10), inf),
+            (nx.Graph(chain(cycle_edges(range(5)), cycle_edges(range(6, 10)))), 4),
+            (
+                nx.Graph(
+                    [
+                        (0, 6),
+                        (0, 8),
+                        (0, 9),
+                        (1, 8),
+                        (2, 8),
+                        (2, 9),
+                        (4, 9),
+                        (5, 9),
+                        (6, 8),
+                        (6, 9),
+                        (7, 8),
+                    ]
+                ),
+                3,
+            ),
+        ),
+    )
+    def test_girth(self, G, expected):
+        assert nx.girth(G) == expected
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_d_separation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_d_separation.py
new file mode 100644
index 00000000..6f629713
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_d_separation.py
@@ -0,0 +1,348 @@
+from itertools import combinations
+
+import pytest
+
+import networkx as nx
+
+
+def path_graph():
+    """Return a path graph of length three."""
+    G = nx.path_graph(3, create_using=nx.DiGraph)
+    G.graph["name"] = "path"
+    nx.freeze(G)
+    return G
+
+
+def fork_graph():
+    """Return a three node fork graph."""
+    G = nx.DiGraph(name="fork")
+    G.add_edges_from([(0, 1), (0, 2)])
+    nx.freeze(G)
+    return G
+
+
+def collider_graph():
+    """Return a collider/v-structure graph with three nodes."""
+    G = nx.DiGraph(name="collider")
+    G.add_edges_from([(0, 2), (1, 2)])
+    nx.freeze(G)
+    return G
+
+
+def naive_bayes_graph():
+    """Return a simply Naive Bayes PGM graph."""
+    G = nx.DiGraph(name="naive_bayes")
+    G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4)])
+    nx.freeze(G)
+    return G
+
+
+def asia_graph():
+    """Return the 'Asia' PGM graph."""
+    G = nx.DiGraph(name="asia")
+    G.add_edges_from(
+        [
+            ("asia", "tuberculosis"),
+            ("smoking", "cancer"),
+            ("smoking", "bronchitis"),
+            ("tuberculosis", "either"),
+            ("cancer", "either"),
+            ("either", "xray"),
+            ("either", "dyspnea"),
+            ("bronchitis", "dyspnea"),
+        ]
+    )
+    nx.freeze(G)
+    return G
+
+
+@pytest.fixture(name="path_graph")
+def path_graph_fixture():
+    return path_graph()
+
+
+@pytest.fixture(name="fork_graph")
+def fork_graph_fixture():
+    return fork_graph()
+
+
+@pytest.fixture(name="collider_graph")
+def collider_graph_fixture():
+    return collider_graph()
+
+
+@pytest.fixture(name="naive_bayes_graph")
+def naive_bayes_graph_fixture():
+    return naive_bayes_graph()
+
+
+@pytest.fixture(name="asia_graph")
+def asia_graph_fixture():
+    return asia_graph()
+
+
+@pytest.fixture()
+def large_collider_graph():
+    edge_list = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("B", "F"), ("G", "E")]
+    G = nx.DiGraph(edge_list)
+    return G
+
+
+@pytest.fixture()
+def chain_and_fork_graph():
+    edge_list = [("A", "B"), ("B", "C"), ("B", "D"), ("D", "C")]
+    G = nx.DiGraph(edge_list)
+    return G
+
+
+@pytest.fixture()
+def no_separating_set_graph():
+    edge_list = [("A", "B")]
+    G = nx.DiGraph(edge_list)
+    return G
+
+
+@pytest.fixture()
+def large_no_separating_set_graph():
+    edge_list = [("A", "B"), ("C", "A"), ("C", "B")]
+    G = nx.DiGraph(edge_list)
+    return G
+
+
+@pytest.fixture()
+def collider_trek_graph():
+    edge_list = [("A", "B"), ("C", "B"), ("C", "D")]
+    G = nx.DiGraph(edge_list)
+    return G
+
+
+@pytest.mark.parametrize(
+    "graph",
+    [path_graph(), fork_graph(), collider_graph(), naive_bayes_graph(), asia_graph()],
+)
+def test_markov_condition(graph):
+    """Test that the Markov condition holds for each PGM graph."""
+    for node in graph.nodes:
+        parents = set(graph.predecessors(node))
+        non_descendants = graph.nodes - nx.descendants(graph, node) - {node} - parents
+        assert nx.is_d_separator(graph, {node}, non_descendants, parents)
+
+
+def test_path_graph_dsep(path_graph):
+    """Example-based test of d-separation for path_graph."""
+    assert nx.is_d_separator(path_graph, {0}, {2}, {1})
+    assert not nx.is_d_separator(path_graph, {0}, {2}, set())
+
+
+def test_fork_graph_dsep(fork_graph):
+    """Example-based test of d-separation for fork_graph."""
+    assert nx.is_d_separator(fork_graph, {1}, {2}, {0})
+    assert not nx.is_d_separator(fork_graph, {1}, {2}, set())
+
+
+def test_collider_graph_dsep(collider_graph):
+    """Example-based test of d-separation for collider_graph."""
+    assert nx.is_d_separator(collider_graph, {0}, {1}, set())
+    assert not nx.is_d_separator(collider_graph, {0}, {1}, {2})
+
+
+def test_naive_bayes_dsep(naive_bayes_graph):
+    """Example-based test of d-separation for naive_bayes_graph."""
+    for u, v in combinations(range(1, 5), 2):
+        assert nx.is_d_separator(naive_bayes_graph, {u}, {v}, {0})
+        assert not nx.is_d_separator(naive_bayes_graph, {u}, {v}, set())
+
+
+def test_asia_graph_dsep(asia_graph):
+    """Example-based test of d-separation for asia_graph."""
+    assert nx.is_d_separator(
+        asia_graph, {"asia", "smoking"}, {"dyspnea", "xray"}, {"bronchitis", "either"}
+    )
+    assert nx.is_d_separator(
+        asia_graph, {"tuberculosis", "cancer"}, {"bronchitis"}, {"smoking", "xray"}
+    )
+
+
+def test_undirected_graphs_are_not_supported():
+    """
+    Test that undirected graphs are not supported.
+
+    d-separation and its related algorithms do not apply in
+    the case of undirected graphs.
+    """
+    g = nx.path_graph(3, nx.Graph)
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.is_d_separator(g, {0}, {1}, {2})
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.is_minimal_d_separator(g, {0}, {1}, {2})
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.find_minimal_d_separator(g, {0}, {1})
+
+
+def test_cyclic_graphs_raise_error():
+    """
+    Test that cycle graphs should cause erroring.
+
+    This is because PGMs assume a directed acyclic graph.
+    """
+    g = nx.cycle_graph(3, nx.DiGraph)
+    with pytest.raises(nx.NetworkXError):
+        nx.is_d_separator(g, {0}, {1}, {2})
+    with pytest.raises(nx.NetworkXError):
+        nx.find_minimal_d_separator(g, {0}, {1})
+    with pytest.raises(nx.NetworkXError):
+        nx.is_minimal_d_separator(g, {0}, {1}, {2})
+
+
+def test_invalid_nodes_raise_error(asia_graph):
+    """
+    Test that graphs that have invalid nodes passed in raise errors.
+    """
+    # Check both set and node arguments
+    with pytest.raises(nx.NodeNotFound):
+        nx.is_d_separator(asia_graph, {0}, {1}, {2})
+    with pytest.raises(nx.NodeNotFound):
+        nx.is_d_separator(asia_graph, 0, 1, 2)
+    with pytest.raises(nx.NodeNotFound):
+        nx.is_minimal_d_separator(asia_graph, {0}, {1}, {2})
+    with pytest.raises(nx.NodeNotFound):
+        nx.is_minimal_d_separator(asia_graph, 0, 1, 2)
+    with pytest.raises(nx.NodeNotFound):
+        nx.find_minimal_d_separator(asia_graph, {0}, {1})
+    with pytest.raises(nx.NodeNotFound):
+        nx.find_minimal_d_separator(asia_graph, 0, 1)
+
+
+def test_nondisjoint_node_sets_raise_error(collider_graph):
+    """
+    Test that error is raised when node sets aren't disjoint.
+    """
+    with pytest.raises(nx.NetworkXError):
+        nx.is_d_separator(collider_graph, 0, 1, 0)
+    with pytest.raises(nx.NetworkXError):
+        nx.is_d_separator(collider_graph, 0, 2, 0)
+    with pytest.raises(nx.NetworkXError):
+        nx.is_d_separator(collider_graph, 0, 0, 1)
+    with pytest.raises(nx.NetworkXError):
+        nx.is_d_separator(collider_graph, 1, 0, 0)
+    with pytest.raises(nx.NetworkXError):
+        nx.find_minimal_d_separator(collider_graph, 0, 0)
+    with pytest.raises(nx.NetworkXError):
+        nx.find_minimal_d_separator(collider_graph, 0, 1, included=0)
+    with pytest.raises(nx.NetworkXError):
+        nx.find_minimal_d_separator(collider_graph, 1, 0, included=0)
+    with pytest.raises(nx.NetworkXError):
+        nx.is_minimal_d_separator(collider_graph, 0, 0, set())
+    with pytest.raises(nx.NetworkXError):
+        nx.is_minimal_d_separator(collider_graph, 0, 1, set(), included=0)
+    with pytest.raises(nx.NetworkXError):
+        nx.is_minimal_d_separator(collider_graph, 1, 0, set(), included=0)
+
+
+def test_is_minimal_d_separator(
+    large_collider_graph,
+    chain_and_fork_graph,
+    no_separating_set_graph,
+    large_no_separating_set_graph,
+    collider_trek_graph,
+):
+    # Case 1:
+    # create a graph A -> B <- C
+    # B -> D -> E;
+    # B -> F;
+    # G -> E;
+    assert not nx.is_d_separator(large_collider_graph, {"B"}, {"E"}, set())
+
+    # minimal set of the corresponding graph
+    # for B and E should be (D,)
+    Zmin = nx.find_minimal_d_separator(large_collider_graph, "B", "E")
+    # check that the minimal d-separator is a d-separating set
+    assert nx.is_d_separator(large_collider_graph, "B", "E", Zmin)
+    # the minimal separating set should also pass the test for minimality
+    assert nx.is_minimal_d_separator(large_collider_graph, "B", "E", Zmin)
+    # function should also work with set arguments
+    assert nx.is_minimal_d_separator(large_collider_graph, {"A", "B"}, {"G", "E"}, Zmin)
+    assert Zmin == {"D"}
+
+    # Case 2:
+    # create a graph A -> B -> C
+    # B -> D -> C;
+    assert not nx.is_d_separator(chain_and_fork_graph, {"A"}, {"C"}, set())
+    Zmin = nx.find_minimal_d_separator(chain_and_fork_graph, "A", "C")
+
+    # the minimal separating set should pass the test for minimality
+    assert nx.is_minimal_d_separator(chain_and_fork_graph, "A", "C", Zmin)
+    assert Zmin == {"B"}
+    Znotmin = Zmin.union({"D"})
+    assert not nx.is_minimal_d_separator(chain_and_fork_graph, "A", "C", Znotmin)
+
+    # Case 3:
+    # create a graph A -> B
+
+    # there is no m-separating set between A and B at all, so
+    # no minimal m-separating set can exist
+    assert not nx.is_d_separator(no_separating_set_graph, {"A"}, {"B"}, set())
+    assert nx.find_minimal_d_separator(no_separating_set_graph, "A", "B") is None
+
+    # Case 4:
+    # create a graph A -> B with A <- C -> B
+
+    # there is no m-separating set between A and B at all, so
+    # no minimal m-separating set can exist
+    # however, the algorithm will initially propose C as a
+    # minimal (but invalid) separating set
+    assert not nx.is_d_separator(large_no_separating_set_graph, {"A"}, {"B"}, {"C"})
+    assert nx.find_minimal_d_separator(large_no_separating_set_graph, "A", "B") is None
+
+    # Test `included` and `excluded` args
+    # create graph A -> B <- C -> D
+    assert nx.find_minimal_d_separator(collider_trek_graph, "A", "D", included="B") == {
+        "B",
+        "C",
+    }
+    assert (
+        nx.find_minimal_d_separator(
+            collider_trek_graph, "A", "D", included="B", restricted="B"
+        )
+        is None
+    )
+
+
+def test_is_minimal_d_separator_checks_dsep():
+    """Test that is_minimal_d_separator checks for d-separation as well."""
+    g = nx.DiGraph()
+    g.add_edges_from(
+        [
+            ("A", "B"),
+            ("A", "E"),
+            ("B", "C"),
+            ("B", "D"),
+            ("D", "C"),
+            ("D", "F"),
+            ("E", "D"),
+            ("E", "F"),
+        ]
+    )
+
+    assert not nx.is_d_separator(g, {"C"}, {"F"}, {"D"})
+
+    # since {'D'} and {} are not d-separators, we return false
+    assert not nx.is_minimal_d_separator(g, "C", "F", {"D"})
+    assert not nx.is_minimal_d_separator(g, "C", "F", set())
+
+
+def test__reachable(large_collider_graph):
+    reachable = nx.algorithms.d_separation._reachable
+    g = large_collider_graph
+    x = {"F", "D"}
+    ancestors = {"A", "B", "C", "D", "F"}
+    assert reachable(g, x, ancestors, {"B"}) == {"B", "F", "D"}
+    assert reachable(g, x, ancestors, set()) == ancestors
+
+
+def test_deprecations():
+    G = nx.DiGraph([(0, 1), (1, 2)])
+    with pytest.deprecated_call():
+        nx.d_separated(G, 0, 2, {1})
+    with pytest.deprecated_call():
+        z = nx.minimal_d_separator(G, 0, 2)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dag.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dag.py
new file mode 100644
index 00000000..e98a6a0e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dag.py
@@ -0,0 +1,835 @@
+from collections import deque
+from itertools import combinations, permutations
+
+import pytest
+
+import networkx as nx
+from networkx.utils import edges_equal, pairwise
+
+
+# Recipe from the itertools documentation.
+def _consume(iterator):
+    "Consume the iterator entirely."
+    # Feed the entire iterator into a zero-length deque.
+    deque(iterator, maxlen=0)
+
+
+class TestDagLongestPath:
+    """Unit tests computing the longest path in a directed acyclic graph."""
+
+    def test_empty(self):
+        G = nx.DiGraph()
+        assert nx.dag_longest_path(G) == []
+
+    def test_unweighted1(self):
+        edges = [(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (3, 7)]
+        G = nx.DiGraph(edges)
+        assert nx.dag_longest_path(G) == [1, 2, 3, 5, 6]
+
+    def test_unweighted2(self):
+        edges = [(1, 2), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)]
+        G = nx.DiGraph(edges)
+        assert nx.dag_longest_path(G) == [1, 2, 3, 4, 5]
+
+    def test_weighted(self):
+        G = nx.DiGraph()
+        edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), (1, 6, 2)]
+        G.add_weighted_edges_from(edges)
+        assert nx.dag_longest_path(G) == [2, 3, 5]
+
+    def test_undirected_not_implemented(self):
+        G = nx.Graph()
+        pytest.raises(nx.NetworkXNotImplemented, nx.dag_longest_path, G)
+
+    def test_unorderable_nodes(self):
+        """Tests that computing the longest path does not depend on
+        nodes being orderable.
+
+        For more information, see issue #1989.
+
+        """
+        # Create the directed path graph on four nodes in a diamond shape,
+        # with nodes represented as (unorderable) Python objects.
+        nodes = [object() for n in range(4)]
+        G = nx.DiGraph()
+        G.add_edge(nodes[0], nodes[1])
+        G.add_edge(nodes[0], nodes[2])
+        G.add_edge(nodes[2], nodes[3])
+        G.add_edge(nodes[1], nodes[3])
+
+        # this will raise NotImplementedError when nodes need to be ordered
+        nx.dag_longest_path(G)
+
+    def test_multigraph_unweighted(self):
+        edges = [(1, 2), (2, 3), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)]
+        G = nx.MultiDiGraph(edges)
+        assert nx.dag_longest_path(G) == [1, 2, 3, 4, 5]
+
+    def test_multigraph_weighted(self):
+        G = nx.MultiDiGraph()
+        edges = [
+            (1, 2, 2),
+            (2, 3, 2),
+            (1, 3, 1),
+            (1, 3, 5),
+            (1, 3, 2),
+        ]
+        G.add_weighted_edges_from(edges)
+        assert nx.dag_longest_path(G) == [1, 3]
+
+    def test_multigraph_weighted_default_weight(self):
+        G = nx.MultiDiGraph([(1, 2), (2, 3)])  # Unweighted edges
+        G.add_weighted_edges_from([(1, 3, 1), (1, 3, 5), (1, 3, 2)])
+
+        # Default value for default weight is 1
+        assert nx.dag_longest_path(G) == [1, 3]
+        assert nx.dag_longest_path(G, default_weight=3) == [1, 2, 3]
+
+
+class TestDagLongestPathLength:
+    """Unit tests for computing the length of a longest path in a
+    directed acyclic graph.
+
+    """
+
+    def test_unweighted(self):
+        edges = [(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (5, 7)]
+        G = nx.DiGraph(edges)
+        assert nx.dag_longest_path_length(G) == 4
+
+        edges = [(1, 2), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)]
+        G = nx.DiGraph(edges)
+        assert nx.dag_longest_path_length(G) == 4
+
+        # test degenerate graphs
+        G = nx.DiGraph()
+        G.add_node(1)
+        assert nx.dag_longest_path_length(G) == 0
+
+    def test_undirected_not_implemented(self):
+        G = nx.Graph()
+        pytest.raises(nx.NetworkXNotImplemented, nx.dag_longest_path_length, G)
+
+    def test_weighted(self):
+        edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), (1, 6, 2)]
+        G = nx.DiGraph()
+        G.add_weighted_edges_from(edges)
+        assert nx.dag_longest_path_length(G) == 5
+
+    def test_multigraph_unweighted(self):
+        edges = [(1, 2), (2, 3), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)]
+        G = nx.MultiDiGraph(edges)
+        assert nx.dag_longest_path_length(G) == 4
+
+    def test_multigraph_weighted(self):
+        G = nx.MultiDiGraph()
+        edges = [
+            (1, 2, 2),
+            (2, 3, 2),
+            (1, 3, 1),
+            (1, 3, 5),
+            (1, 3, 2),
+        ]
+        G.add_weighted_edges_from(edges)
+        assert nx.dag_longest_path_length(G) == 5
+
+
+class TestDAG:
+    @classmethod
+    def setup_class(cls):
+        pass
+
+    def test_topological_sort1(self):
+        DG = nx.DiGraph([(1, 2), (1, 3), (2, 3)])
+
+        for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]:
+            assert tuple(algorithm(DG)) == (1, 2, 3)
+
+        DG.add_edge(3, 2)
+
+        for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]:
+            pytest.raises(nx.NetworkXUnfeasible, _consume, algorithm(DG))
+
+        DG.remove_edge(2, 3)
+
+        for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]:
+            assert tuple(algorithm(DG)) == (1, 3, 2)
+
+        DG.remove_edge(3, 2)
+
+        assert tuple(nx.topological_sort(DG)) in {(1, 2, 3), (1, 3, 2)}
+        assert tuple(nx.lexicographical_topological_sort(DG)) == (1, 2, 3)
+
+    def test_is_directed_acyclic_graph(self):
+        G = nx.generators.complete_graph(2)
+        assert not nx.is_directed_acyclic_graph(G)
+        assert not nx.is_directed_acyclic_graph(G.to_directed())
+        assert not nx.is_directed_acyclic_graph(nx.Graph([(3, 4), (4, 5)]))
+        assert nx.is_directed_acyclic_graph(nx.DiGraph([(3, 4), (4, 5)]))
+
+    def test_topological_sort2(self):
+        DG = nx.DiGraph(
+            {
+                1: [2],
+                2: [3],
+                3: [4],
+                4: [5],
+                5: [1],
+                11: [12],
+                12: [13],
+                13: [14],
+                14: [15],
+            }
+        )
+        pytest.raises(nx.NetworkXUnfeasible, _consume, nx.topological_sort(DG))
+
+        assert not nx.is_directed_acyclic_graph(DG)
+
+        DG.remove_edge(1, 2)
+        _consume(nx.topological_sort(DG))
+        assert nx.is_directed_acyclic_graph(DG)
+
+    def test_topological_sort3(self):
+        DG = nx.DiGraph()
+        DG.add_edges_from([(1, i) for i in range(2, 5)])
+        DG.add_edges_from([(2, i) for i in range(5, 9)])
+        DG.add_edges_from([(6, i) for i in range(9, 12)])
+        DG.add_edges_from([(4, i) for i in range(12, 15)])
+
+        def validate(order):
+            assert isinstance(order, list)
+            assert set(order) == set(DG)
+            for u, v in combinations(order, 2):
+                assert not nx.has_path(DG, v, u)
+
+        validate(list(nx.topological_sort(DG)))
+
+        DG.add_edge(14, 1)
+        pytest.raises(nx.NetworkXUnfeasible, _consume, nx.topological_sort(DG))
+
+    def test_topological_sort4(self):
+        G = nx.Graph()
+        G.add_edge(1, 2)
+        # Only directed graphs can be topologically sorted.
+        pytest.raises(nx.NetworkXError, _consume, nx.topological_sort(G))
+
+    def test_topological_sort5(self):
+        G = nx.DiGraph()
+        G.add_edge(0, 1)
+        assert list(nx.topological_sort(G)) == [0, 1]
+
+    def test_topological_sort6(self):
+        for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]:
+
+            def runtime_error():
+                DG = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+                first = True
+                for x in algorithm(DG):
+                    if first:
+                        first = False
+                        DG.add_edge(5 - x, 5)
+
+            def unfeasible_error():
+                DG = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+                first = True
+                for x in algorithm(DG):
+                    if first:
+                        first = False
+                        DG.remove_node(4)
+
+            def runtime_error2():
+                DG = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+                first = True
+                for x in algorithm(DG):
+                    if first:
+                        first = False
+                        DG.remove_node(2)
+
+            pytest.raises(RuntimeError, runtime_error)
+            pytest.raises(RuntimeError, runtime_error2)
+            pytest.raises(nx.NetworkXUnfeasible, unfeasible_error)
+
+    def test_all_topological_sorts_1(self):
+        DG = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 5)])
+        assert list(nx.all_topological_sorts(DG)) == [[1, 2, 3, 4, 5]]
+
+    def test_all_topological_sorts_2(self):
+        DG = nx.DiGraph([(1, 3), (2, 1), (2, 4), (4, 3), (4, 5)])
+        assert sorted(nx.all_topological_sorts(DG)) == [
+            [2, 1, 4, 3, 5],
+            [2, 1, 4, 5, 3],
+            [2, 4, 1, 3, 5],
+            [2, 4, 1, 5, 3],
+            [2, 4, 5, 1, 3],
+        ]
+
+    def test_all_topological_sorts_3(self):
+        def unfeasible():
+            DG = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 2), (4, 5)])
+            # convert to list to execute generator
+            list(nx.all_topological_sorts(DG))
+
+        def not_implemented():
+            G = nx.Graph([(1, 2), (2, 3)])
+            # convert to list to execute generator
+            list(nx.all_topological_sorts(G))
+
+        def not_implemented_2():
+            G = nx.MultiGraph([(1, 2), (1, 2), (2, 3)])
+            list(nx.all_topological_sorts(G))
+
+        pytest.raises(nx.NetworkXUnfeasible, unfeasible)
+        pytest.raises(nx.NetworkXNotImplemented, not_implemented)
+        pytest.raises(nx.NetworkXNotImplemented, not_implemented_2)
+
+    def test_all_topological_sorts_4(self):
+        DG = nx.DiGraph()
+        for i in range(7):
+            DG.add_node(i)
+        assert sorted(map(list, permutations(DG.nodes))) == sorted(
+            nx.all_topological_sorts(DG)
+        )
+
+    def test_all_topological_sorts_multigraph_1(self):
+        DG = nx.MultiDiGraph([(1, 2), (1, 2), (2, 3), (3, 4), (3, 5), (3, 5), (3, 5)])
+        assert sorted(nx.all_topological_sorts(DG)) == sorted(
+            [[1, 2, 3, 4, 5], [1, 2, 3, 5, 4]]
+        )
+
+    def test_all_topological_sorts_multigraph_2(self):
+        N = 9
+        edges = []
+        for i in range(1, N):
+            edges.extend([(i, i + 1)] * i)
+        DG = nx.MultiDiGraph(edges)
+        assert list(nx.all_topological_sorts(DG)) == [list(range(1, N + 1))]
+
+    def test_ancestors(self):
+        G = nx.DiGraph()
+        ancestors = nx.algorithms.dag.ancestors
+        G.add_edges_from([(1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)])
+        assert ancestors(G, 6) == {1, 2, 4, 5}
+        assert ancestors(G, 3) == {1, 4}
+        assert ancestors(G, 1) == set()
+        pytest.raises(nx.NetworkXError, ancestors, G, 8)
+
+    def test_descendants(self):
+        G = nx.DiGraph()
+        descendants = nx.algorithms.dag.descendants
+        G.add_edges_from([(1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)])
+        assert descendants(G, 1) == {2, 3, 6}
+        assert descendants(G, 4) == {2, 3, 5, 6}
+        assert descendants(G, 3) == set()
+        pytest.raises(nx.NetworkXError, descendants, G, 8)
+
+    def test_transitive_closure(self):
+        G = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
+        assert edges_equal(nx.transitive_closure(G).edges(), solution)
+        G = nx.DiGraph([(1, 2), (2, 3), (2, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)]
+        assert edges_equal(nx.transitive_closure(G).edges(), solution)
+        G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+        solution = [(1, 2), (2, 1), (2, 3), (3, 2), (1, 3), (3, 1)]
+        soln = sorted(solution + [(n, n) for n in G])
+        assert edges_equal(sorted(nx.transitive_closure(G).edges()), soln)
+
+        G = nx.Graph([(1, 2), (2, 3), (3, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
+        assert edges_equal(sorted(nx.transitive_closure(G).edges()), solution)
+
+        G = nx.MultiGraph([(1, 2), (2, 3), (3, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
+        assert edges_equal(sorted(nx.transitive_closure(G).edges()), solution)
+
+        G = nx.MultiDiGraph([(1, 2), (2, 3), (3, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
+        assert edges_equal(sorted(nx.transitive_closure(G).edges()), solution)
+
+        # test if edge data is copied
+        G = nx.DiGraph([(1, 2, {"a": 3}), (2, 3, {"b": 0}), (3, 4)])
+        H = nx.transitive_closure(G)
+        for u, v in G.edges():
+            assert G.get_edge_data(u, v) == H.get_edge_data(u, v)
+
+        k = 10
+        G = nx.DiGraph((i, i + 1, {"f": "b", "weight": i}) for i in range(k))
+        H = nx.transitive_closure(G)
+        for u, v in G.edges():
+            assert G.get_edge_data(u, v) == H.get_edge_data(u, v)
+
+        G = nx.Graph()
+        with pytest.raises(nx.NetworkXError):
+            nx.transitive_closure(G, reflexive="wrong input")
+
+    def test_reflexive_transitive_closure(self):
+        G = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
+        soln = sorted(solution + [(n, n) for n in G])
+        assert edges_equal(nx.transitive_closure(G).edges(), solution)
+        assert edges_equal(nx.transitive_closure(G, False).edges(), solution)
+        assert edges_equal(nx.transitive_closure(G, True).edges(), soln)
+        assert edges_equal(nx.transitive_closure(G, None).edges(), solution)
+
+        G = nx.DiGraph([(1, 2), (2, 3), (2, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)]
+        soln = sorted(solution + [(n, n) for n in G])
+        assert edges_equal(nx.transitive_closure(G).edges(), solution)
+        assert edges_equal(nx.transitive_closure(G, False).edges(), solution)
+        assert edges_equal(nx.transitive_closure(G, True).edges(), soln)
+        assert edges_equal(nx.transitive_closure(G, None).edges(), solution)
+
+        G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+        solution = sorted([(1, 2), (2, 1), (2, 3), (3, 2), (1, 3), (3, 1)])
+        soln = sorted(solution + [(n, n) for n in G])
+        assert edges_equal(sorted(nx.transitive_closure(G).edges()), soln)
+        assert edges_equal(sorted(nx.transitive_closure(G, False).edges()), soln)
+        assert edges_equal(sorted(nx.transitive_closure(G, None).edges()), solution)
+        assert edges_equal(sorted(nx.transitive_closure(G, True).edges()), soln)
+
+        G = nx.Graph([(1, 2), (2, 3), (3, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
+        soln = sorted(solution + [(n, n) for n in G])
+        assert edges_equal(nx.transitive_closure(G).edges(), solution)
+        assert edges_equal(nx.transitive_closure(G, False).edges(), solution)
+        assert edges_equal(nx.transitive_closure(G, True).edges(), soln)
+        assert edges_equal(nx.transitive_closure(G, None).edges(), solution)
+
+        G = nx.MultiGraph([(1, 2), (2, 3), (3, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
+        soln = sorted(solution + [(n, n) for n in G])
+        assert edges_equal(nx.transitive_closure(G).edges(), solution)
+        assert edges_equal(nx.transitive_closure(G, False).edges(), solution)
+        assert edges_equal(nx.transitive_closure(G, True).edges(), soln)
+        assert edges_equal(nx.transitive_closure(G, None).edges(), solution)
+
+        G = nx.MultiDiGraph([(1, 2), (2, 3), (3, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
+        soln = sorted(solution + [(n, n) for n in G])
+        assert edges_equal(nx.transitive_closure(G).edges(), solution)
+        assert edges_equal(nx.transitive_closure(G, False).edges(), solution)
+        assert edges_equal(nx.transitive_closure(G, True).edges(), soln)
+        assert edges_equal(nx.transitive_closure(G, None).edges(), solution)
+
+    def test_transitive_closure_dag(self):
+        G = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+        transitive_closure = nx.algorithms.dag.transitive_closure_dag
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
+        assert edges_equal(transitive_closure(G).edges(), solution)
+        G = nx.DiGraph([(1, 2), (2, 3), (2, 4)])
+        solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)]
+        assert edges_equal(transitive_closure(G).edges(), solution)
+        G = nx.Graph([(1, 2), (2, 3), (3, 4)])
+        pytest.raises(nx.NetworkXNotImplemented, transitive_closure, G)
+
+        # test if edge data is copied
+        G = nx.DiGraph([(1, 2, {"a": 3}), (2, 3, {"b": 0}), (3, 4)])
+        H = transitive_closure(G)
+        for u, v in G.edges():
+            assert G.get_edge_data(u, v) == H.get_edge_data(u, v)
+
+        k = 10
+        G = nx.DiGraph((i, i + 1, {"foo": "bar", "weight": i}) for i in range(k))
+        H = transitive_closure(G)
+        for u, v in G.edges():
+            assert G.get_edge_data(u, v) == H.get_edge_data(u, v)
+
+    def test_transitive_reduction(self):
+        G = nx.DiGraph([(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)])
+        transitive_reduction = nx.algorithms.dag.transitive_reduction
+        solution = [(1, 2), (2, 3), (3, 4)]
+        assert edges_equal(transitive_reduction(G).edges(), solution)
+        G = nx.DiGraph([(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)])
+        transitive_reduction = nx.algorithms.dag.transitive_reduction
+        solution = [(1, 2), (2, 3), (2, 4)]
+        assert edges_equal(transitive_reduction(G).edges(), solution)
+        G = nx.Graph([(1, 2), (2, 3), (3, 4)])
+        pytest.raises(nx.NetworkXNotImplemented, transitive_reduction, G)
+
+    def _check_antichains(self, solution, result):
+        sol = [frozenset(a) for a in solution]
+        res = [frozenset(a) for a in result]
+        assert set(sol) == set(res)
+
+    def test_antichains(self):
+        antichains = nx.algorithms.dag.antichains
+        G = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+        solution = [[], [4], [3], [2], [1]]
+        self._check_antichains(list(antichains(G)), solution)
+        G = nx.DiGraph([(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (5, 7)])
+        solution = [
+            [],
+            [4],
+            [7],
+            [7, 4],
+            [6],
+            [6, 4],
+            [6, 7],
+            [6, 7, 4],
+            [5],
+            [5, 4],
+            [3],
+            [3, 4],
+            [2],
+            [1],
+        ]
+        self._check_antichains(list(antichains(G)), solution)
+        G = nx.DiGraph([(1, 2), (1, 3), (3, 4), (3, 5), (5, 6)])
+        solution = [
+            [],
+            [6],
+            [5],
+            [4],
+            [4, 6],
+            [4, 5],
+            [3],
+            [2],
+            [2, 6],
+            [2, 5],
+            [2, 4],
+            [2, 4, 6],
+            [2, 4, 5],
+            [2, 3],
+            [1],
+        ]
+        self._check_antichains(list(antichains(G)), solution)
+        G = nx.DiGraph({0: [1, 2], 1: [4], 2: [3], 3: [4]})
+        solution = [[], [4], [3], [2], [1], [1, 3], [1, 2], [0]]
+        self._check_antichains(list(antichains(G)), solution)
+        G = nx.DiGraph()
+        self._check_antichains(list(antichains(G)), [[]])
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2])
+        solution = [[], [0], [1], [1, 0], [2], [2, 0], [2, 1], [2, 1, 0]]
+        self._check_antichains(list(antichains(G)), solution)
+
+        def f(x):
+            return list(antichains(x))
+
+        G = nx.Graph([(1, 2), (2, 3), (3, 4)])
+        pytest.raises(nx.NetworkXNotImplemented, f, G)
+        G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+        pytest.raises(nx.NetworkXUnfeasible, f, G)
+
+    def test_lexicographical_topological_sort(self):
+        G = nx.DiGraph([(1, 2), (2, 3), (1, 4), (1, 5), (2, 6)])
+        assert list(nx.lexicographical_topological_sort(G)) == [1, 2, 3, 4, 5, 6]
+        assert list(nx.lexicographical_topological_sort(G, key=lambda x: x)) == [
+            1,
+            2,
+            3,
+            4,
+            5,
+            6,
+        ]
+        assert list(nx.lexicographical_topological_sort(G, key=lambda x: -x)) == [
+            1,
+            5,
+            4,
+            2,
+            6,
+            3,
+        ]
+
+    def test_lexicographical_topological_sort2(self):
+        """
+        Check the case of two or more nodes with same key value.
+        Want to avoid exception raised due to comparing nodes directly.
+        See Issue #3493
+        """
+
+        class Test_Node:
+            def __init__(self, n):
+                self.label = n
+                self.priority = 1
+
+            def __repr__(self):
+                return f"Node({self.label})"
+
+        def sorting_key(node):
+            return node.priority
+
+        test_nodes = [Test_Node(n) for n in range(4)]
+        G = nx.DiGraph()
+        edges = [(0, 1), (0, 2), (0, 3), (2, 3)]
+        G.add_edges_from((test_nodes[a], test_nodes[b]) for a, b in edges)
+
+        sorting = list(nx.lexicographical_topological_sort(G, key=sorting_key))
+        assert sorting == test_nodes
+
+
+def test_topological_generations():
+    G = nx.DiGraph(
+        {1: [2, 3], 2: [4, 5], 3: [7], 4: [], 5: [6, 7], 6: [], 7: []}
+    ).reverse()
+    # order within each generation is inconsequential
+    generations = [sorted(gen) for gen in nx.topological_generations(G)]
+    expected = [[4, 6, 7], [3, 5], [2], [1]]
+    assert generations == expected
+
+    MG = nx.MultiDiGraph(G.edges)
+    MG.add_edge(2, 1)
+    generations = [sorted(gen) for gen in nx.topological_generations(MG)]
+    assert generations == expected
+
+
+def test_topological_generations_empty():
+    G = nx.DiGraph()
+    assert list(nx.topological_generations(G)) == []
+
+
+def test_topological_generations_cycle():
+    G = nx.DiGraph([[2, 1], [3, 1], [1, 2]])
+    with pytest.raises(nx.NetworkXUnfeasible):
+        list(nx.topological_generations(G))
+
+
+def test_is_aperiodic_cycle():
+    G = nx.DiGraph()
+    nx.add_cycle(G, [1, 2, 3, 4])
+    assert not nx.is_aperiodic(G)
+
+
+def test_is_aperiodic_cycle2():
+    G = nx.DiGraph()
+    nx.add_cycle(G, [1, 2, 3, 4])
+    nx.add_cycle(G, [3, 4, 5, 6, 7])
+    assert nx.is_aperiodic(G)
+
+
+def test_is_aperiodic_cycle3():
+    G = nx.DiGraph()
+    nx.add_cycle(G, [1, 2, 3, 4])
+    nx.add_cycle(G, [3, 4, 5, 6])
+    assert not nx.is_aperiodic(G)
+
+
+def test_is_aperiodic_cycle4():
+    G = nx.DiGraph()
+    nx.add_cycle(G, [1, 2, 3, 4])
+    G.add_edge(1, 3)
+    assert nx.is_aperiodic(G)
+
+
+def test_is_aperiodic_selfloop():
+    G = nx.DiGraph()
+    nx.add_cycle(G, [1, 2, 3, 4])
+    G.add_edge(1, 1)
+    assert nx.is_aperiodic(G)
+
+
+def test_is_aperiodic_undirected_raises():
+    G = nx.Graph()
+    pytest.raises(nx.NetworkXError, nx.is_aperiodic, G)
+
+
+def test_is_aperiodic_empty_graph():
+    G = nx.empty_graph(create_using=nx.DiGraph)
+    with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes."):
+        nx.is_aperiodic(G)
+
+
+def test_is_aperiodic_bipartite():
+    # Bipartite graph
+    G = nx.DiGraph(nx.davis_southern_women_graph())
+    assert not nx.is_aperiodic(G)
+
+
+def test_is_aperiodic_rary_tree():
+    G = nx.full_rary_tree(3, 27, create_using=nx.DiGraph())
+    assert not nx.is_aperiodic(G)
+
+
+def test_is_aperiodic_disconnected():
+    # disconnected graph
+    G = nx.DiGraph()
+    nx.add_cycle(G, [1, 2, 3, 4])
+    nx.add_cycle(G, [5, 6, 7, 8])
+    assert not nx.is_aperiodic(G)
+    G.add_edge(1, 3)
+    G.add_edge(5, 7)
+    assert nx.is_aperiodic(G)
+
+
+def test_is_aperiodic_disconnected2():
+    G = nx.DiGraph()
+    nx.add_cycle(G, [0, 1, 2])
+    G.add_edge(3, 3)
+    assert not nx.is_aperiodic(G)
+
+
+class TestDagToBranching:
+    """Unit tests for the :func:`networkx.dag_to_branching` function."""
+
+    def test_single_root(self):
+        """Tests that a directed acyclic graph with a single degree
+        zero node produces an arborescence.
+
+        """
+        G = nx.DiGraph([(0, 1), (0, 2), (1, 3), (2, 3)])
+        B = nx.dag_to_branching(G)
+        expected = nx.DiGraph([(0, 1), (1, 3), (0, 2), (2, 4)])
+        assert nx.is_arborescence(B)
+        assert nx.is_isomorphic(B, expected)
+
+    def test_multiple_roots(self):
+        """Tests that a directed acyclic graph with multiple degree zero
+        nodes creates an arborescence with multiple (weakly) connected
+        components.
+
+        """
+        G = nx.DiGraph([(0, 1), (0, 2), (1, 3), (2, 3), (5, 2)])
+        B = nx.dag_to_branching(G)
+        expected = nx.DiGraph([(0, 1), (1, 3), (0, 2), (2, 4), (5, 6), (6, 7)])
+        assert nx.is_branching(B)
+        assert not nx.is_arborescence(B)
+        assert nx.is_isomorphic(B, expected)
+
+    # # Attributes are not copied by this function. If they were, this would
+    # # be a good test to uncomment.
+    # def test_copy_attributes(self):
+    #     """Tests that node attributes are copied in the branching."""
+    #     G = nx.DiGraph([(0, 1), (0, 2), (1, 3), (2, 3)])
+    #     for v in G:
+    #         G.node[v]['label'] = str(v)
+    #     B = nx.dag_to_branching(G)
+    #     # Determine the root node of the branching.
+    #     root = next(v for v, d in B.in_degree() if d == 0)
+    #     assert_equal(B.node[root]['label'], '0')
+    #     children = B[root]
+    #     # Get the left and right children, nodes 1 and 2, respectively.
+    #     left, right = sorted(children, key=lambda v: B.node[v]['label'])
+    #     assert_equal(B.node[left]['label'], '1')
+    #     assert_equal(B.node[right]['label'], '2')
+    #     # Get the left grandchild.
+    #     children = B[left]
+    #     assert_equal(len(children), 1)
+    #     left_grandchild = arbitrary_element(children)
+    #     assert_equal(B.node[left_grandchild]['label'], '3')
+    #     # Get the right grandchild.
+    #     children = B[right]
+    #     assert_equal(len(children), 1)
+    #     right_grandchild = arbitrary_element(children)
+    #     assert_equal(B.node[right_grandchild]['label'], '3')
+
+    def test_already_arborescence(self):
+        """Tests that a directed acyclic graph that is already an
+        arborescence produces an isomorphic arborescence as output.
+
+        """
+        A = nx.balanced_tree(2, 2, create_using=nx.DiGraph())
+        B = nx.dag_to_branching(A)
+        assert nx.is_isomorphic(A, B)
+
+    def test_already_branching(self):
+        """Tests that a directed acyclic graph that is already a
+        branching produces an isomorphic branching as output.
+
+        """
+        T1 = nx.balanced_tree(2, 2, create_using=nx.DiGraph())
+        T2 = nx.balanced_tree(2, 2, create_using=nx.DiGraph())
+        G = nx.disjoint_union(T1, T2)
+        B = nx.dag_to_branching(G)
+        assert nx.is_isomorphic(G, B)
+
+    def test_not_acyclic(self):
+        """Tests that a non-acyclic graph causes an exception."""
+        with pytest.raises(nx.HasACycle):
+            G = nx.DiGraph(pairwise("abc", cyclic=True))
+            nx.dag_to_branching(G)
+
+    def test_undirected(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            nx.dag_to_branching(nx.Graph())
+
+    def test_multigraph(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            nx.dag_to_branching(nx.MultiGraph())
+
+    def test_multidigraph(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            nx.dag_to_branching(nx.MultiDiGraph())
+
+
+def test_ancestors_descendants_undirected():
+    """Regression test to ensure ancestors and descendants work as expected on
+    undirected graphs."""
+    G = nx.path_graph(5)
+    nx.ancestors(G, 2) == nx.descendants(G, 2) == {0, 1, 3, 4}
+
+
+def test_compute_v_structures_raise():
+    G = nx.Graph()
+    with pytest.raises(nx.NetworkXNotImplemented, match="for undirected type"):
+        nx.compute_v_structures(G)
+
+
+def test_compute_v_structures():
+    edges = [(0, 1), (0, 2), (3, 2)]
+    G = nx.DiGraph(edges)
+
+    v_structs = set(nx.compute_v_structures(G))
+    assert len(v_structs) == 1
+    assert (0, 2, 3) in v_structs
+
+    edges = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("G", "E")]
+    G = nx.DiGraph(edges)
+    v_structs = set(nx.compute_v_structures(G))
+    assert len(v_structs) == 2
+
+
+def test_compute_v_structures_deprecated():
+    G = nx.DiGraph()
+    with pytest.deprecated_call():
+        nx.compute_v_structures(G)
+
+
+def test_v_structures_raise():
+    G = nx.Graph()
+    with pytest.raises(nx.NetworkXNotImplemented, match="for undirected type"):
+        nx.dag.v_structures(G)
+
+
+@pytest.mark.parametrize(
+    ("edgelist", "expected"),
+    (
+        (
+            [(0, 1), (0, 2), (3, 2)],
+            {(0, 2, 3)},
+        ),
+        (
+            [("A", "B"), ("C", "B"), ("D", "G"), ("D", "E"), ("G", "E")],
+            {("A", "B", "C")},
+        ),
+        ([(0, 1), (2, 1), (0, 2)], set()),  # adjacent parents case: see gh-7385
+    ),
+)
+def test_v_structures(edgelist, expected):
+    G = nx.DiGraph(edgelist)
+    v_structs = set(nx.dag.v_structures(G))
+    assert v_structs == expected
+
+
+def test_colliders_raise():
+    G = nx.Graph()
+    with pytest.raises(nx.NetworkXNotImplemented, match="for undirected type"):
+        nx.dag.colliders(G)
+
+
+@pytest.mark.parametrize(
+    ("edgelist", "expected"),
+    (
+        (
+            [(0, 1), (0, 2), (3, 2)],
+            {(0, 2, 3)},
+        ),
+        (
+            [("A", "B"), ("C", "B"), ("D", "G"), ("D", "E"), ("G", "E")],
+            {("A", "B", "C"), ("D", "E", "G")},
+        ),
+    ),
+)
+def test_colliders(edgelist, expected):
+    G = nx.DiGraph(edgelist)
+    colliders = set(nx.dag.colliders(G))
+    assert colliders == expected
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_measures.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_measures.py
new file mode 100644
index 00000000..0b3840fd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_measures.py
@@ -0,0 +1,774 @@
+import math
+from random import Random
+
+import pytest
+
+import networkx as nx
+from networkx import convert_node_labels_to_integers as cnlti
+from networkx.algorithms.distance_measures import _extrema_bounding
+
+
+def test__extrema_bounding_invalid_compute_kwarg():
+    G = nx.path_graph(3)
+    with pytest.raises(ValueError, match="compute must be one of"):
+        _extrema_bounding(G, compute="spam")
+
+
+class TestDistance:
+    def setup_method(self):
+        G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted")
+        self.G = G
+
+    def test_eccentricity(self):
+        assert nx.eccentricity(self.G, 1) == 6
+        e = nx.eccentricity(self.G)
+        assert e[1] == 6
+
+        sp = dict(nx.shortest_path_length(self.G))
+        e = nx.eccentricity(self.G, sp=sp)
+        assert e[1] == 6
+
+        e = nx.eccentricity(self.G, v=1)
+        assert e == 6
+
+        # This behavior changed in version 1.8 (ticket #739)
+        e = nx.eccentricity(self.G, v=[1, 1])
+        assert e[1] == 6
+        e = nx.eccentricity(self.G, v=[1, 2])
+        assert e[1] == 6
+
+        # test against graph with one node
+        G = nx.path_graph(1)
+        e = nx.eccentricity(G)
+        assert e[0] == 0
+        e = nx.eccentricity(G, v=0)
+        assert e == 0
+        pytest.raises(nx.NetworkXError, nx.eccentricity, G, 1)
+
+        # test against empty graph
+        G = nx.empty_graph()
+        e = nx.eccentricity(G)
+        assert e == {}
+
+    def test_diameter(self):
+        assert nx.diameter(self.G) == 6
+
+    def test_harmonic_diameter(self):
+        assert abs(nx.harmonic_diameter(self.G) - 2.0477815699658715) < 1e-12
+
+    def test_harmonic_diameter_empty(self):
+        assert math.isnan(nx.harmonic_diameter(nx.empty_graph()))
+
+    def test_harmonic_diameter_single_node(self):
+        assert math.isnan(nx.harmonic_diameter(nx.empty_graph(1)))
+
+    def test_harmonic_diameter_discrete(self):
+        assert math.isinf(nx.harmonic_diameter(nx.empty_graph(3)))
+
+    def test_harmonic_diameter_not_strongly_connected(self):
+        DG = nx.DiGraph()
+        DG.add_edge(0, 1)
+        assert nx.harmonic_diameter(DG) == 2
+
+    def test_radius(self):
+        assert nx.radius(self.G) == 4
+
+    def test_periphery(self):
+        assert set(nx.periphery(self.G)) == {1, 4, 13, 16}
+
+    def test_center(self):
+        assert set(nx.center(self.G)) == {6, 7, 10, 11}
+
+    def test_bound_diameter(self):
+        assert nx.diameter(self.G, usebounds=True) == 6
+
+    def test_bound_radius(self):
+        assert nx.radius(self.G, usebounds=True) == 4
+
+    def test_bound_periphery(self):
+        result = {1, 4, 13, 16}
+        assert set(nx.periphery(self.G, usebounds=True)) == result
+
+    def test_bound_center(self):
+        result = {6, 7, 10, 11}
+        assert set(nx.center(self.G, usebounds=True)) == result
+
+    def test_radius_exception(self):
+        G = nx.Graph()
+        G.add_edge(1, 2)
+        G.add_edge(3, 4)
+        pytest.raises(nx.NetworkXError, nx.diameter, G)
+
+    def test_eccentricity_infinite(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.Graph([(1, 2), (3, 4)])
+            e = nx.eccentricity(G)
+
+    def test_eccentricity_undirected_not_connected(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.Graph([(1, 2), (3, 4)])
+            e = nx.eccentricity(G, sp=1)
+
+    def test_eccentricity_directed_weakly_connected(self):
+        with pytest.raises(nx.NetworkXError):
+            DG = nx.DiGraph([(1, 2), (1, 3)])
+            nx.eccentricity(DG)
+
+
+class TestWeightedDistance:
+    def setup_method(self):
+        G = nx.Graph()
+        G.add_edge(0, 1, weight=0.6, cost=0.6, high_cost=6)
+        G.add_edge(0, 2, weight=0.2, cost=0.2, high_cost=2)
+        G.add_edge(2, 3, weight=0.1, cost=0.1, high_cost=1)
+        G.add_edge(2, 4, weight=0.7, cost=0.7, high_cost=7)
+        G.add_edge(2, 5, weight=0.9, cost=0.9, high_cost=9)
+        G.add_edge(1, 5, weight=0.3, cost=0.3, high_cost=3)
+        self.G = G
+        self.weight_fn = lambda v, u, e: 2
+
+    def test_eccentricity_weight_None(self):
+        assert nx.eccentricity(self.G, 1, weight=None) == 3
+        e = nx.eccentricity(self.G, weight=None)
+        assert e[1] == 3
+
+        e = nx.eccentricity(self.G, v=1, weight=None)
+        assert e == 3
+
+        # This behavior changed in version 1.8 (ticket #739)
+        e = nx.eccentricity(self.G, v=[1, 1], weight=None)
+        assert e[1] == 3
+        e = nx.eccentricity(self.G, v=[1, 2], weight=None)
+        assert e[1] == 3
+
+    def test_eccentricity_weight_attr(self):
+        assert nx.eccentricity(self.G, 1, weight="weight") == 1.5
+        e = nx.eccentricity(self.G, weight="weight")
+        assert (
+            e
+            == nx.eccentricity(self.G, weight="cost")
+            != nx.eccentricity(self.G, weight="high_cost")
+        )
+        assert e[1] == 1.5
+
+        e = nx.eccentricity(self.G, v=1, weight="weight")
+        assert e == 1.5
+
+        # This behavior changed in version 1.8 (ticket #739)
+        e = nx.eccentricity(self.G, v=[1, 1], weight="weight")
+        assert e[1] == 1.5
+        e = nx.eccentricity(self.G, v=[1, 2], weight="weight")
+        assert e[1] == 1.5
+
+    def test_eccentricity_weight_fn(self):
+        assert nx.eccentricity(self.G, 1, weight=self.weight_fn) == 6
+        e = nx.eccentricity(self.G, weight=self.weight_fn)
+        assert e[1] == 6
+
+        e = nx.eccentricity(self.G, v=1, weight=self.weight_fn)
+        assert e == 6
+
+        # This behavior changed in version 1.8 (ticket #739)
+        e = nx.eccentricity(self.G, v=[1, 1], weight=self.weight_fn)
+        assert e[1] == 6
+        e = nx.eccentricity(self.G, v=[1, 2], weight=self.weight_fn)
+        assert e[1] == 6
+
+    def test_diameter_weight_None(self):
+        assert nx.diameter(self.G, weight=None) == 3
+
+    def test_diameter_weight_attr(self):
+        assert (
+            nx.diameter(self.G, weight="weight")
+            == nx.diameter(self.G, weight="cost")
+            == 1.6
+            != nx.diameter(self.G, weight="high_cost")
+        )
+
+    def test_diameter_weight_fn(self):
+        assert nx.diameter(self.G, weight=self.weight_fn) == 6
+
+    def test_radius_weight_None(self):
+        assert pytest.approx(nx.radius(self.G, weight=None)) == 2
+
+    def test_radius_weight_attr(self):
+        assert (
+            pytest.approx(nx.radius(self.G, weight="weight"))
+            == pytest.approx(nx.radius(self.G, weight="cost"))
+            == 0.9
+            != nx.radius(self.G, weight="high_cost")
+        )
+
+    def test_radius_weight_fn(self):
+        assert nx.radius(self.G, weight=self.weight_fn) == 4
+
+    def test_periphery_weight_None(self):
+        for v in set(nx.periphery(self.G, weight=None)):
+            assert nx.eccentricity(self.G, v, weight=None) == nx.diameter(
+                self.G, weight=None
+            )
+
+    def test_periphery_weight_attr(self):
+        periphery = set(nx.periphery(self.G, weight="weight"))
+        assert (
+            periphery
+            == set(nx.periphery(self.G, weight="cost"))
+            == set(nx.periphery(self.G, weight="high_cost"))
+        )
+        for v in periphery:
+            assert (
+                nx.eccentricity(self.G, v, weight="high_cost")
+                != nx.eccentricity(self.G, v, weight="weight")
+                == nx.eccentricity(self.G, v, weight="cost")
+                == nx.diameter(self.G, weight="weight")
+                == nx.diameter(self.G, weight="cost")
+                != nx.diameter(self.G, weight="high_cost")
+            )
+            assert nx.eccentricity(self.G, v, weight="high_cost") == nx.diameter(
+                self.G, weight="high_cost"
+            )
+
+    def test_periphery_weight_fn(self):
+        for v in set(nx.periphery(self.G, weight=self.weight_fn)):
+            assert nx.eccentricity(self.G, v, weight=self.weight_fn) == nx.diameter(
+                self.G, weight=self.weight_fn
+            )
+
+    def test_center_weight_None(self):
+        for v in set(nx.center(self.G, weight=None)):
+            assert pytest.approx(nx.eccentricity(self.G, v, weight=None)) == nx.radius(
+                self.G, weight=None
+            )
+
+    def test_center_weight_attr(self):
+        center = set(nx.center(self.G, weight="weight"))
+        assert (
+            center
+            == set(nx.center(self.G, weight="cost"))
+            != set(nx.center(self.G, weight="high_cost"))
+        )
+        for v in center:
+            assert (
+                nx.eccentricity(self.G, v, weight="high_cost")
+                != pytest.approx(nx.eccentricity(self.G, v, weight="weight"))
+                == pytest.approx(nx.eccentricity(self.G, v, weight="cost"))
+                == nx.radius(self.G, weight="weight")
+                == nx.radius(self.G, weight="cost")
+                != nx.radius(self.G, weight="high_cost")
+            )
+            assert nx.eccentricity(self.G, v, weight="high_cost") == nx.radius(
+                self.G, weight="high_cost"
+            )
+
+    def test_center_weight_fn(self):
+        for v in set(nx.center(self.G, weight=self.weight_fn)):
+            assert nx.eccentricity(self.G, v, weight=self.weight_fn) == nx.radius(
+                self.G, weight=self.weight_fn
+            )
+
+    def test_bound_diameter_weight_None(self):
+        assert nx.diameter(self.G, usebounds=True, weight=None) == 3
+
+    def test_bound_diameter_weight_attr(self):
+        assert (
+            nx.diameter(self.G, usebounds=True, weight="high_cost")
+            != nx.diameter(self.G, usebounds=True, weight="weight")
+            == nx.diameter(self.G, usebounds=True, weight="cost")
+            == 1.6
+            != nx.diameter(self.G, usebounds=True, weight="high_cost")
+        )
+        assert nx.diameter(self.G, usebounds=True, weight="high_cost") == nx.diameter(
+            self.G, usebounds=True, weight="high_cost"
+        )
+
+    def test_bound_diameter_weight_fn(self):
+        assert nx.diameter(self.G, usebounds=True, weight=self.weight_fn) == 6
+
+    def test_bound_radius_weight_None(self):
+        assert pytest.approx(nx.radius(self.G, usebounds=True, weight=None)) == 2
+
+    def test_bound_radius_weight_attr(self):
+        assert (
+            nx.radius(self.G, usebounds=True, weight="high_cost")
+            != pytest.approx(nx.radius(self.G, usebounds=True, weight="weight"))
+            == pytest.approx(nx.radius(self.G, usebounds=True, weight="cost"))
+            == 0.9
+            != nx.radius(self.G, usebounds=True, weight="high_cost")
+        )
+        assert nx.radius(self.G, usebounds=True, weight="high_cost") == nx.radius(
+            self.G, usebounds=True, weight="high_cost"
+        )
+
+    def test_bound_radius_weight_fn(self):
+        assert nx.radius(self.G, usebounds=True, weight=self.weight_fn) == 4
+
+    def test_bound_periphery_weight_None(self):
+        result = {1, 3, 4}
+        assert set(nx.periphery(self.G, usebounds=True, weight=None)) == result
+
+    def test_bound_periphery_weight_attr(self):
+        result = {4, 5}
+        assert (
+            set(nx.periphery(self.G, usebounds=True, weight="weight"))
+            == set(nx.periphery(self.G, usebounds=True, weight="cost"))
+            == result
+        )
+
+    def test_bound_periphery_weight_fn(self):
+        result = {1, 3, 4}
+        assert (
+            set(nx.periphery(self.G, usebounds=True, weight=self.weight_fn)) == result
+        )
+
+    def test_bound_center_weight_None(self):
+        result = {0, 2, 5}
+        assert set(nx.center(self.G, usebounds=True, weight=None)) == result
+
+    def test_bound_center_weight_attr(self):
+        result = {0}
+        assert (
+            set(nx.center(self.G, usebounds=True, weight="weight"))
+            == set(nx.center(self.G, usebounds=True, weight="cost"))
+            == result
+        )
+
+    def test_bound_center_weight_fn(self):
+        result = {0, 2, 5}
+        assert set(nx.center(self.G, usebounds=True, weight=self.weight_fn)) == result
+
+
+class TestResistanceDistance:
+    @classmethod
+    def setup_class(cls):
+        global np
+        np = pytest.importorskip("numpy")
+        sp = pytest.importorskip("scipy")
+
+    def setup_method(self):
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=2)
+        G.add_edge(2, 3, weight=4)
+        G.add_edge(3, 4, weight=1)
+        G.add_edge(1, 4, weight=3)
+        self.G = G
+
+    def test_resistance_distance_directed_graph(self):
+        G = nx.DiGraph()
+        with pytest.raises(nx.NetworkXNotImplemented):
+            nx.resistance_distance(G)
+
+    def test_resistance_distance_empty(self):
+        G = nx.Graph()
+        with pytest.raises(nx.NetworkXError):
+            nx.resistance_distance(G)
+
+    def test_resistance_distance_not_connected(self):
+        with pytest.raises(nx.NetworkXError):
+            self.G.add_node(5)
+            nx.resistance_distance(self.G, 1, 5)
+
+    def test_resistance_distance_nodeA_not_in_graph(self):
+        with pytest.raises(nx.NetworkXError):
+            nx.resistance_distance(self.G, 9, 1)
+
+    def test_resistance_distance_nodeB_not_in_graph(self):
+        with pytest.raises(nx.NetworkXError):
+            nx.resistance_distance(self.G, 1, 9)
+
+    def test_resistance_distance(self):
+        rd = nx.resistance_distance(self.G, 1, 3, "weight", True)
+        test_data = 1 / (1 / (2 + 4) + 1 / (1 + 3))
+        assert round(rd, 5) == round(test_data, 5)
+
+    def test_resistance_distance_noinv(self):
+        rd = nx.resistance_distance(self.G, 1, 3, "weight", False)
+        test_data = 1 / (1 / (1 / 2 + 1 / 4) + 1 / (1 / 1 + 1 / 3))
+        assert round(rd, 5) == round(test_data, 5)
+
+    def test_resistance_distance_no_weight(self):
+        rd = nx.resistance_distance(self.G, 1, 3)
+        assert round(rd, 5) == 1
+
+    def test_resistance_distance_neg_weight(self):
+        self.G[2][3]["weight"] = -4
+        rd = nx.resistance_distance(self.G, 1, 3, "weight", True)
+        test_data = 1 / (1 / (2 + -4) + 1 / (1 + 3))
+        assert round(rd, 5) == round(test_data, 5)
+
+    def test_multigraph(self):
+        G = nx.MultiGraph()
+        G.add_edge(1, 2, weight=2)
+        G.add_edge(2, 3, weight=4)
+        G.add_edge(3, 4, weight=1)
+        G.add_edge(1, 4, weight=3)
+        rd = nx.resistance_distance(G, 1, 3, "weight", True)
+        assert np.isclose(rd, 1 / (1 / (2 + 4) + 1 / (1 + 3)))
+
+    def test_resistance_distance_div0(self):
+        with pytest.raises(ZeroDivisionError):
+            self.G[1][2]["weight"] = 0
+            nx.resistance_distance(self.G, 1, 3, "weight")
+
+    def test_resistance_distance_same_node(self):
+        assert nx.resistance_distance(self.G, 1, 1) == 0
+
+    def test_resistance_distance_only_nodeA(self):
+        rd = nx.resistance_distance(self.G, nodeA=1)
+        test_data = {}
+        test_data[1] = 0
+        test_data[2] = 0.75
+        test_data[3] = 1
+        test_data[4] = 0.75
+        assert type(rd) == dict
+        assert sorted(rd.keys()) == sorted(test_data.keys())
+        for key in rd:
+            assert np.isclose(rd[key], test_data[key])
+
+    def test_resistance_distance_only_nodeB(self):
+        rd = nx.resistance_distance(self.G, nodeB=1)
+        test_data = {}
+        test_data[1] = 0
+        test_data[2] = 0.75
+        test_data[3] = 1
+        test_data[4] = 0.75
+        assert type(rd) == dict
+        assert sorted(rd.keys()) == sorted(test_data.keys())
+        for key in rd:
+            assert np.isclose(rd[key], test_data[key])
+
+    def test_resistance_distance_all(self):
+        rd = nx.resistance_distance(self.G)
+        assert type(rd) == dict
+        assert round(rd[1][3], 5) == 1
+
+
+class TestEffectiveGraphResistance:
+    @classmethod
+    def setup_class(cls):
+        global np
+        np = pytest.importorskip("numpy")
+        sp = pytest.importorskip("scipy")
+
+    def setup_method(self):
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=2)
+        G.add_edge(1, 3, weight=1)
+        G.add_edge(2, 3, weight=4)
+        self.G = G
+
+    def test_effective_graph_resistance_directed_graph(self):
+        G = nx.DiGraph()
+        with pytest.raises(nx.NetworkXNotImplemented):
+            nx.effective_graph_resistance(G)
+
+    def test_effective_graph_resistance_empty(self):
+        G = nx.Graph()
+        with pytest.raises(nx.NetworkXError):
+            nx.effective_graph_resistance(G)
+
+    def test_effective_graph_resistance_not_connected(self):
+        G = nx.Graph([(1, 2), (3, 4)])
+        RG = nx.effective_graph_resistance(G)
+        assert np.isinf(RG)
+
+    def test_effective_graph_resistance(self):
+        RG = nx.effective_graph_resistance(self.G, "weight", True)
+        rd12 = 1 / (1 / (1 + 4) + 1 / 2)
+        rd13 = 1 / (1 / (1 + 2) + 1 / 4)
+        rd23 = 1 / (1 / (2 + 4) + 1 / 1)
+        assert np.isclose(RG, rd12 + rd13 + rd23)
+
+    def test_effective_graph_resistance_noinv(self):
+        RG = nx.effective_graph_resistance(self.G, "weight", False)
+        rd12 = 1 / (1 / (1 / 1 + 1 / 4) + 1 / (1 / 2))
+        rd13 = 1 / (1 / (1 / 1 + 1 / 2) + 1 / (1 / 4))
+        rd23 = 1 / (1 / (1 / 2 + 1 / 4) + 1 / (1 / 1))
+        assert np.isclose(RG, rd12 + rd13 + rd23)
+
+    def test_effective_graph_resistance_no_weight(self):
+        RG = nx.effective_graph_resistance(self.G)
+        assert np.isclose(RG, 2)
+
+    def test_effective_graph_resistance_neg_weight(self):
+        self.G[2][3]["weight"] = -4
+        RG = nx.effective_graph_resistance(self.G, "weight", True)
+        rd12 = 1 / (1 / (1 + -4) + 1 / 2)
+        rd13 = 1 / (1 / (1 + 2) + 1 / (-4))
+        rd23 = 1 / (1 / (2 + -4) + 1 / 1)
+        assert np.isclose(RG, rd12 + rd13 + rd23)
+
+    def test_effective_graph_resistance_multigraph(self):
+        G = nx.MultiGraph()
+        G.add_edge(1, 2, weight=2)
+        G.add_edge(1, 3, weight=1)
+        G.add_edge(2, 3, weight=1)
+        G.add_edge(2, 3, weight=3)
+        RG = nx.effective_graph_resistance(G, "weight", True)
+        edge23 = 1 / (1 / 1 + 1 / 3)
+        rd12 = 1 / (1 / (1 + edge23) + 1 / 2)
+        rd13 = 1 / (1 / (1 + 2) + 1 / edge23)
+        rd23 = 1 / (1 / (2 + edge23) + 1 / 1)
+        assert np.isclose(RG, rd12 + rd13 + rd23)
+
+    def test_effective_graph_resistance_div0(self):
+        with pytest.raises(ZeroDivisionError):
+            self.G[1][2]["weight"] = 0
+            nx.effective_graph_resistance(self.G, "weight")
+
+    def test_effective_graph_resistance_complete_graph(self):
+        N = 10
+        G = nx.complete_graph(N)
+        RG = nx.effective_graph_resistance(G)
+        assert np.isclose(RG, N - 1)
+
+    def test_effective_graph_resistance_path_graph(self):
+        N = 10
+        G = nx.path_graph(N)
+        RG = nx.effective_graph_resistance(G)
+        assert np.isclose(RG, (N - 1) * N * (N + 1) // 6)
+
+
+class TestBarycenter:
+    """Test :func:`networkx.algorithms.distance_measures.barycenter`."""
+
+    def barycenter_as_subgraph(self, g, **kwargs):
+        """Return the subgraph induced on the barycenter of g"""
+        b = nx.barycenter(g, **kwargs)
+        assert isinstance(b, list)
+        assert set(b) <= set(g)
+        return g.subgraph(b)
+
+    def test_must_be_connected(self):
+        pytest.raises(nx.NetworkXNoPath, nx.barycenter, nx.empty_graph(5))
+
+    def test_sp_kwarg(self):
+        # Complete graph K_5. Normally it works...
+        K_5 = nx.complete_graph(5)
+        sp = dict(nx.shortest_path_length(K_5))
+        assert nx.barycenter(K_5, sp=sp) == list(K_5)
+
+        # ...but not with the weight argument
+        for u, v, data in K_5.edges.data():
+            data["weight"] = 1
+        pytest.raises(ValueError, nx.barycenter, K_5, sp=sp, weight="weight")
+
+        # ...and a corrupted sp can make it seem like K_5 is disconnected
+        del sp[0][1]
+        pytest.raises(nx.NetworkXNoPath, nx.barycenter, K_5, sp=sp)
+
+    def test_trees(self):
+        """The barycenter of a tree is a single vertex or an edge.
+
+        See [West01]_, p. 78.
+        """
+        prng = Random(0xDEADBEEF)
+        for i in range(50):
+            RT = nx.random_labeled_tree(prng.randint(1, 75), seed=prng)
+            b = self.barycenter_as_subgraph(RT)
+            if len(b) == 2:
+                assert b.size() == 1
+            else:
+                assert len(b) == 1
+                assert b.size() == 0
+
+    def test_this_one_specific_tree(self):
+        """Test the tree pictured at the bottom of [West01]_, p. 78."""
+        g = nx.Graph(
+            {
+                "a": ["b"],
+                "b": ["a", "x"],
+                "x": ["b", "y"],
+                "y": ["x", "z"],
+                "z": ["y", 0, 1, 2, 3, 4],
+                0: ["z"],
+                1: ["z"],
+                2: ["z"],
+                3: ["z"],
+                4: ["z"],
+            }
+        )
+        b = self.barycenter_as_subgraph(g, attr="barycentricity")
+        assert list(b) == ["z"]
+        assert not b.edges
+        expected_barycentricity = {
+            0: 23,
+            1: 23,
+            2: 23,
+            3: 23,
+            4: 23,
+            "a": 35,
+            "b": 27,
+            "x": 21,
+            "y": 17,
+            "z": 15,
+        }
+        for node, barycentricity in expected_barycentricity.items():
+            assert g.nodes[node]["barycentricity"] == barycentricity
+
+        # Doubling weights should do nothing but double the barycentricities
+        for edge in g.edges:
+            g.edges[edge]["weight"] = 2
+        b = self.barycenter_as_subgraph(g, weight="weight", attr="barycentricity2")
+        assert list(b) == ["z"]
+        assert not b.edges
+        for node, barycentricity in expected_barycentricity.items():
+            assert g.nodes[node]["barycentricity2"] == barycentricity * 2
+
+
+class TestKemenyConstant:
+    @classmethod
+    def setup_class(cls):
+        global np
+        np = pytest.importorskip("numpy")
+        sp = pytest.importorskip("scipy")
+
+    def setup_method(self):
+        G = nx.Graph()
+        w12 = 2
+        w13 = 3
+        w23 = 4
+        G.add_edge(1, 2, weight=w12)
+        G.add_edge(1, 3, weight=w13)
+        G.add_edge(2, 3, weight=w23)
+        self.G = G
+
+    def test_kemeny_constant_directed(self):
+        G = nx.DiGraph()
+        G.add_edge(1, 2)
+        G.add_edge(1, 3)
+        G.add_edge(2, 3)
+        with pytest.raises(nx.NetworkXNotImplemented):
+            nx.kemeny_constant(G)
+
+    def test_kemeny_constant_not_connected(self):
+        self.G.add_node(5)
+        with pytest.raises(nx.NetworkXError):
+            nx.kemeny_constant(self.G)
+
+    def test_kemeny_constant_no_nodes(self):
+        G = nx.Graph()
+        with pytest.raises(nx.NetworkXError):
+            nx.kemeny_constant(G)
+
+    def test_kemeny_constant_negative_weight(self):
+        G = nx.Graph()
+        w12 = 2
+        w13 = 3
+        w23 = -10
+        G.add_edge(1, 2, weight=w12)
+        G.add_edge(1, 3, weight=w13)
+        G.add_edge(2, 3, weight=w23)
+        with pytest.raises(nx.NetworkXError):
+            nx.kemeny_constant(G, weight="weight")
+
+    def test_kemeny_constant(self):
+        K = nx.kemeny_constant(self.G, weight="weight")
+        w12 = 2
+        w13 = 3
+        w23 = 4
+        test_data = (
+            3
+            / 2
+            * (w12 + w13)
+            * (w12 + w23)
+            * (w13 + w23)
+            / (
+                w12**2 * (w13 + w23)
+                + w13**2 * (w12 + w23)
+                + w23**2 * (w12 + w13)
+                + 3 * w12 * w13 * w23
+            )
+        )
+        assert np.isclose(K, test_data)
+
+    def test_kemeny_constant_no_weight(self):
+        K = nx.kemeny_constant(self.G)
+        assert np.isclose(K, 4 / 3)
+
+    def test_kemeny_constant_multigraph(self):
+        G = nx.MultiGraph()
+        w12_1 = 2
+        w12_2 = 1
+        w13 = 3
+        w23 = 4
+        G.add_edge(1, 2, weight=w12_1)
+        G.add_edge(1, 2, weight=w12_2)
+        G.add_edge(1, 3, weight=w13)
+        G.add_edge(2, 3, weight=w23)
+        K = nx.kemeny_constant(G, weight="weight")
+        w12 = w12_1 + w12_2
+        test_data = (
+            3
+            / 2
+            * (w12 + w13)
+            * (w12 + w23)
+            * (w13 + w23)
+            / (
+                w12**2 * (w13 + w23)
+                + w13**2 * (w12 + w23)
+                + w23**2 * (w12 + w13)
+                + 3 * w12 * w13 * w23
+            )
+        )
+        assert np.isclose(K, test_data)
+
+    def test_kemeny_constant_weight0(self):
+        G = nx.Graph()
+        w12 = 0
+        w13 = 3
+        w23 = 4
+        G.add_edge(1, 2, weight=w12)
+        G.add_edge(1, 3, weight=w13)
+        G.add_edge(2, 3, weight=w23)
+        K = nx.kemeny_constant(G, weight="weight")
+        test_data = (
+            3
+            / 2
+            * (w12 + w13)
+            * (w12 + w23)
+            * (w13 + w23)
+            / (
+                w12**2 * (w13 + w23)
+                + w13**2 * (w12 + w23)
+                + w23**2 * (w12 + w13)
+                + 3 * w12 * w13 * w23
+            )
+        )
+        assert np.isclose(K, test_data)
+
+    def test_kemeny_constant_selfloop(self):
+        G = nx.Graph()
+        w11 = 1
+        w12 = 2
+        w13 = 3
+        w23 = 4
+        G.add_edge(1, 1, weight=w11)
+        G.add_edge(1, 2, weight=w12)
+        G.add_edge(1, 3, weight=w13)
+        G.add_edge(2, 3, weight=w23)
+        K = nx.kemeny_constant(G, weight="weight")
+        test_data = (
+            (2 * w11 + 3 * w12 + 3 * w13)
+            * (w12 + w23)
+            * (w13 + w23)
+            / (
+                (w12 * w13 + w12 * w23 + w13 * w23)
+                * (w11 + 2 * w12 + 2 * w13 + 2 * w23)
+            )
+        )
+        assert np.isclose(K, test_data)
+
+    def test_kemeny_constant_complete_bipartite_graph(self):
+        # Theorem 1 in https://www.sciencedirect.com/science/article/pii/S0166218X20302912
+        n1 = 5
+        n2 = 4
+        G = nx.complete_bipartite_graph(n1, n2)
+        K = nx.kemeny_constant(G)
+        assert np.isclose(K, n1 + n2 - 3 / 2)
+
+    def test_kemeny_constant_path_graph(self):
+        # Theorem 2 in https://www.sciencedirect.com/science/article/pii/S0166218X20302912
+        n = 10
+        G = nx.path_graph(n)
+        K = nx.kemeny_constant(G)
+        assert np.isclose(K, n**2 / 3 - 2 * n / 3 + 1 / 2)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_regular.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_regular.py
new file mode 100644
index 00000000..545fb6de
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_regular.py
@@ -0,0 +1,85 @@
+import pytest
+
+import networkx as nx
+from networkx import is_strongly_regular
+
+
+@pytest.mark.parametrize(
+    "f", (nx.is_distance_regular, nx.intersection_array, nx.is_strongly_regular)
+)
+@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph))
+def test_raises_on_directed_and_multigraphs(f, graph_constructor):
+    G = graph_constructor([(0, 1), (1, 2)])
+    with pytest.raises(nx.NetworkXNotImplemented):
+        f(G)
+
+
+class TestDistanceRegular:
+    def test_is_distance_regular(self):
+        assert nx.is_distance_regular(nx.icosahedral_graph())
+        assert nx.is_distance_regular(nx.petersen_graph())
+        assert nx.is_distance_regular(nx.cubical_graph())
+        assert nx.is_distance_regular(nx.complete_bipartite_graph(3, 3))
+        assert nx.is_distance_regular(nx.tetrahedral_graph())
+        assert nx.is_distance_regular(nx.dodecahedral_graph())
+        assert nx.is_distance_regular(nx.pappus_graph())
+        assert nx.is_distance_regular(nx.heawood_graph())
+        assert nx.is_distance_regular(nx.cycle_graph(3))
+        # no distance regular
+        assert not nx.is_distance_regular(nx.path_graph(4))
+
+    def test_not_connected(self):
+        G = nx.cycle_graph(4)
+        nx.add_cycle(G, [5, 6, 7])
+        assert not nx.is_distance_regular(G)
+
+    def test_global_parameters(self):
+        b, c = nx.intersection_array(nx.cycle_graph(5))
+        g = nx.global_parameters(b, c)
+        assert list(g) == [(0, 0, 2), (1, 0, 1), (1, 1, 0)]
+        b, c = nx.intersection_array(nx.cycle_graph(3))
+        g = nx.global_parameters(b, c)
+        assert list(g) == [(0, 0, 2), (1, 1, 0)]
+
+    def test_intersection_array(self):
+        b, c = nx.intersection_array(nx.cycle_graph(5))
+        assert b == [2, 1]
+        assert c == [1, 1]
+        b, c = nx.intersection_array(nx.dodecahedral_graph())
+        assert b == [3, 2, 1, 1, 1]
+        assert c == [1, 1, 1, 2, 3]
+        b, c = nx.intersection_array(nx.icosahedral_graph())
+        assert b == [5, 2, 1]
+        assert c == [1, 2, 5]
+
+
+@pytest.mark.parametrize("f", (nx.is_distance_regular, nx.is_strongly_regular))
+def test_empty_graph_raises(f):
+    G = nx.Graph()
+    with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes"):
+        f(G)
+
+
+class TestStronglyRegular:
+    """Unit tests for the :func:`~networkx.is_strongly_regular`
+    function.
+
+    """
+
+    def test_cycle_graph(self):
+        """Tests that the cycle graph on five vertices is strongly
+        regular.
+
+        """
+        G = nx.cycle_graph(5)
+        assert is_strongly_regular(G)
+
+    def test_petersen_graph(self):
+        """Tests that the Petersen graph is strongly regular."""
+        G = nx.petersen_graph()
+        assert is_strongly_regular(G)
+
+    def test_path_graph(self):
+        """Tests that the path graph is not strongly regular."""
+        G = nx.path_graph(4)
+        assert not is_strongly_regular(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominance.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominance.py
new file mode 100644
index 00000000..9b804c2f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominance.py
@@ -0,0 +1,286 @@
+import pytest
+
+import networkx as nx
+
+
+class TestImmediateDominators:
+    def test_exceptions(self):
+        G = nx.Graph()
+        G.add_node(0)
+        pytest.raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0)
+        G = nx.MultiGraph(G)
+        pytest.raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0)
+        G = nx.DiGraph([[0, 0]])
+        pytest.raises(nx.NetworkXError, nx.immediate_dominators, G, 1)
+
+    def test_singleton(self):
+        G = nx.DiGraph()
+        G.add_node(0)
+        assert nx.immediate_dominators(G, 0) == {0: 0}
+        G.add_edge(0, 0)
+        assert nx.immediate_dominators(G, 0) == {0: 0}
+
+    def test_path(self):
+        n = 5
+        G = nx.path_graph(n, create_using=nx.DiGraph())
+        assert nx.immediate_dominators(G, 0) == {i: max(i - 1, 0) for i in range(n)}
+
+    def test_cycle(self):
+        n = 5
+        G = nx.cycle_graph(n, create_using=nx.DiGraph())
+        assert nx.immediate_dominators(G, 0) == {i: max(i - 1, 0) for i in range(n)}
+
+    def test_unreachable(self):
+        n = 5
+        assert n > 1
+        G = nx.path_graph(n, create_using=nx.DiGraph())
+        assert nx.immediate_dominators(G, n // 2) == {
+            i: max(i - 1, n // 2) for i in range(n // 2, n)
+        }
+
+    def test_irreducible1(self):
+        """
+        Graph taken from figure 2 of "A simple, fast dominance algorithm." (2006).
+        https://hdl.handle.net/1911/96345
+        """
+        edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)]
+        G = nx.DiGraph(edges)
+        assert nx.immediate_dominators(G, 5) == {i: 5 for i in range(1, 6)}
+
+    def test_irreducible2(self):
+        """
+        Graph taken from figure 4 of "A simple, fast dominance algorithm." (2006).
+        https://hdl.handle.net/1911/96345
+        """
+
+        edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), (6, 4), (6, 5)]
+        G = nx.DiGraph(edges)
+        result = nx.immediate_dominators(G, 6)
+        assert result == {i: 6 for i in range(1, 7)}
+
+    def test_domrel_png(self):
+        # Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png
+        edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)]
+        G = nx.DiGraph(edges)
+        result = nx.immediate_dominators(G, 1)
+        assert result == {1: 1, 2: 1, 3: 2, 4: 2, 5: 2, 6: 2}
+        # Test postdominance.
+        result = nx.immediate_dominators(G.reverse(copy=False), 6)
+        assert result == {1: 2, 2: 6, 3: 5, 4: 5, 5: 2, 6: 6}
+
+    def test_boost_example(self):
+        # Graph taken from Figure 1 of
+        # http://www.boost.org/doc/libs/1_56_0/libs/graph/doc/lengauer_tarjan_dominator.htm
+        edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), (5, 7), (6, 4)]
+        G = nx.DiGraph(edges)
+        result = nx.immediate_dominators(G, 0)
+        assert result == {0: 0, 1: 0, 2: 1, 3: 1, 4: 3, 5: 4, 6: 4, 7: 1}
+        # Test postdominance.
+        result = nx.immediate_dominators(G.reverse(copy=False), 7)
+        assert result == {0: 1, 1: 7, 2: 7, 3: 4, 4: 5, 5: 7, 6: 4, 7: 7}
+
+
+class TestDominanceFrontiers:
+    def test_exceptions(self):
+        G = nx.Graph()
+        G.add_node(0)
+        pytest.raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0)
+        G = nx.MultiGraph(G)
+        pytest.raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0)
+        G = nx.DiGraph([[0, 0]])
+        pytest.raises(nx.NetworkXError, nx.dominance_frontiers, G, 1)
+
+    def test_singleton(self):
+        G = nx.DiGraph()
+        G.add_node(0)
+        assert nx.dominance_frontiers(G, 0) == {0: set()}
+        G.add_edge(0, 0)
+        assert nx.dominance_frontiers(G, 0) == {0: set()}
+
+    def test_path(self):
+        n = 5
+        G = nx.path_graph(n, create_using=nx.DiGraph())
+        assert nx.dominance_frontiers(G, 0) == {i: set() for i in range(n)}
+
+    def test_cycle(self):
+        n = 5
+        G = nx.cycle_graph(n, create_using=nx.DiGraph())
+        assert nx.dominance_frontiers(G, 0) == {i: set() for i in range(n)}
+
+    def test_unreachable(self):
+        n = 5
+        assert n > 1
+        G = nx.path_graph(n, create_using=nx.DiGraph())
+        assert nx.dominance_frontiers(G, n // 2) == {i: set() for i in range(n // 2, n)}
+
+    def test_irreducible1(self):
+        """
+        Graph taken from figure 2 of "A simple, fast dominance algorithm." (2006).
+        https://hdl.handle.net/1911/96345
+        """
+        edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)]
+        G = nx.DiGraph(edges)
+        assert dict(nx.dominance_frontiers(G, 5).items()) == {
+            1: {2},
+            2: {1},
+            3: {2},
+            4: {1},
+            5: set(),
+        }
+
+    def test_irreducible2(self):
+        """
+        Graph taken from figure 4 of "A simple, fast dominance algorithm." (2006).
+        https://hdl.handle.net/1911/96345
+        """
+        edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), (6, 4), (6, 5)]
+        G = nx.DiGraph(edges)
+        assert nx.dominance_frontiers(G, 6) == {
+            1: {2},
+            2: {1, 3},
+            3: {2},
+            4: {2, 3},
+            5: {1},
+            6: set(),
+        }
+
+    def test_domrel_png(self):
+        # Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png
+        edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)]
+        G = nx.DiGraph(edges)
+        assert nx.dominance_frontiers(G, 1) == {
+            1: set(),
+            2: {2},
+            3: {5},
+            4: {5},
+            5: {2},
+            6: set(),
+        }
+        # Test postdominance.
+        result = nx.dominance_frontiers(G.reverse(copy=False), 6)
+        assert result == {1: set(), 2: {2}, 3: {2}, 4: {2}, 5: {2}, 6: set()}
+
+    def test_boost_example(self):
+        # Graph taken from Figure 1 of
+        # http://www.boost.org/doc/libs/1_56_0/libs/graph/doc/lengauer_tarjan_dominator.htm
+        edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), (5, 7), (6, 4)]
+        G = nx.DiGraph(edges)
+        assert nx.dominance_frontiers(G, 0) == {
+            0: set(),
+            1: set(),
+            2: {7},
+            3: {7},
+            4: {4, 7},
+            5: {7},
+            6: {4},
+            7: set(),
+        }
+        # Test postdominance.
+        result = nx.dominance_frontiers(G.reverse(copy=False), 7)
+        expected = {
+            0: set(),
+            1: set(),
+            2: {1},
+            3: {1},
+            4: {1, 4},
+            5: {1},
+            6: {4},
+            7: set(),
+        }
+        assert result == expected
+
+    def test_discard_issue(self):
+        # https://github.com/networkx/networkx/issues/2071
+        g = nx.DiGraph()
+        g.add_edges_from(
+            [
+                ("b0", "b1"),
+                ("b1", "b2"),
+                ("b2", "b3"),
+                ("b3", "b1"),
+                ("b1", "b5"),
+                ("b5", "b6"),
+                ("b5", "b8"),
+                ("b6", "b7"),
+                ("b8", "b7"),
+                ("b7", "b3"),
+                ("b3", "b4"),
+            ]
+        )
+        df = nx.dominance_frontiers(g, "b0")
+        assert df == {
+            "b4": set(),
+            "b5": {"b3"},
+            "b6": {"b7"},
+            "b7": {"b3"},
+            "b0": set(),
+            "b1": {"b1"},
+            "b2": {"b3"},
+            "b3": {"b1"},
+            "b8": {"b7"},
+        }
+
+    def test_loop(self):
+        g = nx.DiGraph()
+        g.add_edges_from([("a", "b"), ("b", "c"), ("b", "a")])
+        df = nx.dominance_frontiers(g, "a")
+        assert df == {"a": set(), "b": set(), "c": set()}
+
+    def test_missing_immediate_doms(self):
+        # see https://github.com/networkx/networkx/issues/2070
+        g = nx.DiGraph()
+        edges = [
+            ("entry_1", "b1"),
+            ("b1", "b2"),
+            ("b2", "b3"),
+            ("b3", "exit"),
+            ("entry_2", "b3"),
+        ]
+
+        # entry_1
+        #   |
+        #   b1
+        #   |
+        #   b2  entry_2
+        #    |  /
+        #    b3
+        #    |
+        #   exit
+
+        g.add_edges_from(edges)
+        # formerly raised KeyError on entry_2 when parsing b3
+        # because entry_2 does not have immediate doms (no path)
+        nx.dominance_frontiers(g, "entry_1")
+
+    def test_loops_larger(self):
+        # from
+        # http://ecee.colorado.edu/~waite/Darmstadt/motion.html
+        g = nx.DiGraph()
+        edges = [
+            ("entry", "exit"),
+            ("entry", "1"),
+            ("1", "2"),
+            ("2", "3"),
+            ("3", "4"),
+            ("4", "5"),
+            ("5", "6"),
+            ("6", "exit"),
+            ("6", "2"),
+            ("5", "3"),
+            ("4", "4"),
+        ]
+
+        g.add_edges_from(edges)
+        df = nx.dominance_frontiers(g, "entry")
+        answer = {
+            "entry": set(),
+            "1": {"exit"},
+            "2": {"exit", "2"},
+            "3": {"exit", "3", "2"},
+            "4": {"exit", "4", "3", "2"},
+            "5": {"exit", "3", "2"},
+            "6": {"exit", "2"},
+            "exit": set(),
+        }
+        for n in df:
+            assert set(df[n]) == set(answer[n])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominating.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominating.py
new file mode 100644
index 00000000..b945c738
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominating.py
@@ -0,0 +1,46 @@
+import pytest
+
+import networkx as nx
+
+
+def test_dominating_set():
+    G = nx.gnp_random_graph(100, 0.1)
+    D = nx.dominating_set(G)
+    assert nx.is_dominating_set(G, D)
+    D = nx.dominating_set(G, start_with=0)
+    assert nx.is_dominating_set(G, D)
+
+
+def test_complete():
+    """In complete graphs each node is a dominating set.
+    Thus the dominating set has to be of cardinality 1.
+    """
+    K4 = nx.complete_graph(4)
+    assert len(nx.dominating_set(K4)) == 1
+    K5 = nx.complete_graph(5)
+    assert len(nx.dominating_set(K5)) == 1
+
+
+def test_raise_dominating_set():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.path_graph(4)
+        D = nx.dominating_set(G, start_with=10)
+
+
+def test_is_dominating_set():
+    G = nx.path_graph(4)
+    d = {1, 3}
+    assert nx.is_dominating_set(G, d)
+    d = {0, 2}
+    assert nx.is_dominating_set(G, d)
+    d = {1}
+    assert not nx.is_dominating_set(G, d)
+
+
+def test_wikipedia_is_dominating_set():
+    """Example from https://en.wikipedia.org/wiki/Dominating_set"""
+    G = nx.cycle_graph(4)
+    G.add_edges_from([(0, 4), (1, 4), (2, 5)])
+    assert nx.is_dominating_set(G, {4, 3, 5})
+    assert nx.is_dominating_set(G, {0, 2})
+    assert nx.is_dominating_set(G, {1, 2})
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_efficiency.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_efficiency.py
new file mode 100644
index 00000000..9a2e7d04
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_efficiency.py
@@ -0,0 +1,58 @@
+"""Unit tests for the :mod:`networkx.algorithms.efficiency` module."""
+
+import networkx as nx
+
+
+class TestEfficiency:
+    def setup_method(self):
+        # G1 is a disconnected graph
+        self.G1 = nx.Graph()
+        self.G1.add_nodes_from([1, 2, 3])
+        # G2 is a cycle graph
+        self.G2 = nx.cycle_graph(4)
+        # G3 is the triangle graph with one additional edge
+        self.G3 = nx.lollipop_graph(3, 1)
+
+    def test_efficiency_disconnected_nodes(self):
+        """
+        When nodes are disconnected, efficiency is 0
+        """
+        assert nx.efficiency(self.G1, 1, 2) == 0
+
+    def test_local_efficiency_disconnected_graph(self):
+        """
+        In a disconnected graph the efficiency is 0
+        """
+        assert nx.local_efficiency(self.G1) == 0
+
+    def test_efficiency(self):
+        assert nx.efficiency(self.G2, 0, 1) == 1
+        assert nx.efficiency(self.G2, 0, 2) == 1 / 2
+
+    def test_global_efficiency(self):
+        assert nx.global_efficiency(self.G2) == 5 / 6
+
+    def test_global_efficiency_complete_graph(self):
+        """
+        Tests that the average global efficiency of the complete graph is one.
+        """
+        for n in range(2, 10):
+            G = nx.complete_graph(n)
+            assert nx.global_efficiency(G) == 1
+
+    def test_local_efficiency_complete_graph(self):
+        """
+        Test that the local efficiency for a complete graph with at least 3
+        nodes should be one. For a graph with only 2 nodes, the induced
+        subgraph has no edges.
+        """
+        for n in range(3, 10):
+            G = nx.complete_graph(n)
+            assert nx.local_efficiency(G) == 1
+
+    def test_using_ego_graph(self):
+        """
+        Test that the ego graph is used when computing local efficiency.
+        For more information, see GitHub issue #2710.
+        """
+        assert nx.local_efficiency(self.G3) == 7 / 12
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_euler.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_euler.py
new file mode 100644
index 00000000..b5871f09
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_euler.py
@@ -0,0 +1,314 @@
+import collections
+
+import pytest
+
+import networkx as nx
+
+
+@pytest.mark.parametrize("f", (nx.is_eulerian, nx.is_semieulerian))
+def test_empty_graph_raises(f):
+    G = nx.Graph()
+    with pytest.raises(nx.NetworkXPointlessConcept, match="Connectivity is undefined"):
+        f(G)
+
+
+class TestIsEulerian:
+    def test_is_eulerian(self):
+        assert nx.is_eulerian(nx.complete_graph(5))
+        assert nx.is_eulerian(nx.complete_graph(7))
+        assert nx.is_eulerian(nx.hypercube_graph(4))
+        assert nx.is_eulerian(nx.hypercube_graph(6))
+
+        assert not nx.is_eulerian(nx.complete_graph(4))
+        assert not nx.is_eulerian(nx.complete_graph(6))
+        assert not nx.is_eulerian(nx.hypercube_graph(3))
+        assert not nx.is_eulerian(nx.hypercube_graph(5))
+
+        assert not nx.is_eulerian(nx.petersen_graph())
+        assert not nx.is_eulerian(nx.path_graph(4))
+
+    def test_is_eulerian2(self):
+        # not connected
+        G = nx.Graph()
+        G.add_nodes_from([1, 2, 3])
+        assert not nx.is_eulerian(G)
+        # not strongly connected
+        G = nx.DiGraph()
+        G.add_nodes_from([1, 2, 3])
+        assert not nx.is_eulerian(G)
+        G = nx.MultiDiGraph()
+        G.add_edge(1, 2)
+        G.add_edge(2, 3)
+        G.add_edge(2, 3)
+        G.add_edge(3, 1)
+        assert not nx.is_eulerian(G)
+
+
+class TestEulerianCircuit:
+    def test_eulerian_circuit_cycle(self):
+        G = nx.cycle_graph(4)
+
+        edges = list(nx.eulerian_circuit(G, source=0))
+        nodes = [u for u, v in edges]
+        assert nodes == [0, 3, 2, 1]
+        assert edges == [(0, 3), (3, 2), (2, 1), (1, 0)]
+
+        edges = list(nx.eulerian_circuit(G, source=1))
+        nodes = [u for u, v in edges]
+        assert nodes == [1, 2, 3, 0]
+        assert edges == [(1, 2), (2, 3), (3, 0), (0, 1)]
+
+        G = nx.complete_graph(3)
+
+        edges = list(nx.eulerian_circuit(G, source=0))
+        nodes = [u for u, v in edges]
+        assert nodes == [0, 2, 1]
+        assert edges == [(0, 2), (2, 1), (1, 0)]
+
+        edges = list(nx.eulerian_circuit(G, source=1))
+        nodes = [u for u, v in edges]
+        assert nodes == [1, 2, 0]
+        assert edges == [(1, 2), (2, 0), (0, 1)]
+
+    def test_eulerian_circuit_digraph(self):
+        G = nx.DiGraph()
+        nx.add_cycle(G, [0, 1, 2, 3])
+
+        edges = list(nx.eulerian_circuit(G, source=0))
+        nodes = [u for u, v in edges]
+        assert nodes == [0, 1, 2, 3]
+        assert edges == [(0, 1), (1, 2), (2, 3), (3, 0)]
+
+        edges = list(nx.eulerian_circuit(G, source=1))
+        nodes = [u for u, v in edges]
+        assert nodes == [1, 2, 3, 0]
+        assert edges == [(1, 2), (2, 3), (3, 0), (0, 1)]
+
+    def test_multigraph(self):
+        G = nx.MultiGraph()
+        nx.add_cycle(G, [0, 1, 2, 3])
+        G.add_edge(1, 2)
+        G.add_edge(1, 2)
+        edges = list(nx.eulerian_circuit(G, source=0))
+        nodes = [u for u, v in edges]
+        assert nodes == [0, 3, 2, 1, 2, 1]
+        assert edges == [(0, 3), (3, 2), (2, 1), (1, 2), (2, 1), (1, 0)]
+
+    def test_multigraph_with_keys(self):
+        G = nx.MultiGraph()
+        nx.add_cycle(G, [0, 1, 2, 3])
+        G.add_edge(1, 2)
+        G.add_edge(1, 2)
+        edges = list(nx.eulerian_circuit(G, source=0, keys=True))
+        nodes = [u for u, v, k in edges]
+        assert nodes == [0, 3, 2, 1, 2, 1]
+        assert edges[:2] == [(0, 3, 0), (3, 2, 0)]
+        assert collections.Counter(edges[2:5]) == collections.Counter(
+            [(2, 1, 0), (1, 2, 1), (2, 1, 2)]
+        )
+        assert edges[5:] == [(1, 0, 0)]
+
+    def test_not_eulerian(self):
+        with pytest.raises(nx.NetworkXError):
+            f = list(nx.eulerian_circuit(nx.complete_graph(4)))
+
+
+class TestIsSemiEulerian:
+    def test_is_semieulerian(self):
+        # Test graphs with Eulerian paths but no cycles return True.
+        assert nx.is_semieulerian(nx.path_graph(4))
+        G = nx.path_graph(6, create_using=nx.DiGraph)
+        assert nx.is_semieulerian(G)
+
+        # Test graphs with Eulerian cycles return False.
+        assert not nx.is_semieulerian(nx.complete_graph(5))
+        assert not nx.is_semieulerian(nx.complete_graph(7))
+        assert not nx.is_semieulerian(nx.hypercube_graph(4))
+        assert not nx.is_semieulerian(nx.hypercube_graph(6))
+
+
+class TestHasEulerianPath:
+    def test_has_eulerian_path_cyclic(self):
+        # Test graphs with Eulerian cycles return True.
+        assert nx.has_eulerian_path(nx.complete_graph(5))
+        assert nx.has_eulerian_path(nx.complete_graph(7))
+        assert nx.has_eulerian_path(nx.hypercube_graph(4))
+        assert nx.has_eulerian_path(nx.hypercube_graph(6))
+
+    def test_has_eulerian_path_non_cyclic(self):
+        # Test graphs with Eulerian paths but no cycles return True.
+        assert nx.has_eulerian_path(nx.path_graph(4))
+        G = nx.path_graph(6, create_using=nx.DiGraph)
+        assert nx.has_eulerian_path(G)
+
+    def test_has_eulerian_path_directed_graph(self):
+        # Test directed graphs and returns False
+        G = nx.DiGraph()
+        G.add_edges_from([(0, 1), (1, 2), (0, 2)])
+        assert not nx.has_eulerian_path(G)
+
+        # Test directed graphs without isolated node returns True
+        G = nx.DiGraph()
+        G.add_edges_from([(0, 1), (1, 2), (2, 0)])
+        assert nx.has_eulerian_path(G)
+
+        # Test directed graphs with isolated node returns False
+        G.add_node(3)
+        assert not nx.has_eulerian_path(G)
+
+    @pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph()))
+    def test_has_eulerian_path_not_weakly_connected(self, G):
+        G.add_edges_from([(0, 1), (2, 3), (3, 2)])
+        assert not nx.has_eulerian_path(G)
+
+    @pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph()))
+    def test_has_eulerian_path_unbalancedins_more_than_one(self, G):
+        G.add_edges_from([(0, 1), (2, 3)])
+        assert not nx.has_eulerian_path(G)
+
+
+class TestFindPathStart:
+    def testfind_path_start(self):
+        find_path_start = nx.algorithms.euler._find_path_start
+        # Test digraphs return correct starting node.
+        G = nx.path_graph(6, create_using=nx.DiGraph)
+        assert find_path_start(G) == 0
+        edges = [(0, 1), (1, 2), (2, 0), (4, 0)]
+        assert find_path_start(nx.DiGraph(edges)) == 4
+
+        # Test graph with no Eulerian path return None.
+        edges = [(0, 1), (1, 2), (2, 3), (2, 4)]
+        assert find_path_start(nx.DiGraph(edges)) is None
+
+
+class TestEulerianPath:
+    def test_eulerian_path(self):
+        x = [(4, 0), (0, 1), (1, 2), (2, 0)]
+        for e1, e2 in zip(x, nx.eulerian_path(nx.DiGraph(x))):
+            assert e1 == e2
+
+    def test_eulerian_path_straight_link(self):
+        G = nx.DiGraph()
+        result = [(1, 2), (2, 3), (3, 4), (4, 5)]
+        G.add_edges_from(result)
+        assert result == list(nx.eulerian_path(G))
+        assert result == list(nx.eulerian_path(G, source=1))
+        with pytest.raises(nx.NetworkXError):
+            list(nx.eulerian_path(G, source=3))
+        with pytest.raises(nx.NetworkXError):
+            list(nx.eulerian_path(G, source=4))
+        with pytest.raises(nx.NetworkXError):
+            list(nx.eulerian_path(G, source=5))
+
+    def test_eulerian_path_multigraph(self):
+        G = nx.MultiDiGraph()
+        result = [(2, 1), (1, 2), (2, 1), (1, 2), (2, 3), (3, 4), (4, 3)]
+        G.add_edges_from(result)
+        assert result == list(nx.eulerian_path(G))
+        assert result == list(nx.eulerian_path(G, source=2))
+        with pytest.raises(nx.NetworkXError):
+            list(nx.eulerian_path(G, source=3))
+        with pytest.raises(nx.NetworkXError):
+            list(nx.eulerian_path(G, source=4))
+
+    def test_eulerian_path_eulerian_circuit(self):
+        G = nx.DiGraph()
+        result = [(1, 2), (2, 3), (3, 4), (4, 1)]
+        result2 = [(2, 3), (3, 4), (4, 1), (1, 2)]
+        result3 = [(3, 4), (4, 1), (1, 2), (2, 3)]
+        G.add_edges_from(result)
+        assert result == list(nx.eulerian_path(G))
+        assert result == list(nx.eulerian_path(G, source=1))
+        assert result2 == list(nx.eulerian_path(G, source=2))
+        assert result3 == list(nx.eulerian_path(G, source=3))
+
+    def test_eulerian_path_undirected(self):
+        G = nx.Graph()
+        result = [(1, 2), (2, 3), (3, 4), (4, 5)]
+        result2 = [(5, 4), (4, 3), (3, 2), (2, 1)]
+        G.add_edges_from(result)
+        assert list(nx.eulerian_path(G)) in (result, result2)
+        assert result == list(nx.eulerian_path(G, source=1))
+        assert result2 == list(nx.eulerian_path(G, source=5))
+        with pytest.raises(nx.NetworkXError):
+            list(nx.eulerian_path(G, source=3))
+        with pytest.raises(nx.NetworkXError):
+            list(nx.eulerian_path(G, source=2))
+
+    def test_eulerian_path_multigraph_undirected(self):
+        G = nx.MultiGraph()
+        result = [(2, 1), (1, 2), (2, 1), (1, 2), (2, 3), (3, 4)]
+        G.add_edges_from(result)
+        assert result == list(nx.eulerian_path(G))
+        assert result == list(nx.eulerian_path(G, source=2))
+        with pytest.raises(nx.NetworkXError):
+            list(nx.eulerian_path(G, source=3))
+        with pytest.raises(nx.NetworkXError):
+            list(nx.eulerian_path(G, source=1))
+
+    @pytest.mark.parametrize(
+        ("graph_type", "result"),
+        (
+            (nx.MultiGraph, [(0, 1, 0), (1, 0, 1)]),
+            (nx.MultiDiGraph, [(0, 1, 0), (1, 0, 0)]),
+        ),
+    )
+    def test_eulerian_with_keys(self, graph_type, result):
+        G = graph_type([(0, 1), (1, 0)])
+        answer = nx.eulerian_path(G, keys=True)
+        assert list(answer) == result
+
+
+class TestEulerize:
+    def test_disconnected(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.from_edgelist([(0, 1), (2, 3)])
+            nx.eulerize(G)
+
+    def test_null_graph(self):
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            nx.eulerize(nx.Graph())
+
+    def test_null_multigraph(self):
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            nx.eulerize(nx.MultiGraph())
+
+    def test_on_empty_graph(self):
+        with pytest.raises(nx.NetworkXError):
+            nx.eulerize(nx.empty_graph(3))
+
+    def test_on_eulerian(self):
+        G = nx.cycle_graph(3)
+        H = nx.eulerize(G)
+        assert nx.is_isomorphic(G, H)
+
+    def test_on_eulerian_multigraph(self):
+        G = nx.MultiGraph(nx.cycle_graph(3))
+        G.add_edge(0, 1)
+        H = nx.eulerize(G)
+        assert nx.is_eulerian(H)
+
+    def test_on_complete_graph(self):
+        G = nx.complete_graph(4)
+        assert nx.is_eulerian(nx.eulerize(G))
+        assert nx.is_eulerian(nx.eulerize(nx.MultiGraph(G)))
+
+    def test_on_non_eulerian_graph(self):
+        G = nx.cycle_graph(18)
+        G.add_edge(0, 18)
+        G.add_edge(18, 19)
+        G.add_edge(17, 19)
+        G.add_edge(4, 20)
+        G.add_edge(20, 21)
+        G.add_edge(21, 22)
+        G.add_edge(22, 23)
+        G.add_edge(23, 24)
+        G.add_edge(24, 25)
+        G.add_edge(25, 26)
+        G.add_edge(26, 27)
+        G.add_edge(27, 28)
+        G.add_edge(28, 13)
+        assert not nx.is_eulerian(G)
+        G = nx.eulerize(G)
+        assert nx.is_eulerian(G)
+        assert nx.number_of_edges(G) == 39
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graph_hashing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graph_hashing.py
new file mode 100644
index 00000000..0828069d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graph_hashing.py
@@ -0,0 +1,686 @@
+import pytest
+
+import networkx as nx
+from networkx.generators import directed
+
+# Unit tests for the :func:`~networkx.weisfeiler_lehman_graph_hash` function
+
+
+def test_empty_graph_hash():
+    """
+    empty graphs should give hashes regardless of other params
+    """
+    G1 = nx.empty_graph()
+    G2 = nx.empty_graph()
+
+    h1 = nx.weisfeiler_lehman_graph_hash(G1)
+    h2 = nx.weisfeiler_lehman_graph_hash(G2)
+    h3 = nx.weisfeiler_lehman_graph_hash(G2, edge_attr="edge_attr1")
+    h4 = nx.weisfeiler_lehman_graph_hash(G2, node_attr="node_attr1")
+    h5 = nx.weisfeiler_lehman_graph_hash(
+        G2, edge_attr="edge_attr1", node_attr="node_attr1"
+    )
+    h6 = nx.weisfeiler_lehman_graph_hash(G2, iterations=10)
+
+    assert h1 == h2
+    assert h1 == h3
+    assert h1 == h4
+    assert h1 == h5
+    assert h1 == h6
+
+
+def test_directed():
+    """
+    A directed graph with no bi-directional edges should yield different a graph hash
+    to the same graph taken as undirected if there are no hash collisions.
+    """
+    r = 10
+    for i in range(r):
+        G_directed = nx.gn_graph(10 + r, seed=100 + i)
+        G_undirected = nx.to_undirected(G_directed)
+
+        h_directed = nx.weisfeiler_lehman_graph_hash(G_directed)
+        h_undirected = nx.weisfeiler_lehman_graph_hash(G_undirected)
+
+        assert h_directed != h_undirected
+
+
+def test_reversed():
+    """
+    A directed graph with no bi-directional edges should yield different a graph hash
+    to the same graph taken with edge directions reversed if there are no hash collisions.
+    Here we test a cycle graph which is the minimal counterexample
+    """
+    G = nx.cycle_graph(5, create_using=nx.DiGraph)
+    nx.set_node_attributes(G, {n: str(n) for n in G.nodes()}, name="label")
+
+    G_reversed = G.reverse()
+
+    h = nx.weisfeiler_lehman_graph_hash(G, node_attr="label")
+    h_reversed = nx.weisfeiler_lehman_graph_hash(G_reversed, node_attr="label")
+
+    assert h != h_reversed
+
+
+def test_isomorphic():
+    """
+    graph hashes should be invariant to node-relabeling (when the output is reindexed
+    by the same mapping)
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G1 = nx.erdos_renyi_graph(n, p * i, seed=200 + i)
+        G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
+
+        g1_hash = nx.weisfeiler_lehman_graph_hash(G1)
+        g2_hash = nx.weisfeiler_lehman_graph_hash(G2)
+
+        assert g1_hash == g2_hash
+
+
+def test_isomorphic_edge_attr():
+    """
+    Isomorphic graphs with differing edge attributes should yield different graph
+    hashes if the 'edge_attr' argument is supplied and populated in the graph,
+    and there are no hash collisions.
+    The output should still be invariant to node-relabeling
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G1 = nx.erdos_renyi_graph(n, p * i, seed=300 + i)
+
+        for a, b in G1.edges:
+            G1[a][b]["edge_attr1"] = f"{a}-{b}-1"
+            G1[a][b]["edge_attr2"] = f"{a}-{b}-2"
+
+        g1_hash_with_edge_attr1 = nx.weisfeiler_lehman_graph_hash(
+            G1, edge_attr="edge_attr1"
+        )
+        g1_hash_with_edge_attr2 = nx.weisfeiler_lehman_graph_hash(
+            G1, edge_attr="edge_attr2"
+        )
+        g1_hash_no_edge_attr = nx.weisfeiler_lehman_graph_hash(G1, edge_attr=None)
+
+        assert g1_hash_with_edge_attr1 != g1_hash_no_edge_attr
+        assert g1_hash_with_edge_attr2 != g1_hash_no_edge_attr
+        assert g1_hash_with_edge_attr1 != g1_hash_with_edge_attr2
+
+        G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
+
+        g2_hash_with_edge_attr1 = nx.weisfeiler_lehman_graph_hash(
+            G2, edge_attr="edge_attr1"
+        )
+        g2_hash_with_edge_attr2 = nx.weisfeiler_lehman_graph_hash(
+            G2, edge_attr="edge_attr2"
+        )
+
+        assert g1_hash_with_edge_attr1 == g2_hash_with_edge_attr1
+        assert g1_hash_with_edge_attr2 == g2_hash_with_edge_attr2
+
+
+def test_missing_edge_attr():
+    """
+    If the 'edge_attr' argument is supplied but is missing from an edge in the graph,
+    we should raise a KeyError
+    """
+    G = nx.Graph()
+    G.add_edges_from([(1, 2, {"edge_attr1": "a"}), (1, 3, {})])
+    pytest.raises(KeyError, nx.weisfeiler_lehman_graph_hash, G, edge_attr="edge_attr1")
+
+
+def test_isomorphic_node_attr():
+    """
+    Isomorphic graphs with differing node attributes should yield different graph
+    hashes if the 'node_attr' argument is supplied and populated in the graph, and
+    there are no hash collisions.
+    The output should still be invariant to node-relabeling
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G1 = nx.erdos_renyi_graph(n, p * i, seed=400 + i)
+
+        for u in G1.nodes():
+            G1.nodes[u]["node_attr1"] = f"{u}-1"
+            G1.nodes[u]["node_attr2"] = f"{u}-2"
+
+        g1_hash_with_node_attr1 = nx.weisfeiler_lehman_graph_hash(
+            G1, node_attr="node_attr1"
+        )
+        g1_hash_with_node_attr2 = nx.weisfeiler_lehman_graph_hash(
+            G1, node_attr="node_attr2"
+        )
+        g1_hash_no_node_attr = nx.weisfeiler_lehman_graph_hash(G1, node_attr=None)
+
+        assert g1_hash_with_node_attr1 != g1_hash_no_node_attr
+        assert g1_hash_with_node_attr2 != g1_hash_no_node_attr
+        assert g1_hash_with_node_attr1 != g1_hash_with_node_attr2
+
+        G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
+
+        g2_hash_with_node_attr1 = nx.weisfeiler_lehman_graph_hash(
+            G2, node_attr="node_attr1"
+        )
+        g2_hash_with_node_attr2 = nx.weisfeiler_lehman_graph_hash(
+            G2, node_attr="node_attr2"
+        )
+
+        assert g1_hash_with_node_attr1 == g2_hash_with_node_attr1
+        assert g1_hash_with_node_attr2 == g2_hash_with_node_attr2
+
+
+def test_missing_node_attr():
+    """
+    If the 'node_attr' argument is supplied but is missing from a node in the graph,
+    we should raise a KeyError
+    """
+    G = nx.Graph()
+    G.add_nodes_from([(1, {"node_attr1": "a"}), (2, {})])
+    G.add_edges_from([(1, 2), (2, 3), (3, 1), (1, 4)])
+    pytest.raises(KeyError, nx.weisfeiler_lehman_graph_hash, G, node_attr="node_attr1")
+
+
+def test_isomorphic_edge_attr_and_node_attr():
+    """
+    Isomorphic graphs with differing node attributes should yield different graph
+    hashes if the 'node_attr' and 'edge_attr' argument is supplied and populated in
+    the graph, and there are no hash collisions.
+    The output should still be invariant to node-relabeling
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G1 = nx.erdos_renyi_graph(n, p * i, seed=500 + i)
+
+        for u in G1.nodes():
+            G1.nodes[u]["node_attr1"] = f"{u}-1"
+            G1.nodes[u]["node_attr2"] = f"{u}-2"
+
+        for a, b in G1.edges:
+            G1[a][b]["edge_attr1"] = f"{a}-{b}-1"
+            G1[a][b]["edge_attr2"] = f"{a}-{b}-2"
+
+        g1_hash_edge1_node1 = nx.weisfeiler_lehman_graph_hash(
+            G1, edge_attr="edge_attr1", node_attr="node_attr1"
+        )
+        g1_hash_edge2_node2 = nx.weisfeiler_lehman_graph_hash(
+            G1, edge_attr="edge_attr2", node_attr="node_attr2"
+        )
+        g1_hash_edge1_node2 = nx.weisfeiler_lehman_graph_hash(
+            G1, edge_attr="edge_attr1", node_attr="node_attr2"
+        )
+        g1_hash_no_attr = nx.weisfeiler_lehman_graph_hash(G1)
+
+        assert g1_hash_edge1_node1 != g1_hash_no_attr
+        assert g1_hash_edge2_node2 != g1_hash_no_attr
+        assert g1_hash_edge1_node1 != g1_hash_edge2_node2
+        assert g1_hash_edge1_node2 != g1_hash_edge2_node2
+        assert g1_hash_edge1_node2 != g1_hash_edge1_node1
+
+        G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
+
+        g2_hash_edge1_node1 = nx.weisfeiler_lehman_graph_hash(
+            G2, edge_attr="edge_attr1", node_attr="node_attr1"
+        )
+        g2_hash_edge2_node2 = nx.weisfeiler_lehman_graph_hash(
+            G2, edge_attr="edge_attr2", node_attr="node_attr2"
+        )
+
+        assert g1_hash_edge1_node1 == g2_hash_edge1_node1
+        assert g1_hash_edge2_node2 == g2_hash_edge2_node2
+
+
+def test_digest_size():
+    """
+    The hash string lengths should be as expected for a variety of graphs and
+    digest sizes
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G = nx.erdos_renyi_graph(n, p * i, seed=1000 + i)
+
+        h16 = nx.weisfeiler_lehman_graph_hash(G)
+        h32 = nx.weisfeiler_lehman_graph_hash(G, digest_size=32)
+
+        assert h16 != h32
+        assert len(h16) == 16 * 2
+        assert len(h32) == 32 * 2
+
+
+# Unit tests for the :func:`~networkx.weisfeiler_lehman_hash_subgraphs` function
+
+
+def is_subiteration(a, b):
+    """
+    returns True if that each hash sequence in 'a' is a prefix for
+    the corresponding sequence indexed by the same node in 'b'.
+    """
+    return all(b[node][: len(hashes)] == hashes for node, hashes in a.items())
+
+
+def hexdigest_sizes_correct(a, digest_size):
+    """
+    returns True if all hex digest sizes are the expected length in a node:subgraph-hashes
+    dictionary. Hex digest string length == 2 * bytes digest length since each pair of hex
+    digits encodes 1 byte (https://docs.python.org/3/library/hashlib.html)
+    """
+    hexdigest_size = digest_size * 2
+    list_digest_sizes_correct = lambda l: all(len(x) == hexdigest_size for x in l)
+    return all(list_digest_sizes_correct(hashes) for hashes in a.values())
+
+
+def test_empty_graph_subgraph_hash():
+    """ "
+    empty graphs should give empty dict subgraph hashes regardless of other params
+    """
+    G = nx.empty_graph()
+
+    subgraph_hashes1 = nx.weisfeiler_lehman_subgraph_hashes(G)
+    subgraph_hashes2 = nx.weisfeiler_lehman_subgraph_hashes(G, edge_attr="edge_attr")
+    subgraph_hashes3 = nx.weisfeiler_lehman_subgraph_hashes(G, node_attr="edge_attr")
+    subgraph_hashes4 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=2)
+    subgraph_hashes5 = nx.weisfeiler_lehman_subgraph_hashes(G, digest_size=64)
+
+    assert subgraph_hashes1 == {}
+    assert subgraph_hashes2 == {}
+    assert subgraph_hashes3 == {}
+    assert subgraph_hashes4 == {}
+    assert subgraph_hashes5 == {}
+
+
+def test_directed_subgraph_hash():
+    """
+    A directed graph with no bi-directional edges should yield different subgraph hashes
+    to the same graph taken as undirected, if all hashes don't collide.
+    """
+    r = 10
+    for i in range(r):
+        G_directed = nx.gn_graph(10 + r, seed=100 + i)
+        G_undirected = nx.to_undirected(G_directed)
+
+        directed_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G_directed)
+        undirected_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G_undirected)
+
+        assert directed_subgraph_hashes != undirected_subgraph_hashes
+
+
+def test_reversed_subgraph_hash():
+    """
+    A directed graph with no bi-directional edges should yield different subgraph hashes
+    to the same graph taken with edge directions reversed if there are no hash collisions.
+    Here we test a cycle graph which is the minimal counterexample
+    """
+    G = nx.cycle_graph(5, create_using=nx.DiGraph)
+    nx.set_node_attributes(G, {n: str(n) for n in G.nodes()}, name="label")
+
+    G_reversed = G.reverse()
+
+    h = nx.weisfeiler_lehman_subgraph_hashes(G, node_attr="label")
+    h_reversed = nx.weisfeiler_lehman_subgraph_hashes(G_reversed, node_attr="label")
+
+    assert h != h_reversed
+
+
+def test_isomorphic_subgraph_hash():
+    """
+    the subgraph hashes should be invariant to node-relabeling when the output is reindexed
+    by the same mapping and all hashes don't collide.
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G1 = nx.erdos_renyi_graph(n, p * i, seed=200 + i)
+        G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
+
+        g1_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G1)
+        g2_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G2)
+
+        assert g1_subgraph_hashes == {-1 * k: v for k, v in g2_subgraph_hashes.items()}
+
+
+def test_isomorphic_edge_attr_subgraph_hash():
+    """
+    Isomorphic graphs with differing edge attributes should yield different subgraph
+    hashes if the 'edge_attr' argument is supplied and populated in the graph, and
+    all hashes don't collide.
+    The output should still be invariant to node-relabeling
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G1 = nx.erdos_renyi_graph(n, p * i, seed=300 + i)
+
+        for a, b in G1.edges:
+            G1[a][b]["edge_attr1"] = f"{a}-{b}-1"
+            G1[a][b]["edge_attr2"] = f"{a}-{b}-2"
+
+        g1_hash_with_edge_attr1 = nx.weisfeiler_lehman_subgraph_hashes(
+            G1, edge_attr="edge_attr1"
+        )
+        g1_hash_with_edge_attr2 = nx.weisfeiler_lehman_subgraph_hashes(
+            G1, edge_attr="edge_attr2"
+        )
+        g1_hash_no_edge_attr = nx.weisfeiler_lehman_subgraph_hashes(G1, edge_attr=None)
+
+        assert g1_hash_with_edge_attr1 != g1_hash_no_edge_attr
+        assert g1_hash_with_edge_attr2 != g1_hash_no_edge_attr
+        assert g1_hash_with_edge_attr1 != g1_hash_with_edge_attr2
+
+        G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
+
+        g2_hash_with_edge_attr1 = nx.weisfeiler_lehman_subgraph_hashes(
+            G2, edge_attr="edge_attr1"
+        )
+        g2_hash_with_edge_attr2 = nx.weisfeiler_lehman_subgraph_hashes(
+            G2, edge_attr="edge_attr2"
+        )
+
+        assert g1_hash_with_edge_attr1 == {
+            -1 * k: v for k, v in g2_hash_with_edge_attr1.items()
+        }
+        assert g1_hash_with_edge_attr2 == {
+            -1 * k: v for k, v in g2_hash_with_edge_attr2.items()
+        }
+
+
+def test_missing_edge_attr_subgraph_hash():
+    """
+    If the 'edge_attr' argument is supplied but is missing from an edge in the graph,
+    we should raise a KeyError
+    """
+    G = nx.Graph()
+    G.add_edges_from([(1, 2, {"edge_attr1": "a"}), (1, 3, {})])
+    pytest.raises(
+        KeyError, nx.weisfeiler_lehman_subgraph_hashes, G, edge_attr="edge_attr1"
+    )
+
+
+def test_isomorphic_node_attr_subgraph_hash():
+    """
+    Isomorphic graphs with differing node attributes should yield different subgraph
+    hashes if the 'node_attr' argument is supplied and populated in the graph, and
+    all hashes don't collide.
+    The output should still be invariant to node-relabeling
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G1 = nx.erdos_renyi_graph(n, p * i, seed=400 + i)
+
+        for u in G1.nodes():
+            G1.nodes[u]["node_attr1"] = f"{u}-1"
+            G1.nodes[u]["node_attr2"] = f"{u}-2"
+
+        g1_hash_with_node_attr1 = nx.weisfeiler_lehman_subgraph_hashes(
+            G1, node_attr="node_attr1"
+        )
+        g1_hash_with_node_attr2 = nx.weisfeiler_lehman_subgraph_hashes(
+            G1, node_attr="node_attr2"
+        )
+        g1_hash_no_node_attr = nx.weisfeiler_lehman_subgraph_hashes(G1, node_attr=None)
+
+        assert g1_hash_with_node_attr1 != g1_hash_no_node_attr
+        assert g1_hash_with_node_attr2 != g1_hash_no_node_attr
+        assert g1_hash_with_node_attr1 != g1_hash_with_node_attr2
+
+        G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
+
+        g2_hash_with_node_attr1 = nx.weisfeiler_lehman_subgraph_hashes(
+            G2, node_attr="node_attr1"
+        )
+        g2_hash_with_node_attr2 = nx.weisfeiler_lehman_subgraph_hashes(
+            G2, node_attr="node_attr2"
+        )
+
+        assert g1_hash_with_node_attr1 == {
+            -1 * k: v for k, v in g2_hash_with_node_attr1.items()
+        }
+        assert g1_hash_with_node_attr2 == {
+            -1 * k: v for k, v in g2_hash_with_node_attr2.items()
+        }
+
+
+def test_missing_node_attr_subgraph_hash():
+    """
+    If the 'node_attr' argument is supplied but is missing from a node in the graph,
+    we should raise a KeyError
+    """
+    G = nx.Graph()
+    G.add_nodes_from([(1, {"node_attr1": "a"}), (2, {})])
+    G.add_edges_from([(1, 2), (2, 3), (3, 1), (1, 4)])
+    pytest.raises(
+        KeyError, nx.weisfeiler_lehman_subgraph_hashes, G, node_attr="node_attr1"
+    )
+
+
+def test_isomorphic_edge_attr_and_node_attr_subgraph_hash():
+    """
+    Isomorphic graphs with differing node attributes should yield different subgraph
+    hashes if the 'node_attr' and 'edge_attr' argument is supplied and populated in
+    the graph, and all hashes don't collide
+    The output should still be invariant to node-relabeling
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G1 = nx.erdos_renyi_graph(n, p * i, seed=500 + i)
+
+        for u in G1.nodes():
+            G1.nodes[u]["node_attr1"] = f"{u}-1"
+            G1.nodes[u]["node_attr2"] = f"{u}-2"
+
+        for a, b in G1.edges:
+            G1[a][b]["edge_attr1"] = f"{a}-{b}-1"
+            G1[a][b]["edge_attr2"] = f"{a}-{b}-2"
+
+        g1_hash_edge1_node1 = nx.weisfeiler_lehman_subgraph_hashes(
+            G1, edge_attr="edge_attr1", node_attr="node_attr1"
+        )
+        g1_hash_edge2_node2 = nx.weisfeiler_lehman_subgraph_hashes(
+            G1, edge_attr="edge_attr2", node_attr="node_attr2"
+        )
+        g1_hash_edge1_node2 = nx.weisfeiler_lehman_subgraph_hashes(
+            G1, edge_attr="edge_attr1", node_attr="node_attr2"
+        )
+        g1_hash_no_attr = nx.weisfeiler_lehman_subgraph_hashes(G1)
+
+        assert g1_hash_edge1_node1 != g1_hash_no_attr
+        assert g1_hash_edge2_node2 != g1_hash_no_attr
+        assert g1_hash_edge1_node1 != g1_hash_edge2_node2
+        assert g1_hash_edge1_node2 != g1_hash_edge2_node2
+        assert g1_hash_edge1_node2 != g1_hash_edge1_node1
+
+        G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
+
+        g2_hash_edge1_node1 = nx.weisfeiler_lehman_subgraph_hashes(
+            G2, edge_attr="edge_attr1", node_attr="node_attr1"
+        )
+        g2_hash_edge2_node2 = nx.weisfeiler_lehman_subgraph_hashes(
+            G2, edge_attr="edge_attr2", node_attr="node_attr2"
+        )
+
+        assert g1_hash_edge1_node1 == {
+            -1 * k: v for k, v in g2_hash_edge1_node1.items()
+        }
+        assert g1_hash_edge2_node2 == {
+            -1 * k: v for k, v in g2_hash_edge2_node2.items()
+        }
+
+
+def test_iteration_depth():
+    """
+    All nodes should have the correct number of subgraph hashes in the output when
+    using degree as initial node labels
+    Subsequent iteration depths for the same graph should be additive for each node
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G = nx.erdos_renyi_graph(n, p * i, seed=600 + i)
+
+        depth3 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=3)
+        depth4 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=4)
+        depth5 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=5)
+
+        assert all(len(hashes) == 3 for hashes in depth3.values())
+        assert all(len(hashes) == 4 for hashes in depth4.values())
+        assert all(len(hashes) == 5 for hashes in depth5.values())
+
+        assert is_subiteration(depth3, depth4)
+        assert is_subiteration(depth4, depth5)
+        assert is_subiteration(depth3, depth5)
+
+
+def test_iteration_depth_edge_attr():
+    """
+    All nodes should have the correct number of subgraph hashes in the output when
+    setting initial node labels empty and using an edge attribute when aggregating
+    neighborhoods.
+    Subsequent iteration depths for the same graph should be additive for each node
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G = nx.erdos_renyi_graph(n, p * i, seed=700 + i)
+
+        for a, b in G.edges:
+            G[a][b]["edge_attr1"] = f"{a}-{b}-1"
+
+        depth3 = nx.weisfeiler_lehman_subgraph_hashes(
+            G, edge_attr="edge_attr1", iterations=3
+        )
+        depth4 = nx.weisfeiler_lehman_subgraph_hashes(
+            G, edge_attr="edge_attr1", iterations=4
+        )
+        depth5 = nx.weisfeiler_lehman_subgraph_hashes(
+            G, edge_attr="edge_attr1", iterations=5
+        )
+
+        assert all(len(hashes) == 3 for hashes in depth3.values())
+        assert all(len(hashes) == 4 for hashes in depth4.values())
+        assert all(len(hashes) == 5 for hashes in depth5.values())
+
+        assert is_subiteration(depth3, depth4)
+        assert is_subiteration(depth4, depth5)
+        assert is_subiteration(depth3, depth5)
+
+
+def test_iteration_depth_node_attr():
+    """
+    All nodes should have the correct number of subgraph hashes in the output when
+    setting initial node labels to an attribute.
+    Subsequent iteration depths for the same graph should be additive for each node
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G = nx.erdos_renyi_graph(n, p * i, seed=800 + i)
+
+        for u in G.nodes():
+            G.nodes[u]["node_attr1"] = f"{u}-1"
+
+        depth3 = nx.weisfeiler_lehman_subgraph_hashes(
+            G, node_attr="node_attr1", iterations=3
+        )
+        depth4 = nx.weisfeiler_lehman_subgraph_hashes(
+            G, node_attr="node_attr1", iterations=4
+        )
+        depth5 = nx.weisfeiler_lehman_subgraph_hashes(
+            G, node_attr="node_attr1", iterations=5
+        )
+
+        assert all(len(hashes) == 3 for hashes in depth3.values())
+        assert all(len(hashes) == 4 for hashes in depth4.values())
+        assert all(len(hashes) == 5 for hashes in depth5.values())
+
+        assert is_subiteration(depth3, depth4)
+        assert is_subiteration(depth4, depth5)
+        assert is_subiteration(depth3, depth5)
+
+
+def test_iteration_depth_node_edge_attr():
+    """
+    All nodes should have the correct number of subgraph hashes in the output when
+    setting initial node labels to an attribute and also using an edge attribute when
+    aggregating neighborhoods.
+    Subsequent iteration depths for the same graph should be additive for each node
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G = nx.erdos_renyi_graph(n, p * i, seed=900 + i)
+
+        for u in G.nodes():
+            G.nodes[u]["node_attr1"] = f"{u}-1"
+
+        for a, b in G.edges:
+            G[a][b]["edge_attr1"] = f"{a}-{b}-1"
+
+        depth3 = nx.weisfeiler_lehman_subgraph_hashes(
+            G, edge_attr="edge_attr1", node_attr="node_attr1", iterations=3
+        )
+        depth4 = nx.weisfeiler_lehman_subgraph_hashes(
+            G, edge_attr="edge_attr1", node_attr="node_attr1", iterations=4
+        )
+        depth5 = nx.weisfeiler_lehman_subgraph_hashes(
+            G, edge_attr="edge_attr1", node_attr="node_attr1", iterations=5
+        )
+
+        assert all(len(hashes) == 3 for hashes in depth3.values())
+        assert all(len(hashes) == 4 for hashes in depth4.values())
+        assert all(len(hashes) == 5 for hashes in depth5.values())
+
+        assert is_subiteration(depth3, depth4)
+        assert is_subiteration(depth4, depth5)
+        assert is_subiteration(depth3, depth5)
+
+
+def test_digest_size_subgraph_hash():
+    """
+    The hash string lengths should be as expected for a variety of graphs and
+    digest sizes
+    """
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(1, r + 1):
+        G = nx.erdos_renyi_graph(n, p * i, seed=1000 + i)
+
+        digest_size16_hashes = nx.weisfeiler_lehman_subgraph_hashes(G)
+        digest_size32_hashes = nx.weisfeiler_lehman_subgraph_hashes(G, digest_size=32)
+
+        assert digest_size16_hashes != digest_size32_hashes
+
+        assert hexdigest_sizes_correct(digest_size16_hashes, 16)
+        assert hexdigest_sizes_correct(digest_size32_hashes, 32)
+
+
+def test_initial_node_labels_subgraph_hash():
+    """
+    Including the hashed initial label prepends an extra hash to the lists
+    """
+    G = nx.path_graph(5)
+    nx.set_node_attributes(G, {i: int(0 < i < 4) for i in G}, "label")
+    # initial node labels:
+    # 0--1--1--1--0
+
+    without_initial_label = nx.weisfeiler_lehman_subgraph_hashes(G, node_attr="label")
+    assert all(len(v) == 3 for v in without_initial_label.values())
+    # 3 different 1 hop nhds
+    assert len({v[0] for v in without_initial_label.values()}) == 3
+
+    with_initial_label = nx.weisfeiler_lehman_subgraph_hashes(
+        G, node_attr="label", include_initial_labels=True
+    )
+    assert all(len(v) == 4 for v in with_initial_label.values())
+    # 2 different initial labels
+    assert len({v[0] for v in with_initial_label.values()}) == 2
+
+    # check hashes match otherwise
+    for u in G:
+        for a, b in zip(
+            with_initial_label[u][1:], without_initial_label[u], strict=True
+        ):
+            assert a == b
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graphical.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graphical.py
new file mode 100644
index 00000000..99f766f7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graphical.py
@@ -0,0 +1,163 @@
+import pytest
+
+import networkx as nx
+
+
+def test_valid_degree_sequence1():
+    n = 100
+    p = 0.3
+    for i in range(10):
+        G = nx.erdos_renyi_graph(n, p)
+        deg = (d for n, d in G.degree())
+        assert nx.is_graphical(deg, method="eg")
+        assert nx.is_graphical(deg, method="hh")
+
+
+def test_valid_degree_sequence2():
+    n = 100
+    for i in range(10):
+        G = nx.barabasi_albert_graph(n, 1)
+        deg = (d for n, d in G.degree())
+        assert nx.is_graphical(deg, method="eg")
+        assert nx.is_graphical(deg, method="hh")
+
+
+def test_string_input():
+    pytest.raises(nx.NetworkXException, nx.is_graphical, [], "foo")
+    pytest.raises(nx.NetworkXException, nx.is_graphical, ["red"], "hh")
+    pytest.raises(nx.NetworkXException, nx.is_graphical, ["red"], "eg")
+
+
+def test_non_integer_input():
+    pytest.raises(nx.NetworkXException, nx.is_graphical, [72.5], "eg")
+    pytest.raises(nx.NetworkXException, nx.is_graphical, [72.5], "hh")
+
+
+def test_negative_input():
+    assert not nx.is_graphical([-1], "hh")
+    assert not nx.is_graphical([-1], "eg")
+
+
+class TestAtlas:
+    @classmethod
+    def setup_class(cls):
+        global atlas
+        from networkx.generators import atlas
+
+        cls.GAG = atlas.graph_atlas_g()
+
+    def test_atlas(self):
+        for graph in self.GAG:
+            deg = (d for n, d in graph.degree())
+            assert nx.is_graphical(deg, method="eg")
+            assert nx.is_graphical(deg, method="hh")
+
+
+def test_small_graph_true():
+    z = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]
+    assert nx.is_graphical(z, method="hh")
+    assert nx.is_graphical(z, method="eg")
+    z = [10, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2]
+    assert nx.is_graphical(z, method="hh")
+    assert nx.is_graphical(z, method="eg")
+    z = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4]
+    assert nx.is_graphical(z, method="hh")
+    assert nx.is_graphical(z, method="eg")
+
+
+def test_small_graph_false():
+    z = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]
+    assert not nx.is_graphical(z, method="hh")
+    assert not nx.is_graphical(z, method="eg")
+    z = [6, 5, 4, 4, 2, 1, 1, 1]
+    assert not nx.is_graphical(z, method="hh")
+    assert not nx.is_graphical(z, method="eg")
+    z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4]
+    assert not nx.is_graphical(z, method="hh")
+    assert not nx.is_graphical(z, method="eg")
+
+
+def test_directed_degree_sequence():
+    # Test a range of valid directed degree sequences
+    n, r = 100, 10
+    p = 1.0 / r
+    for i in range(r):
+        G = nx.erdos_renyi_graph(n, p * (i + 1), None, True)
+        din = (d for n, d in G.in_degree())
+        dout = (d for n, d in G.out_degree())
+        assert nx.is_digraphical(din, dout)
+
+
+def test_small_directed_sequences():
+    dout = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]
+    din = [3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 1]
+    assert nx.is_digraphical(din, dout)
+    # Test nongraphical directed sequence
+    dout = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]
+    din = [103, 102, 102, 102, 102, 102, 102, 102, 102, 102]
+    assert not nx.is_digraphical(din, dout)
+    # Test digraphical small sequence
+    dout = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4]
+    din = [2, 2, 2, 2, 2, 2, 2, 2, 1, 1]
+    assert nx.is_digraphical(din, dout)
+    # Test nonmatching sum
+    din = [2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1]
+    assert not nx.is_digraphical(din, dout)
+    # Test for negative integer in sequence
+    din = [2, 2, 2, -2, 2, 2, 2, 2, 1, 1, 4]
+    assert not nx.is_digraphical(din, dout)
+    # Test for noninteger
+    din = dout = [1, 1, 1.1, 1]
+    assert not nx.is_digraphical(din, dout)
+    din = dout = [1, 1, "rer", 1]
+    assert not nx.is_digraphical(din, dout)
+
+
+def test_multi_sequence():
+    # Test nongraphical multi sequence
+    seq = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1]
+    assert not nx.is_multigraphical(seq)
+    # Test small graphical multi sequence
+    seq = [6, 5, 4, 4, 2, 1, 1, 1]
+    assert nx.is_multigraphical(seq)
+    # Test for negative integer in sequence
+    seq = [6, 5, 4, -4, 2, 1, 1, 1]
+    assert not nx.is_multigraphical(seq)
+    # Test for sequence with odd sum
+    seq = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4]
+    assert not nx.is_multigraphical(seq)
+    # Test for noninteger
+    seq = [1, 1, 1.1, 1]
+    assert not nx.is_multigraphical(seq)
+    seq = [1, 1, "rer", 1]
+    assert not nx.is_multigraphical(seq)
+
+
+def test_pseudo_sequence():
+    # Test small valid pseudo sequence
+    seq = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1]
+    assert nx.is_pseudographical(seq)
+    # Test for sequence with odd sum
+    seq = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]
+    assert not nx.is_pseudographical(seq)
+    # Test for negative integer in sequence
+    seq = [1000, 3, 3, 3, 3, 2, 2, -2, 1, 1]
+    assert not nx.is_pseudographical(seq)
+    # Test for noninteger
+    seq = [1, 1, 1.1, 1]
+    assert not nx.is_pseudographical(seq)
+    seq = [1, 1, "rer", 1]
+    assert not nx.is_pseudographical(seq)
+
+
+def test_numpy_degree_sequence():
+    np = pytest.importorskip("numpy")
+    ds = np.array([1, 2, 2, 2, 1], dtype=np.int64)
+    assert nx.is_graphical(ds, "eg")
+    assert nx.is_graphical(ds, "hh")
+    ds = np.array([1, 2, 2, 2, 1], dtype=np.float64)
+    assert nx.is_graphical(ds, "eg")
+    assert nx.is_graphical(ds, "hh")
+    ds = np.array([1.1, 2, 2, 2, 1], dtype=np.float64)
+    pytest.raises(nx.NetworkXException, nx.is_graphical, ds, "eg")
+    pytest.raises(nx.NetworkXException, nx.is_graphical, ds, "hh")
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hierarchy.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hierarchy.py
new file mode 100644
index 00000000..eaa6a67b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hierarchy.py
@@ -0,0 +1,46 @@
+import pytest
+
+import networkx as nx
+
+
+def test_hierarchy_undirected():
+    G = nx.cycle_graph(5)
+    pytest.raises(nx.NetworkXError, nx.flow_hierarchy, G)
+
+
+def test_hierarchy_cycle():
+    G = nx.cycle_graph(5, create_using=nx.DiGraph())
+    assert nx.flow_hierarchy(G) == 0.0
+
+
+def test_hierarchy_tree():
+    G = nx.full_rary_tree(2, 16, create_using=nx.DiGraph())
+    assert nx.flow_hierarchy(G) == 1.0
+
+
+def test_hierarchy_1():
+    G = nx.DiGraph()
+    G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 1), (3, 4), (0, 4)])
+    assert nx.flow_hierarchy(G) == 0.5
+
+
+def test_hierarchy_weight():
+    G = nx.DiGraph()
+    G.add_edges_from(
+        [
+            (0, 1, {"weight": 0.3}),
+            (1, 2, {"weight": 0.1}),
+            (2, 3, {"weight": 0.1}),
+            (3, 1, {"weight": 0.1}),
+            (3, 4, {"weight": 0.3}),
+            (0, 4, {"weight": 0.3}),
+        ]
+    )
+    assert nx.flow_hierarchy(G, weight="weight") == 0.75
+
+
+@pytest.mark.parametrize("n", (0, 1, 3))
+def test_hierarchy_empty_graph(n):
+    G = nx.empty_graph(n, create_using=nx.DiGraph)
+    with pytest.raises(nx.NetworkXError, match=".*not applicable to empty graphs"):
+        nx.flow_hierarchy(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hybrid.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hybrid.py
new file mode 100644
index 00000000..6af00164
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hybrid.py
@@ -0,0 +1,24 @@
+import networkx as nx
+
+
+def test_2d_grid_graph():
+    # FC article claims 2d grid graph of size n is (3,3)-connected
+    # and (5,9)-connected, but I don't think it is (5,9)-connected
+    G = nx.grid_2d_graph(8, 8, periodic=True)
+    assert nx.is_kl_connected(G, 3, 3)
+    assert not nx.is_kl_connected(G, 5, 9)
+    (H, graphOK) = nx.kl_connected_subgraph(G, 5, 9, same_as_graph=True)
+    assert not graphOK
+
+
+def test_small_graph():
+    G = nx.Graph()
+    G.add_edge(1, 2)
+    G.add_edge(1, 3)
+    G.add_edge(2, 3)
+    assert nx.is_kl_connected(G, 2, 2)
+    H = nx.kl_connected_subgraph(G, 2, 2)
+    (H, graphOK) = nx.kl_connected_subgraph(
+        G, 2, 2, low_memory=True, same_as_graph=True
+    )
+    assert graphOK
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_isolate.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_isolate.py
new file mode 100644
index 00000000..d29b306d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_isolate.py
@@ -0,0 +1,26 @@
+"""Unit tests for the :mod:`networkx.algorithms.isolates` module."""
+
+import networkx as nx
+
+
+def test_is_isolate():
+    G = nx.Graph()
+    G.add_edge(0, 1)
+    G.add_node(2)
+    assert not nx.is_isolate(G, 0)
+    assert not nx.is_isolate(G, 1)
+    assert nx.is_isolate(G, 2)
+
+
+def test_isolates():
+    G = nx.Graph()
+    G.add_edge(0, 1)
+    G.add_nodes_from([2, 3])
+    assert sorted(nx.isolates(G)) == [2, 3]
+
+
+def test_number_of_isolates():
+    G = nx.Graph()
+    G.add_edge(0, 1)
+    G.add_nodes_from([2, 3])
+    assert nx.number_of_isolates(G) == 2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_link_prediction.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_link_prediction.py
new file mode 100644
index 00000000..0878496b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_link_prediction.py
@@ -0,0 +1,586 @@
+import math
+from functools import partial
+
+import pytest
+
+import networkx as nx
+
+
+def _test_func(G, ebunch, expected, predict_func, **kwargs):
+    result = predict_func(G, ebunch, **kwargs)
+    exp_dict = {tuple(sorted([u, v])): score for u, v, score in expected}
+    res_dict = {tuple(sorted([u, v])): score for u, v, score in result}
+
+    assert len(exp_dict) == len(res_dict)
+    for p in exp_dict:
+        assert exp_dict[p] == pytest.approx(res_dict[p], abs=1e-7)
+
+
+class TestResourceAllocationIndex:
+    @classmethod
+    def setup_class(cls):
+        cls.func = staticmethod(nx.resource_allocation_index)
+        cls.test = partial(_test_func, predict_func=cls.func)
+
+    def test_K5(self):
+        G = nx.complete_graph(5)
+        self.test(G, [(0, 1)], [(0, 1, 0.75)])
+
+    def test_P3(self):
+        G = nx.path_graph(3)
+        self.test(G, [(0, 2)], [(0, 2, 0.5)])
+
+    def test_S4(self):
+        G = nx.star_graph(4)
+        self.test(G, [(1, 2)], [(1, 2, 0.25)])
+
+    @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
+    def test_notimplemented(self, graph_type):
+        assert pytest.raises(
+            nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
+        )
+
+    def test_node_not_found(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
+
+    def test_no_common_neighbor(self):
+        G = nx.Graph()
+        G.add_nodes_from([0, 1])
+        self.test(G, [(0, 1)], [(0, 1, 0)])
+
+    def test_equal_nodes(self):
+        G = nx.complete_graph(4)
+        self.test(G, [(0, 0)], [(0, 0, 1)])
+
+    def test_all_nonexistent_edges(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)])
+
+
+class TestJaccardCoefficient:
+    @classmethod
+    def setup_class(cls):
+        cls.func = staticmethod(nx.jaccard_coefficient)
+        cls.test = partial(_test_func, predict_func=cls.func)
+
+    def test_K5(self):
+        G = nx.complete_graph(5)
+        self.test(G, [(0, 1)], [(0, 1, 0.6)])
+
+    def test_P4(self):
+        G = nx.path_graph(4)
+        self.test(G, [(0, 2)], [(0, 2, 0.5)])
+
+    @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
+    def test_notimplemented(self, graph_type):
+        assert pytest.raises(
+            nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
+        )
+
+    def test_node_not_found(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
+
+    def test_no_common_neighbor(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (2, 3)])
+        self.test(G, [(0, 2)], [(0, 2, 0)])
+
+    def test_isolated_nodes(self):
+        G = nx.Graph()
+        G.add_nodes_from([0, 1])
+        self.test(G, [(0, 1)], [(0, 1, 0)])
+
+    def test_all_nonexistent_edges(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)])
+
+
+class TestAdamicAdarIndex:
+    @classmethod
+    def setup_class(cls):
+        cls.func = staticmethod(nx.adamic_adar_index)
+        cls.test = partial(_test_func, predict_func=cls.func)
+
+    def test_K5(self):
+        G = nx.complete_graph(5)
+        self.test(G, [(0, 1)], [(0, 1, 3 / math.log(4))])
+
+    def test_P3(self):
+        G = nx.path_graph(3)
+        self.test(G, [(0, 2)], [(0, 2, 1 / math.log(2))])
+
+    def test_S4(self):
+        G = nx.star_graph(4)
+        self.test(G, [(1, 2)], [(1, 2, 1 / math.log(4))])
+
+    @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
+    def test_notimplemented(self, graph_type):
+        assert pytest.raises(
+            nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
+        )
+
+    def test_node_not_found(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
+
+    def test_no_common_neighbor(self):
+        G = nx.Graph()
+        G.add_nodes_from([0, 1])
+        self.test(G, [(0, 1)], [(0, 1, 0)])
+
+    def test_equal_nodes(self):
+        G = nx.complete_graph(4)
+        self.test(G, [(0, 0)], [(0, 0, 3 / math.log(3))])
+
+    def test_all_nonexistent_edges(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        self.test(
+            G, None, [(0, 3, 1 / math.log(2)), (1, 2, 1 / math.log(2)), (1, 3, 0)]
+        )
+
+
+class TestCommonNeighborCentrality:
+    @classmethod
+    def setup_class(cls):
+        cls.func = staticmethod(nx.common_neighbor_centrality)
+        cls.test = partial(_test_func, predict_func=cls.func)
+
+    def test_K5(self):
+        G = nx.complete_graph(5)
+        self.test(G, [(0, 1)], [(0, 1, 3.0)], alpha=1)
+        self.test(G, [(0, 1)], [(0, 1, 5.0)], alpha=0)
+
+    def test_P3(self):
+        G = nx.path_graph(3)
+        self.test(G, [(0, 2)], [(0, 2, 1.25)], alpha=0.5)
+
+    def test_S4(self):
+        G = nx.star_graph(4)
+        self.test(G, [(1, 2)], [(1, 2, 1.75)], alpha=0.5)
+
+    @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
+    def test_notimplemented(self, graph_type):
+        assert pytest.raises(
+            nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
+        )
+
+    def test_node_u_not_found(self):
+        G = nx.Graph()
+        G.add_edges_from([(1, 3), (2, 3)])
+        assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 1)])
+
+    def test_node_v_not_found(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
+
+    def test_no_common_neighbor(self):
+        G = nx.Graph()
+        G.add_nodes_from([0, 1])
+        self.test(G, [(0, 1)], [(0, 1, 0)])
+
+    def test_equal_nodes(self):
+        G = nx.complete_graph(4)
+        assert pytest.raises(nx.NetworkXAlgorithmError, self.test, G, [(0, 0)], [])
+
+    def test_equal_nodes_with_alpha_one_raises_error(self):
+        G = nx.complete_graph(4)
+        assert pytest.raises(
+            nx.NetworkXAlgorithmError, self.test, G, [(0, 0)], [], alpha=1.0
+        )
+
+    def test_all_nonexistent_edges(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        self.test(G, None, [(0, 3, 1.5), (1, 2, 1.5), (1, 3, 2 / 3)], alpha=0.5)
+
+
+class TestPreferentialAttachment:
+    @classmethod
+    def setup_class(cls):
+        cls.func = staticmethod(nx.preferential_attachment)
+        cls.test = partial(_test_func, predict_func=cls.func)
+
+    def test_K5(self):
+        G = nx.complete_graph(5)
+        self.test(G, [(0, 1)], [(0, 1, 16)])
+
+    def test_P3(self):
+        G = nx.path_graph(3)
+        self.test(G, [(0, 1)], [(0, 1, 2)])
+
+    def test_S4(self):
+        G = nx.star_graph(4)
+        self.test(G, [(0, 2)], [(0, 2, 4)])
+
+    @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
+    def test_notimplemented(self, graph_type):
+        assert pytest.raises(
+            nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
+        )
+
+    def test_node_not_found(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
+
+    def test_zero_degrees(self):
+        G = nx.Graph()
+        G.add_nodes_from([0, 1])
+        self.test(G, [(0, 1)], [(0, 1, 0)])
+
+    def test_all_nonexistent_edges(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        self.test(G, None, [(0, 3, 2), (1, 2, 2), (1, 3, 1)])
+
+
+class TestCNSoundarajanHopcroft:
+    @classmethod
+    def setup_class(cls):
+        cls.func = staticmethod(nx.cn_soundarajan_hopcroft)
+        cls.test = partial(_test_func, predict_func=cls.func, community="community")
+
+    def test_K5(self):
+        G = nx.complete_graph(5)
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        G.nodes[4]["community"] = 1
+        self.test(G, [(0, 1)], [(0, 1, 5)])
+
+    def test_P3(self):
+        G = nx.path_graph(3)
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 0
+        self.test(G, [(0, 2)], [(0, 2, 1)])
+
+    def test_S4(self):
+        G = nx.star_graph(4)
+        G.nodes[0]["community"] = 1
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 1
+        G.nodes[3]["community"] = 0
+        G.nodes[4]["community"] = 0
+        self.test(G, [(1, 2)], [(1, 2, 2)])
+
+    @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
+    def test_notimplemented(self, graph_type):
+        G = graph_type([(0, 1), (1, 2)])
+        G.add_nodes_from([0, 1, 2], community=0)
+        assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+
+    def test_node_not_found(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
+
+    def test_no_common_neighbor(self):
+        G = nx.Graph()
+        G.add_nodes_from([0, 1])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        self.test(G, [(0, 1)], [(0, 1, 0)])
+
+    def test_equal_nodes(self):
+        G = nx.complete_graph(3)
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        self.test(G, [(0, 0)], [(0, 0, 4)])
+
+    def test_different_community(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 1
+        self.test(G, [(0, 3)], [(0, 3, 2)])
+
+    def test_no_community_information(self):
+        G = nx.complete_graph(5)
+        assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)]))
+
+    def test_insufficient_community_information(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[3]["community"] = 0
+        assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)]))
+
+    def test_sufficient_community_information(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)])
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        G.nodes[4]["community"] = 0
+        self.test(G, [(1, 4)], [(1, 4, 4)])
+
+    def test_custom_community_attribute_name(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+        G.nodes[0]["cmty"] = 0
+        G.nodes[1]["cmty"] = 0
+        G.nodes[2]["cmty"] = 0
+        G.nodes[3]["cmty"] = 1
+        self.test(G, [(0, 3)], [(0, 3, 2)], community="cmty")
+
+    def test_all_nonexistent_edges(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        self.test(G, None, [(0, 3, 2), (1, 2, 1), (1, 3, 0)])
+
+
+class TestRAIndexSoundarajanHopcroft:
+    @classmethod
+    def setup_class(cls):
+        cls.func = staticmethod(nx.ra_index_soundarajan_hopcroft)
+        cls.test = partial(_test_func, predict_func=cls.func, community="community")
+
+    def test_K5(self):
+        G = nx.complete_graph(5)
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        G.nodes[4]["community"] = 1
+        self.test(G, [(0, 1)], [(0, 1, 0.5)])
+
+    def test_P3(self):
+        G = nx.path_graph(3)
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 0
+        self.test(G, [(0, 2)], [(0, 2, 0)])
+
+    def test_S4(self):
+        G = nx.star_graph(4)
+        G.nodes[0]["community"] = 1
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 1
+        G.nodes[3]["community"] = 0
+        G.nodes[4]["community"] = 0
+        self.test(G, [(1, 2)], [(1, 2, 0.25)])
+
+    @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
+    def test_notimplemented(self, graph_type):
+        G = graph_type([(0, 1), (1, 2)])
+        G.add_nodes_from([0, 1, 2], community=0)
+        assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+
+    def test_node_not_found(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
+
+    def test_no_common_neighbor(self):
+        G = nx.Graph()
+        G.add_nodes_from([0, 1])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        self.test(G, [(0, 1)], [(0, 1, 0)])
+
+    def test_equal_nodes(self):
+        G = nx.complete_graph(3)
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        self.test(G, [(0, 0)], [(0, 0, 1)])
+
+    def test_different_community(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 1
+        self.test(G, [(0, 3)], [(0, 3, 0)])
+
+    def test_no_community_information(self):
+        G = nx.complete_graph(5)
+        assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)]))
+
+    def test_insufficient_community_information(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[3]["community"] = 0
+        assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)]))
+
+    def test_sufficient_community_information(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)])
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        G.nodes[4]["community"] = 0
+        self.test(G, [(1, 4)], [(1, 4, 1)])
+
+    def test_custom_community_attribute_name(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+        G.nodes[0]["cmty"] = 0
+        G.nodes[1]["cmty"] = 0
+        G.nodes[2]["cmty"] = 0
+        G.nodes[3]["cmty"] = 1
+        self.test(G, [(0, 3)], [(0, 3, 0)], community="cmty")
+
+    def test_all_nonexistent_edges(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        self.test(G, None, [(0, 3, 0.5), (1, 2, 0), (1, 3, 0)])
+
+
+class TestWithinInterCluster:
+    @classmethod
+    def setup_class(cls):
+        cls.delta = 0.001
+        cls.func = staticmethod(nx.within_inter_cluster)
+        cls.test = partial(
+            _test_func, predict_func=cls.func, delta=cls.delta, community="community"
+        )
+
+    def test_K5(self):
+        G = nx.complete_graph(5)
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        G.nodes[4]["community"] = 1
+        self.test(G, [(0, 1)], [(0, 1, 2 / (1 + self.delta))])
+
+    def test_P3(self):
+        G = nx.path_graph(3)
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 0
+        self.test(G, [(0, 2)], [(0, 2, 0)])
+
+    def test_S4(self):
+        G = nx.star_graph(4)
+        G.nodes[0]["community"] = 1
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 1
+        G.nodes[3]["community"] = 0
+        G.nodes[4]["community"] = 0
+        self.test(G, [(1, 2)], [(1, 2, 1 / self.delta)])
+
+    @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
+    def test_notimplemented(self, graph_type):
+        G = graph_type([(0, 1), (1, 2)])
+        G.add_nodes_from([0, 1, 2], community=0)
+        assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+
+    def test_node_not_found(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
+
+    def test_no_common_neighbor(self):
+        G = nx.Graph()
+        G.add_nodes_from([0, 1])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        self.test(G, [(0, 1)], [(0, 1, 0)])
+
+    def test_equal_nodes(self):
+        G = nx.complete_graph(3)
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        self.test(G, [(0, 0)], [(0, 0, 2 / self.delta)])
+
+    def test_different_community(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 1
+        self.test(G, [(0, 3)], [(0, 3, 0)])
+
+    def test_no_inter_cluster_common_neighbor(self):
+        G = nx.complete_graph(4)
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)])
+
+    def test_no_community_information(self):
+        G = nx.complete_graph(5)
+        assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)]))
+
+    def test_insufficient_community_information(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 0
+        G.nodes[3]["community"] = 0
+        assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)]))
+
+    def test_sufficient_community_information(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)])
+        G.nodes[1]["community"] = 0
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        G.nodes[4]["community"] = 0
+        self.test(G, [(1, 4)], [(1, 4, 2 / self.delta)])
+
+    def test_invalid_delta(self):
+        G = nx.complete_graph(3)
+        G.add_nodes_from([0, 1, 2], community=0)
+        assert pytest.raises(nx.NetworkXAlgorithmError, self.func, G, [(0, 1)], 0)
+        assert pytest.raises(nx.NetworkXAlgorithmError, self.func, G, [(0, 1)], -0.5)
+
+    def test_custom_community_attribute_name(self):
+        G = nx.complete_graph(4)
+        G.nodes[0]["cmty"] = 0
+        G.nodes[1]["cmty"] = 0
+        G.nodes[2]["cmty"] = 0
+        G.nodes[3]["cmty"] = 0
+        self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)], community="cmty")
+
+    def test_all_nonexistent_edges(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+        G.nodes[0]["community"] = 0
+        G.nodes[1]["community"] = 1
+        G.nodes[2]["community"] = 0
+        G.nodes[3]["community"] = 0
+        self.test(G, None, [(0, 3, 1 / self.delta), (1, 2, 0), (1, 3, 0)])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_lowest_common_ancestors.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_lowest_common_ancestors.py
new file mode 100644
index 00000000..66d75220
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_lowest_common_ancestors.py
@@ -0,0 +1,427 @@
+from itertools import chain, combinations, product
+
+import pytest
+
+import networkx as nx
+
+tree_all_pairs_lca = nx.tree_all_pairs_lowest_common_ancestor
+all_pairs_lca = nx.all_pairs_lowest_common_ancestor
+
+
+def get_pair(dictionary, n1, n2):
+    if (n1, n2) in dictionary:
+        return dictionary[n1, n2]
+    else:
+        return dictionary[n2, n1]
+
+
+class TestTreeLCA:
+    @classmethod
+    def setup_class(cls):
+        cls.DG = nx.DiGraph()
+        edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]
+        cls.DG.add_edges_from(edges)
+        cls.ans = dict(tree_all_pairs_lca(cls.DG, 0))
+        gold = {(n, n): n for n in cls.DG}
+        gold.update({(0, i): 0 for i in range(1, 7)})
+        gold.update(
+            {
+                (1, 2): 0,
+                (1, 3): 1,
+                (1, 4): 1,
+                (1, 5): 0,
+                (1, 6): 0,
+                (2, 3): 0,
+                (2, 4): 0,
+                (2, 5): 2,
+                (2, 6): 2,
+                (3, 4): 1,
+                (3, 5): 0,
+                (3, 6): 0,
+                (4, 5): 0,
+                (4, 6): 0,
+                (5, 6): 2,
+            }
+        )
+
+        cls.gold = gold
+
+    @staticmethod
+    def assert_has_same_pairs(d1, d2):
+        for a, b in ((min(pair), max(pair)) for pair in chain(d1, d2)):
+            assert get_pair(d1, a, b) == get_pair(d2, a, b)
+
+    def test_tree_all_pairs_lca_default_root(self):
+        assert dict(tree_all_pairs_lca(self.DG)) == self.ans
+
+    def test_tree_all_pairs_lca_return_subset(self):
+        test_pairs = [(0, 1), (0, 1), (1, 0)]
+        ans = dict(tree_all_pairs_lca(self.DG, 0, test_pairs))
+        assert (0, 1) in ans and (1, 0) in ans
+        assert len(ans) == 2
+
+    def test_tree_all_pairs_lca(self):
+        all_pairs = chain(combinations(self.DG, 2), ((node, node) for node in self.DG))
+
+        ans = dict(tree_all_pairs_lca(self.DG, 0, all_pairs))
+        self.assert_has_same_pairs(ans, self.ans)
+
+    def test_tree_all_pairs_gold_example(self):
+        ans = dict(tree_all_pairs_lca(self.DG))
+        self.assert_has_same_pairs(self.gold, ans)
+
+    def test_tree_all_pairs_lca_invalid_input(self):
+        empty_digraph = tree_all_pairs_lca(nx.DiGraph())
+        pytest.raises(nx.NetworkXPointlessConcept, list, empty_digraph)
+
+        bad_pairs_digraph = tree_all_pairs_lca(self.DG, pairs=[(-1, -2)])
+        pytest.raises(nx.NodeNotFound, list, bad_pairs_digraph)
+
+    def test_tree_all_pairs_lca_subtrees(self):
+        ans = dict(tree_all_pairs_lca(self.DG, 1))
+        gold = {
+            pair: lca
+            for (pair, lca) in self.gold.items()
+            if all(n in (1, 3, 4) for n in pair)
+        }
+        self.assert_has_same_pairs(gold, ans)
+
+    def test_tree_all_pairs_lca_disconnected_nodes(self):
+        G = nx.DiGraph()
+        G.add_node(1)
+        assert {(1, 1): 1} == dict(tree_all_pairs_lca(G))
+
+        G.add_node(0)
+        assert {(1, 1): 1} == dict(tree_all_pairs_lca(G, 1))
+        assert {(0, 0): 0} == dict(tree_all_pairs_lca(G, 0))
+
+        pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G))
+
+    def test_tree_all_pairs_lca_error_if_input_not_tree(self):
+        # Cycle
+        G = nx.DiGraph([(1, 2), (2, 1)])
+        pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G))
+        # DAG
+        G = nx.DiGraph([(0, 2), (1, 2)])
+        pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G))
+
+    def test_tree_all_pairs_lca_generator(self):
+        pairs = iter([(0, 1), (0, 1), (1, 0)])
+        some_pairs = dict(tree_all_pairs_lca(self.DG, 0, pairs))
+        assert (0, 1) in some_pairs and (1, 0) in some_pairs
+        assert len(some_pairs) == 2
+
+    def test_tree_all_pairs_lca_nonexisting_pairs_exception(self):
+        lca = tree_all_pairs_lca(self.DG, 0, [(-1, -1)])
+        pytest.raises(nx.NodeNotFound, list, lca)
+        # check if node is None
+        lca = tree_all_pairs_lca(self.DG, None, [(-1, -1)])
+        pytest.raises(nx.NodeNotFound, list, lca)
+
+    def test_tree_all_pairs_lca_routine_bails_on_DAGs(self):
+        G = nx.DiGraph([(3, 4), (5, 4)])
+        pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G))
+
+    def test_tree_all_pairs_lca_not_implemented(self):
+        NNI = nx.NetworkXNotImplemented
+        G = nx.Graph([(0, 1)])
+        with pytest.raises(NNI):
+            next(tree_all_pairs_lca(G))
+        with pytest.raises(NNI):
+            next(all_pairs_lca(G))
+        pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1)
+        G = nx.MultiGraph([(0, 1)])
+        with pytest.raises(NNI):
+            next(tree_all_pairs_lca(G))
+        with pytest.raises(NNI):
+            next(all_pairs_lca(G))
+        pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1)
+
+    def test_tree_all_pairs_lca_trees_without_LCAs(self):
+        G = nx.DiGraph()
+        G.add_node(3)
+        ans = list(tree_all_pairs_lca(G))
+        assert ans == [((3, 3), 3)]
+
+
+class TestMultiTreeLCA(TestTreeLCA):
+    @classmethod
+    def setup_class(cls):
+        cls.DG = nx.MultiDiGraph()
+        edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]
+        cls.DG.add_edges_from(edges)
+        cls.ans = dict(tree_all_pairs_lca(cls.DG, 0))
+        # add multiedges
+        cls.DG.add_edges_from(edges)
+
+        gold = {(n, n): n for n in cls.DG}
+        gold.update({(0, i): 0 for i in range(1, 7)})
+        gold.update(
+            {
+                (1, 2): 0,
+                (1, 3): 1,
+                (1, 4): 1,
+                (1, 5): 0,
+                (1, 6): 0,
+                (2, 3): 0,
+                (2, 4): 0,
+                (2, 5): 2,
+                (2, 6): 2,
+                (3, 4): 1,
+                (3, 5): 0,
+                (3, 6): 0,
+                (4, 5): 0,
+                (4, 6): 0,
+                (5, 6): 2,
+            }
+        )
+
+        cls.gold = gold
+
+
+class TestDAGLCA:
+    @classmethod
+    def setup_class(cls):
+        cls.DG = nx.DiGraph()
+        nx.add_path(cls.DG, (0, 1, 2, 3))
+        nx.add_path(cls.DG, (0, 4, 3))
+        nx.add_path(cls.DG, (0, 5, 6, 8, 3))
+        nx.add_path(cls.DG, (5, 7, 8))
+        cls.DG.add_edge(6, 2)
+        cls.DG.add_edge(7, 2)
+
+        cls.root_distance = nx.shortest_path_length(cls.DG, source=0)
+
+        cls.gold = {
+            (1, 1): 1,
+            (1, 2): 1,
+            (1, 3): 1,
+            (1, 4): 0,
+            (1, 5): 0,
+            (1, 6): 0,
+            (1, 7): 0,
+            (1, 8): 0,
+            (2, 2): 2,
+            (2, 3): 2,
+            (2, 4): 0,
+            (2, 5): 5,
+            (2, 6): 6,
+            (2, 7): 7,
+            (2, 8): 7,
+            (3, 3): 3,
+            (3, 4): 4,
+            (3, 5): 5,
+            (3, 6): 6,
+            (3, 7): 7,
+            (3, 8): 8,
+            (4, 4): 4,
+            (4, 5): 0,
+            (4, 6): 0,
+            (4, 7): 0,
+            (4, 8): 0,
+            (5, 5): 5,
+            (5, 6): 5,
+            (5, 7): 5,
+            (5, 8): 5,
+            (6, 6): 6,
+            (6, 7): 5,
+            (6, 8): 6,
+            (7, 7): 7,
+            (7, 8): 7,
+            (8, 8): 8,
+        }
+        cls.gold.update(((0, n), 0) for n in cls.DG)
+
+    def assert_lca_dicts_same(self, d1, d2, G=None):
+        """Checks if d1 and d2 contain the same pairs and
+        have a node at the same distance from root for each.
+        If G is None use self.DG."""
+        if G is None:
+            G = self.DG
+            root_distance = self.root_distance
+        else:
+            roots = [n for n, deg in G.in_degree if deg == 0]
+            assert len(roots) == 1
+            root_distance = nx.shortest_path_length(G, source=roots[0])
+
+        for a, b in ((min(pair), max(pair)) for pair in chain(d1, d2)):
+            assert (
+                root_distance[get_pair(d1, a, b)] == root_distance[get_pair(d2, a, b)]
+            )
+
+    def test_all_pairs_lca_gold_example(self):
+        self.assert_lca_dicts_same(dict(all_pairs_lca(self.DG)), self.gold)
+
+    def test_all_pairs_lca_all_pairs_given(self):
+        all_pairs = list(product(self.DG.nodes(), self.DG.nodes()))
+        ans = all_pairs_lca(self.DG, pairs=all_pairs)
+        self.assert_lca_dicts_same(dict(ans), self.gold)
+
+    def test_all_pairs_lca_generator(self):
+        all_pairs = product(self.DG.nodes(), self.DG.nodes())
+        ans = all_pairs_lca(self.DG, pairs=all_pairs)
+        self.assert_lca_dicts_same(dict(ans), self.gold)
+
+    def test_all_pairs_lca_input_graph_with_two_roots(self):
+        G = self.DG.copy()
+        G.add_edge(9, 10)
+        G.add_edge(9, 4)
+        gold = self.gold.copy()
+        gold[9, 9] = 9
+        gold[9, 10] = 9
+        gold[9, 4] = 9
+        gold[9, 3] = 9
+        gold[10, 4] = 9
+        gold[10, 3] = 9
+        gold[10, 10] = 10
+
+        testing = dict(all_pairs_lca(G))
+
+        G.add_edge(-1, 9)
+        G.add_edge(-1, 0)
+        self.assert_lca_dicts_same(testing, gold, G)
+
+    def test_all_pairs_lca_nonexisting_pairs_exception(self):
+        pytest.raises(nx.NodeNotFound, all_pairs_lca, self.DG, [(-1, -1)])
+
+    def test_all_pairs_lca_pairs_without_lca(self):
+        G = self.DG.copy()
+        G.add_node(-1)
+        gen = all_pairs_lca(G, [(-1, -1), (-1, 0)])
+        assert dict(gen) == {(-1, -1): -1}
+
+    def test_all_pairs_lca_null_graph(self):
+        pytest.raises(nx.NetworkXPointlessConcept, all_pairs_lca, nx.DiGraph())
+
+    def test_all_pairs_lca_non_dags(self):
+        pytest.raises(nx.NetworkXError, all_pairs_lca, nx.DiGraph([(3, 4), (4, 3)]))
+
+    def test_all_pairs_lca_nonempty_graph_without_lca(self):
+        G = nx.DiGraph()
+        G.add_node(3)
+        ans = list(all_pairs_lca(G))
+        assert ans == [((3, 3), 3)]
+
+    def test_all_pairs_lca_bug_gh4942(self):
+        G = nx.DiGraph([(0, 2), (1, 2), (2, 3)])
+        ans = list(all_pairs_lca(G))
+        assert len(ans) == 9
+
+    def test_all_pairs_lca_default_kwarg(self):
+        G = nx.DiGraph([(0, 1), (2, 1)])
+        sentinel = object()
+        assert nx.lowest_common_ancestor(G, 0, 2, default=sentinel) is sentinel
+
+    def test_all_pairs_lca_identity(self):
+        G = nx.DiGraph()
+        G.add_node(3)
+        assert nx.lowest_common_ancestor(G, 3, 3) == 3
+
+    def test_all_pairs_lca_issue_4574(self):
+        G = nx.DiGraph()
+        G.add_nodes_from(range(17))
+        G.add_edges_from(
+            [
+                (2, 0),
+                (1, 2),
+                (3, 2),
+                (5, 2),
+                (8, 2),
+                (11, 2),
+                (4, 5),
+                (6, 5),
+                (7, 8),
+                (10, 8),
+                (13, 11),
+                (14, 11),
+                (15, 11),
+                (9, 10),
+                (12, 13),
+                (16, 15),
+            ]
+        )
+
+        assert nx.lowest_common_ancestor(G, 7, 9) == None
+
+    def test_all_pairs_lca_one_pair_gh4942(self):
+        G = nx.DiGraph()
+        # Note: order edge addition is critical to the test
+        G.add_edge(0, 1)
+        G.add_edge(2, 0)
+        G.add_edge(2, 3)
+        G.add_edge(4, 0)
+        G.add_edge(5, 2)
+
+        assert nx.lowest_common_ancestor(G, 1, 3) == 2
+
+
+class TestMultiDiGraph_DAGLCA(TestDAGLCA):
+    @classmethod
+    def setup_class(cls):
+        cls.DG = nx.MultiDiGraph()
+        nx.add_path(cls.DG, (0, 1, 2, 3))
+        # add multiedges
+        nx.add_path(cls.DG, (0, 1, 2, 3))
+        nx.add_path(cls.DG, (0, 4, 3))
+        nx.add_path(cls.DG, (0, 5, 6, 8, 3))
+        nx.add_path(cls.DG, (5, 7, 8))
+        cls.DG.add_edge(6, 2)
+        cls.DG.add_edge(7, 2)
+
+        cls.root_distance = nx.shortest_path_length(cls.DG, source=0)
+
+        cls.gold = {
+            (1, 1): 1,
+            (1, 2): 1,
+            (1, 3): 1,
+            (1, 4): 0,
+            (1, 5): 0,
+            (1, 6): 0,
+            (1, 7): 0,
+            (1, 8): 0,
+            (2, 2): 2,
+            (2, 3): 2,
+            (2, 4): 0,
+            (2, 5): 5,
+            (2, 6): 6,
+            (2, 7): 7,
+            (2, 8): 7,
+            (3, 3): 3,
+            (3, 4): 4,
+            (3, 5): 5,
+            (3, 6): 6,
+            (3, 7): 7,
+            (3, 8): 8,
+            (4, 4): 4,
+            (4, 5): 0,
+            (4, 6): 0,
+            (4, 7): 0,
+            (4, 8): 0,
+            (5, 5): 5,
+            (5, 6): 5,
+            (5, 7): 5,
+            (5, 8): 5,
+            (6, 6): 6,
+            (6, 7): 5,
+            (6, 8): 6,
+            (7, 7): 7,
+            (7, 8): 7,
+            (8, 8): 8,
+        }
+        cls.gold.update(((0, n), 0) for n in cls.DG)
+
+
+def test_all_pairs_lca_self_ancestors():
+    """Self-ancestors should always be the node itself, i.e. lca of (0, 0) is 0.
+    See gh-4458."""
+    # DAG for test - note order of node/edge addition is relevant
+    G = nx.DiGraph()
+    G.add_nodes_from(range(5))
+    G.add_edges_from([(1, 0), (2, 0), (3, 2), (4, 1), (4, 3)])
+
+    ap_lca = nx.all_pairs_lowest_common_ancestor
+    assert all(u == v == a for (u, v), a in ap_lca(G) if u == v)
+    MG = nx.MultiDiGraph(G)
+    assert all(u == v == a for (u, v), a in ap_lca(MG) if u == v)
+    MG.add_edges_from([(1, 0), (2, 0)])
+    assert all(u == v == a for (u, v), a in ap_lca(MG) if u == v)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_matching.py
new file mode 100644
index 00000000..37853e38
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_matching.py
@@ -0,0 +1,605 @@
+import math
+from itertools import permutations
+
+from pytest import raises
+
+import networkx as nx
+from networkx.algorithms.matching import matching_dict_to_set
+from networkx.utils import edges_equal
+
+
+class TestMaxWeightMatching:
+    """Unit tests for the
+    :func:`~networkx.algorithms.matching.max_weight_matching` function.
+
+    """
+
+    def test_trivial1(self):
+        """Empty graph"""
+        G = nx.Graph()
+        assert nx.max_weight_matching(G) == set()
+        assert nx.min_weight_matching(G) == set()
+
+    def test_selfloop(self):
+        G = nx.Graph()
+        G.add_edge(0, 0, weight=100)
+        assert nx.max_weight_matching(G) == set()
+        assert nx.min_weight_matching(G) == set()
+
+    def test_single_edge(self):
+        G = nx.Graph()
+        G.add_edge(0, 1)
+        assert edges_equal(
+            nx.max_weight_matching(G), matching_dict_to_set({0: 1, 1: 0})
+        )
+        assert edges_equal(
+            nx.min_weight_matching(G), matching_dict_to_set({0: 1, 1: 0})
+        )
+
+    def test_two_path(self):
+        G = nx.Graph()
+        G.add_edge("one", "two", weight=10)
+        G.add_edge("two", "three", weight=11)
+        assert edges_equal(
+            nx.max_weight_matching(G),
+            matching_dict_to_set({"three": "two", "two": "three"}),
+        )
+        assert edges_equal(
+            nx.min_weight_matching(G),
+            matching_dict_to_set({"one": "two", "two": "one"}),
+        )
+
+    def test_path(self):
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=5)
+        G.add_edge(2, 3, weight=11)
+        G.add_edge(3, 4, weight=5)
+        assert edges_equal(
+            nx.max_weight_matching(G), matching_dict_to_set({2: 3, 3: 2})
+        )
+        assert edges_equal(
+            nx.max_weight_matching(G, 1), matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3})
+        )
+        assert edges_equal(
+            nx.min_weight_matching(G), matching_dict_to_set({1: 2, 3: 4})
+        )
+        assert edges_equal(
+            nx.min_weight_matching(G, 1), matching_dict_to_set({1: 2, 3: 4})
+        )
+
+    def test_square(self):
+        G = nx.Graph()
+        G.add_edge(1, 4, weight=2)
+        G.add_edge(2, 3, weight=2)
+        G.add_edge(1, 2, weight=1)
+        G.add_edge(3, 4, weight=4)
+        assert edges_equal(
+            nx.max_weight_matching(G), matching_dict_to_set({1: 2, 3: 4})
+        )
+        assert edges_equal(
+            nx.min_weight_matching(G), matching_dict_to_set({1: 4, 2: 3})
+        )
+
+    def test_edge_attribute_name(self):
+        G = nx.Graph()
+        G.add_edge("one", "two", weight=10, abcd=11)
+        G.add_edge("two", "three", weight=11, abcd=10)
+        assert edges_equal(
+            nx.max_weight_matching(G, weight="abcd"),
+            matching_dict_to_set({"one": "two", "two": "one"}),
+        )
+        assert edges_equal(
+            nx.min_weight_matching(G, weight="abcd"),
+            matching_dict_to_set({"three": "two"}),
+        )
+
+    def test_floating_point_weights(self):
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=math.pi)
+        G.add_edge(2, 3, weight=math.exp(1))
+        G.add_edge(1, 3, weight=3.0)
+        G.add_edge(1, 4, weight=math.sqrt(2.0))
+        assert edges_equal(
+            nx.max_weight_matching(G), matching_dict_to_set({1: 4, 2: 3, 3: 2, 4: 1})
+        )
+        assert edges_equal(
+            nx.min_weight_matching(G), matching_dict_to_set({1: 4, 2: 3, 3: 2, 4: 1})
+        )
+
+    def test_negative_weights(self):
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=2)
+        G.add_edge(1, 3, weight=-2)
+        G.add_edge(2, 3, weight=1)
+        G.add_edge(2, 4, weight=-1)
+        G.add_edge(3, 4, weight=-6)
+        assert edges_equal(
+            nx.max_weight_matching(G), matching_dict_to_set({1: 2, 2: 1})
+        )
+        assert edges_equal(
+            nx.max_weight_matching(G, maxcardinality=True),
+            matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2}),
+        )
+        assert edges_equal(
+            nx.min_weight_matching(G), matching_dict_to_set({1: 2, 3: 4})
+        )
+
+    def test_s_blossom(self):
+        """Create S-blossom and use it for augmentation:"""
+        G = nx.Graph()
+        G.add_weighted_edges_from([(1, 2, 8), (1, 3, 9), (2, 3, 10), (3, 4, 7)])
+        answer = matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3})
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+        G.add_weighted_edges_from([(1, 6, 5), (4, 5, 6)])
+        answer = matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1})
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_s_t_blossom(self):
+        """Create S-blossom, relabel as T-blossom, use for augmentation:"""
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [(1, 2, 9), (1, 3, 8), (2, 3, 10), (1, 4, 5), (4, 5, 4), (1, 6, 3)]
+        )
+        answer = matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1})
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+        G.add_edge(4, 5, weight=3)
+        G.add_edge(1, 6, weight=4)
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+        G.remove_edge(1, 6)
+        G.add_edge(3, 6, weight=4)
+        answer = matching_dict_to_set({1: 2, 2: 1, 3: 6, 4: 5, 5: 4, 6: 3})
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_nested_s_blossom(self):
+        """Create nested S-blossom, use for augmentation:"""
+
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                (1, 2, 9),
+                (1, 3, 9),
+                (2, 3, 10),
+                (2, 4, 8),
+                (3, 5, 8),
+                (4, 5, 10),
+                (5, 6, 6),
+            ]
+        )
+        dict_format = {1: 3, 2: 4, 3: 1, 4: 2, 5: 6, 6: 5}
+        expected = {frozenset(e) for e in matching_dict_to_set(dict_format)}
+        answer = {frozenset(e) for e in nx.max_weight_matching(G)}
+        assert answer == expected
+        answer = {frozenset(e) for e in nx.min_weight_matching(G)}
+        assert answer == expected
+
+    def test_nested_s_blossom_relabel(self):
+        """Create S-blossom, relabel as S, include in nested S-blossom:"""
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                (1, 2, 10),
+                (1, 7, 10),
+                (2, 3, 12),
+                (3, 4, 20),
+                (3, 5, 20),
+                (4, 5, 25),
+                (5, 6, 10),
+                (6, 7, 10),
+                (7, 8, 8),
+            ]
+        )
+        answer = matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3, 5: 6, 6: 5, 7: 8, 8: 7})
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_nested_s_blossom_expand(self):
+        """Create nested S-blossom, augment, expand recursively:"""
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                (1, 2, 8),
+                (1, 3, 8),
+                (2, 3, 10),
+                (2, 4, 12),
+                (3, 5, 12),
+                (4, 5, 14),
+                (4, 6, 12),
+                (5, 7, 12),
+                (6, 7, 14),
+                (7, 8, 12),
+            ]
+        )
+        answer = matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 6, 5: 3, 6: 4, 7: 8, 8: 7})
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_s_blossom_relabel_expand(self):
+        """Create S-blossom, relabel as T, expand:"""
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                (1, 2, 23),
+                (1, 5, 22),
+                (1, 6, 15),
+                (2, 3, 25),
+                (3, 4, 22),
+                (4, 5, 25),
+                (4, 8, 14),
+                (5, 7, 13),
+            ]
+        )
+        answer = matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4})
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_nested_s_blossom_relabel_expand(self):
+        """Create nested S-blossom, relabel as T, expand:"""
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                (1, 2, 19),
+                (1, 3, 20),
+                (1, 8, 8),
+                (2, 3, 25),
+                (2, 4, 18),
+                (3, 5, 18),
+                (4, 5, 13),
+                (4, 7, 7),
+                (5, 6, 7),
+            ]
+        )
+        answer = matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 7, 5: 6, 6: 5, 7: 4, 8: 1})
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_nasty_blossom1(self):
+        """Create blossom, relabel as T in more than one way, expand,
+        augment:
+        """
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                (1, 2, 45),
+                (1, 5, 45),
+                (2, 3, 50),
+                (3, 4, 45),
+                (4, 5, 50),
+                (1, 6, 30),
+                (3, 9, 35),
+                (4, 8, 35),
+                (5, 7, 26),
+                (9, 10, 5),
+            ]
+        )
+        ansdict = {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}
+        answer = matching_dict_to_set(ansdict)
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_nasty_blossom2(self):
+        """Again but slightly different:"""
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                (1, 2, 45),
+                (1, 5, 45),
+                (2, 3, 50),
+                (3, 4, 45),
+                (4, 5, 50),
+                (1, 6, 30),
+                (3, 9, 35),
+                (4, 8, 26),
+                (5, 7, 40),
+                (9, 10, 5),
+            ]
+        )
+        ans = {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}
+        answer = matching_dict_to_set(ans)
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_nasty_blossom_least_slack(self):
+        """Create blossom, relabel as T, expand such that a new
+        least-slack S-to-free dge is produced, augment:
+        """
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                (1, 2, 45),
+                (1, 5, 45),
+                (2, 3, 50),
+                (3, 4, 45),
+                (4, 5, 50),
+                (1, 6, 30),
+                (3, 9, 35),
+                (4, 8, 28),
+                (5, 7, 26),
+                (9, 10, 5),
+            ]
+        )
+        ans = {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}
+        answer = matching_dict_to_set(ans)
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_nasty_blossom_augmenting(self):
+        """Create nested blossom, relabel as T in more than one way"""
+        # expand outer blossom such that inner blossom ends up on an
+        # augmenting path:
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                (1, 2, 45),
+                (1, 7, 45),
+                (2, 3, 50),
+                (3, 4, 45),
+                (4, 5, 95),
+                (4, 6, 94),
+                (5, 6, 94),
+                (6, 7, 50),
+                (1, 8, 30),
+                (3, 11, 35),
+                (5, 9, 36),
+                (7, 10, 26),
+                (11, 12, 5),
+            ]
+        )
+        ans = {
+            1: 8,
+            2: 3,
+            3: 2,
+            4: 6,
+            5: 9,
+            6: 4,
+            7: 10,
+            8: 1,
+            9: 5,
+            10: 7,
+            11: 12,
+            12: 11,
+        }
+        answer = matching_dict_to_set(ans)
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_nasty_blossom_expand_recursively(self):
+        """Create nested S-blossom, relabel as S, expand recursively:"""
+        G = nx.Graph()
+        G.add_weighted_edges_from(
+            [
+                (1, 2, 40),
+                (1, 3, 40),
+                (2, 3, 60),
+                (2, 4, 55),
+                (3, 5, 55),
+                (4, 5, 50),
+                (1, 8, 15),
+                (5, 7, 30),
+                (7, 6, 10),
+                (8, 10, 10),
+                (4, 9, 30),
+            ]
+        )
+        ans = {1: 2, 2: 1, 3: 5, 4: 9, 5: 3, 6: 7, 7: 6, 8: 10, 9: 4, 10: 8}
+        answer = matching_dict_to_set(ans)
+        assert edges_equal(nx.max_weight_matching(G), answer)
+        assert edges_equal(nx.min_weight_matching(G), answer)
+
+    def test_wrong_graph_type(self):
+        error = nx.NetworkXNotImplemented
+        raises(error, nx.max_weight_matching, nx.MultiGraph())
+        raises(error, nx.max_weight_matching, nx.MultiDiGraph())
+        raises(error, nx.max_weight_matching, nx.DiGraph())
+        raises(error, nx.min_weight_matching, nx.DiGraph())
+
+
+class TestIsMatching:
+    """Unit tests for the
+    :func:`~networkx.algorithms.matching.is_matching` function.
+
+    """
+
+    def test_dict(self):
+        G = nx.path_graph(4)
+        assert nx.is_matching(G, {0: 1, 1: 0, 2: 3, 3: 2})
+
+    def test_empty_matching(self):
+        G = nx.path_graph(4)
+        assert nx.is_matching(G, set())
+
+    def test_single_edge(self):
+        G = nx.path_graph(4)
+        assert nx.is_matching(G, {(1, 2)})
+
+    def test_edge_order(self):
+        G = nx.path_graph(4)
+        assert nx.is_matching(G, {(0, 1), (2, 3)})
+        assert nx.is_matching(G, {(1, 0), (2, 3)})
+        assert nx.is_matching(G, {(0, 1), (3, 2)})
+        assert nx.is_matching(G, {(1, 0), (3, 2)})
+
+    def test_valid_matching(self):
+        G = nx.path_graph(4)
+        assert nx.is_matching(G, {(0, 1), (2, 3)})
+
+    def test_invalid_input(self):
+        error = nx.NetworkXError
+        G = nx.path_graph(4)
+        # edge to node not in G
+        raises(error, nx.is_matching, G, {(0, 5), (2, 3)})
+        # edge not a 2-tuple
+        raises(error, nx.is_matching, G, {(0, 1, 2), (2, 3)})
+        raises(error, nx.is_matching, G, {(0,), (2, 3)})
+
+    def test_selfloops(self):
+        error = nx.NetworkXError
+        G = nx.path_graph(4)
+        # selfloop for node not in G
+        raises(error, nx.is_matching, G, {(5, 5), (2, 3)})
+        # selfloop edge not in G
+        assert not nx.is_matching(G, {(0, 0), (1, 2), (2, 3)})
+        # selfloop edge in G
+        G.add_edge(0, 0)
+        assert not nx.is_matching(G, {(0, 0), (1, 2)})
+
+    def test_invalid_matching(self):
+        G = nx.path_graph(4)
+        assert not nx.is_matching(G, {(0, 1), (1, 2), (2, 3)})
+
+    def test_invalid_edge(self):
+        G = nx.path_graph(4)
+        assert not nx.is_matching(G, {(0, 3), (1, 2)})
+        raises(nx.NetworkXError, nx.is_matching, G, {(0, 55)})
+
+        G = nx.DiGraph(G.edges)
+        assert nx.is_matching(G, {(0, 1)})
+        assert not nx.is_matching(G, {(1, 0)})
+
+
+class TestIsMaximalMatching:
+    """Unit tests for the
+    :func:`~networkx.algorithms.matching.is_maximal_matching` function.
+
+    """
+
+    def test_dict(self):
+        G = nx.path_graph(4)
+        assert nx.is_maximal_matching(G, {0: 1, 1: 0, 2: 3, 3: 2})
+
+    def test_invalid_input(self):
+        error = nx.NetworkXError
+        G = nx.path_graph(4)
+        # edge to node not in G
+        raises(error, nx.is_maximal_matching, G, {(0, 5)})
+        raises(error, nx.is_maximal_matching, G, {(5, 0)})
+        # edge not a 2-tuple
+        raises(error, nx.is_maximal_matching, G, {(0, 1, 2), (2, 3)})
+        raises(error, nx.is_maximal_matching, G, {(0,), (2, 3)})
+
+    def test_valid(self):
+        G = nx.path_graph(4)
+        assert nx.is_maximal_matching(G, {(0, 1), (2, 3)})
+
+    def test_not_matching(self):
+        G = nx.path_graph(4)
+        assert not nx.is_maximal_matching(G, {(0, 1), (1, 2), (2, 3)})
+        assert not nx.is_maximal_matching(G, {(0, 3)})
+        G.add_edge(0, 0)
+        assert not nx.is_maximal_matching(G, {(0, 0)})
+
+    def test_not_maximal(self):
+        G = nx.path_graph(4)
+        assert not nx.is_maximal_matching(G, {(0, 1)})
+
+
+class TestIsPerfectMatching:
+    """Unit tests for the
+    :func:`~networkx.algorithms.matching.is_perfect_matching` function.
+
+    """
+
+    def test_dict(self):
+        G = nx.path_graph(4)
+        assert nx.is_perfect_matching(G, {0: 1, 1: 0, 2: 3, 3: 2})
+
+    def test_valid(self):
+        G = nx.path_graph(4)
+        assert nx.is_perfect_matching(G, {(0, 1), (2, 3)})
+
+    def test_valid_not_path(self):
+        G = nx.cycle_graph(4)
+        G.add_edge(0, 4)
+        G.add_edge(1, 4)
+        G.add_edge(5, 2)
+
+        assert nx.is_perfect_matching(G, {(1, 4), (0, 3), (5, 2)})
+
+    def test_invalid_input(self):
+        error = nx.NetworkXError
+        G = nx.path_graph(4)
+        # edge to node not in G
+        raises(error, nx.is_perfect_matching, G, {(0, 5)})
+        raises(error, nx.is_perfect_matching, G, {(5, 0)})
+        # edge not a 2-tuple
+        raises(error, nx.is_perfect_matching, G, {(0, 1, 2), (2, 3)})
+        raises(error, nx.is_perfect_matching, G, {(0,), (2, 3)})
+
+    def test_selfloops(self):
+        error = nx.NetworkXError
+        G = nx.path_graph(4)
+        # selfloop for node not in G
+        raises(error, nx.is_perfect_matching, G, {(5, 5), (2, 3)})
+        # selfloop edge not in G
+        assert not nx.is_perfect_matching(G, {(0, 0), (1, 2), (2, 3)})
+        # selfloop edge in G
+        G.add_edge(0, 0)
+        assert not nx.is_perfect_matching(G, {(0, 0), (1, 2)})
+
+    def test_not_matching(self):
+        G = nx.path_graph(4)
+        assert not nx.is_perfect_matching(G, {(0, 3)})
+        assert not nx.is_perfect_matching(G, {(0, 1), (1, 2), (2, 3)})
+
+    def test_maximal_but_not_perfect(self):
+        G = nx.cycle_graph(4)
+        G.add_edge(0, 4)
+        G.add_edge(1, 4)
+
+        assert not nx.is_perfect_matching(G, {(1, 4), (0, 3)})
+
+
+class TestMaximalMatching:
+    """Unit tests for the
+    :func:`~networkx.algorithms.matching.maximal_matching`.
+
+    """
+
+    def test_valid_matching(self):
+        edges = [(1, 2), (1, 5), (2, 3), (2, 5), (3, 4), (3, 6), (5, 6)]
+        G = nx.Graph(edges)
+        matching = nx.maximal_matching(G)
+        assert nx.is_maximal_matching(G, matching)
+
+    def test_single_edge_matching(self):
+        # In the star graph, any maximal matching has just one edge.
+        G = nx.star_graph(5)
+        matching = nx.maximal_matching(G)
+        assert 1 == len(matching)
+        assert nx.is_maximal_matching(G, matching)
+
+    def test_self_loops(self):
+        # Create the path graph with two self-loops.
+        G = nx.path_graph(3)
+        G.add_edges_from([(0, 0), (1, 1)])
+        matching = nx.maximal_matching(G)
+        assert len(matching) == 1
+        # The matching should never include self-loops.
+        assert not any(u == v for u, v in matching)
+        assert nx.is_maximal_matching(G, matching)
+
+    def test_ordering(self):
+        """Tests that a maximal matching is computed correctly
+        regardless of the order in which nodes are added to the graph.
+
+        """
+        for nodes in permutations(range(3)):
+            G = nx.Graph()
+            G.add_nodes_from(nodes)
+            G.add_edges_from([(0, 1), (0, 2)])
+            matching = nx.maximal_matching(G)
+            assert len(matching) == 1
+            assert nx.is_maximal_matching(G, matching)
+
+    def test_wrong_graph_type(self):
+        error = nx.NetworkXNotImplemented
+        raises(error, nx.maximal_matching, nx.MultiGraph())
+        raises(error, nx.maximal_matching, nx.MultiDiGraph())
+        raises(error, nx.maximal_matching, nx.DiGraph())
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_max_weight_clique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_max_weight_clique.py
new file mode 100644
index 00000000..6cd8584e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_max_weight_clique.py
@@ -0,0 +1,179 @@
+"""Maximum weight clique test suite."""
+
+import pytest
+
+import networkx as nx
+
+
+class TestMaximumWeightClique:
+    def test_basic_cases(self):
+        def check_basic_case(graph_func, expected_weight, weight_accessor):
+            graph = graph_func()
+            clique, weight = nx.algorithms.max_weight_clique(graph, weight_accessor)
+            assert verify_clique(
+                graph, clique, weight, expected_weight, weight_accessor
+            )
+
+        for graph_func, (expected_weight, expected_size) in TEST_CASES.items():
+            check_basic_case(graph_func, expected_weight, "weight")
+            check_basic_case(graph_func, expected_size, None)
+
+    def test_key_error(self):
+        graph = two_node_graph()
+        with pytest.raises(KeyError):
+            nx.algorithms.max_weight_clique(graph, "nonexistent-key")
+
+    def test_error_on_non_integer_weight(self):
+        graph = two_node_graph()
+        graph.nodes[2]["weight"] = 1.5
+        with pytest.raises(ValueError):
+            nx.algorithms.max_weight_clique(graph)
+
+    def test_unaffected_by_self_loops(self):
+        graph = two_node_graph()
+        graph.add_edge(1, 1)
+        graph.add_edge(2, 2)
+        clique, weight = nx.algorithms.max_weight_clique(graph, "weight")
+        assert verify_clique(graph, clique, weight, 30, "weight")
+        graph = three_node_independent_set()
+        graph.add_edge(1, 1)
+        clique, weight = nx.algorithms.max_weight_clique(graph, "weight")
+        assert verify_clique(graph, clique, weight, 20, "weight")
+
+    def test_30_node_prob(self):
+        G = nx.Graph()
+        G.add_nodes_from(range(1, 31))
+        for i in range(1, 31):
+            G.nodes[i]["weight"] = i + 1
+        # fmt: off
+        G.add_edges_from(
+            [
+                (1, 12), (1, 13), (1, 15), (1, 16), (1, 18), (1, 19), (1, 20),
+                (1, 23), (1, 26), (1, 28), (1, 29), (1, 30), (2, 3), (2, 4),
+                (2, 5), (2, 8), (2, 9), (2, 10), (2, 14), (2, 17), (2, 18),
+                (2, 21), (2, 22), (2, 23), (2, 27), (3, 9), (3, 15), (3, 21),
+                (3, 22), (3, 23), (3, 24), (3, 27), (3, 28), (3, 29), (4, 5),
+                (4, 6), (4, 8), (4, 21), (4, 22), (4, 23), (4, 26), (4, 28),
+                (4, 30), (5, 6), (5, 8), (5, 9), (5, 13), (5, 14), (5, 15),
+                (5, 16), (5, 20), (5, 21), (5, 22), (5, 25), (5, 28), (5, 29),
+                (6, 7), (6, 8), (6, 13), (6, 17), (6, 18), (6, 19), (6, 24),
+                (6, 26), (6, 27), (6, 28), (6, 29), (7, 12), (7, 14), (7, 15),
+                (7, 16), (7, 17), (7, 20), (7, 25), (7, 27), (7, 29), (7, 30),
+                (8, 10), (8, 15), (8, 16), (8, 18), (8, 20), (8, 22), (8, 24),
+                (8, 26), (8, 27), (8, 28), (8, 30), (9, 11), (9, 12), (9, 13),
+                (9, 14), (9, 15), (9, 16), (9, 19), (9, 20), (9, 21), (9, 24),
+                (9, 30), (10, 12), (10, 15), (10, 18), (10, 19), (10, 20),
+                (10, 22), (10, 23), (10, 24), (10, 26), (10, 27), (10, 29),
+                (10, 30), (11, 13), (11, 15), (11, 16), (11, 17), (11, 18),
+                (11, 19), (11, 20), (11, 22), (11, 29), (11, 30), (12, 14),
+                (12, 17), (12, 18), (12, 19), (12, 20), (12, 21), (12, 23),
+                (12, 25), (12, 26), (12, 30), (13, 20), (13, 22), (13, 23),
+                (13, 24), (13, 30), (14, 16), (14, 20), (14, 21), (14, 22),
+                (14, 23), (14, 25), (14, 26), (14, 27), (14, 29), (14, 30),
+                (15, 17), (15, 18), (15, 20), (15, 21), (15, 26), (15, 27),
+                (15, 28), (16, 17), (16, 18), (16, 19), (16, 20), (16, 21),
+                (16, 29), (16, 30), (17, 18), (17, 21), (17, 22), (17, 25),
+                (17, 27), (17, 28), (17, 30), (18, 19), (18, 20), (18, 21),
+                (18, 22), (18, 23), (18, 24), (19, 20), (19, 22), (19, 23),
+                (19, 24), (19, 25), (19, 27), (19, 30), (20, 21), (20, 23),
+                (20, 24), (20, 26), (20, 28), (20, 29), (21, 23), (21, 26),
+                (21, 27), (21, 29), (22, 24), (22, 25), (22, 26), (22, 29),
+                (23, 25), (23, 30), (24, 25), (24, 26), (25, 27), (25, 29),
+                (26, 27), (26, 28), (26, 30), (28, 29), (29, 30),
+            ]
+        )
+        # fmt: on
+        clique, weight = nx.algorithms.max_weight_clique(G)
+        assert verify_clique(G, clique, weight, 111, "weight")
+
+
+#  ############################  Utility functions ############################
+def verify_clique(
+    graph, clique, reported_clique_weight, expected_clique_weight, weight_accessor
+):
+    for node1 in clique:
+        for node2 in clique:
+            if node1 == node2:
+                continue
+            if not graph.has_edge(node1, node2):
+                return False
+
+    if weight_accessor is None:
+        clique_weight = len(clique)
+    else:
+        clique_weight = sum(graph.nodes[v]["weight"] for v in clique)
+
+    if clique_weight != expected_clique_weight:
+        return False
+    if clique_weight != reported_clique_weight:
+        return False
+
+    return True
+
+
+#  ############################  Graph Generation ############################
+
+
+def empty_graph():
+    return nx.Graph()
+
+
+def one_node_graph():
+    graph = nx.Graph()
+    graph.add_nodes_from([1])
+    graph.nodes[1]["weight"] = 10
+    return graph
+
+
+def two_node_graph():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2])
+    graph.add_edges_from([(1, 2)])
+    graph.nodes[1]["weight"] = 10
+    graph.nodes[2]["weight"] = 20
+    return graph
+
+
+def three_node_clique():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3])
+    graph.add_edges_from([(1, 2), (1, 3), (2, 3)])
+    graph.nodes[1]["weight"] = 10
+    graph.nodes[2]["weight"] = 20
+    graph.nodes[3]["weight"] = 5
+    return graph
+
+
+def three_node_independent_set():
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3])
+    graph.nodes[1]["weight"] = 10
+    graph.nodes[2]["weight"] = 20
+    graph.nodes[3]["weight"] = 5
+    return graph
+
+
+def disconnected():
+    graph = nx.Graph()
+    graph.add_edges_from([(1, 2), (2, 3), (4, 5), (5, 6)])
+    graph.nodes[1]["weight"] = 10
+    graph.nodes[2]["weight"] = 20
+    graph.nodes[3]["weight"] = 5
+    graph.nodes[4]["weight"] = 100
+    graph.nodes[5]["weight"] = 200
+    graph.nodes[6]["weight"] = 50
+    return graph
+
+
+# --------------------------------------------------------------------------
+# Basic tests for all strategies
+# For each basic graph function, specify expected weight of max weight clique
+# and expected size of maximum clique
+TEST_CASES = {
+    empty_graph: (0, 0),
+    one_node_graph: (10, 1),
+    two_node_graph: (30, 2),
+    three_node_clique: (35, 3),
+    three_node_independent_set: (20, 1),
+    disconnected: (300, 2),
+}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_mis.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_mis.py
new file mode 100644
index 00000000..02be02d4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_mis.py
@@ -0,0 +1,62 @@
+"""
+Tests for maximal (not maximum) independent sets.
+
+"""
+
+import random
+
+import pytest
+
+import networkx as nx
+
+
+def test_random_seed():
+    G = nx.empty_graph(5)
+    assert nx.maximal_independent_set(G, seed=1) == [1, 0, 3, 2, 4]
+
+
+@pytest.mark.parametrize("graph", [nx.complete_graph(5), nx.complete_graph(55)])
+def test_K5(graph):
+    """Maximal independent set for complete graphs"""
+    assert all(nx.maximal_independent_set(graph, [n]) == [n] for n in graph)
+
+
+def test_exceptions():
+    """Bad input should raise exception."""
+    G = nx.florentine_families_graph()
+    pytest.raises(nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Smith"])
+    pytest.raises(
+        nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Salviati", "Pazzi"]
+    )
+    # MaximalIndependentSet is not implemented for directed graphs
+    pytest.raises(nx.NetworkXNotImplemented, nx.maximal_independent_set, nx.DiGraph(G))
+
+
+def test_florentine_family():
+    G = nx.florentine_families_graph()
+    indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"])
+    assert set(indep) == {
+        "Medici",
+        "Bischeri",
+        "Castellani",
+        "Pazzi",
+        "Ginori",
+        "Lamberteschi",
+    }
+
+
+def test_bipartite():
+    G = nx.complete_bipartite_graph(12, 34)
+    indep = nx.maximal_independent_set(G, [4, 5, 9, 10])
+    assert sorted(indep) == list(range(12))
+
+
+def test_random_graphs():
+    """Generate 5 random graphs of different types and sizes and
+    make sure that all sets are independent and maximal."""
+    for i in range(0, 50, 10):
+        G = nx.erdos_renyi_graph(i * 10 + 1, random.random())
+        IS = nx.maximal_independent_set(G)
+        assert G.subgraph(IS).number_of_edges() == 0
+        nbrs_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS))
+        assert all(v in nbrs_of_MIS for v in set(G.nodes()).difference(IS))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_moral.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_moral.py
new file mode 100644
index 00000000..fc98c972
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_moral.py
@@ -0,0 +1,15 @@
+import networkx as nx
+from networkx.algorithms.moral import moral_graph
+
+
+def test_get_moral_graph():
+    graph = nx.DiGraph()
+    graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
+    graph.add_edges_from([(1, 2), (3, 2), (4, 1), (4, 5), (6, 5), (7, 5)])
+    H = moral_graph(graph)
+    assert not H.is_directed()
+    assert H.has_edge(1, 3)
+    assert H.has_edge(4, 6)
+    assert H.has_edge(6, 7)
+    assert H.has_edge(4, 7)
+    assert not H.has_edge(1, 5)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_node_classification.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_node_classification.py
new file mode 100644
index 00000000..2e1fc79d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_node_classification.py
@@ -0,0 +1,140 @@
+import pytest
+
+pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+import networkx as nx
+from networkx.algorithms import node_classification
+
+
+class TestHarmonicFunction:
+    def test_path_graph(self):
+        G = nx.path_graph(4)
+        label_name = "label"
+        G.nodes[0][label_name] = "A"
+        G.nodes[3][label_name] = "B"
+        predicted = node_classification.harmonic_function(G, label_name=label_name)
+        assert predicted[0] == "A"
+        assert predicted[1] == "A"
+        assert predicted[2] == "B"
+        assert predicted[3] == "B"
+
+    def test_no_labels(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.path_graph(4)
+            node_classification.harmonic_function(G)
+
+    def test_no_nodes(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.Graph()
+            node_classification.harmonic_function(G)
+
+    def test_no_edges(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.Graph()
+            G.add_node(1)
+            G.add_node(2)
+            node_classification.harmonic_function(G)
+
+    def test_digraph(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            G = nx.DiGraph()
+            G.add_edge(0, 1)
+            G.add_edge(1, 2)
+            G.add_edge(2, 3)
+            label_name = "label"
+            G.nodes[0][label_name] = "A"
+            G.nodes[3][label_name] = "B"
+            node_classification.harmonic_function(G)
+
+    def test_one_labeled_node(self):
+        G = nx.path_graph(4)
+        label_name = "label"
+        G.nodes[0][label_name] = "A"
+        predicted = node_classification.harmonic_function(G, label_name=label_name)
+        assert predicted[0] == "A"
+        assert predicted[1] == "A"
+        assert predicted[2] == "A"
+        assert predicted[3] == "A"
+
+    def test_nodes_all_labeled(self):
+        G = nx.karate_club_graph()
+        label_name = "club"
+        predicted = node_classification.harmonic_function(G, label_name=label_name)
+        for i in range(len(G)):
+            assert predicted[i] == G.nodes[i][label_name]
+
+    def test_labeled_nodes_are_not_changed(self):
+        G = nx.karate_club_graph()
+        label_name = "club"
+        label_removed = {0, 1, 2, 3, 4, 5, 6, 7}
+        for i in label_removed:
+            del G.nodes[i][label_name]
+        predicted = node_classification.harmonic_function(G, label_name=label_name)
+        label_not_removed = set(range(len(G))) - label_removed
+        for i in label_not_removed:
+            assert predicted[i] == G.nodes[i][label_name]
+
+
+class TestLocalAndGlobalConsistency:
+    def test_path_graph(self):
+        G = nx.path_graph(4)
+        label_name = "label"
+        G.nodes[0][label_name] = "A"
+        G.nodes[3][label_name] = "B"
+        predicted = node_classification.local_and_global_consistency(
+            G, label_name=label_name
+        )
+        assert predicted[0] == "A"
+        assert predicted[1] == "A"
+        assert predicted[2] == "B"
+        assert predicted[3] == "B"
+
+    def test_no_labels(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.path_graph(4)
+            node_classification.local_and_global_consistency(G)
+
+    def test_no_nodes(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.Graph()
+            node_classification.local_and_global_consistency(G)
+
+    def test_no_edges(self):
+        with pytest.raises(nx.NetworkXError):
+            G = nx.Graph()
+            G.add_node(1)
+            G.add_node(2)
+            node_classification.local_and_global_consistency(G)
+
+    def test_digraph(self):
+        with pytest.raises(nx.NetworkXNotImplemented):
+            G = nx.DiGraph()
+            G.add_edge(0, 1)
+            G.add_edge(1, 2)
+            G.add_edge(2, 3)
+            label_name = "label"
+            G.nodes[0][label_name] = "A"
+            G.nodes[3][label_name] = "B"
+            node_classification.harmonic_function(G)
+
+    def test_one_labeled_node(self):
+        G = nx.path_graph(4)
+        label_name = "label"
+        G.nodes[0][label_name] = "A"
+        predicted = node_classification.local_and_global_consistency(
+            G, label_name=label_name
+        )
+        assert predicted[0] == "A"
+        assert predicted[1] == "A"
+        assert predicted[2] == "A"
+        assert predicted[3] == "A"
+
+    def test_nodes_all_labeled(self):
+        G = nx.karate_club_graph()
+        label_name = "club"
+        predicted = node_classification.local_and_global_consistency(
+            G, alpha=0, label_name=label_name
+        )
+        for i in range(len(G)):
+            assert predicted[i] == G.nodes[i][label_name]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_non_randomness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_non_randomness.py
new file mode 100644
index 00000000..2f495be2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_non_randomness.py
@@ -0,0 +1,42 @@
+import pytest
+
+import networkx as nx
+
+np = pytest.importorskip("numpy")
+
+
+@pytest.mark.parametrize(
+    "k, weight, expected",
+    [
+        (None, None, 7.21),  # infers 3 communities
+        (2, None, 11.7),
+        (None, "weight", 25.45),
+        (2, "weight", 38.8),
+    ],
+)
+def test_non_randomness(k, weight, expected):
+    G = nx.karate_club_graph()
+    np.testing.assert_almost_equal(
+        nx.non_randomness(G, k, weight)[0], expected, decimal=2
+    )
+
+
+def test_non_connected():
+    G = nx.Graph([(1, 2)])
+    G.add_node(3)
+    with pytest.raises(nx.NetworkXException, match="Non connected"):
+        nx.non_randomness(G)
+
+
+def test_self_loops():
+    G = nx.Graph()
+    G.add_edge(1, 2)
+    G.add_edge(1, 1)
+    with pytest.raises(nx.NetworkXError, match="Graph must not contain self-loops"):
+        nx.non_randomness(G)
+
+
+def test_empty_graph():
+    G = nx.empty_graph(1)
+    with pytest.raises(nx.NetworkXError, match=".*not applicable to empty graphs"):
+        nx.non_randomness(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planar_drawing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planar_drawing.py
new file mode 100644
index 00000000..a5de0e03
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planar_drawing.py
@@ -0,0 +1,274 @@
+import math
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms.planar_drawing import triangulate_embedding
+
+
+def test_graph1():
+    embedding_data = {0: [1, 2, 3], 1: [2, 0], 2: [3, 0, 1], 3: [2, 0]}
+    check_embedding_data(embedding_data)
+
+
+def test_graph2():
+    embedding_data = {
+        0: [8, 6],
+        1: [2, 6, 9],
+        2: [8, 1, 7, 9, 6, 4],
+        3: [9],
+        4: [2],
+        5: [6, 8],
+        6: [9, 1, 0, 5, 2],
+        7: [9, 2],
+        8: [0, 2, 5],
+        9: [1, 6, 2, 7, 3],
+    }
+    check_embedding_data(embedding_data)
+
+
+def test_circle_graph():
+    embedding_data = {
+        0: [1, 9],
+        1: [0, 2],
+        2: [1, 3],
+        3: [2, 4],
+        4: [3, 5],
+        5: [4, 6],
+        6: [5, 7],
+        7: [6, 8],
+        8: [7, 9],
+        9: [8, 0],
+    }
+    check_embedding_data(embedding_data)
+
+
+def test_grid_graph():
+    embedding_data = {
+        (0, 1): [(0, 0), (1, 1), (0, 2)],
+        (1, 2): [(1, 1), (2, 2), (0, 2)],
+        (0, 0): [(0, 1), (1, 0)],
+        (2, 1): [(2, 0), (2, 2), (1, 1)],
+        (1, 1): [(2, 1), (1, 2), (0, 1), (1, 0)],
+        (2, 0): [(1, 0), (2, 1)],
+        (2, 2): [(1, 2), (2, 1)],
+        (1, 0): [(0, 0), (2, 0), (1, 1)],
+        (0, 2): [(1, 2), (0, 1)],
+    }
+    check_embedding_data(embedding_data)
+
+
+def test_one_node_graph():
+    embedding_data = {0: []}
+    check_embedding_data(embedding_data)
+
+
+def test_two_node_graph():
+    embedding_data = {0: [1], 1: [0]}
+    check_embedding_data(embedding_data)
+
+
+def test_three_node_graph():
+    embedding_data = {0: [1, 2], 1: [0, 2], 2: [0, 1]}
+    check_embedding_data(embedding_data)
+
+
+def test_multiple_component_graph1():
+    embedding_data = {0: [], 1: []}
+    check_embedding_data(embedding_data)
+
+
+def test_multiple_component_graph2():
+    embedding_data = {0: [1, 2], 1: [0, 2], 2: [0, 1], 3: [4, 5], 4: [3, 5], 5: [3, 4]}
+    check_embedding_data(embedding_data)
+
+
+def test_invalid_half_edge():
+    with pytest.raises(nx.NetworkXException):
+        embedding_data = {1: [2, 3, 4], 2: [1, 3, 4], 3: [1, 2, 4], 4: [1, 2, 3]}
+        embedding = nx.PlanarEmbedding()
+        embedding.set_data(embedding_data)
+        nx.combinatorial_embedding_to_pos(embedding)
+
+
+def test_triangulate_embedding1():
+    embedding = nx.PlanarEmbedding()
+    embedding.add_node(1)
+    expected_embedding = {1: []}
+    check_triangulation(embedding, expected_embedding)
+
+
+def test_triangulate_embedding2():
+    embedding = nx.PlanarEmbedding()
+    embedding.connect_components(1, 2)
+    expected_embedding = {1: [2], 2: [1]}
+    check_triangulation(embedding, expected_embedding)
+
+
+def check_triangulation(embedding, expected_embedding):
+    res_embedding, _ = triangulate_embedding(embedding, True)
+    assert (
+        res_embedding.get_data() == expected_embedding
+    ), "Expected embedding incorrect"
+    res_embedding, _ = triangulate_embedding(embedding, False)
+    assert (
+        res_embedding.get_data() == expected_embedding
+    ), "Expected embedding incorrect"
+
+
+def check_embedding_data(embedding_data):
+    """Checks that the planar embedding of the input is correct"""
+    embedding = nx.PlanarEmbedding()
+    embedding.set_data(embedding_data)
+    pos_fully = nx.combinatorial_embedding_to_pos(embedding, False)
+    msg = "Planar drawing does not conform to the embedding (fully triangulation)"
+    assert planar_drawing_conforms_to_embedding(embedding, pos_fully), msg
+    check_edge_intersections(embedding, pos_fully)
+    pos_internally = nx.combinatorial_embedding_to_pos(embedding, True)
+    msg = "Planar drawing does not conform to the embedding (internal triangulation)"
+    assert planar_drawing_conforms_to_embedding(embedding, pos_internally), msg
+    check_edge_intersections(embedding, pos_internally)
+
+
+def is_close(a, b, rel_tol=1e-09, abs_tol=0.0):
+    # Check if float numbers are basically equal, for python >=3.5 there is
+    # function for that in the standard library
+    return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
+
+
+def point_in_between(a, b, p):
+    # checks if p is on the line between a and b
+    x1, y1 = a
+    x2, y2 = b
+    px, py = p
+    dist_1_2 = math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)
+    dist_1_p = math.sqrt((x1 - px) ** 2 + (y1 - py) ** 2)
+    dist_2_p = math.sqrt((x2 - px) ** 2 + (y2 - py) ** 2)
+    return is_close(dist_1_p + dist_2_p, dist_1_2)
+
+
+def check_edge_intersections(G, pos):
+    """Check all edges in G for intersections.
+
+    Raises an exception if an intersection is found.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+    pos : dict
+        Maps every node to a tuple (x, y) representing its position
+
+    """
+    for a, b in G.edges():
+        for c, d in G.edges():
+            # Check if end points are different
+            if a != c and b != d and b != c and a != d:
+                x1, y1 = pos[a]
+                x2, y2 = pos[b]
+                x3, y3 = pos[c]
+                x4, y4 = pos[d]
+                determinant = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4)
+                if determinant != 0:  # the lines are not parallel
+                    # calculate intersection point, see:
+                    # https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection
+                    px = (x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (
+                        x3 * y4 - y3 * x4
+                    ) / determinant
+                    py = (x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (
+                        x3 * y4 - y3 * x4
+                    ) / determinant
+
+                    # Check if intersection lies between the points
+                    if point_in_between(pos[a], pos[b], (px, py)) and point_in_between(
+                        pos[c], pos[d], (px, py)
+                    ):
+                        msg = f"There is an intersection at {px},{py}"
+                        raise nx.NetworkXException(msg)
+
+                #  Check overlap
+                msg = "A node lies on a edge connecting two other nodes"
+                if (
+                    point_in_between(pos[a], pos[b], pos[c])
+                    or point_in_between(pos[a], pos[b], pos[d])
+                    or point_in_between(pos[c], pos[d], pos[a])
+                    or point_in_between(pos[c], pos[d], pos[b])
+                ):
+                    raise nx.NetworkXException(msg)
+    # No edge intersection found
+
+
+class Vector:
+    """Compare vectors by their angle without loss of precision
+
+    All vectors in direction [0, 1] are the smallest.
+    The vectors grow in clockwise direction.
+    """
+
+    __slots__ = ["x", "y", "node", "quadrant"]
+
+    def __init__(self, x, y, node):
+        self.x = x
+        self.y = y
+        self.node = node
+        if self.x >= 0 and self.y > 0:
+            self.quadrant = 1
+        elif self.x > 0 and self.y <= 0:
+            self.quadrant = 2
+        elif self.x <= 0 and self.y < 0:
+            self.quadrant = 3
+        else:
+            self.quadrant = 4
+
+    def __eq__(self, other):
+        return self.quadrant == other.quadrant and self.x * other.y == self.y * other.x
+
+    def __lt__(self, other):
+        if self.quadrant < other.quadrant:
+            return True
+        elif self.quadrant > other.quadrant:
+            return False
+        else:
+            return self.x * other.y < self.y * other.x
+
+    def __ne__(self, other):
+        return self != other
+
+    def __le__(self, other):
+        return not other < self
+
+    def __gt__(self, other):
+        return other < self
+
+    def __ge__(self, other):
+        return not self < other
+
+
+def planar_drawing_conforms_to_embedding(embedding, pos):
+    """Checks if pos conforms to the planar embedding
+
+    Returns true iff the neighbors are actually oriented in the orientation
+    specified of the embedding
+    """
+    for v in embedding:
+        nbr_vectors = []
+        v_pos = pos[v]
+        for nbr in embedding[v]:
+            new_vector = Vector(pos[nbr][0] - v_pos[0], pos[nbr][1] - v_pos[1], nbr)
+            nbr_vectors.append(new_vector)
+        # Sort neighbors according to their phi angle
+        nbr_vectors.sort()
+        for idx, nbr_vector in enumerate(nbr_vectors):
+            cw_vector = nbr_vectors[(idx + 1) % len(nbr_vectors)]
+            ccw_vector = nbr_vectors[idx - 1]
+            if (
+                embedding[v][nbr_vector.node]["cw"] != cw_vector.node
+                or embedding[v][nbr_vector.node]["ccw"] != ccw_vector.node
+            ):
+                return False
+            if cw_vector.node != nbr_vector.node and cw_vector == nbr_vector:
+                # Lines overlap
+                return False
+            if ccw_vector.node != nbr_vector.node and ccw_vector == nbr_vector:
+                # Lines overlap
+                return False
+    return True
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planarity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planarity.py
new file mode 100644
index 00000000..99bcff41
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planarity.py
@@ -0,0 +1,535 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms.planarity import (
+    check_planarity_recursive,
+    get_counterexample,
+    get_counterexample_recursive,
+)
+
+
+class TestLRPlanarity:
+    """Nose Unit tests for the :mod:`networkx.algorithms.planarity` module.
+
+    Tests three things:
+    1. Check that the result is correct
+        (returns planar if and only if the graph is actually planar)
+    2. In case a counter example is returned: Check if it is correct
+    3. In case an embedding is returned: Check if its actually an embedding
+    """
+
+    @staticmethod
+    def check_graph(G, is_planar=None):
+        """Raises an exception if the lr_planarity check returns a wrong result
+
+        Parameters
+        ----------
+        G : NetworkX graph
+        is_planar : bool
+            The expected result of the planarity check.
+            If set to None only counter example or embedding are verified.
+
+        """
+
+        # obtain results of planarity check
+        is_planar_lr, result = nx.check_planarity(G, True)
+        is_planar_lr_rec, result_rec = check_planarity_recursive(G, True)
+
+        if is_planar is not None:
+            # set a message for the assert
+            if is_planar:
+                msg = "Wrong planarity check result. Should be planar."
+            else:
+                msg = "Wrong planarity check result. Should be non-planar."
+
+            # check if the result is as expected
+            assert is_planar == is_planar_lr, msg
+            assert is_planar == is_planar_lr_rec, msg
+
+        if is_planar_lr:
+            # check embedding
+            check_embedding(G, result)
+            check_embedding(G, result_rec)
+        else:
+            # check counter example
+            check_counterexample(G, result)
+            check_counterexample(G, result_rec)
+
+    def test_simple_planar_graph(self):
+        e = [
+            (1, 2),
+            (2, 3),
+            (3, 4),
+            (4, 6),
+            (6, 7),
+            (7, 1),
+            (1, 5),
+            (5, 2),
+            (2, 4),
+            (4, 5),
+            (5, 7),
+        ]
+        self.check_graph(nx.Graph(e), is_planar=True)
+
+    def test_planar_with_selfloop(self):
+        e = [
+            (1, 1),
+            (2, 2),
+            (3, 3),
+            (4, 4),
+            (5, 5),
+            (1, 2),
+            (1, 3),
+            (1, 5),
+            (2, 5),
+            (2, 4),
+            (3, 4),
+            (3, 5),
+            (4, 5),
+        ]
+        self.check_graph(nx.Graph(e), is_planar=True)
+
+    def test_k3_3(self):
+        self.check_graph(nx.complete_bipartite_graph(3, 3), is_planar=False)
+
+    def test_k5(self):
+        self.check_graph(nx.complete_graph(5), is_planar=False)
+
+    def test_multiple_components_planar(self):
+        e = [(1, 2), (2, 3), (3, 1), (4, 5), (5, 6), (6, 4)]
+        self.check_graph(nx.Graph(e), is_planar=True)
+
+    def test_multiple_components_non_planar(self):
+        G = nx.complete_graph(5)
+        # add another planar component to the non planar component
+        # G stays non planar
+        G.add_edges_from([(6, 7), (7, 8), (8, 6)])
+        self.check_graph(G, is_planar=False)
+
+    def test_non_planar_with_selfloop(self):
+        G = nx.complete_graph(5)
+        # add self loops
+        for i in range(5):
+            G.add_edge(i, i)
+        self.check_graph(G, is_planar=False)
+
+    def test_non_planar1(self):
+        # tests a graph that has no subgraph directly isomorph to K5 or K3_3
+        e = [
+            (1, 5),
+            (1, 6),
+            (1, 7),
+            (2, 6),
+            (2, 3),
+            (3, 5),
+            (3, 7),
+            (4, 5),
+            (4, 6),
+            (4, 7),
+        ]
+        self.check_graph(nx.Graph(e), is_planar=False)
+
+    def test_loop(self):
+        # test a graph with a selfloop
+        e = [(1, 2), (2, 2)]
+        G = nx.Graph(e)
+        self.check_graph(G, is_planar=True)
+
+    def test_comp(self):
+        # test multiple component graph
+        e = [(1, 2), (3, 4)]
+        G = nx.Graph(e)
+        G.remove_edge(1, 2)
+        self.check_graph(G, is_planar=True)
+
+    def test_goldner_harary(self):
+        # test goldner-harary graph (a maximal planar graph)
+        e = [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 5),
+            (1, 7),
+            (1, 8),
+            (1, 10),
+            (1, 11),
+            (2, 3),
+            (2, 4),
+            (2, 6),
+            (2, 7),
+            (2, 9),
+            (2, 10),
+            (2, 11),
+            (3, 4),
+            (4, 5),
+            (4, 6),
+            (4, 7),
+            (5, 7),
+            (6, 7),
+            (7, 8),
+            (7, 9),
+            (7, 10),
+            (8, 10),
+            (9, 10),
+            (10, 11),
+        ]
+        G = nx.Graph(e)
+        self.check_graph(G, is_planar=True)
+
+    def test_planar_multigraph(self):
+        G = nx.MultiGraph([(1, 2), (1, 2), (1, 2), (1, 2), (2, 3), (3, 1)])
+        self.check_graph(G, is_planar=True)
+
+    def test_non_planar_multigraph(self):
+        G = nx.MultiGraph(nx.complete_graph(5))
+        G.add_edges_from([(1, 2)] * 5)
+        self.check_graph(G, is_planar=False)
+
+    def test_planar_digraph(self):
+        G = nx.DiGraph([(1, 2), (2, 3), (2, 4), (4, 1), (4, 2), (1, 4), (3, 2)])
+        self.check_graph(G, is_planar=True)
+
+    def test_non_planar_digraph(self):
+        G = nx.DiGraph(nx.complete_graph(5))
+        G.remove_edge(1, 2)
+        G.remove_edge(4, 1)
+        self.check_graph(G, is_planar=False)
+
+    def test_single_component(self):
+        # Test a graph with only a single node
+        G = nx.Graph()
+        G.add_node(1)
+        self.check_graph(G, is_planar=True)
+
+    def test_graph1(self):
+        G = nx.Graph(
+            [
+                (3, 10),
+                (2, 13),
+                (1, 13),
+                (7, 11),
+                (0, 8),
+                (8, 13),
+                (0, 2),
+                (0, 7),
+                (0, 10),
+                (1, 7),
+            ]
+        )
+        self.check_graph(G, is_planar=True)
+
+    def test_graph2(self):
+        G = nx.Graph(
+            [
+                (1, 2),
+                (4, 13),
+                (0, 13),
+                (4, 5),
+                (7, 10),
+                (1, 7),
+                (0, 3),
+                (2, 6),
+                (5, 6),
+                (7, 13),
+                (4, 8),
+                (0, 8),
+                (0, 9),
+                (2, 13),
+                (6, 7),
+                (3, 6),
+                (2, 8),
+            ]
+        )
+        self.check_graph(G, is_planar=False)
+
+    def test_graph3(self):
+        G = nx.Graph(
+            [
+                (0, 7),
+                (3, 11),
+                (3, 4),
+                (8, 9),
+                (4, 11),
+                (1, 7),
+                (1, 13),
+                (1, 11),
+                (3, 5),
+                (5, 7),
+                (1, 3),
+                (0, 4),
+                (5, 11),
+                (5, 13),
+            ]
+        )
+        self.check_graph(G, is_planar=False)
+
+    def test_counterexample_planar(self):
+        with pytest.raises(nx.NetworkXException):
+            # Try to get a counterexample of a planar graph
+            G = nx.Graph()
+            G.add_node(1)
+            get_counterexample(G)
+
+    def test_counterexample_planar_recursive(self):
+        with pytest.raises(nx.NetworkXException):
+            # Try to get a counterexample of a planar graph
+            G = nx.Graph()
+            G.add_node(1)
+            get_counterexample_recursive(G)
+
+    def test_edge_removal_from_planar_embedding(self):
+        # PlanarEmbedding.check_structure() must succeed after edge removal
+        edges = ((0, 1), (1, 2), (2, 3), (3, 4), (4, 0), (0, 2), (0, 3))
+        G = nx.Graph(edges)
+        cert, P = nx.check_planarity(G)
+        assert cert is True
+        P.remove_edge(0, 2)
+        self.check_graph(P, is_planar=True)
+        P.add_half_edge_ccw(1, 3, 2)
+        P.add_half_edge_cw(3, 1, 2)
+        self.check_graph(P, is_planar=True)
+        P.remove_edges_from(((0, 3), (1, 3)))
+        self.check_graph(P, is_planar=True)
+
+
+def check_embedding(G, embedding):
+    """Raises an exception if the combinatorial embedding is not correct
+
+    Parameters
+    ----------
+    G : NetworkX graph
+    embedding : a dict mapping nodes to a list of edges
+        This specifies the ordering of the outgoing edges from a node for
+        a combinatorial embedding
+
+    Notes
+    -----
+    Checks the following things:
+        - The type of the embedding is correct
+        - The nodes and edges match the original graph
+        - Every half edge has its matching opposite half edge
+        - No intersections of edges (checked by Euler's formula)
+    """
+
+    if not isinstance(embedding, nx.PlanarEmbedding):
+        raise nx.NetworkXException("Bad embedding. Not of type nx.PlanarEmbedding")
+
+    # Check structure
+    embedding.check_structure()
+
+    # Check that graphs are equivalent
+
+    assert set(G.nodes) == set(
+        embedding.nodes
+    ), "Bad embedding. Nodes don't match the original graph."
+
+    # Check that the edges are equal
+    g_edges = set()
+    for edge in G.edges:
+        if edge[0] != edge[1]:
+            g_edges.add((edge[0], edge[1]))
+            g_edges.add((edge[1], edge[0]))
+    assert g_edges == set(
+        embedding.edges
+    ), "Bad embedding. Edges don't match the original graph."
+
+
+def check_counterexample(G, sub_graph):
+    """Raises an exception if the counterexample is wrong.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+    subdivision_nodes : set
+        A set of nodes inducing a subgraph as a counterexample
+    """
+    # 1. Create the sub graph
+    sub_graph = nx.Graph(sub_graph)
+
+    # 2. Remove self loops
+    for u in sub_graph:
+        if sub_graph.has_edge(u, u):
+            sub_graph.remove_edge(u, u)
+
+    # keep track of nodes we might need to contract
+    contract = list(sub_graph)
+
+    # 3. Contract Edges
+    while len(contract) > 0:
+        contract_node = contract.pop()
+        if contract_node not in sub_graph:
+            # Node was already contracted
+            continue
+        degree = sub_graph.degree[contract_node]
+        # Check if we can remove the node
+        if degree == 2:
+            # Get the two neighbors
+            neighbors = iter(sub_graph[contract_node])
+            u = next(neighbors)
+            v = next(neighbors)
+            # Save nodes for later
+            contract.append(u)
+            contract.append(v)
+            # Contract edge
+            sub_graph.remove_node(contract_node)
+            sub_graph.add_edge(u, v)
+
+    # 4. Check for isomorphism with K5 or K3_3 graphs
+    if len(sub_graph) == 5:
+        if not nx.is_isomorphic(nx.complete_graph(5), sub_graph):
+            raise nx.NetworkXException("Bad counter example.")
+    elif len(sub_graph) == 6:
+        if not nx.is_isomorphic(nx.complete_bipartite_graph(3, 3), sub_graph):
+            raise nx.NetworkXException("Bad counter example.")
+    else:
+        raise nx.NetworkXException("Bad counter example.")
+
+
+class TestPlanarEmbeddingClass:
+    def test_add_half_edge(self):
+        embedding = nx.PlanarEmbedding()
+        embedding.add_half_edge(0, 1)
+        with pytest.raises(
+            nx.NetworkXException, match="Invalid clockwise reference node."
+        ):
+            embedding.add_half_edge(0, 2, cw=3)
+        with pytest.raises(
+            nx.NetworkXException, match="Invalid counterclockwise reference node."
+        ):
+            embedding.add_half_edge(0, 2, ccw=3)
+        with pytest.raises(
+            nx.NetworkXException, match="Only one of cw/ccw can be specified."
+        ):
+            embedding.add_half_edge(0, 2, cw=1, ccw=1)
+        with pytest.raises(
+            nx.NetworkXException,
+            match=(
+                r"Node already has out-half-edge\(s\), either"
+                " cw or ccw reference node required."
+            ),
+        ):
+            embedding.add_half_edge(0, 2)
+        # these should work
+        embedding.add_half_edge(0, 2, cw=1)
+        embedding.add_half_edge(0, 3, ccw=1)
+        assert sorted(embedding.edges(data=True)) == [
+            (0, 1, {"ccw": 2, "cw": 3}),
+            (0, 2, {"cw": 1, "ccw": 3}),
+            (0, 3, {"cw": 2, "ccw": 1}),
+        ]
+
+    def test_get_data(self):
+        embedding = self.get_star_embedding(4)
+        data = embedding.get_data()
+        data_cmp = {0: [3, 2, 1], 1: [0], 2: [0], 3: [0]}
+        assert data == data_cmp
+
+    def test_edge_removal(self):
+        embedding = nx.PlanarEmbedding()
+        embedding.set_data(
+            {
+                1: [2, 5, 7],
+                2: [1, 3, 4, 5],
+                3: [2, 4],
+                4: [3, 6, 5, 2],
+                5: [7, 1, 2, 4],
+                6: [4, 7],
+                7: [6, 1, 5],
+            }
+        )
+        # remove_edges_from() calls remove_edge(), so both are tested here
+        embedding.remove_edges_from(((5, 4), (1, 5)))
+        embedding.check_structure()
+        embedding_expected = nx.PlanarEmbedding()
+        embedding_expected.set_data(
+            {
+                1: [2, 7],
+                2: [1, 3, 4, 5],
+                3: [2, 4],
+                4: [3, 6, 2],
+                5: [7, 2],
+                6: [4, 7],
+                7: [6, 1, 5],
+            }
+        )
+        assert nx.utils.graphs_equal(embedding, embedding_expected)
+
+    def test_missing_edge_orientation(self):
+        embedding = nx.PlanarEmbedding({1: {2: {}}, 2: {1: {}}})
+        with pytest.raises(nx.NetworkXException):
+            # Invalid structure because the orientation of the edge was not set
+            embedding.check_structure()
+
+    def test_invalid_edge_orientation(self):
+        embedding = nx.PlanarEmbedding(
+            {
+                1: {2: {"cw": 2, "ccw": 2}},
+                2: {1: {"cw": 1, "ccw": 1}},
+                1: {3: {}},
+                3: {1: {}},
+            }
+        )
+        with pytest.raises(nx.NetworkXException):
+            embedding.check_structure()
+
+    def test_missing_half_edge(self):
+        embedding = nx.PlanarEmbedding()
+        embedding.add_half_edge(1, 2)
+        with pytest.raises(nx.NetworkXException):
+            # Invalid structure because other half edge is missing
+            embedding.check_structure()
+
+    def test_not_fulfilling_euler_formula(self):
+        embedding = nx.PlanarEmbedding()
+        for i in range(5):
+            ref = None
+            for j in range(5):
+                if i != j:
+                    embedding.add_half_edge(i, j, cw=ref)
+                    ref = j
+        with pytest.raises(nx.NetworkXException):
+            embedding.check_structure()
+
+    def test_missing_reference(self):
+        embedding = nx.PlanarEmbedding()
+        with pytest.raises(nx.NetworkXException, match="Invalid reference node."):
+            embedding.add_half_edge(1, 2, ccw=3)
+
+    def test_connect_components(self):
+        embedding = nx.PlanarEmbedding()
+        embedding.connect_components(1, 2)
+
+    def test_successful_face_traversal(self):
+        embedding = nx.PlanarEmbedding()
+        embedding.add_half_edge(1, 2)
+        embedding.add_half_edge(2, 1)
+        face = embedding.traverse_face(1, 2)
+        assert face == [1, 2]
+
+    def test_unsuccessful_face_traversal(self):
+        embedding = nx.PlanarEmbedding(
+            {1: {2: {"cw": 3, "ccw": 2}}, 2: {1: {"cw": 3, "ccw": 1}}}
+        )
+        with pytest.raises(nx.NetworkXException):
+            embedding.traverse_face(1, 2)
+
+    def test_forbidden_methods(self):
+        embedding = nx.PlanarEmbedding()
+        embedding.add_node(42)  # no exception
+        embedding.add_nodes_from([(23, 24)])  # no exception
+        with pytest.raises(NotImplementedError):
+            embedding.add_edge(1, 3)
+        with pytest.raises(NotImplementedError):
+            embedding.add_edges_from([(0, 2), (1, 4)])
+        with pytest.raises(NotImplementedError):
+            embedding.add_weighted_edges_from([(0, 2, 350), (1, 4, 125)])
+
+    @staticmethod
+    def get_star_embedding(n):
+        embedding = nx.PlanarEmbedding()
+        ref = None
+        for i in range(1, n):
+            embedding.add_half_edge(0, i, cw=ref)
+            ref = i
+            embedding.add_half_edge(i, 0)
+        return embedding
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_polynomials.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_polynomials.py
new file mode 100644
index 00000000..a81d6a69
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_polynomials.py
@@ -0,0 +1,57 @@
+"""Unit tests for the :mod:`networkx.algorithms.polynomials` module."""
+
+import pytest
+
+import networkx as nx
+
+sympy = pytest.importorskip("sympy")
+
+
+# Mapping of input graphs to a string representation of their tutte polynomials
+_test_tutte_graphs = {
+    nx.complete_graph(1): "1",
+    nx.complete_graph(4): "x**3 + 3*x**2 + 4*x*y + 2*x + y**3 + 3*y**2 + 2*y",
+    nx.cycle_graph(5): "x**4 + x**3 + x**2 + x + y",
+    nx.diamond_graph(): "x**3 + 2*x**2 + 2*x*y + x + y**2 + y",
+}
+
+_test_chromatic_graphs = {
+    nx.complete_graph(1): "x",
+    nx.complete_graph(4): "x**4 - 6*x**3 + 11*x**2 - 6*x",
+    nx.cycle_graph(5): "x**5 - 5*x**4 + 10*x**3 - 10*x**2 + 4*x",
+    nx.diamond_graph(): "x**4 - 5*x**3 + 8*x**2 - 4*x",
+    nx.path_graph(5): "x**5 - 4*x**4 + 6*x**3 - 4*x**2 + x",
+}
+
+
+@pytest.mark.parametrize(("G", "expected"), _test_tutte_graphs.items())
+def test_tutte_polynomial(G, expected):
+    assert nx.tutte_polynomial(G).equals(expected)
+
+
+@pytest.mark.parametrize("G", _test_tutte_graphs.keys())
+def test_tutte_polynomial_disjoint(G):
+    """Tutte polynomial factors into the Tutte polynomials of its components.
+    Verify this property with the disjoint union of two copies of the input graph.
+    """
+    t_g = nx.tutte_polynomial(G)
+    H = nx.disjoint_union(G, G)
+    t_h = nx.tutte_polynomial(H)
+    assert sympy.simplify(t_g * t_g).equals(t_h)
+
+
+@pytest.mark.parametrize(("G", "expected"), _test_chromatic_graphs.items())
+def test_chromatic_polynomial(G, expected):
+    assert nx.chromatic_polynomial(G).equals(expected)
+
+
+@pytest.mark.parametrize("G", _test_chromatic_graphs.keys())
+def test_chromatic_polynomial_disjoint(G):
+    """Chromatic polynomial factors into the Chromatic polynomials of its
+    components. Verify this property with the disjoint union of two copies of
+    the input graph.
+    """
+    x_g = nx.chromatic_polynomial(G)
+    H = nx.disjoint_union(G, G)
+    x_h = nx.chromatic_polynomial(H)
+    assert sympy.simplify(x_g * x_g).equals(x_h)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_reciprocity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_reciprocity.py
new file mode 100644
index 00000000..e713bc43
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_reciprocity.py
@@ -0,0 +1,37 @@
+import pytest
+
+import networkx as nx
+
+
+class TestReciprocity:
+    # test overall reciprocity by passing whole graph
+    def test_reciprocity_digraph(self):
+        DG = nx.DiGraph([(1, 2), (2, 1)])
+        reciprocity = nx.reciprocity(DG)
+        assert reciprocity == 1.0
+
+    # test empty graph's overall reciprocity which will throw an error
+    def test_overall_reciprocity_empty_graph(self):
+        with pytest.raises(nx.NetworkXError):
+            DG = nx.DiGraph()
+            nx.overall_reciprocity(DG)
+
+    # test for reciprocity for a list of nodes
+    def test_reciprocity_graph_nodes(self):
+        DG = nx.DiGraph([(1, 2), (2, 3), (3, 2)])
+        reciprocity = nx.reciprocity(DG, [1, 2])
+        expected_reciprocity = {1: 0.0, 2: 0.6666666666666666}
+        assert reciprocity == expected_reciprocity
+
+    # test for reciprocity for a single node
+    def test_reciprocity_graph_node(self):
+        DG = nx.DiGraph([(1, 2), (2, 3), (3, 2)])
+        reciprocity = nx.reciprocity(DG, 2)
+        assert reciprocity == 0.6666666666666666
+
+    # test for reciprocity for an isolated node
+    def test_reciprocity_graph_isolated_nodes(self):
+        with pytest.raises(nx.NetworkXError):
+            DG = nx.DiGraph([(1, 2)])
+            DG.add_node(4)
+            nx.reciprocity(DG, 4)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_regular.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_regular.py
new file mode 100644
index 00000000..a8b4c3a3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_regular.py
@@ -0,0 +1,92 @@
+import pytest
+
+import networkx
+import networkx as nx
+import networkx.algorithms.regular as reg
+import networkx.generators as gen
+
+
+class TestKFactor:
+    def test_k_factor_trivial(self):
+        g = gen.cycle_graph(4)
+        f = reg.k_factor(g, 2)
+        assert g.edges == f.edges
+
+    def test_k_factor1(self):
+        g = gen.grid_2d_graph(4, 4)
+        g_kf = reg.k_factor(g, 2)
+        for edge in g_kf.edges():
+            assert g.has_edge(edge[0], edge[1])
+        for _, degree in g_kf.degree():
+            assert degree == 2
+
+    def test_k_factor2(self):
+        g = gen.complete_graph(6)
+        g_kf = reg.k_factor(g, 3)
+        for edge in g_kf.edges():
+            assert g.has_edge(edge[0], edge[1])
+        for _, degree in g_kf.degree():
+            assert degree == 3
+
+    def test_k_factor3(self):
+        g = gen.grid_2d_graph(4, 4)
+        with pytest.raises(nx.NetworkXUnfeasible):
+            reg.k_factor(g, 3)
+
+    def test_k_factor4(self):
+        g = gen.lattice.hexagonal_lattice_graph(4, 4)
+        # Perfect matching doesn't exist for 4,4 hexagonal lattice graph
+        with pytest.raises(nx.NetworkXUnfeasible):
+            reg.k_factor(g, 2)
+
+    def test_k_factor5(self):
+        g = gen.complete_graph(6)
+        # small k to exercise SmallKGadget
+        g_kf = reg.k_factor(g, 2)
+        for edge in g_kf.edges():
+            assert g.has_edge(edge[0], edge[1])
+        for _, degree in g_kf.degree():
+            assert degree == 2
+
+
+class TestIsRegular:
+    def test_is_regular1(self):
+        g = gen.cycle_graph(4)
+        assert reg.is_regular(g)
+
+    def test_is_regular2(self):
+        g = gen.complete_graph(5)
+        assert reg.is_regular(g)
+
+    def test_is_regular3(self):
+        g = gen.lollipop_graph(5, 5)
+        assert not reg.is_regular(g)
+
+    def test_is_regular4(self):
+        g = nx.DiGraph()
+        g.add_edges_from([(0, 1), (1, 2), (2, 0)])
+        assert reg.is_regular(g)
+
+
+def test_is_regular_empty_graph_raises():
+    G = nx.Graph()
+    with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes"):
+        nx.is_regular(G)
+
+
+class TestIsKRegular:
+    def test_is_k_regular1(self):
+        g = gen.cycle_graph(4)
+        assert reg.is_k_regular(g, 2)
+        assert not reg.is_k_regular(g, 3)
+
+    def test_is_k_regular2(self):
+        g = gen.complete_graph(5)
+        assert reg.is_k_regular(g, 4)
+        assert not reg.is_k_regular(g, 3)
+        assert not reg.is_k_regular(g, 6)
+
+    def test_is_k_regular3(self):
+        g = gen.lollipop_graph(5, 5)
+        assert not reg.is_k_regular(g, 5)
+        assert not reg.is_k_regular(g, 6)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_richclub.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_richclub.py
new file mode 100644
index 00000000..1bdb6684
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_richclub.py
@@ -0,0 +1,149 @@
+import pytest
+
+import networkx as nx
+
+
+def test_richclub():
+    G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
+    rc = nx.richclub.rich_club_coefficient(G, normalized=False)
+    assert rc == {0: 12.0 / 30, 1: 8.0 / 12}
+
+    # test single value
+    rc0 = nx.richclub.rich_club_coefficient(G, normalized=False)[0]
+    assert rc0 == 12.0 / 30.0
+
+
+def test_richclub_seed():
+    G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
+    rcNorm = nx.richclub.rich_club_coefficient(G, Q=2, seed=1)
+    assert rcNorm == {0: 1.0, 1: 1.0}
+
+
+def test_richclub_normalized():
+    G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
+    rcNorm = nx.richclub.rich_club_coefficient(G, Q=2, seed=42)
+    assert rcNorm == {0: 1.0, 1: 1.0}
+
+
+def test_richclub2():
+    T = nx.balanced_tree(2, 10)
+    rc = nx.richclub.rich_club_coefficient(T, normalized=False)
+    assert rc == {
+        0: 4092 / (2047 * 2046.0),
+        1: (2044.0 / (1023 * 1022)),
+        2: (2040.0 / (1022 * 1021)),
+    }
+
+
+def test_richclub3():
+    # tests edgecase
+    G = nx.karate_club_graph()
+    rc = nx.rich_club_coefficient(G, normalized=False)
+    assert rc == {
+        0: 156.0 / 1122,
+        1: 154.0 / 1056,
+        2: 110.0 / 462,
+        3: 78.0 / 240,
+        4: 44.0 / 90,
+        5: 22.0 / 42,
+        6: 10.0 / 20,
+        7: 10.0 / 20,
+        8: 10.0 / 20,
+        9: 6.0 / 12,
+        10: 2.0 / 6,
+        11: 2.0 / 6,
+        12: 0.0,
+        13: 0.0,
+        14: 0.0,
+        15: 0.0,
+    }
+
+
+def test_richclub4():
+    G = nx.Graph()
+    G.add_edges_from(
+        [(0, 1), (0, 2), (0, 3), (0, 4), (4, 5), (5, 9), (6, 9), (7, 9), (8, 9)]
+    )
+    rc = nx.rich_club_coefficient(G, normalized=False)
+    assert rc == {0: 18 / 90.0, 1: 6 / 12.0, 2: 0.0, 3: 0.0}
+
+
+def test_richclub_exception():
+    with pytest.raises(nx.NetworkXNotImplemented):
+        G = nx.DiGraph()
+        nx.rich_club_coefficient(G)
+
+
+def test_rich_club_exception2():
+    with pytest.raises(nx.NetworkXNotImplemented):
+        G = nx.MultiGraph()
+        nx.rich_club_coefficient(G)
+
+
+def test_rich_club_selfloop():
+    G = nx.Graph()  # or DiGraph, MultiGraph, MultiDiGraph, etc
+    G.add_edge(1, 1)  # self loop
+    G.add_edge(1, 2)
+    with pytest.raises(
+        Exception,
+        match="rich_club_coefficient is not implemented for " "graphs with self loops.",
+    ):
+        nx.rich_club_coefficient(G)
+
+
+def test_rich_club_leq_3_nodes_unnormalized():
+    # edgeless graphs upto 3 nodes
+    G = nx.Graph()
+    rc = nx.rich_club_coefficient(G, normalized=False)
+    assert rc == {}
+
+    for i in range(3):
+        G.add_node(i)
+        rc = nx.rich_club_coefficient(G, normalized=False)
+        assert rc == {}
+
+    # 2 nodes, single edge
+    G = nx.Graph()
+    G.add_edge(0, 1)
+    rc = nx.rich_club_coefficient(G, normalized=False)
+    assert rc == {0: 1}
+
+    # 3 nodes, single edge
+    G = nx.Graph()
+    G.add_nodes_from([0, 1, 2])
+    G.add_edge(0, 1)
+    rc = nx.rich_club_coefficient(G, normalized=False)
+    assert rc == {0: 1}
+
+    # 3 nodes, 2 edges
+    G.add_edge(1, 2)
+    rc = nx.rich_club_coefficient(G, normalized=False)
+    assert rc == {0: 2 / 3}
+
+    # 3 nodes, 3 edges
+    G.add_edge(0, 2)
+    rc = nx.rich_club_coefficient(G, normalized=False)
+    assert rc == {0: 1, 1: 1}
+
+
+def test_rich_club_leq_3_nodes_normalized():
+    G = nx.Graph()
+    with pytest.raises(
+        nx.exception.NetworkXError,
+        match="Graph has fewer than four nodes",
+    ):
+        rc = nx.rich_club_coefficient(G, normalized=True)
+
+    for i in range(3):
+        G.add_node(i)
+        with pytest.raises(
+            nx.exception.NetworkXError,
+            match="Graph has fewer than four nodes",
+        ):
+            rc = nx.rich_club_coefficient(G, normalized=True)
+
+
+# def test_richclub2_normalized():
+#    T = nx.balanced_tree(2,10)
+#    rcNorm = nx.richclub.rich_club_coefficient(T,Q=2)
+#    assert_true(rcNorm[0] ==1.0 and rcNorm[1] < 0.9 and rcNorm[2] < 0.9)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_similarity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_similarity.py
new file mode 100644
index 00000000..3836ccfe
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_similarity.py
@@ -0,0 +1,946 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms.similarity import (
+    graph_edit_distance,
+    optimal_edit_paths,
+    optimize_graph_edit_distance,
+)
+from networkx.generators.classic import (
+    circular_ladder_graph,
+    cycle_graph,
+    path_graph,
+    wheel_graph,
+)
+
+
+def nmatch(n1, n2):
+    return n1 == n2
+
+
+def ematch(e1, e2):
+    return e1 == e2
+
+
+def getCanonical():
+    G = nx.Graph()
+    G.add_node("A", label="A")
+    G.add_node("B", label="B")
+    G.add_node("C", label="C")
+    G.add_node("D", label="D")
+    G.add_edge("A", "B", label="a-b")
+    G.add_edge("B", "C", label="b-c")
+    G.add_edge("B", "D", label="b-d")
+    return G
+
+
+class TestSimilarity:
+    @classmethod
+    def setup_class(cls):
+        global np
+        np = pytest.importorskip("numpy")
+        pytest.importorskip("scipy")
+
+    def test_graph_edit_distance_roots_and_timeout(self):
+        G0 = nx.star_graph(5)
+        G1 = G0.copy()
+        pytest.raises(ValueError, graph_edit_distance, G0, G1, roots=[2])
+        pytest.raises(ValueError, graph_edit_distance, G0, G1, roots=[2, 3, 4])
+        pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(9, 3))
+        pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(3, 9))
+        pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(9, 9))
+        assert graph_edit_distance(G0, G1, roots=(1, 2)) == 0
+        assert graph_edit_distance(G0, G1, roots=(0, 1)) == 8
+        assert graph_edit_distance(G0, G1, roots=(1, 2), timeout=5) == 0
+        assert graph_edit_distance(G0, G1, roots=(0, 1), timeout=5) == 8
+        assert graph_edit_distance(G0, G1, roots=(0, 1), timeout=0.0001) is None
+        # test raise on 0 timeout
+        pytest.raises(nx.NetworkXError, graph_edit_distance, G0, G1, timeout=0)
+
+    def test_graph_edit_distance(self):
+        G0 = nx.Graph()
+        G1 = path_graph(6)
+        G2 = cycle_graph(6)
+        G3 = wheel_graph(7)
+
+        assert graph_edit_distance(G0, G0) == 0
+        assert graph_edit_distance(G0, G1) == 11
+        assert graph_edit_distance(G1, G0) == 11
+        assert graph_edit_distance(G0, G2) == 12
+        assert graph_edit_distance(G2, G0) == 12
+        assert graph_edit_distance(G0, G3) == 19
+        assert graph_edit_distance(G3, G0) == 19
+
+        assert graph_edit_distance(G1, G1) == 0
+        assert graph_edit_distance(G1, G2) == 1
+        assert graph_edit_distance(G2, G1) == 1
+        assert graph_edit_distance(G1, G3) == 8
+        assert graph_edit_distance(G3, G1) == 8
+
+        assert graph_edit_distance(G2, G2) == 0
+        assert graph_edit_distance(G2, G3) == 7
+        assert graph_edit_distance(G3, G2) == 7
+
+        assert graph_edit_distance(G3, G3) == 0
+
+    def test_graph_edit_distance_node_match(self):
+        G1 = cycle_graph(5)
+        G2 = cycle_graph(5)
+        for n, attr in G1.nodes.items():
+            attr["color"] = "red" if n % 2 == 0 else "blue"
+        for n, attr in G2.nodes.items():
+            attr["color"] = "red" if n % 2 == 1 else "blue"
+        assert graph_edit_distance(G1, G2) == 0
+        assert (
+            graph_edit_distance(
+                G1, G2, node_match=lambda n1, n2: n1["color"] == n2["color"]
+            )
+            == 1
+        )
+
+    def test_graph_edit_distance_edge_match(self):
+        G1 = path_graph(6)
+        G2 = path_graph(6)
+        for e, attr in G1.edges.items():
+            attr["color"] = "red" if min(e) % 2 == 0 else "blue"
+        for e, attr in G2.edges.items():
+            attr["color"] = "red" if min(e) // 3 == 0 else "blue"
+        assert graph_edit_distance(G1, G2) == 0
+        assert (
+            graph_edit_distance(
+                G1, G2, edge_match=lambda e1, e2: e1["color"] == e2["color"]
+            )
+            == 2
+        )
+
+    def test_graph_edit_distance_node_cost(self):
+        G1 = path_graph(6)
+        G2 = path_graph(6)
+        for n, attr in G1.nodes.items():
+            attr["color"] = "red" if n % 2 == 0 else "blue"
+        for n, attr in G2.nodes.items():
+            attr["color"] = "red" if n % 2 == 1 else "blue"
+
+        def node_subst_cost(uattr, vattr):
+            if uattr["color"] == vattr["color"]:
+                return 1
+            else:
+                return 10
+
+        def node_del_cost(attr):
+            if attr["color"] == "blue":
+                return 20
+            else:
+                return 50
+
+        def node_ins_cost(attr):
+            if attr["color"] == "blue":
+                return 40
+            else:
+                return 100
+
+        assert (
+            graph_edit_distance(
+                G1,
+                G2,
+                node_subst_cost=node_subst_cost,
+                node_del_cost=node_del_cost,
+                node_ins_cost=node_ins_cost,
+            )
+            == 6
+        )
+
+    def test_graph_edit_distance_edge_cost(self):
+        G1 = path_graph(6)
+        G2 = path_graph(6)
+        for e, attr in G1.edges.items():
+            attr["color"] = "red" if min(e) % 2 == 0 else "blue"
+        for e, attr in G2.edges.items():
+            attr["color"] = "red" if min(e) // 3 == 0 else "blue"
+
+        def edge_subst_cost(gattr, hattr):
+            if gattr["color"] == hattr["color"]:
+                return 0.01
+            else:
+                return 0.1
+
+        def edge_del_cost(attr):
+            if attr["color"] == "blue":
+                return 0.2
+            else:
+                return 0.5
+
+        def edge_ins_cost(attr):
+            if attr["color"] == "blue":
+                return 0.4
+            else:
+                return 1.0
+
+        assert (
+            graph_edit_distance(
+                G1,
+                G2,
+                edge_subst_cost=edge_subst_cost,
+                edge_del_cost=edge_del_cost,
+                edge_ins_cost=edge_ins_cost,
+            )
+            == 0.23
+        )
+
+    def test_graph_edit_distance_upper_bound(self):
+        G1 = circular_ladder_graph(2)
+        G2 = circular_ladder_graph(6)
+        assert graph_edit_distance(G1, G2, upper_bound=5) is None
+        assert graph_edit_distance(G1, G2, upper_bound=24) == 22
+        assert graph_edit_distance(G1, G2) == 22
+
+    def test_optimal_edit_paths(self):
+        G1 = path_graph(3)
+        G2 = cycle_graph(3)
+        paths, cost = optimal_edit_paths(G1, G2)
+        assert cost == 1
+        assert len(paths) == 6
+
+        def canonical(vertex_path, edge_path):
+            return (
+                tuple(sorted(vertex_path)),
+                tuple(sorted(edge_path, key=lambda x: (None in x, x))),
+            )
+
+        expected_paths = [
+            (
+                [(0, 0), (1, 1), (2, 2)],
+                [((0, 1), (0, 1)), ((1, 2), (1, 2)), (None, (0, 2))],
+            ),
+            (
+                [(0, 0), (1, 2), (2, 1)],
+                [((0, 1), (0, 2)), ((1, 2), (1, 2)), (None, (0, 1))],
+            ),
+            (
+                [(0, 1), (1, 0), (2, 2)],
+                [((0, 1), (0, 1)), ((1, 2), (0, 2)), (None, (1, 2))],
+            ),
+            (
+                [(0, 1), (1, 2), (2, 0)],
+                [((0, 1), (1, 2)), ((1, 2), (0, 2)), (None, (0, 1))],
+            ),
+            (
+                [(0, 2), (1, 0), (2, 1)],
+                [((0, 1), (0, 2)), ((1, 2), (0, 1)), (None, (1, 2))],
+            ),
+            (
+                [(0, 2), (1, 1), (2, 0)],
+                [((0, 1), (1, 2)), ((1, 2), (0, 1)), (None, (0, 2))],
+            ),
+        ]
+        assert {canonical(*p) for p in paths} == {canonical(*p) for p in expected_paths}
+
+    def test_optimize_graph_edit_distance(self):
+        G1 = circular_ladder_graph(2)
+        G2 = circular_ladder_graph(6)
+        bestcost = 1000
+        for cost in optimize_graph_edit_distance(G1, G2):
+            assert cost < bestcost
+            bestcost = cost
+        assert bestcost == 22
+
+    # def test_graph_edit_distance_bigger(self):
+    #     G1 = circular_ladder_graph(12)
+    #     G2 = circular_ladder_graph(16)
+    #     assert_equal(graph_edit_distance(G1, G2), 22)
+
+    def test_selfloops(self):
+        G0 = nx.Graph()
+        G1 = nx.Graph()
+        G1.add_edges_from((("A", "A"), ("A", "B")))
+        G2 = nx.Graph()
+        G2.add_edges_from((("A", "B"), ("B", "B")))
+        G3 = nx.Graph()
+        G3.add_edges_from((("A", "A"), ("A", "B"), ("B", "B")))
+
+        assert graph_edit_distance(G0, G0) == 0
+        assert graph_edit_distance(G0, G1) == 4
+        assert graph_edit_distance(G1, G0) == 4
+        assert graph_edit_distance(G0, G2) == 4
+        assert graph_edit_distance(G2, G0) == 4
+        assert graph_edit_distance(G0, G3) == 5
+        assert graph_edit_distance(G3, G0) == 5
+
+        assert graph_edit_distance(G1, G1) == 0
+        assert graph_edit_distance(G1, G2) == 0
+        assert graph_edit_distance(G2, G1) == 0
+        assert graph_edit_distance(G1, G3) == 1
+        assert graph_edit_distance(G3, G1) == 1
+
+        assert graph_edit_distance(G2, G2) == 0
+        assert graph_edit_distance(G2, G3) == 1
+        assert graph_edit_distance(G3, G2) == 1
+
+        assert graph_edit_distance(G3, G3) == 0
+
+    def test_digraph(self):
+        G0 = nx.DiGraph()
+        G1 = nx.DiGraph()
+        G1.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("D", "A")))
+        G2 = nx.DiGraph()
+        G2.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("A", "D")))
+        G3 = nx.DiGraph()
+        G3.add_edges_from((("A", "B"), ("A", "C"), ("B", "D"), ("C", "D")))
+
+        assert graph_edit_distance(G0, G0) == 0
+        assert graph_edit_distance(G0, G1) == 8
+        assert graph_edit_distance(G1, G0) == 8
+        assert graph_edit_distance(G0, G2) == 8
+        assert graph_edit_distance(G2, G0) == 8
+        assert graph_edit_distance(G0, G3) == 8
+        assert graph_edit_distance(G3, G0) == 8
+
+        assert graph_edit_distance(G1, G1) == 0
+        assert graph_edit_distance(G1, G2) == 2
+        assert graph_edit_distance(G2, G1) == 2
+        assert graph_edit_distance(G1, G3) == 4
+        assert graph_edit_distance(G3, G1) == 4
+
+        assert graph_edit_distance(G2, G2) == 0
+        assert graph_edit_distance(G2, G3) == 2
+        assert graph_edit_distance(G3, G2) == 2
+
+        assert graph_edit_distance(G3, G3) == 0
+
+    def test_multigraph(self):
+        G0 = nx.MultiGraph()
+        G1 = nx.MultiGraph()
+        G1.add_edges_from((("A", "B"), ("B", "C"), ("A", "C")))
+        G2 = nx.MultiGraph()
+        G2.add_edges_from((("A", "B"), ("B", "C"), ("B", "C"), ("A", "C")))
+        G3 = nx.MultiGraph()
+        G3.add_edges_from((("A", "B"), ("B", "C"), ("A", "C"), ("A", "C"), ("A", "C")))
+
+        assert graph_edit_distance(G0, G0) == 0
+        assert graph_edit_distance(G0, G1) == 6
+        assert graph_edit_distance(G1, G0) == 6
+        assert graph_edit_distance(G0, G2) == 7
+        assert graph_edit_distance(G2, G0) == 7
+        assert graph_edit_distance(G0, G3) == 8
+        assert graph_edit_distance(G3, G0) == 8
+
+        assert graph_edit_distance(G1, G1) == 0
+        assert graph_edit_distance(G1, G2) == 1
+        assert graph_edit_distance(G2, G1) == 1
+        assert graph_edit_distance(G1, G3) == 2
+        assert graph_edit_distance(G3, G1) == 2
+
+        assert graph_edit_distance(G2, G2) == 0
+        assert graph_edit_distance(G2, G3) == 1
+        assert graph_edit_distance(G3, G2) == 1
+
+        assert graph_edit_distance(G3, G3) == 0
+
+    def test_multidigraph(self):
+        G1 = nx.MultiDiGraph()
+        G1.add_edges_from(
+            (
+                ("hardware", "kernel"),
+                ("kernel", "hardware"),
+                ("kernel", "userspace"),
+                ("userspace", "kernel"),
+            )
+        )
+        G2 = nx.MultiDiGraph()
+        G2.add_edges_from(
+            (
+                ("winter", "spring"),
+                ("spring", "summer"),
+                ("summer", "autumn"),
+                ("autumn", "winter"),
+            )
+        )
+
+        assert graph_edit_distance(G1, G2) == 5
+        assert graph_edit_distance(G2, G1) == 5
+
+    # by https://github.com/jfbeaumont
+    def testCopy(self):
+        G = nx.Graph()
+        G.add_node("A", label="A")
+        G.add_node("B", label="B")
+        G.add_edge("A", "B", label="a-b")
+        assert (
+            graph_edit_distance(G, G.copy(), node_match=nmatch, edge_match=ematch) == 0
+        )
+
+    def testSame(self):
+        G1 = nx.Graph()
+        G1.add_node("A", label="A")
+        G1.add_node("B", label="B")
+        G1.add_edge("A", "B", label="a-b")
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_edge("A", "B", label="a-b")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 0
+
+    def testOneEdgeLabelDiff(self):
+        G1 = nx.Graph()
+        G1.add_node("A", label="A")
+        G1.add_node("B", label="B")
+        G1.add_edge("A", "B", label="a-b")
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_edge("A", "B", label="bad")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
+
+    def testOneNodeLabelDiff(self):
+        G1 = nx.Graph()
+        G1.add_node("A", label="A")
+        G1.add_node("B", label="B")
+        G1.add_edge("A", "B", label="a-b")
+        G2 = nx.Graph()
+        G2.add_node("A", label="Z")
+        G2.add_node("B", label="B")
+        G2.add_edge("A", "B", label="a-b")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
+
+    def testOneExtraNode(self):
+        G1 = nx.Graph()
+        G1.add_node("A", label="A")
+        G1.add_node("B", label="B")
+        G1.add_edge("A", "B", label="a-b")
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_edge("A", "B", label="a-b")
+        G2.add_node("C", label="C")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
+
+    def testOneExtraEdge(self):
+        G1 = nx.Graph()
+        G1.add_node("A", label="A")
+        G1.add_node("B", label="B")
+        G1.add_node("C", label="C")
+        G1.add_node("C", label="C")
+        G1.add_edge("A", "B", label="a-b")
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_node("C", label="C")
+        G2.add_edge("A", "B", label="a-b")
+        G2.add_edge("A", "C", label="a-c")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
+
+    def testOneExtraNodeAndEdge(self):
+        G1 = nx.Graph()
+        G1.add_node("A", label="A")
+        G1.add_node("B", label="B")
+        G1.add_edge("A", "B", label="a-b")
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_node("C", label="C")
+        G2.add_edge("A", "B", label="a-b")
+        G2.add_edge("A", "C", label="a-c")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2
+
+    def testGraph1(self):
+        G1 = getCanonical()
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_node("D", label="D")
+        G2.add_node("E", label="E")
+        G2.add_edge("A", "B", label="a-b")
+        G2.add_edge("B", "D", label="b-d")
+        G2.add_edge("D", "E", label="d-e")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 3
+
+    def testGraph2(self):
+        G1 = getCanonical()
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_node("C", label="C")
+        G2.add_node("D", label="D")
+        G2.add_node("E", label="E")
+        G2.add_edge("A", "B", label="a-b")
+        G2.add_edge("B", "C", label="b-c")
+        G2.add_edge("C", "D", label="c-d")
+        G2.add_edge("C", "E", label="c-e")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 4
+
+    def testGraph3(self):
+        G1 = getCanonical()
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_node("C", label="C")
+        G2.add_node("D", label="D")
+        G2.add_node("E", label="E")
+        G2.add_node("F", label="F")
+        G2.add_node("G", label="G")
+        G2.add_edge("A", "C", label="a-c")
+        G2.add_edge("A", "D", label="a-d")
+        G2.add_edge("D", "E", label="d-e")
+        G2.add_edge("D", "F", label="d-f")
+        G2.add_edge("D", "G", label="d-g")
+        G2.add_edge("E", "B", label="e-b")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 12
+
+    def testGraph4(self):
+        G1 = getCanonical()
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_node("C", label="C")
+        G2.add_node("D", label="D")
+        G2.add_edge("A", "B", label="a-b")
+        G2.add_edge("B", "C", label="b-c")
+        G2.add_edge("C", "D", label="c-d")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2
+
+    def testGraph4_a(self):
+        G1 = getCanonical()
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_node("C", label="C")
+        G2.add_node("D", label="D")
+        G2.add_edge("A", "B", label="a-b")
+        G2.add_edge("B", "C", label="b-c")
+        G2.add_edge("A", "D", label="a-d")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2
+
+    def testGraph4_b(self):
+        G1 = getCanonical()
+        G2 = nx.Graph()
+        G2.add_node("A", label="A")
+        G2.add_node("B", label="B")
+        G2.add_node("C", label="C")
+        G2.add_node("D", label="D")
+        G2.add_edge("A", "B", label="a-b")
+        G2.add_edge("B", "C", label="b-c")
+        G2.add_edge("B", "D", label="bad")
+        assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
+
+    # note: nx.simrank_similarity_numpy not included because returns np.array
+    simrank_algs = [
+        nx.simrank_similarity,
+        nx.algorithms.similarity._simrank_similarity_python,
+    ]
+
+    @pytest.mark.parametrize("simrank_similarity", simrank_algs)
+    def test_simrank_no_source_no_target(self, simrank_similarity):
+        G = nx.cycle_graph(5)
+        expected = {
+            0: {
+                0: 1,
+                1: 0.3951219505902448,
+                2: 0.5707317069281646,
+                3: 0.5707317069281646,
+                4: 0.3951219505902449,
+            },
+            1: {
+                0: 0.3951219505902448,
+                1: 1,
+                2: 0.3951219505902449,
+                3: 0.5707317069281646,
+                4: 0.5707317069281646,
+            },
+            2: {
+                0: 0.5707317069281646,
+                1: 0.3951219505902449,
+                2: 1,
+                3: 0.3951219505902449,
+                4: 0.5707317069281646,
+            },
+            3: {
+                0: 0.5707317069281646,
+                1: 0.5707317069281646,
+                2: 0.3951219505902449,
+                3: 1,
+                4: 0.3951219505902449,
+            },
+            4: {
+                0: 0.3951219505902449,
+                1: 0.5707317069281646,
+                2: 0.5707317069281646,
+                3: 0.3951219505902449,
+                4: 1,
+            },
+        }
+        actual = simrank_similarity(G)
+        for k, v in expected.items():
+            assert v == pytest.approx(actual[k], abs=1e-2)
+
+        # For a DiGraph test, use the first graph from the paper cited in
+        # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
+        G = nx.DiGraph()
+        G.add_node(0, label="Univ")
+        G.add_node(1, label="ProfA")
+        G.add_node(2, label="ProfB")
+        G.add_node(3, label="StudentA")
+        G.add_node(4, label="StudentB")
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
+
+        expected = {
+            0: {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443},
+            1: {0: 0.0, 1: 1, 2: 0.4135512472705618, 3: 0.0, 4: 0.10586911930126384},
+            2: {
+                0: 0.1323363991265798,
+                1: 0.4135512472705618,
+                2: 1,
+                3: 0.04234764772050554,
+                4: 0.08822426608438655,
+            },
+            3: {0: 0.0, 1: 0.0, 2: 0.04234764772050554, 3: 1, 4: 0.3308409978164495},
+            4: {
+                0: 0.03387811817640443,
+                1: 0.10586911930126384,
+                2: 0.08822426608438655,
+                3: 0.3308409978164495,
+                4: 1,
+            },
+        }
+        # Use the importance_factor from the paper to get the same numbers.
+        actual = simrank_similarity(G, importance_factor=0.8)
+        for k, v in expected.items():
+            assert v == pytest.approx(actual[k], abs=1e-2)
+
+    @pytest.mark.parametrize("simrank_similarity", simrank_algs)
+    def test_simrank_source_no_target(self, simrank_similarity):
+        G = nx.cycle_graph(5)
+        expected = {
+            0: 1,
+            1: 0.3951219505902448,
+            2: 0.5707317069281646,
+            3: 0.5707317069281646,
+            4: 0.3951219505902449,
+        }
+        actual = simrank_similarity(G, source=0)
+        assert expected == pytest.approx(actual, abs=1e-2)
+
+        # For a DiGraph test, use the first graph from the paper cited in
+        # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
+        G = nx.DiGraph()
+        G.add_node(0, label="Univ")
+        G.add_node(1, label="ProfA")
+        G.add_node(2, label="ProfB")
+        G.add_node(3, label="StudentA")
+        G.add_node(4, label="StudentB")
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
+
+        expected = {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443}
+        # Use the importance_factor from the paper to get the same numbers.
+        actual = simrank_similarity(G, importance_factor=0.8, source=0)
+        assert expected == pytest.approx(actual, abs=1e-2)
+
+    @pytest.mark.parametrize("simrank_similarity", simrank_algs)
+    def test_simrank_noninteger_nodes(self, simrank_similarity):
+        G = nx.cycle_graph(5)
+        G = nx.relabel_nodes(G, dict(enumerate("abcde")))
+        expected = {
+            "a": 1,
+            "b": 0.3951219505902448,
+            "c": 0.5707317069281646,
+            "d": 0.5707317069281646,
+            "e": 0.3951219505902449,
+        }
+        actual = simrank_similarity(G, source="a")
+        assert expected == pytest.approx(actual, abs=1e-2)
+
+        # For a DiGraph test, use the first graph from the paper cited in
+        # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
+        G = nx.DiGraph()
+        G.add_node(0, label="Univ")
+        G.add_node(1, label="ProfA")
+        G.add_node(2, label="ProfB")
+        G.add_node(3, label="StudentA")
+        G.add_node(4, label="StudentB")
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
+        node_labels = dict(enumerate(nx.get_node_attributes(G, "label").values()))
+        G = nx.relabel_nodes(G, node_labels)
+
+        expected = {
+            "Univ": 1,
+            "ProfA": 0.0,
+            "ProfB": 0.1323363991265798,
+            "StudentA": 0.0,
+            "StudentB": 0.03387811817640443,
+        }
+        # Use the importance_factor from the paper to get the same numbers.
+        actual = simrank_similarity(G, importance_factor=0.8, source="Univ")
+        assert expected == pytest.approx(actual, abs=1e-2)
+
+    @pytest.mark.parametrize("simrank_similarity", simrank_algs)
+    def test_simrank_source_and_target(self, simrank_similarity):
+        G = nx.cycle_graph(5)
+        expected = 1
+        actual = simrank_similarity(G, source=0, target=0)
+        assert expected == pytest.approx(actual, abs=1e-2)
+
+        # For a DiGraph test, use the first graph from the paper cited in
+        # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
+        G = nx.DiGraph()
+        G.add_node(0, label="Univ")
+        G.add_node(1, label="ProfA")
+        G.add_node(2, label="ProfB")
+        G.add_node(3, label="StudentA")
+        G.add_node(4, label="StudentB")
+        G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
+
+        expected = 0.1323363991265798
+        # Use the importance_factor from the paper to get the same numbers.
+        # Use the pair (0,2) because (0,0) and (0,1) have trivial results.
+        actual = simrank_similarity(G, importance_factor=0.8, source=0, target=2)
+        assert expected == pytest.approx(actual, abs=1e-5)
+
+    @pytest.mark.parametrize("alg", simrank_algs)
+    def test_simrank_max_iterations(self, alg):
+        G = nx.cycle_graph(5)
+        pytest.raises(nx.ExceededMaxIterations, alg, G, max_iterations=10)
+
+    def test_simrank_source_not_found(self):
+        G = nx.cycle_graph(5)
+        with pytest.raises(nx.NodeNotFound, match="Source node 10 not in G"):
+            nx.simrank_similarity(G, source=10)
+
+    def test_simrank_target_not_found(self):
+        G = nx.cycle_graph(5)
+        with pytest.raises(nx.NodeNotFound, match="Target node 10 not in G"):
+            nx.simrank_similarity(G, target=10)
+
+    def test_simrank_between_versions(self):
+        G = nx.cycle_graph(5)
+        # _python tolerance 1e-4
+        expected_python_tol4 = {
+            0: 1,
+            1: 0.394512499239852,
+            2: 0.5703550452791322,
+            3: 0.5703550452791323,
+            4: 0.394512499239852,
+        }
+        # _numpy tolerance 1e-4
+        expected_numpy_tol4 = {
+            0: 1.0,
+            1: 0.3947180735764555,
+            2: 0.570482097206368,
+            3: 0.570482097206368,
+            4: 0.3947180735764555,
+        }
+        actual = nx.simrank_similarity(G, source=0)
+        assert expected_numpy_tol4 == pytest.approx(actual, abs=1e-7)
+        # versions differ at 1e-4 level but equal at 1e-3
+        assert expected_python_tol4 != pytest.approx(actual, abs=1e-4)
+        assert expected_python_tol4 == pytest.approx(actual, abs=1e-3)
+
+        actual = nx.similarity._simrank_similarity_python(G, source=0)
+        assert expected_python_tol4 == pytest.approx(actual, abs=1e-7)
+        # versions differ at 1e-4 level but equal at 1e-3
+        assert expected_numpy_tol4 != pytest.approx(actual, abs=1e-4)
+        assert expected_numpy_tol4 == pytest.approx(actual, abs=1e-3)
+
+    def test_simrank_numpy_no_source_no_target(self):
+        G = nx.cycle_graph(5)
+        expected = np.array(
+            [
+                [
+                    1.0,
+                    0.3947180735764555,
+                    0.570482097206368,
+                    0.570482097206368,
+                    0.3947180735764555,
+                ],
+                [
+                    0.3947180735764555,
+                    1.0,
+                    0.3947180735764555,
+                    0.570482097206368,
+                    0.570482097206368,
+                ],
+                [
+                    0.570482097206368,
+                    0.3947180735764555,
+                    1.0,
+                    0.3947180735764555,
+                    0.570482097206368,
+                ],
+                [
+                    0.570482097206368,
+                    0.570482097206368,
+                    0.3947180735764555,
+                    1.0,
+                    0.3947180735764555,
+                ],
+                [
+                    0.3947180735764555,
+                    0.570482097206368,
+                    0.570482097206368,
+                    0.3947180735764555,
+                    1.0,
+                ],
+            ]
+        )
+        actual = nx.similarity._simrank_similarity_numpy(G)
+        np.testing.assert_allclose(expected, actual, atol=1e-7)
+
+    def test_simrank_numpy_source_no_target(self):
+        G = nx.cycle_graph(5)
+        expected = np.array(
+            [
+                1.0,
+                0.3947180735764555,
+                0.570482097206368,
+                0.570482097206368,
+                0.3947180735764555,
+            ]
+        )
+        actual = nx.similarity._simrank_similarity_numpy(G, source=0)
+        np.testing.assert_allclose(expected, actual, atol=1e-7)
+
+    def test_simrank_numpy_source_and_target(self):
+        G = nx.cycle_graph(5)
+        expected = 1.0
+        actual = nx.similarity._simrank_similarity_numpy(G, source=0, target=0)
+        np.testing.assert_allclose(expected, actual, atol=1e-7)
+
+    def test_panther_similarity_unweighted(self):
+        np.random.seed(42)
+
+        G = nx.Graph()
+        G.add_edge(0, 1)
+        G.add_edge(0, 2)
+        G.add_edge(0, 3)
+        G.add_edge(1, 2)
+        G.add_edge(2, 4)
+        expected = {3: 0.5, 2: 0.5, 1: 0.5, 4: 0.125}
+        sim = nx.panther_similarity(G, 0, path_length=2)
+        assert sim == expected
+
+    def test_panther_similarity_weighted(self):
+        np.random.seed(42)
+
+        G = nx.Graph()
+        G.add_edge("v1", "v2", w=5)
+        G.add_edge("v1", "v3", w=1)
+        G.add_edge("v1", "v4", w=2)
+        G.add_edge("v2", "v3", w=0.1)
+        G.add_edge("v3", "v5", w=1)
+        expected = {"v3": 0.75, "v4": 0.5, "v2": 0.5, "v5": 0.25}
+        sim = nx.panther_similarity(G, "v1", path_length=2, weight="w")
+        assert sim == expected
+
+    def test_panther_similarity_source_not_found(self):
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 2), (2, 4)])
+        with pytest.raises(nx.NodeNotFound, match="Source node 10 not in G"):
+            nx.panther_similarity(G, source=10)
+
+    def test_panther_similarity_isolated(self):
+        G = nx.Graph()
+        G.add_nodes_from(range(5))
+        with pytest.raises(
+            nx.NetworkXUnfeasible,
+            match="Panther similarity is not defined for the isolated source node 1.",
+        ):
+            nx.panther_similarity(G, source=1)
+
+    def test_generate_random_paths_unweighted(self):
+        index_map = {}
+        num_paths = 10
+        path_length = 2
+        G = nx.Graph()
+        G.add_edge(0, 1)
+        G.add_edge(0, 2)
+        G.add_edge(0, 3)
+        G.add_edge(1, 2)
+        G.add_edge(2, 4)
+        paths = nx.generate_random_paths(
+            G, num_paths, path_length=path_length, index_map=index_map, seed=42
+        )
+        expected_paths = [
+            [3, 0, 3],
+            [4, 2, 1],
+            [2, 1, 0],
+            [2, 0, 3],
+            [3, 0, 1],
+            [3, 0, 1],
+            [4, 2, 0],
+            [2, 1, 0],
+            [3, 0, 2],
+            [2, 1, 2],
+        ]
+        expected_map = {
+            0: {0, 2, 3, 4, 5, 6, 7, 8},
+            1: {1, 2, 4, 5, 7, 9},
+            2: {1, 2, 3, 6, 7, 8, 9},
+            3: {0, 3, 4, 5, 8},
+            4: {1, 6},
+        }
+
+        assert expected_paths == list(paths)
+        assert expected_map == index_map
+
+    def test_generate_random_paths_weighted(self):
+        np.random.seed(42)
+
+        index_map = {}
+        num_paths = 10
+        path_length = 6
+        G = nx.Graph()
+        G.add_edge("a", "b", weight=0.6)
+        G.add_edge("a", "c", weight=0.2)
+        G.add_edge("c", "d", weight=0.1)
+        G.add_edge("c", "e", weight=0.7)
+        G.add_edge("c", "f", weight=0.9)
+        G.add_edge("a", "d", weight=0.3)
+        paths = nx.generate_random_paths(
+            G, num_paths, path_length=path_length, index_map=index_map
+        )
+
+        expected_paths = [
+            ["d", "c", "f", "c", "d", "a", "b"],
+            ["e", "c", "f", "c", "f", "c", "e"],
+            ["d", "a", "b", "a", "b", "a", "c"],
+            ["b", "a", "d", "a", "b", "a", "b"],
+            ["d", "a", "b", "a", "b", "a", "d"],
+            ["d", "a", "b", "a", "b", "a", "c"],
+            ["d", "a", "b", "a", "b", "a", "b"],
+            ["f", "c", "f", "c", "f", "c", "e"],
+            ["d", "a", "d", "a", "b", "a", "b"],
+            ["e", "c", "f", "c", "e", "c", "d"],
+        ]
+        expected_map = {
+            "d": {0, 2, 3, 4, 5, 6, 8, 9},
+            "c": {0, 1, 2, 5, 7, 9},
+            "f": {0, 1, 9, 7},
+            "a": {0, 2, 3, 4, 5, 6, 8},
+            "b": {0, 2, 3, 4, 5, 6, 8},
+            "e": {1, 9, 7},
+        }
+
+        assert expected_paths == list(paths)
+        assert expected_map == index_map
+
+    def test_symmetry_with_custom_matching(self):
+        print("G2 is edge (a,b) and G3 is edge (a,a)")
+        print("but node order for G2 is (a,b) while for G3 it is (b,a)")
+
+        a, b = "A", "B"
+        G2 = nx.Graph()
+        G2.add_nodes_from((a, b))
+        G2.add_edges_from([(a, b)])
+        G3 = nx.Graph()
+        G3.add_nodes_from((b, a))
+        G3.add_edges_from([(a, a)])
+        for G in (G2, G3):
+            for n in G:
+                G.nodes[n]["attr"] = n
+            for e in G.edges:
+                G.edges[e]["attr"] = e
+        match = lambda x, y: x == y
+
+        print("Starting G2 to G3 GED calculation")
+        assert nx.graph_edit_distance(G2, G3, node_match=match, edge_match=match) == 1
+
+        print("Starting G3 to G2 GED calculation")
+        assert nx.graph_edit_distance(G3, G2, node_match=match, edge_match=match) == 1
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_simple_paths.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_simple_paths.py
new file mode 100644
index 00000000..7855bbad
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_simple_paths.py
@@ -0,0 +1,803 @@
+import random
+
+import pytest
+
+import networkx as nx
+from networkx import convert_node_labels_to_integers as cnlti
+from networkx.algorithms.simple_paths import (
+    _bidirectional_dijkstra,
+    _bidirectional_shortest_path,
+)
+from networkx.utils import arbitrary_element, pairwise
+
+
+class TestIsSimplePath:
+    """Unit tests for the
+    :func:`networkx.algorithms.simple_paths.is_simple_path` function.
+
+    """
+
+    def test_empty_list(self):
+        """Tests that the empty list is not a valid path, since there
+        should be a one-to-one correspondence between paths as lists of
+        nodes and paths as lists of edges.
+
+        """
+        G = nx.trivial_graph()
+        assert not nx.is_simple_path(G, [])
+
+    def test_trivial_path(self):
+        """Tests that the trivial path, a path of length one, is
+        considered a simple path in a graph.
+
+        """
+        G = nx.trivial_graph()
+        assert nx.is_simple_path(G, [0])
+
+    def test_trivial_nonpath(self):
+        """Tests that a list whose sole element is an object not in the
+        graph is not considered a simple path.
+
+        """
+        G = nx.trivial_graph()
+        assert not nx.is_simple_path(G, ["not a node"])
+
+    def test_simple_path(self):
+        G = nx.path_graph(2)
+        assert nx.is_simple_path(G, [0, 1])
+
+    def test_non_simple_path(self):
+        G = nx.path_graph(2)
+        assert not nx.is_simple_path(G, [0, 1, 0])
+
+    def test_cycle(self):
+        G = nx.cycle_graph(3)
+        assert not nx.is_simple_path(G, [0, 1, 2, 0])
+
+    def test_missing_node(self):
+        G = nx.path_graph(2)
+        assert not nx.is_simple_path(G, [0, 2])
+
+    def test_missing_starting_node(self):
+        G = nx.path_graph(2)
+        assert not nx.is_simple_path(G, [2, 0])
+
+    def test_directed_path(self):
+        G = nx.DiGraph([(0, 1), (1, 2)])
+        assert nx.is_simple_path(G, [0, 1, 2])
+
+    def test_directed_non_path(self):
+        G = nx.DiGraph([(0, 1), (1, 2)])
+        assert not nx.is_simple_path(G, [2, 1, 0])
+
+    def test_directed_cycle(self):
+        G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
+        assert not nx.is_simple_path(G, [0, 1, 2, 0])
+
+    def test_multigraph(self):
+        G = nx.MultiGraph([(0, 1), (0, 1)])
+        assert nx.is_simple_path(G, [0, 1])
+
+    def test_multidigraph(self):
+        G = nx.MultiDiGraph([(0, 1), (0, 1), (1, 0), (1, 0)])
+        assert nx.is_simple_path(G, [0, 1])
+
+
+# Tests for all_simple_paths
+def test_all_simple_paths():
+    G = nx.path_graph(4)
+    paths = nx.all_simple_paths(G, 0, 3)
+    assert {tuple(p) for p in paths} == {(0, 1, 2, 3)}
+
+
+def test_all_simple_paths_with_two_targets_emits_two_paths():
+    G = nx.path_graph(4)
+    G.add_edge(2, 4)
+    paths = nx.all_simple_paths(G, 0, [3, 4])
+    assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)}
+
+
+def test_digraph_all_simple_paths_with_two_targets_emits_two_paths():
+    G = nx.path_graph(4, create_using=nx.DiGraph())
+    G.add_edge(2, 4)
+    paths = nx.all_simple_paths(G, 0, [3, 4])
+    assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)}
+
+
+def test_all_simple_paths_with_two_targets_cutoff():
+    G = nx.path_graph(4)
+    G.add_edge(2, 4)
+    paths = nx.all_simple_paths(G, 0, [3, 4], cutoff=3)
+    assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)}
+
+
+def test_digraph_all_simple_paths_with_two_targets_cutoff():
+    G = nx.path_graph(4, create_using=nx.DiGraph())
+    G.add_edge(2, 4)
+    paths = nx.all_simple_paths(G, 0, [3, 4], cutoff=3)
+    assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)}
+
+
+def test_all_simple_paths_with_two_targets_in_line_emits_two_paths():
+    G = nx.path_graph(4)
+    paths = nx.all_simple_paths(G, 0, [2, 3])
+    assert {tuple(p) for p in paths} == {(0, 1, 2), (0, 1, 2, 3)}
+
+
+def test_all_simple_paths_ignores_cycle():
+    G = nx.cycle_graph(3, create_using=nx.DiGraph())
+    G.add_edge(1, 3)
+    paths = nx.all_simple_paths(G, 0, 3)
+    assert {tuple(p) for p in paths} == {(0, 1, 3)}
+
+
+def test_all_simple_paths_with_two_targets_inside_cycle_emits_two_paths():
+    G = nx.cycle_graph(3, create_using=nx.DiGraph())
+    G.add_edge(1, 3)
+    paths = nx.all_simple_paths(G, 0, [2, 3])
+    assert {tuple(p) for p in paths} == {(0, 1, 2), (0, 1, 3)}
+
+
+def test_all_simple_paths_source_target():
+    G = nx.path_graph(4)
+    assert list(nx.all_simple_paths(G, 1, 1)) == [[1]]
+
+
+def test_all_simple_paths_cutoff():
+    G = nx.complete_graph(4)
+    paths = nx.all_simple_paths(G, 0, 1, cutoff=1)
+    assert {tuple(p) for p in paths} == {(0, 1)}
+    paths = nx.all_simple_paths(G, 0, 1, cutoff=2)
+    assert {tuple(p) for p in paths} == {(0, 1), (0, 2, 1), (0, 3, 1)}
+
+
+def test_all_simple_paths_on_non_trivial_graph():
+    """you may need to draw this graph to make sure it is reasonable"""
+    G = nx.path_graph(5, create_using=nx.DiGraph())
+    G.add_edges_from([(0, 5), (1, 5), (1, 3), (5, 4), (4, 2), (4, 3)])
+    paths = nx.all_simple_paths(G, 1, [2, 3])
+    assert {tuple(p) for p in paths} == {
+        (1, 2),
+        (1, 3, 4, 2),
+        (1, 5, 4, 2),
+        (1, 3),
+        (1, 2, 3),
+        (1, 5, 4, 3),
+        (1, 5, 4, 2, 3),
+    }
+    paths = nx.all_simple_paths(G, 1, [2, 3], cutoff=3)
+    assert {tuple(p) for p in paths} == {
+        (1, 2),
+        (1, 3, 4, 2),
+        (1, 5, 4, 2),
+        (1, 3),
+        (1, 2, 3),
+        (1, 5, 4, 3),
+    }
+    paths = nx.all_simple_paths(G, 1, [2, 3], cutoff=2)
+    assert {tuple(p) for p in paths} == {(1, 2), (1, 3), (1, 2, 3)}
+
+
+def test_all_simple_paths_multigraph():
+    G = nx.MultiGraph([(1, 2), (1, 2)])
+    assert list(nx.all_simple_paths(G, 1, 1)) == [[1]]
+    nx.add_path(G, [3, 1, 10, 2])
+    paths = list(nx.all_simple_paths(G, 1, 2))
+    assert len(paths) == 3
+    assert {tuple(p) for p in paths} == {(1, 2), (1, 2), (1, 10, 2)}
+
+
+def test_all_simple_paths_multigraph_with_cutoff():
+    G = nx.MultiGraph([(1, 2), (1, 2), (1, 10), (10, 2)])
+    paths = list(nx.all_simple_paths(G, 1, 2, cutoff=1))
+    assert len(paths) == 2
+    assert {tuple(p) for p in paths} == {(1, 2), (1, 2)}
+
+    # See GitHub issue #6732.
+    G = nx.MultiGraph([(0, 1), (0, 2)])
+    assert list(nx.all_simple_paths(G, 0, {1, 2}, cutoff=1)) == [[0, 1], [0, 2]]
+
+
+def test_all_simple_paths_directed():
+    G = nx.DiGraph()
+    nx.add_path(G, [1, 2, 3])
+    nx.add_path(G, [3, 2, 1])
+    paths = nx.all_simple_paths(G, 1, 3)
+    assert {tuple(p) for p in paths} == {(1, 2, 3)}
+
+
+def test_all_simple_paths_empty():
+    G = nx.path_graph(4)
+    paths = nx.all_simple_paths(G, 0, 3, cutoff=2)
+    assert list(paths) == []
+
+
+def test_all_simple_paths_corner_cases():
+    assert list(nx.all_simple_paths(nx.empty_graph(2), 0, 0)) == [[0]]
+    assert list(nx.all_simple_paths(nx.empty_graph(2), 0, 1)) == []
+    assert list(nx.all_simple_paths(nx.path_graph(9), 0, 8, 0)) == []
+
+
+def test_all_simple_paths_source_in_targets():
+    # See GitHub issue #6690.
+    G = nx.path_graph(3)
+    assert list(nx.all_simple_paths(G, 0, {0, 1, 2})) == [[0], [0, 1], [0, 1, 2]]
+
+
+def hamiltonian_path(G, source):
+    source = arbitrary_element(G)
+    neighbors = set(G[source]) - {source}
+    n = len(G)
+    for target in neighbors:
+        for path in nx.all_simple_paths(G, source, target):
+            if len(path) == n:
+                yield path
+
+
+def test_hamiltonian_path():
+    from itertools import permutations
+
+    G = nx.complete_graph(4)
+    paths = [list(p) for p in hamiltonian_path(G, 0)]
+    exact = [[0] + list(p) for p in permutations([1, 2, 3], 3)]
+    assert sorted(paths) == sorted(exact)
+
+
+def test_cutoff_zero():
+    G = nx.complete_graph(4)
+    paths = nx.all_simple_paths(G, 0, 3, cutoff=0)
+    assert [list(p) for p in paths] == []
+    paths = nx.all_simple_paths(nx.MultiGraph(G), 0, 3, cutoff=0)
+    assert [list(p) for p in paths] == []
+
+
+def test_source_missing():
+    with pytest.raises(nx.NodeNotFound):
+        G = nx.Graph()
+        nx.add_path(G, [1, 2, 3])
+        list(nx.all_simple_paths(nx.MultiGraph(G), 0, 3))
+
+
+def test_target_missing():
+    with pytest.raises(nx.NodeNotFound):
+        G = nx.Graph()
+        nx.add_path(G, [1, 2, 3])
+        list(nx.all_simple_paths(nx.MultiGraph(G), 1, 4))
+
+
+# Tests for all_simple_edge_paths
+def test_all_simple_edge_paths():
+    G = nx.path_graph(4)
+    paths = nx.all_simple_edge_paths(G, 0, 3)
+    assert {tuple(p) for p in paths} == {((0, 1), (1, 2), (2, 3))}
+
+
+def test_all_simple_edge_paths_empty_path():
+    G = nx.empty_graph(1)
+    assert list(nx.all_simple_edge_paths(G, 0, 0)) == [[]]
+
+
+def test_all_simple_edge_paths_with_two_targets_emits_two_paths():
+    G = nx.path_graph(4)
+    G.add_edge(2, 4)
+    paths = nx.all_simple_edge_paths(G, 0, [3, 4])
+    assert {tuple(p) for p in paths} == {
+        ((0, 1), (1, 2), (2, 3)),
+        ((0, 1), (1, 2), (2, 4)),
+    }
+
+
+def test_digraph_all_simple_edge_paths_with_two_targets_emits_two_paths():
+    G = nx.path_graph(4, create_using=nx.DiGraph())
+    G.add_edge(2, 4)
+    paths = nx.all_simple_edge_paths(G, 0, [3, 4])
+    assert {tuple(p) for p in paths} == {
+        ((0, 1), (1, 2), (2, 3)),
+        ((0, 1), (1, 2), (2, 4)),
+    }
+
+
+def test_all_simple_edge_paths_with_two_targets_cutoff():
+    G = nx.path_graph(4)
+    G.add_edge(2, 4)
+    paths = nx.all_simple_edge_paths(G, 0, [3, 4], cutoff=3)
+    assert {tuple(p) for p in paths} == {
+        ((0, 1), (1, 2), (2, 3)),
+        ((0, 1), (1, 2), (2, 4)),
+    }
+
+
+def test_digraph_all_simple_edge_paths_with_two_targets_cutoff():
+    G = nx.path_graph(4, create_using=nx.DiGraph())
+    G.add_edge(2, 4)
+    paths = nx.all_simple_edge_paths(G, 0, [3, 4], cutoff=3)
+    assert {tuple(p) for p in paths} == {
+        ((0, 1), (1, 2), (2, 3)),
+        ((0, 1), (1, 2), (2, 4)),
+    }
+
+
+def test_all_simple_edge_paths_with_two_targets_in_line_emits_two_paths():
+    G = nx.path_graph(4)
+    paths = nx.all_simple_edge_paths(G, 0, [2, 3])
+    assert {tuple(p) for p in paths} == {((0, 1), (1, 2)), ((0, 1), (1, 2), (2, 3))}
+
+
+def test_all_simple_edge_paths_ignores_cycle():
+    G = nx.cycle_graph(3, create_using=nx.DiGraph())
+    G.add_edge(1, 3)
+    paths = nx.all_simple_edge_paths(G, 0, 3)
+    assert {tuple(p) for p in paths} == {((0, 1), (1, 3))}
+
+
+def test_all_simple_edge_paths_with_two_targets_inside_cycle_emits_two_paths():
+    G = nx.cycle_graph(3, create_using=nx.DiGraph())
+    G.add_edge(1, 3)
+    paths = nx.all_simple_edge_paths(G, 0, [2, 3])
+    assert {tuple(p) for p in paths} == {((0, 1), (1, 2)), ((0, 1), (1, 3))}
+
+
+def test_all_simple_edge_paths_source_target():
+    G = nx.path_graph(4)
+    paths = nx.all_simple_edge_paths(G, 1, 1)
+    assert list(paths) == [[]]
+
+
+def test_all_simple_edge_paths_cutoff():
+    G = nx.complete_graph(4)
+    paths = nx.all_simple_edge_paths(G, 0, 1, cutoff=1)
+    assert {tuple(p) for p in paths} == {((0, 1),)}
+    paths = nx.all_simple_edge_paths(G, 0, 1, cutoff=2)
+    assert {tuple(p) for p in paths} == {((0, 1),), ((0, 2), (2, 1)), ((0, 3), (3, 1))}
+
+
+def test_all_simple_edge_paths_on_non_trivial_graph():
+    """you may need to draw this graph to make sure it is reasonable"""
+    G = nx.path_graph(5, create_using=nx.DiGraph())
+    G.add_edges_from([(0, 5), (1, 5), (1, 3), (5, 4), (4, 2), (4, 3)])
+    paths = nx.all_simple_edge_paths(G, 1, [2, 3])
+    assert {tuple(p) for p in paths} == {
+        ((1, 2),),
+        ((1, 3), (3, 4), (4, 2)),
+        ((1, 5), (5, 4), (4, 2)),
+        ((1, 3),),
+        ((1, 2), (2, 3)),
+        ((1, 5), (5, 4), (4, 3)),
+        ((1, 5), (5, 4), (4, 2), (2, 3)),
+    }
+    paths = nx.all_simple_edge_paths(G, 1, [2, 3], cutoff=3)
+    assert {tuple(p) for p in paths} == {
+        ((1, 2),),
+        ((1, 3), (3, 4), (4, 2)),
+        ((1, 5), (5, 4), (4, 2)),
+        ((1, 3),),
+        ((1, 2), (2, 3)),
+        ((1, 5), (5, 4), (4, 3)),
+    }
+    paths = nx.all_simple_edge_paths(G, 1, [2, 3], cutoff=2)
+    assert {tuple(p) for p in paths} == {((1, 2),), ((1, 3),), ((1, 2), (2, 3))}
+
+
+def test_all_simple_edge_paths_multigraph():
+    G = nx.MultiGraph([(1, 2), (1, 2)])
+    paths = nx.all_simple_edge_paths(G, 1, 1)
+    assert list(paths) == [[]]
+    nx.add_path(G, [3, 1, 10, 2])
+    paths = list(nx.all_simple_edge_paths(G, 1, 2))
+    assert len(paths) == 3
+    assert {tuple(p) for p in paths} == {
+        ((1, 2, 0),),
+        ((1, 2, 1),),
+        ((1, 10, 0), (10, 2, 0)),
+    }
+
+
+def test_all_simple_edge_paths_multigraph_with_cutoff():
+    G = nx.MultiGraph([(1, 2), (1, 2), (1, 10), (10, 2)])
+    paths = list(nx.all_simple_edge_paths(G, 1, 2, cutoff=1))
+    assert len(paths) == 2
+    assert {tuple(p) for p in paths} == {((1, 2, 0),), ((1, 2, 1),)}
+
+
+def test_all_simple_edge_paths_directed():
+    G = nx.DiGraph()
+    nx.add_path(G, [1, 2, 3])
+    nx.add_path(G, [3, 2, 1])
+    paths = nx.all_simple_edge_paths(G, 1, 3)
+    assert {tuple(p) for p in paths} == {((1, 2), (2, 3))}
+
+
+def test_all_simple_edge_paths_empty():
+    G = nx.path_graph(4)
+    paths = nx.all_simple_edge_paths(G, 0, 3, cutoff=2)
+    assert list(paths) == []
+
+
+def test_all_simple_edge_paths_corner_cases():
+    assert list(nx.all_simple_edge_paths(nx.empty_graph(2), 0, 0)) == [[]]
+    assert list(nx.all_simple_edge_paths(nx.empty_graph(2), 0, 1)) == []
+    assert list(nx.all_simple_edge_paths(nx.path_graph(9), 0, 8, 0)) == []
+
+
+def test_all_simple_edge_paths_ignores_self_loop():
+    G = nx.Graph([(0, 0), (0, 1), (1, 1), (1, 2)])
+    assert list(nx.all_simple_edge_paths(G, 0, 2)) == [[(0, 1), (1, 2)]]
+
+
+def hamiltonian_edge_path(G, source):
+    source = arbitrary_element(G)
+    neighbors = set(G[source]) - {source}
+    n = len(G)
+    for target in neighbors:
+        for path in nx.all_simple_edge_paths(G, source, target):
+            if len(path) == n - 1:
+                yield path
+
+
+def test_hamiltonian__edge_path():
+    from itertools import permutations
+
+    G = nx.complete_graph(4)
+    paths = hamiltonian_edge_path(G, 0)
+    exact = [list(pairwise([0] + list(p))) for p in permutations([1, 2, 3], 3)]
+    assert sorted(exact) == sorted(paths)
+
+
+def test_edge_cutoff_zero():
+    G = nx.complete_graph(4)
+    paths = nx.all_simple_edge_paths(G, 0, 3, cutoff=0)
+    assert [list(p) for p in paths] == []
+    paths = nx.all_simple_edge_paths(nx.MultiGraph(G), 0, 3, cutoff=0)
+    assert [list(p) for p in paths] == []
+
+
+def test_edge_source_missing():
+    with pytest.raises(nx.NodeNotFound):
+        G = nx.Graph()
+        nx.add_path(G, [1, 2, 3])
+        list(nx.all_simple_edge_paths(nx.MultiGraph(G), 0, 3))
+
+
+def test_edge_target_missing():
+    with pytest.raises(nx.NodeNotFound):
+        G = nx.Graph()
+        nx.add_path(G, [1, 2, 3])
+        list(nx.all_simple_edge_paths(nx.MultiGraph(G), 1, 4))
+
+
+# Tests for shortest_simple_paths
+def test_shortest_simple_paths():
+    G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted")
+    paths = nx.shortest_simple_paths(G, 1, 12)
+    assert next(paths) == [1, 2, 3, 4, 8, 12]
+    assert next(paths) == [1, 5, 6, 7, 8, 12]
+    assert [len(path) for path in nx.shortest_simple_paths(G, 1, 12)] == sorted(
+        len(path) for path in nx.all_simple_paths(G, 1, 12)
+    )
+
+
+def test_shortest_simple_paths_singleton_path():
+    G = nx.empty_graph(3)
+    assert list(nx.shortest_simple_paths(G, 0, 0)) == [[0]]
+
+
+def test_shortest_simple_paths_directed():
+    G = nx.cycle_graph(7, create_using=nx.DiGraph())
+    paths = nx.shortest_simple_paths(G, 0, 3)
+    assert list(paths) == [[0, 1, 2, 3]]
+
+
+def test_shortest_simple_paths_directed_with_weight_function():
+    def cost(u, v, x):
+        return 1
+
+    G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted")
+    paths = nx.shortest_simple_paths(G, 1, 12)
+    assert next(paths) == [1, 2, 3, 4, 8, 12]
+    assert next(paths) == [1, 5, 6, 7, 8, 12]
+    assert [
+        len(path) for path in nx.shortest_simple_paths(G, 1, 12, weight=cost)
+    ] == sorted(len(path) for path in nx.all_simple_paths(G, 1, 12))
+
+
+def test_shortest_simple_paths_with_weight_function():
+    def cost(u, v, x):
+        return 1
+
+    G = nx.cycle_graph(7, create_using=nx.DiGraph())
+    paths = nx.shortest_simple_paths(G, 0, 3, weight=cost)
+    assert list(paths) == [[0, 1, 2, 3]]
+
+
+def test_shortest_simple_paths_with_none_weight_function():
+    def cost(u, v, x):
+        delta = abs(u - v)
+        # ignore interior edges
+        return 1 if (delta == 1 or delta == 4) else None
+
+    G = nx.complete_graph(5)
+    paths = nx.shortest_simple_paths(G, 0, 2, weight=cost)
+    assert list(paths) == [[0, 1, 2], [0, 4, 3, 2]]
+
+
+def test_Greg_Bernstein():
+    g1 = nx.Graph()
+    g1.add_nodes_from(["N0", "N1", "N2", "N3", "N4"])
+    g1.add_edge("N4", "N1", weight=10.0, capacity=50, name="L5")
+    g1.add_edge("N4", "N0", weight=7.0, capacity=40, name="L4")
+    g1.add_edge("N0", "N1", weight=10.0, capacity=45, name="L1")
+    g1.add_edge("N3", "N0", weight=10.0, capacity=50, name="L0")
+    g1.add_edge("N2", "N3", weight=12.0, capacity=30, name="L2")
+    g1.add_edge("N1", "N2", weight=15.0, capacity=42, name="L3")
+    solution = [["N1", "N0", "N3"], ["N1", "N2", "N3"], ["N1", "N4", "N0", "N3"]]
+    result = list(nx.shortest_simple_paths(g1, "N1", "N3", weight="weight"))
+    assert result == solution
+
+
+def test_weighted_shortest_simple_path():
+    def cost_func(path):
+        return sum(G.adj[u][v]["weight"] for (u, v) in zip(path, path[1:]))
+
+    G = nx.complete_graph(5)
+    weight = {(u, v): random.randint(1, 100) for (u, v) in G.edges()}
+    nx.set_edge_attributes(G, weight, "weight")
+    cost = 0
+    for path in nx.shortest_simple_paths(G, 0, 3, weight="weight"):
+        this_cost = cost_func(path)
+        assert cost <= this_cost
+        cost = this_cost
+
+
+def test_directed_weighted_shortest_simple_path():
+    def cost_func(path):
+        return sum(G.adj[u][v]["weight"] for (u, v) in zip(path, path[1:]))
+
+    G = nx.complete_graph(5)
+    G = G.to_directed()
+    weight = {(u, v): random.randint(1, 100) for (u, v) in G.edges()}
+    nx.set_edge_attributes(G, weight, "weight")
+    cost = 0
+    for path in nx.shortest_simple_paths(G, 0, 3, weight="weight"):
+        this_cost = cost_func(path)
+        assert cost <= this_cost
+        cost = this_cost
+
+
+def test_weighted_shortest_simple_path_issue2427():
+    G = nx.Graph()
+    G.add_edge("IN", "OUT", weight=2)
+    G.add_edge("IN", "A", weight=1)
+    G.add_edge("IN", "B", weight=2)
+    G.add_edge("B", "OUT", weight=2)
+    assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [
+        ["IN", "OUT"],
+        ["IN", "B", "OUT"],
+    ]
+    G = nx.Graph()
+    G.add_edge("IN", "OUT", weight=10)
+    G.add_edge("IN", "A", weight=1)
+    G.add_edge("IN", "B", weight=1)
+    G.add_edge("B", "OUT", weight=1)
+    assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [
+        ["IN", "B", "OUT"],
+        ["IN", "OUT"],
+    ]
+
+
+def test_directed_weighted_shortest_simple_path_issue2427():
+    G = nx.DiGraph()
+    G.add_edge("IN", "OUT", weight=2)
+    G.add_edge("IN", "A", weight=1)
+    G.add_edge("IN", "B", weight=2)
+    G.add_edge("B", "OUT", weight=2)
+    assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [
+        ["IN", "OUT"],
+        ["IN", "B", "OUT"],
+    ]
+    G = nx.DiGraph()
+    G.add_edge("IN", "OUT", weight=10)
+    G.add_edge("IN", "A", weight=1)
+    G.add_edge("IN", "B", weight=1)
+    G.add_edge("B", "OUT", weight=1)
+    assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [
+        ["IN", "B", "OUT"],
+        ["IN", "OUT"],
+    ]
+
+
+def test_weight_name():
+    G = nx.cycle_graph(7)
+    nx.set_edge_attributes(G, 1, "weight")
+    nx.set_edge_attributes(G, 1, "foo")
+    G.adj[1][2]["foo"] = 7
+    paths = list(nx.shortest_simple_paths(G, 0, 3, weight="foo"))
+    solution = [[0, 6, 5, 4, 3], [0, 1, 2, 3]]
+    assert paths == solution
+
+
+def test_ssp_source_missing():
+    with pytest.raises(nx.NodeNotFound):
+        G = nx.Graph()
+        nx.add_path(G, [1, 2, 3])
+        list(nx.shortest_simple_paths(G, 0, 3))
+
+
+def test_ssp_target_missing():
+    with pytest.raises(nx.NodeNotFound):
+        G = nx.Graph()
+        nx.add_path(G, [1, 2, 3])
+        list(nx.shortest_simple_paths(G, 1, 4))
+
+
+def test_ssp_multigraph():
+    with pytest.raises(nx.NetworkXNotImplemented):
+        G = nx.MultiGraph()
+        nx.add_path(G, [1, 2, 3])
+        list(nx.shortest_simple_paths(G, 1, 4))
+
+
+def test_ssp_source_missing2():
+    with pytest.raises(nx.NetworkXNoPath):
+        G = nx.Graph()
+        nx.add_path(G, [0, 1, 2])
+        nx.add_path(G, [3, 4, 5])
+        list(nx.shortest_simple_paths(G, 0, 3))
+
+
+def test_bidirectional_shortest_path_restricted_cycle():
+    cycle = nx.cycle_graph(7)
+    length, path = _bidirectional_shortest_path(cycle, 0, 3)
+    assert path == [0, 1, 2, 3]
+    length, path = _bidirectional_shortest_path(cycle, 0, 3, ignore_nodes=[1])
+    assert path == [0, 6, 5, 4, 3]
+
+
+def test_bidirectional_shortest_path_restricted_wheel():
+    wheel = nx.wheel_graph(6)
+    length, path = _bidirectional_shortest_path(wheel, 1, 3)
+    assert path in [[1, 0, 3], [1, 2, 3]]
+    length, path = _bidirectional_shortest_path(wheel, 1, 3, ignore_nodes=[0])
+    assert path == [1, 2, 3]
+    length, path = _bidirectional_shortest_path(wheel, 1, 3, ignore_nodes=[0, 2])
+    assert path == [1, 5, 4, 3]
+    length, path = _bidirectional_shortest_path(
+        wheel, 1, 3, ignore_edges=[(1, 0), (5, 0), (2, 3)]
+    )
+    assert path in [[1, 2, 0, 3], [1, 5, 4, 3]]
+
+
+def test_bidirectional_shortest_path_restricted_directed_cycle():
+    directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
+    length, path = _bidirectional_shortest_path(directed_cycle, 0, 3)
+    assert path == [0, 1, 2, 3]
+    pytest.raises(
+        nx.NetworkXNoPath,
+        _bidirectional_shortest_path,
+        directed_cycle,
+        0,
+        3,
+        ignore_nodes=[1],
+    )
+    length, path = _bidirectional_shortest_path(
+        directed_cycle, 0, 3, ignore_edges=[(2, 1)]
+    )
+    assert path == [0, 1, 2, 3]
+    pytest.raises(
+        nx.NetworkXNoPath,
+        _bidirectional_shortest_path,
+        directed_cycle,
+        0,
+        3,
+        ignore_edges=[(1, 2)],
+    )
+
+
+def test_bidirectional_shortest_path_ignore():
+    G = nx.Graph()
+    nx.add_path(G, [1, 2])
+    nx.add_path(G, [1, 3])
+    nx.add_path(G, [1, 4])
+    pytest.raises(
+        nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[1]
+    )
+    pytest.raises(
+        nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[2]
+    )
+    G = nx.Graph()
+    nx.add_path(G, [1, 3])
+    nx.add_path(G, [1, 4])
+    nx.add_path(G, [3, 2])
+    pytest.raises(
+        nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[1, 2]
+    )
+
+
+def validate_path(G, s, t, soln_len, path):
+    assert path[0] == s
+    assert path[-1] == t
+    assert soln_len == sum(
+        G[u][v].get("weight", 1) for u, v in zip(path[:-1], path[1:])
+    )
+
+
+def validate_length_path(G, s, t, soln_len, length, path):
+    assert soln_len == length
+    validate_path(G, s, t, length, path)
+
+
+def test_bidirectional_dijkstra_restricted():
+    XG = nx.DiGraph()
+    XG.add_weighted_edges_from(
+        [
+            ("s", "u", 10),
+            ("s", "x", 5),
+            ("u", "v", 1),
+            ("u", "x", 2),
+            ("v", "y", 1),
+            ("x", "u", 3),
+            ("x", "v", 5),
+            ("x", "y", 2),
+            ("y", "s", 7),
+            ("y", "v", 6),
+        ]
+    )
+
+    XG3 = nx.Graph()
+    XG3.add_weighted_edges_from(
+        [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]]
+    )
+    validate_length_path(XG, "s", "v", 9, *_bidirectional_dijkstra(XG, "s", "v"))
+    validate_length_path(
+        XG, "s", "v", 10, *_bidirectional_dijkstra(XG, "s", "v", ignore_nodes=["u"])
+    )
+    validate_length_path(
+        XG,
+        "s",
+        "v",
+        11,
+        *_bidirectional_dijkstra(XG, "s", "v", ignore_edges=[("s", "x")]),
+    )
+    pytest.raises(
+        nx.NetworkXNoPath,
+        _bidirectional_dijkstra,
+        XG,
+        "s",
+        "v",
+        ignore_nodes=["u"],
+        ignore_edges=[("s", "x")],
+    )
+    validate_length_path(XG3, 0, 3, 15, *_bidirectional_dijkstra(XG3, 0, 3))
+    validate_length_path(
+        XG3, 0, 3, 16, *_bidirectional_dijkstra(XG3, 0, 3, ignore_nodes=[1])
+    )
+    validate_length_path(
+        XG3, 0, 3, 16, *_bidirectional_dijkstra(XG3, 0, 3, ignore_edges=[(2, 3)])
+    )
+    pytest.raises(
+        nx.NetworkXNoPath,
+        _bidirectional_dijkstra,
+        XG3,
+        0,
+        3,
+        ignore_nodes=[1],
+        ignore_edges=[(5, 4)],
+    )
+
+
+def test_bidirectional_dijkstra_no_path():
+    with pytest.raises(nx.NetworkXNoPath):
+        G = nx.Graph()
+        nx.add_path(G, [1, 2, 3])
+        nx.add_path(G, [4, 5, 6])
+        _bidirectional_dijkstra(G, 1, 6)
+
+
+def test_bidirectional_dijkstra_ignore():
+    G = nx.Graph()
+    nx.add_path(G, [1, 2, 10])
+    nx.add_path(G, [1, 3, 10])
+    pytest.raises(nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[1])
+    pytest.raises(nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[2])
+    pytest.raises(
+        nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[1, 2]
+    )
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smallworld.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smallworld.py
new file mode 100644
index 00000000..d115dd99
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smallworld.py
@@ -0,0 +1,78 @@
+import pytest
+
+pytest.importorskip("numpy")
+
+import random
+
+import networkx as nx
+from networkx import lattice_reference, omega, random_reference, sigma
+
+rng = 42
+
+
+def test_random_reference():
+    G = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng)
+    Gr = random_reference(G, niter=1, seed=rng)
+    C = nx.average_clustering(G)
+    Cr = nx.average_clustering(Gr)
+    assert C > Cr
+
+    with pytest.raises(nx.NetworkXError):
+        next(random_reference(nx.Graph()))
+    with pytest.raises(nx.NetworkXNotImplemented):
+        next(random_reference(nx.DiGraph()))
+
+    H = nx.Graph(((0, 1), (2, 3)))
+    Hl = random_reference(H, niter=1, seed=rng)
+
+
+def test_lattice_reference():
+    G = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng)
+    Gl = lattice_reference(G, niter=1, seed=rng)
+    L = nx.average_shortest_path_length(G)
+    Ll = nx.average_shortest_path_length(Gl)
+    assert Ll > L
+
+    pytest.raises(nx.NetworkXError, lattice_reference, nx.Graph())
+    pytest.raises(nx.NetworkXNotImplemented, lattice_reference, nx.DiGraph())
+
+    H = nx.Graph(((0, 1), (2, 3)))
+    Hl = lattice_reference(H, niter=1)
+
+
+def test_sigma():
+    Gs = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng)
+    Gr = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng)
+    sigmas = sigma(Gs, niter=1, nrand=2, seed=rng)
+    sigmar = sigma(Gr, niter=1, nrand=2, seed=rng)
+    assert sigmar < sigmas
+
+
+def test_omega():
+    Gl = nx.connected_watts_strogatz_graph(50, 6, 0, seed=rng)
+    Gr = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng)
+    Gs = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng)
+    omegal = omega(Gl, niter=1, nrand=1, seed=rng)
+    omegar = omega(Gr, niter=1, nrand=1, seed=rng)
+    omegas = omega(Gs, niter=1, nrand=1, seed=rng)
+    assert omegal < omegas and omegas < omegar
+
+    # Test that omega lies within the [-1, 1] bounds
+    G_barbell = nx.barbell_graph(5, 1)
+    G_karate = nx.karate_club_graph()
+
+    omega_barbell = nx.omega(G_barbell)
+    omega_karate = nx.omega(G_karate, nrand=2)
+
+    omegas = (omegal, omegar, omegas, omega_barbell, omega_karate)
+
+    for o in omegas:
+        assert -1 <= o <= 1
+
+
+@pytest.mark.parametrize("f", (nx.random_reference, nx.lattice_reference))
+def test_graph_no_edges(f):
+    G = nx.Graph()
+    G.add_nodes_from([0, 1, 2, 3])
+    with pytest.raises(nx.NetworkXError, match="Graph has fewer that 2 edges"):
+        f(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smetric.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smetric.py
new file mode 100644
index 00000000..528dbc8d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smetric.py
@@ -0,0 +1,8 @@
+import pytest
+
+import networkx as nx
+
+
+def test_smetric():
+    G = nx.Graph([(1, 2), (2, 3), (2, 4), (1, 4)])
+    assert nx.s_metric(G) == 19.0
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_sparsifiers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_sparsifiers.py
new file mode 100644
index 00000000..e8604e61
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_sparsifiers.py
@@ -0,0 +1,138 @@
+"""Unit tests for the sparsifier computation functions."""
+
+import pytest
+
+import networkx as nx
+from networkx.utils import py_random_state
+
+_seed = 2
+
+
+def _test_spanner(G, spanner, stretch, weight=None):
+    """Test whether a spanner is valid.
+
+    This function tests whether the given spanner is a subgraph of the
+    given graph G with the same node set. It also tests for all shortest
+    paths whether they adhere to the given stretch.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The original graph for which the spanner was constructed.
+
+    spanner : NetworkX graph
+        The spanner to be tested.
+
+    stretch : float
+        The proclaimed stretch of the spanner.
+
+    weight : object
+        The edge attribute to use as distance.
+    """
+    # check node set
+    assert set(G.nodes()) == set(spanner.nodes())
+
+    # check edge set and weights
+    for u, v in spanner.edges():
+        assert G.has_edge(u, v)
+        if weight:
+            assert spanner[u][v][weight] == G[u][v][weight]
+
+    # check connectivity and stretch
+    original_length = dict(nx.shortest_path_length(G, weight=weight))
+    spanner_length = dict(nx.shortest_path_length(spanner, weight=weight))
+    for u in G.nodes():
+        for v in G.nodes():
+            if u in original_length and v in original_length[u]:
+                assert spanner_length[u][v] <= stretch * original_length[u][v]
+
+
+@py_random_state(1)
+def _assign_random_weights(G, seed=None):
+    """Assigns random weights to the edges of a graph.
+
+    Parameters
+    ----------
+
+    G : NetworkX graph
+        The original graph for which the spanner was constructed.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+    """
+    for u, v in G.edges():
+        G[u][v]["weight"] = seed.random()
+
+
+def test_spanner_trivial():
+    """Test a trivial spanner with stretch 1."""
+    G = nx.complete_graph(20)
+    spanner = nx.spanner(G, 1, seed=_seed)
+
+    for u, v in G.edges:
+        assert spanner.has_edge(u, v)
+
+
+def test_spanner_unweighted_complete_graph():
+    """Test spanner construction on a complete unweighted graph."""
+    G = nx.complete_graph(20)
+
+    spanner = nx.spanner(G, 4, seed=_seed)
+    _test_spanner(G, spanner, 4)
+
+    spanner = nx.spanner(G, 10, seed=_seed)
+    _test_spanner(G, spanner, 10)
+
+
+def test_spanner_weighted_complete_graph():
+    """Test spanner construction on a complete weighted graph."""
+    G = nx.complete_graph(20)
+    _assign_random_weights(G, seed=_seed)
+
+    spanner = nx.spanner(G, 4, weight="weight", seed=_seed)
+    _test_spanner(G, spanner, 4, weight="weight")
+
+    spanner = nx.spanner(G, 10, weight="weight", seed=_seed)
+    _test_spanner(G, spanner, 10, weight="weight")
+
+
+def test_spanner_unweighted_gnp_graph():
+    """Test spanner construction on an unweighted gnp graph."""
+    G = nx.gnp_random_graph(20, 0.4, seed=_seed)
+
+    spanner = nx.spanner(G, 4, seed=_seed)
+    _test_spanner(G, spanner, 4)
+
+    spanner = nx.spanner(G, 10, seed=_seed)
+    _test_spanner(G, spanner, 10)
+
+
+def test_spanner_weighted_gnp_graph():
+    """Test spanner construction on an weighted gnp graph."""
+    G = nx.gnp_random_graph(20, 0.4, seed=_seed)
+    _assign_random_weights(G, seed=_seed)
+
+    spanner = nx.spanner(G, 4, weight="weight", seed=_seed)
+    _test_spanner(G, spanner, 4, weight="weight")
+
+    spanner = nx.spanner(G, 10, weight="weight", seed=_seed)
+    _test_spanner(G, spanner, 10, weight="weight")
+
+
+def test_spanner_unweighted_disconnected_graph():
+    """Test spanner construction on a disconnected graph."""
+    G = nx.disjoint_union(nx.complete_graph(10), nx.complete_graph(10))
+
+    spanner = nx.spanner(G, 4, seed=_seed)
+    _test_spanner(G, spanner, 4)
+
+    spanner = nx.spanner(G, 10, seed=_seed)
+    _test_spanner(G, spanner, 10)
+
+
+def test_spanner_invalid_stretch():
+    """Check whether an invalid stretch is caught."""
+    with pytest.raises(ValueError):
+        G = nx.empty_graph()
+        nx.spanner(G, 0)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_structuralholes.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_structuralholes.py
new file mode 100644
index 00000000..1e5952b2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_structuralholes.py
@@ -0,0 +1,137 @@
+"""Unit tests for the :mod:`networkx.algorithms.structuralholes` module."""
+
+import math
+
+import pytest
+
+import networkx as nx
+from networkx.classes.tests import dispatch_interface
+
+
+class TestStructuralHoles:
+    """Unit tests for computing measures of structural holes.
+
+    The expected values for these functions were originally computed using the
+    proprietary software `UCINET`_ and the free software `IGraph`_ , and then
+    computed by hand to make sure that the results are correct.
+
+    .. _UCINET: https://sites.google.com/site/ucinetsoftware/home
+    .. _IGraph: http://igraph.org/
+
+    """
+
+    def setup_method(self):
+        self.D = nx.DiGraph()
+        self.D.add_edges_from([(0, 1), (0, 2), (1, 0), (2, 1)])
+        self.D_weights = {(0, 1): 2, (0, 2): 2, (1, 0): 1, (2, 1): 1}
+        # Example from http://www.analytictech.com/connections/v20(1)/holes.htm
+        self.G = nx.Graph()
+        self.G.add_edges_from(
+            [
+                ("A", "B"),
+                ("A", "F"),
+                ("A", "G"),
+                ("A", "E"),
+                ("E", "G"),
+                ("F", "G"),
+                ("B", "G"),
+                ("B", "D"),
+                ("D", "G"),
+                ("G", "C"),
+            ]
+        )
+        self.G_weights = {
+            ("A", "B"): 2,
+            ("A", "F"): 3,
+            ("A", "G"): 5,
+            ("A", "E"): 2,
+            ("E", "G"): 8,
+            ("F", "G"): 3,
+            ("B", "G"): 4,
+            ("B", "D"): 1,
+            ("D", "G"): 3,
+            ("G", "C"): 10,
+        }
+
+    def test_constraint_directed(self):
+        constraint = nx.constraint(self.D)
+        assert constraint[0] == pytest.approx(1.003, abs=1e-3)
+        assert constraint[1] == pytest.approx(1.003, abs=1e-3)
+        assert constraint[2] == pytest.approx(1.389, abs=1e-3)
+
+    def test_effective_size_directed(self):
+        effective_size = nx.effective_size(self.D)
+        assert effective_size[0] == pytest.approx(1.167, abs=1e-3)
+        assert effective_size[1] == pytest.approx(1.167, abs=1e-3)
+        assert effective_size[2] == pytest.approx(1, abs=1e-3)
+
+    def test_constraint_weighted_directed(self):
+        D = self.D.copy()
+        nx.set_edge_attributes(D, self.D_weights, "weight")
+        constraint = nx.constraint(D, weight="weight")
+        assert constraint[0] == pytest.approx(0.840, abs=1e-3)
+        assert constraint[1] == pytest.approx(1.143, abs=1e-3)
+        assert constraint[2] == pytest.approx(1.378, abs=1e-3)
+
+    def test_effective_size_weighted_directed(self):
+        D = self.D.copy()
+        nx.set_edge_attributes(D, self.D_weights, "weight")
+        effective_size = nx.effective_size(D, weight="weight")
+        assert effective_size[0] == pytest.approx(1.567, abs=1e-3)
+        assert effective_size[1] == pytest.approx(1.083, abs=1e-3)
+        assert effective_size[2] == pytest.approx(1, abs=1e-3)
+
+    def test_constraint_undirected(self):
+        constraint = nx.constraint(self.G)
+        assert constraint["G"] == pytest.approx(0.400, abs=1e-3)
+        assert constraint["A"] == pytest.approx(0.595, abs=1e-3)
+        assert constraint["C"] == pytest.approx(1, abs=1e-3)
+
+    def test_effective_size_undirected_borgatti(self):
+        effective_size = nx.effective_size(self.G)
+        assert effective_size["G"] == pytest.approx(4.67, abs=1e-2)
+        assert effective_size["A"] == pytest.approx(2.50, abs=1e-2)
+        assert effective_size["C"] == pytest.approx(1, abs=1e-2)
+
+    def test_effective_size_undirected(self):
+        G = self.G.copy()
+        nx.set_edge_attributes(G, 1, "weight")
+        effective_size = nx.effective_size(G, weight="weight")
+        assert effective_size["G"] == pytest.approx(4.67, abs=1e-2)
+        assert effective_size["A"] == pytest.approx(2.50, abs=1e-2)
+        assert effective_size["C"] == pytest.approx(1, abs=1e-2)
+
+    def test_constraint_weighted_undirected(self):
+        G = self.G.copy()
+        nx.set_edge_attributes(G, self.G_weights, "weight")
+        constraint = nx.constraint(G, weight="weight")
+        assert constraint["G"] == pytest.approx(0.299, abs=1e-3)
+        assert constraint["A"] == pytest.approx(0.795, abs=1e-3)
+        assert constraint["C"] == pytest.approx(1, abs=1e-3)
+
+    def test_effective_size_weighted_undirected(self):
+        G = self.G.copy()
+        nx.set_edge_attributes(G, self.G_weights, "weight")
+        effective_size = nx.effective_size(G, weight="weight")
+        assert effective_size["G"] == pytest.approx(5.47, abs=1e-2)
+        assert effective_size["A"] == pytest.approx(2.47, abs=1e-2)
+        assert effective_size["C"] == pytest.approx(1, abs=1e-2)
+
+    def test_constraint_isolated(self):
+        G = self.G.copy()
+        G.add_node(1)
+        constraint = nx.constraint(G)
+        assert math.isnan(constraint[1])
+
+    def test_effective_size_isolated(self):
+        G = self.G.copy()
+        G.add_node(1)
+        nx.set_edge_attributes(G, self.G_weights, "weight")
+        effective_size = nx.effective_size(G, weight="weight")
+        assert math.isnan(effective_size[1])
+
+    def test_effective_size_borgatti_isolated(self):
+        G = self.G.copy()
+        G.add_node(1)
+        effective_size = nx.effective_size(G)
+        assert math.isnan(effective_size[1])
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_summarization.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_summarization.py
new file mode 100644
index 00000000..c3bf82fa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_summarization.py
@@ -0,0 +1,642 @@
+"""
+Unit tests for dedensification and graph summarization
+"""
+
+import pytest
+
+import networkx as nx
+
+
+class TestDirectedDedensification:
+    def build_original_graph(self):
+        original_matrix = [
+            ("1", "BC"),
+            ("2", "ABC"),
+            ("3", ["A", "B", "6"]),
+            ("4", "ABC"),
+            ("5", "AB"),
+            ("6", ["5"]),
+            ("A", ["6"]),
+        ]
+        graph = nx.DiGraph()
+        for source, targets in original_matrix:
+            for target in targets:
+                graph.add_edge(source, target)
+        return graph
+
+    def build_compressed_graph(self):
+        compressed_matrix = [
+            ("1", "BC"),
+            ("2", ["ABC"]),
+            ("3", ["A", "B", "6"]),
+            ("4", ["ABC"]),
+            ("5", "AB"),
+            ("6", ["5"]),
+            ("A", ["6"]),
+            ("ABC", "ABC"),
+        ]
+        compressed_graph = nx.DiGraph()
+        for source, targets in compressed_matrix:
+            for target in targets:
+                compressed_graph.add_edge(source, target)
+        return compressed_graph
+
+    def test_empty(self):
+        """
+        Verify that an empty directed graph results in no compressor nodes
+        """
+        G = nx.DiGraph()
+        compressed_graph, c_nodes = nx.dedensify(G, threshold=2)
+        assert c_nodes == set()
+
+    @staticmethod
+    def densify(G, compressor_nodes, copy=True):
+        """
+        Reconstructs the original graph from a dedensified, directed graph
+
+        Parameters
+        ----------
+        G: dedensified graph
+           A networkx graph
+        compressor_nodes: iterable
+           Iterable of compressor nodes in the dedensified graph
+        inplace: bool, optional (default: False)
+           Indicates if densification should be done inplace
+
+        Returns
+        -------
+        G: graph
+           A densified networkx graph
+        """
+        if copy:
+            G = G.copy()
+        for compressor_node in compressor_nodes:
+            all_neighbors = set(nx.all_neighbors(G, compressor_node))
+            out_neighbors = set(G.neighbors(compressor_node))
+            for out_neighbor in out_neighbors:
+                G.remove_edge(compressor_node, out_neighbor)
+            in_neighbors = all_neighbors - out_neighbors
+            for in_neighbor in in_neighbors:
+                G.remove_edge(in_neighbor, compressor_node)
+                for out_neighbor in out_neighbors:
+                    G.add_edge(in_neighbor, out_neighbor)
+            G.remove_node(compressor_node)
+        return G
+
+    def setup_method(self):
+        self.c_nodes = ("ABC",)
+
+    def test_dedensify_edges(self):
+        """
+        Verifies that dedensify produced the correct edges to/from compressor
+        nodes in a directed graph
+        """
+        G = self.build_original_graph()
+        compressed_G = self.build_compressed_graph()
+        compressed_graph, c_nodes = nx.dedensify(G, threshold=2)
+        for s, t in compressed_graph.edges():
+            o_s = "".join(sorted(s))
+            o_t = "".join(sorted(t))
+            compressed_graph_exists = compressed_graph.has_edge(s, t)
+            verified_compressed_exists = compressed_G.has_edge(o_s, o_t)
+            assert compressed_graph_exists == verified_compressed_exists
+        assert len(c_nodes) == len(self.c_nodes)
+
+    def test_dedensify_edge_count(self):
+        """
+        Verifies that dedensify produced the correct number of compressor nodes
+        in a directed graph
+        """
+        G = self.build_original_graph()
+        original_edge_count = len(G.edges())
+        c_G, c_nodes = nx.dedensify(G, threshold=2)
+        compressed_edge_count = len(c_G.edges())
+        assert compressed_edge_count <= original_edge_count
+        compressed_G = self.build_compressed_graph()
+        assert compressed_edge_count == len(compressed_G.edges())
+
+    def test_densify_edges(self):
+        """
+        Verifies that densification produces the correct edges from the
+        original directed graph
+        """
+        compressed_G = self.build_compressed_graph()
+        original_graph = self.densify(compressed_G, self.c_nodes, copy=True)
+        G = self.build_original_graph()
+        for s, t in G.edges():
+            assert G.has_edge(s, t) == original_graph.has_edge(s, t)
+
+    def test_densify_edge_count(self):
+        """
+        Verifies that densification produces the correct number of edges in the
+        original directed graph
+        """
+        compressed_G = self.build_compressed_graph()
+        compressed_edge_count = len(compressed_G.edges())
+        original_graph = self.densify(compressed_G, self.c_nodes)
+        original_edge_count = len(original_graph.edges())
+        assert compressed_edge_count <= original_edge_count
+        G = self.build_original_graph()
+        assert original_edge_count == len(G.edges())
+
+
+class TestUnDirectedDedensification:
+    def build_original_graph(self):
+        """
+        Builds graph shown in the original research paper
+        """
+        original_matrix = [
+            ("1", "CB"),
+            ("2", "ABC"),
+            ("3", ["A", "B", "6"]),
+            ("4", "ABC"),
+            ("5", "AB"),
+            ("6", ["5"]),
+            ("A", ["6"]),
+        ]
+        graph = nx.Graph()
+        for source, targets in original_matrix:
+            for target in targets:
+                graph.add_edge(source, target)
+        return graph
+
+    def test_empty(self):
+        """
+        Verify that an empty undirected graph results in no compressor nodes
+        """
+        G = nx.Graph()
+        compressed_G, c_nodes = nx.dedensify(G, threshold=2)
+        assert c_nodes == set()
+
+    def setup_method(self):
+        self.c_nodes = ("6AB", "ABC")
+
+    def build_compressed_graph(self):
+        compressed_matrix = [
+            ("1", ["B", "C"]),
+            ("2", ["ABC"]),
+            ("3", ["6AB"]),
+            ("4", ["ABC"]),
+            ("5", ["6AB"]),
+            ("6", ["6AB", "A"]),
+            ("A", ["6AB", "ABC"]),
+            ("B", ["ABC", "6AB"]),
+            ("C", ["ABC"]),
+        ]
+        compressed_graph = nx.Graph()
+        for source, targets in compressed_matrix:
+            for target in targets:
+                compressed_graph.add_edge(source, target)
+        return compressed_graph
+
+    def test_dedensify_edges(self):
+        """
+        Verifies that dedensify produced correct compressor nodes and the
+        correct edges to/from the compressor nodes in an undirected graph
+        """
+        G = self.build_original_graph()
+        c_G, c_nodes = nx.dedensify(G, threshold=2)
+        v_compressed_G = self.build_compressed_graph()
+        for s, t in c_G.edges():
+            o_s = "".join(sorted(s))
+            o_t = "".join(sorted(t))
+            has_compressed_edge = c_G.has_edge(s, t)
+            verified_has_compressed_edge = v_compressed_G.has_edge(o_s, o_t)
+            assert has_compressed_edge == verified_has_compressed_edge
+        assert len(c_nodes) == len(self.c_nodes)
+
+    def test_dedensify_edge_count(self):
+        """
+        Verifies that dedensify produced the correct number of edges in an
+        undirected graph
+        """
+        G = self.build_original_graph()
+        c_G, c_nodes = nx.dedensify(G, threshold=2, copy=True)
+        compressed_edge_count = len(c_G.edges())
+        verified_original_edge_count = len(G.edges())
+        assert compressed_edge_count <= verified_original_edge_count
+        verified_compressed_G = self.build_compressed_graph()
+        verified_compressed_edge_count = len(verified_compressed_G.edges())
+        assert compressed_edge_count == verified_compressed_edge_count
+
+
+@pytest.mark.parametrize(
+    "graph_type", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph]
+)
+def test_summarization_empty(graph_type):
+    G = graph_type()
+    summary_graph = nx.snap_aggregation(G, node_attributes=("color",))
+    assert nx.is_isomorphic(summary_graph, G)
+
+
+class AbstractSNAP:
+    node_attributes = ("color",)
+
+    def build_original_graph(self):
+        pass
+
+    def build_summary_graph(self):
+        pass
+
+    def test_summary_graph(self):
+        original_graph = self.build_original_graph()
+        summary_graph = self.build_summary_graph()
+
+        relationship_attributes = ("type",)
+        generated_summary_graph = nx.snap_aggregation(
+            original_graph, self.node_attributes, relationship_attributes
+        )
+        relabeled_summary_graph = self.deterministic_labels(generated_summary_graph)
+        assert nx.is_isomorphic(summary_graph, relabeled_summary_graph)
+
+    def deterministic_labels(self, G):
+        node_labels = list(G.nodes)
+        node_labels = sorted(node_labels, key=lambda n: sorted(G.nodes[n]["group"])[0])
+        node_labels.sort()
+
+        label_mapping = {}
+        for index, node in enumerate(node_labels):
+            label = f"Supernode-{index}"
+            label_mapping[node] = label
+
+        return nx.relabel_nodes(G, label_mapping)
+
+
+class TestSNAPNoEdgeTypes(AbstractSNAP):
+    relationship_attributes = ()
+
+    def test_summary_graph(self):
+        original_graph = self.build_original_graph()
+        summary_graph = self.build_summary_graph()
+
+        relationship_attributes = ("type",)
+        generated_summary_graph = nx.snap_aggregation(
+            original_graph, self.node_attributes
+        )
+        relabeled_summary_graph = self.deterministic_labels(generated_summary_graph)
+        assert nx.is_isomorphic(summary_graph, relabeled_summary_graph)
+
+    def build_original_graph(self):
+        nodes = {
+            "A": {"color": "Red"},
+            "B": {"color": "Red"},
+            "C": {"color": "Red"},
+            "D": {"color": "Red"},
+            "E": {"color": "Blue"},
+            "F": {"color": "Blue"},
+            "G": {"color": "Blue"},
+            "H": {"color": "Blue"},
+            "I": {"color": "Yellow"},
+            "J": {"color": "Yellow"},
+            "K": {"color": "Yellow"},
+            "L": {"color": "Yellow"},
+        }
+        edges = [
+            ("A", "B"),
+            ("A", "C"),
+            ("A", "E"),
+            ("A", "I"),
+            ("B", "D"),
+            ("B", "J"),
+            ("B", "F"),
+            ("C", "G"),
+            ("D", "H"),
+            ("I", "J"),
+            ("J", "K"),
+            ("I", "L"),
+        ]
+        G = nx.Graph()
+        for node in nodes:
+            attributes = nodes[node]
+            G.add_node(node, **attributes)
+
+        for source, target in edges:
+            G.add_edge(source, target)
+
+        return G
+
+    def build_summary_graph(self):
+        nodes = {
+            "Supernode-0": {"color": "Red"},
+            "Supernode-1": {"color": "Red"},
+            "Supernode-2": {"color": "Blue"},
+            "Supernode-3": {"color": "Blue"},
+            "Supernode-4": {"color": "Yellow"},
+            "Supernode-5": {"color": "Yellow"},
+        }
+        edges = [
+            ("Supernode-0", "Supernode-0"),
+            ("Supernode-0", "Supernode-1"),
+            ("Supernode-0", "Supernode-2"),
+            ("Supernode-0", "Supernode-4"),
+            ("Supernode-1", "Supernode-3"),
+            ("Supernode-4", "Supernode-4"),
+            ("Supernode-4", "Supernode-5"),
+        ]
+        G = nx.Graph()
+        for node in nodes:
+            attributes = nodes[node]
+            G.add_node(node, **attributes)
+
+        for source, target in edges:
+            G.add_edge(source, target)
+
+        supernodes = {
+            "Supernode-0": {"A", "B"},
+            "Supernode-1": {"C", "D"},
+            "Supernode-2": {"E", "F"},
+            "Supernode-3": {"G", "H"},
+            "Supernode-4": {"I", "J"},
+            "Supernode-5": {"K", "L"},
+        }
+        nx.set_node_attributes(G, supernodes, "group")
+        return G
+
+
+class TestSNAPUndirected(AbstractSNAP):
+    def build_original_graph(self):
+        nodes = {
+            "A": {"color": "Red"},
+            "B": {"color": "Red"},
+            "C": {"color": "Red"},
+            "D": {"color": "Red"},
+            "E": {"color": "Blue"},
+            "F": {"color": "Blue"},
+            "G": {"color": "Blue"},
+            "H": {"color": "Blue"},
+            "I": {"color": "Yellow"},
+            "J": {"color": "Yellow"},
+            "K": {"color": "Yellow"},
+            "L": {"color": "Yellow"},
+        }
+        edges = [
+            ("A", "B", "Strong"),
+            ("A", "C", "Weak"),
+            ("A", "E", "Strong"),
+            ("A", "I", "Weak"),
+            ("B", "D", "Weak"),
+            ("B", "J", "Weak"),
+            ("B", "F", "Strong"),
+            ("C", "G", "Weak"),
+            ("D", "H", "Weak"),
+            ("I", "J", "Strong"),
+            ("J", "K", "Strong"),
+            ("I", "L", "Strong"),
+        ]
+        G = nx.Graph()
+        for node in nodes:
+            attributes = nodes[node]
+            G.add_node(node, **attributes)
+
+        for source, target, type in edges:
+            G.add_edge(source, target, type=type)
+
+        return G
+
+    def build_summary_graph(self):
+        nodes = {
+            "Supernode-0": {"color": "Red"},
+            "Supernode-1": {"color": "Red"},
+            "Supernode-2": {"color": "Blue"},
+            "Supernode-3": {"color": "Blue"},
+            "Supernode-4": {"color": "Yellow"},
+            "Supernode-5": {"color": "Yellow"},
+        }
+        edges = [
+            ("Supernode-0", "Supernode-0", "Strong"),
+            ("Supernode-0", "Supernode-1", "Weak"),
+            ("Supernode-0", "Supernode-2", "Strong"),
+            ("Supernode-0", "Supernode-4", "Weak"),
+            ("Supernode-1", "Supernode-3", "Weak"),
+            ("Supernode-4", "Supernode-4", "Strong"),
+            ("Supernode-4", "Supernode-5", "Strong"),
+        ]
+        G = nx.Graph()
+        for node in nodes:
+            attributes = nodes[node]
+            G.add_node(node, **attributes)
+
+        for source, target, type in edges:
+            G.add_edge(source, target, types=[{"type": type}])
+
+        supernodes = {
+            "Supernode-0": {"A", "B"},
+            "Supernode-1": {"C", "D"},
+            "Supernode-2": {"E", "F"},
+            "Supernode-3": {"G", "H"},
+            "Supernode-4": {"I", "J"},
+            "Supernode-5": {"K", "L"},
+        }
+        nx.set_node_attributes(G, supernodes, "group")
+        return G
+
+
+class TestSNAPDirected(AbstractSNAP):
+    def build_original_graph(self):
+        nodes = {
+            "A": {"color": "Red"},
+            "B": {"color": "Red"},
+            "C": {"color": "Green"},
+            "D": {"color": "Green"},
+            "E": {"color": "Blue"},
+            "F": {"color": "Blue"},
+            "G": {"color": "Yellow"},
+            "H": {"color": "Yellow"},
+        }
+        edges = [
+            ("A", "C", "Strong"),
+            ("A", "E", "Strong"),
+            ("A", "F", "Weak"),
+            ("B", "D", "Strong"),
+            ("B", "E", "Weak"),
+            ("B", "F", "Strong"),
+            ("C", "G", "Strong"),
+            ("C", "F", "Strong"),
+            ("D", "E", "Strong"),
+            ("D", "H", "Strong"),
+            ("G", "E", "Strong"),
+            ("H", "F", "Strong"),
+        ]
+        G = nx.DiGraph()
+        for node in nodes:
+            attributes = nodes[node]
+            G.add_node(node, **attributes)
+
+        for source, target, type in edges:
+            G.add_edge(source, target, type=type)
+
+        return G
+
+    def build_summary_graph(self):
+        nodes = {
+            "Supernode-0": {"color": "Red"},
+            "Supernode-1": {"color": "Green"},
+            "Supernode-2": {"color": "Blue"},
+            "Supernode-3": {"color": "Yellow"},
+        }
+        edges = [
+            ("Supernode-0", "Supernode-1", [{"type": "Strong"}]),
+            ("Supernode-0", "Supernode-2", [{"type": "Weak"}, {"type": "Strong"}]),
+            ("Supernode-1", "Supernode-2", [{"type": "Strong"}]),
+            ("Supernode-1", "Supernode-3", [{"type": "Strong"}]),
+            ("Supernode-3", "Supernode-2", [{"type": "Strong"}]),
+        ]
+        G = nx.DiGraph()
+        for node in nodes:
+            attributes = nodes[node]
+            G.add_node(node, **attributes)
+
+        for source, target, types in edges:
+            G.add_edge(source, target, types=types)
+
+        supernodes = {
+            "Supernode-0": {"A", "B"},
+            "Supernode-1": {"C", "D"},
+            "Supernode-2": {"E", "F"},
+            "Supernode-3": {"G", "H"},
+            "Supernode-4": {"I", "J"},
+            "Supernode-5": {"K", "L"},
+        }
+        nx.set_node_attributes(G, supernodes, "group")
+        return G
+
+
+class TestSNAPUndirectedMulti(AbstractSNAP):
+    def build_original_graph(self):
+        nodes = {
+            "A": {"color": "Red"},
+            "B": {"color": "Red"},
+            "C": {"color": "Red"},
+            "D": {"color": "Blue"},
+            "E": {"color": "Blue"},
+            "F": {"color": "Blue"},
+            "G": {"color": "Yellow"},
+            "H": {"color": "Yellow"},
+            "I": {"color": "Yellow"},
+        }
+        edges = [
+            ("A", "D", ["Weak", "Strong"]),
+            ("B", "E", ["Weak", "Strong"]),
+            ("D", "I", ["Strong"]),
+            ("E", "H", ["Strong"]),
+            ("F", "G", ["Weak"]),
+            ("I", "G", ["Weak", "Strong"]),
+            ("I", "H", ["Weak", "Strong"]),
+            ("G", "H", ["Weak", "Strong"]),
+        ]
+        G = nx.MultiGraph()
+        for node in nodes:
+            attributes = nodes[node]
+            G.add_node(node, **attributes)
+
+        for source, target, types in edges:
+            for type in types:
+                G.add_edge(source, target, type=type)
+
+        return G
+
+    def build_summary_graph(self):
+        nodes = {
+            "Supernode-0": {"color": "Red"},
+            "Supernode-1": {"color": "Blue"},
+            "Supernode-2": {"color": "Yellow"},
+            "Supernode-3": {"color": "Blue"},
+            "Supernode-4": {"color": "Yellow"},
+            "Supernode-5": {"color": "Red"},
+        }
+        edges = [
+            ("Supernode-1", "Supernode-2", [{"type": "Weak"}]),
+            ("Supernode-2", "Supernode-4", [{"type": "Weak"}, {"type": "Strong"}]),
+            ("Supernode-3", "Supernode-4", [{"type": "Strong"}]),
+            ("Supernode-3", "Supernode-5", [{"type": "Weak"}, {"type": "Strong"}]),
+            ("Supernode-4", "Supernode-4", [{"type": "Weak"}, {"type": "Strong"}]),
+        ]
+        G = nx.MultiGraph()
+        for node in nodes:
+            attributes = nodes[node]
+            G.add_node(node, **attributes)
+
+        for source, target, types in edges:
+            for type in types:
+                G.add_edge(source, target, type=type)
+
+        supernodes = {
+            "Supernode-0": {"A", "B"},
+            "Supernode-1": {"C", "D"},
+            "Supernode-2": {"E", "F"},
+            "Supernode-3": {"G", "H"},
+            "Supernode-4": {"I", "J"},
+            "Supernode-5": {"K", "L"},
+        }
+        nx.set_node_attributes(G, supernodes, "group")
+        return G
+
+
+class TestSNAPDirectedMulti(AbstractSNAP):
+    def build_original_graph(self):
+        nodes = {
+            "A": {"color": "Red"},
+            "B": {"color": "Red"},
+            "C": {"color": "Green"},
+            "D": {"color": "Green"},
+            "E": {"color": "Blue"},
+            "F": {"color": "Blue"},
+            "G": {"color": "Yellow"},
+            "H": {"color": "Yellow"},
+        }
+        edges = [
+            ("A", "C", ["Weak", "Strong"]),
+            ("A", "E", ["Strong"]),
+            ("A", "F", ["Weak"]),
+            ("B", "D", ["Weak", "Strong"]),
+            ("B", "E", ["Weak"]),
+            ("B", "F", ["Strong"]),
+            ("C", "G", ["Weak", "Strong"]),
+            ("C", "F", ["Strong"]),
+            ("D", "E", ["Strong"]),
+            ("D", "H", ["Weak", "Strong"]),
+            ("G", "E", ["Strong"]),
+            ("H", "F", ["Strong"]),
+        ]
+        G = nx.MultiDiGraph()
+        for node in nodes:
+            attributes = nodes[node]
+            G.add_node(node, **attributes)
+
+        for source, target, types in edges:
+            for type in types:
+                G.add_edge(source, target, type=type)
+
+        return G
+
+    def build_summary_graph(self):
+        nodes = {
+            "Supernode-0": {"color": "Red"},
+            "Supernode-1": {"color": "Blue"},
+            "Supernode-2": {"color": "Yellow"},
+            "Supernode-3": {"color": "Blue"},
+        }
+        edges = [
+            ("Supernode-0", "Supernode-1", ["Weak", "Strong"]),
+            ("Supernode-0", "Supernode-2", ["Weak", "Strong"]),
+            ("Supernode-1", "Supernode-2", ["Strong"]),
+            ("Supernode-1", "Supernode-3", ["Weak", "Strong"]),
+            ("Supernode-3", "Supernode-2", ["Strong"]),
+        ]
+        G = nx.MultiDiGraph()
+        for node in nodes:
+            attributes = nodes[node]
+            G.add_node(node, **attributes)
+
+        for source, target, types in edges:
+            for type in types:
+                G.add_edge(source, target, type=type)
+
+        supernodes = {
+            "Supernode-0": {"A", "B"},
+            "Supernode-1": {"C", "D"},
+            "Supernode-2": {"E", "F"},
+            "Supernode-3": {"G", "H"},
+        }
+        nx.set_node_attributes(G, supernodes, "group")
+        return G
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_swap.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_swap.py
new file mode 100644
index 00000000..e765bd5e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_swap.py
@@ -0,0 +1,179 @@
+import pytest
+
+import networkx as nx
+
+cycle = nx.cycle_graph(5, create_using=nx.DiGraph)
+tree = nx.DiGraph()
+tree.add_edges_from(nx.random_labeled_tree(10, seed=42).edges)
+path = nx.path_graph(5, create_using=nx.DiGraph)
+binomial = nx.binomial_tree(3, create_using=nx.DiGraph)
+HH = nx.directed_havel_hakimi_graph([1, 2, 1, 2, 2, 2], [3, 1, 0, 1, 2, 3])
+balanced_tree = nx.balanced_tree(2, 3, create_using=nx.DiGraph)
+
+
+@pytest.mark.parametrize("G", [path, binomial, HH, cycle, tree, balanced_tree])
+def test_directed_edge_swap(G):
+    in_degree = set(G.in_degree)
+    out_degree = set(G.out_degree)
+    edges = set(G.edges)
+    nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1)
+    assert in_degree == set(G.in_degree)
+    assert out_degree == set(G.out_degree)
+    assert edges != set(G.edges)
+    assert 3 == sum(e not in edges for e in G.edges)
+
+
+def test_directed_edge_swap_undo_previous_swap():
+    G = nx.DiGraph(nx.path_graph(4).edges)  # only 1 swap possible
+    edges = set(G.edges)
+    nx.directed_edge_swap(G, nswap=2, max_tries=100)
+    assert edges == set(G.edges)
+
+    nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1)
+    assert {(0, 2), (1, 3), (2, 1)} == set(G.edges)
+    nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1)
+    assert edges == set(G.edges)
+
+
+def test_edge_cases_directed_edge_swap():
+    # Tests cases when swaps are impossible, either too few edges exist, or self loops/cycles are unavoidable
+    # TODO: Rewrite function to explicitly check for impossible swaps and raise error
+    e = (
+        "Maximum number of swap attempts \\(11\\) exceeded "
+        "before desired swaps achieved \\(\\d\\)."
+    )
+    graph = nx.DiGraph([(0, 0), (0, 1), (1, 0), (2, 3), (3, 2)])
+    with pytest.raises(nx.NetworkXAlgorithmError, match=e):
+        nx.directed_edge_swap(graph, nswap=1, max_tries=10, seed=1)
+
+
+def test_double_edge_swap():
+    graph = nx.barabasi_albert_graph(200, 1)
+    degrees = sorted(d for n, d in graph.degree())
+    G = nx.double_edge_swap(graph, 40)
+    assert degrees == sorted(d for n, d in graph.degree())
+
+
+def test_double_edge_swap_seed():
+    graph = nx.barabasi_albert_graph(200, 1)
+    degrees = sorted(d for n, d in graph.degree())
+    G = nx.double_edge_swap(graph, 40, seed=1)
+    assert degrees == sorted(d for n, d in graph.degree())
+
+
+def test_connected_double_edge_swap():
+    graph = nx.barabasi_albert_graph(200, 1)
+    degrees = sorted(d for n, d in graph.degree())
+    G = nx.connected_double_edge_swap(graph, 40, seed=1)
+    assert nx.is_connected(graph)
+    assert degrees == sorted(d for n, d in graph.degree())
+
+
+def test_connected_double_edge_swap_low_window_threshold():
+    graph = nx.barabasi_albert_graph(200, 1)
+    degrees = sorted(d for n, d in graph.degree())
+    G = nx.connected_double_edge_swap(graph, 40, _window_threshold=0, seed=1)
+    assert nx.is_connected(graph)
+    assert degrees == sorted(d for n, d in graph.degree())
+
+
+def test_connected_double_edge_swap_star():
+    # Testing ui==xi in connected_double_edge_swap
+    graph = nx.star_graph(40)
+    degrees = sorted(d for n, d in graph.degree())
+    G = nx.connected_double_edge_swap(graph, 1, seed=4)
+    assert nx.is_connected(graph)
+    assert degrees == sorted(d for n, d in graph.degree())
+
+
+def test_connected_double_edge_swap_star_low_window_threshold():
+    # Testing ui==xi in connected_double_edge_swap with low window threshold
+    graph = nx.star_graph(40)
+    degrees = sorted(d for n, d in graph.degree())
+    G = nx.connected_double_edge_swap(graph, 1, _window_threshold=0, seed=4)
+    assert nx.is_connected(graph)
+    assert degrees == sorted(d for n, d in graph.degree())
+
+
+def test_directed_edge_swap_small():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.directed_edge_swap(nx.path_graph(3, create_using=nx.DiGraph))
+
+
+def test_directed_edge_swap_tries():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.directed_edge_swap(
+            nx.path_graph(3, create_using=nx.DiGraph), nswap=1, max_tries=0
+        )
+
+
+def test_directed_exception_undirected():
+    graph = nx.Graph([(0, 1), (2, 3)])
+    with pytest.raises(nx.NetworkXNotImplemented):
+        G = nx.directed_edge_swap(graph)
+
+
+def test_directed_edge_max_tries():
+    with pytest.raises(nx.NetworkXAlgorithmError):
+        G = nx.directed_edge_swap(
+            nx.complete_graph(4, nx.DiGraph()), nswap=1, max_tries=5
+        )
+
+
+def test_double_edge_swap_small():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.double_edge_swap(nx.path_graph(3))
+
+
+def test_double_edge_swap_tries():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.double_edge_swap(nx.path_graph(10), nswap=1, max_tries=0)
+
+
+def test_double_edge_directed():
+    graph = nx.DiGraph([(0, 1), (2, 3)])
+    with pytest.raises(nx.NetworkXError, match="not defined for directed graphs."):
+        G = nx.double_edge_swap(graph)
+
+
+def test_double_edge_max_tries():
+    with pytest.raises(nx.NetworkXAlgorithmError):
+        G = nx.double_edge_swap(nx.complete_graph(4), nswap=1, max_tries=5)
+
+
+def test_connected_double_edge_swap_small():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.connected_double_edge_swap(nx.path_graph(3))
+
+
+def test_connected_double_edge_swap_not_connected():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.path_graph(3)
+        nx.add_path(G, [10, 11, 12])
+        G = nx.connected_double_edge_swap(G)
+
+
+def test_degree_seq_c4():
+    G = nx.cycle_graph(4)
+    degrees = sorted(d for n, d in G.degree())
+    G = nx.double_edge_swap(G, 1, 100)
+    assert degrees == sorted(d for n, d in G.degree())
+
+
+def test_fewer_than_4_nodes():
+    G = nx.DiGraph()
+    G.add_nodes_from([0, 1, 2])
+    with pytest.raises(nx.NetworkXError, match=".*fewer than four nodes."):
+        nx.directed_edge_swap(G)
+
+
+def test_less_than_3_edges():
+    G = nx.DiGraph([(0, 1), (1, 2)])
+    G.add_nodes_from([3, 4])
+    with pytest.raises(nx.NetworkXError, match=".*fewer than 3 edges"):
+        nx.directed_edge_swap(G)
+
+    G = nx.Graph()
+    G.add_nodes_from([0, 1, 2, 3])
+    with pytest.raises(nx.NetworkXError, match=".*fewer than 2 edges"):
+        nx.double_edge_swap(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_threshold.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_threshold.py
new file mode 100644
index 00000000..07aad44b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_threshold.py
@@ -0,0 +1,269 @@
+"""
+Threshold Graphs
+================
+"""
+
+import pytest
+
+import networkx as nx
+import networkx.algorithms.threshold as nxt
+from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic
+
+cnlti = nx.convert_node_labels_to_integers
+
+
+class TestGeneratorThreshold:
+    def test_threshold_sequence_graph_test(self):
+        G = nx.star_graph(10)
+        assert nxt.is_threshold_graph(G)
+        assert nxt.is_threshold_sequence([d for n, d in G.degree()])
+
+        G = nx.complete_graph(10)
+        assert nxt.is_threshold_graph(G)
+        assert nxt.is_threshold_sequence([d for n, d in G.degree()])
+
+        deg = [3, 2, 2, 1, 1, 1]
+        assert not nxt.is_threshold_sequence(deg)
+
+        deg = [3, 2, 2, 1]
+        assert nxt.is_threshold_sequence(deg)
+
+        G = nx.generators.havel_hakimi_graph(deg)
+        assert nxt.is_threshold_graph(G)
+
+    def test_creation_sequences(self):
+        deg = [3, 2, 2, 1]
+        G = nx.generators.havel_hakimi_graph(deg)
+
+        with pytest.raises(ValueError):
+            nxt.creation_sequence(deg, with_labels=True, compact=True)
+
+        cs0 = nxt.creation_sequence(deg)
+        H0 = nxt.threshold_graph(cs0)
+        assert "".join(cs0) == "ddid"
+
+        cs1 = nxt.creation_sequence(deg, with_labels=True)
+        H1 = nxt.threshold_graph(cs1)
+        assert cs1 == [(1, "d"), (2, "d"), (3, "i"), (0, "d")]
+
+        cs2 = nxt.creation_sequence(deg, compact=True)
+        H2 = nxt.threshold_graph(cs2)
+        assert cs2 == [2, 1, 1]
+        assert "".join(nxt.uncompact(cs2)) == "ddid"
+        assert graph_could_be_isomorphic(H0, G)
+        assert graph_could_be_isomorphic(H0, H1)
+        assert graph_could_be_isomorphic(H0, H2)
+
+    def test_make_compact(self):
+        assert nxt.make_compact(["d", "d", "d", "i", "d", "d"]) == [3, 1, 2]
+        assert nxt.make_compact([3, 1, 2]) == [3, 1, 2]
+        assert pytest.raises(TypeError, nxt.make_compact, [3.0, 1.0, 2.0])
+
+    def test_uncompact(self):
+        assert nxt.uncompact([3, 1, 2]) == ["d", "d", "d", "i", "d", "d"]
+        assert nxt.uncompact(["d", "d", "i", "d"]) == ["d", "d", "i", "d"]
+        assert nxt.uncompact(
+            nxt.uncompact([(1, "d"), (2, "d"), (3, "i"), (0, "d")])
+        ) == nxt.uncompact([(1, "d"), (2, "d"), (3, "i"), (0, "d")])
+        assert pytest.raises(TypeError, nxt.uncompact, [3.0, 1.0, 2.0])
+
+    def test_creation_sequence_to_weights(self):
+        assert nxt.creation_sequence_to_weights([3, 1, 2]) == [
+            0.5,
+            0.5,
+            0.5,
+            0.25,
+            0.75,
+            0.75,
+        ]
+        assert pytest.raises(
+            TypeError, nxt.creation_sequence_to_weights, [3.0, 1.0, 2.0]
+        )
+
+    def test_weights_to_creation_sequence(self):
+        deg = [3, 2, 2, 1]
+        with pytest.raises(ValueError):
+            nxt.weights_to_creation_sequence(deg, with_labels=True, compact=True)
+        assert nxt.weights_to_creation_sequence(deg, with_labels=True) == [
+            (3, "d"),
+            (1, "d"),
+            (2, "d"),
+            (0, "d"),
+        ]
+        assert nxt.weights_to_creation_sequence(deg, compact=True) == [4]
+
+    def test_find_alternating_4_cycle(self):
+        G = nx.Graph()
+        G.add_edge(1, 2)
+        assert not nxt.find_alternating_4_cycle(G)
+
+    def test_shortest_path(self):
+        deg = [3, 2, 2, 1]
+        G = nx.generators.havel_hakimi_graph(deg)
+        cs1 = nxt.creation_sequence(deg, with_labels=True)
+        for n, m in [(3, 0), (0, 3), (0, 2), (0, 1), (1, 3), (3, 1), (1, 2), (2, 3)]:
+            assert nxt.shortest_path(cs1, n, m) == nx.shortest_path(G, n, m)
+
+        spl = nxt.shortest_path_length(cs1, 3)
+        spl2 = nxt.shortest_path_length([t for v, t in cs1], 2)
+        assert spl == spl2
+
+        spld = {}
+        for j, pl in enumerate(spl):
+            n = cs1[j][0]
+            spld[n] = pl
+        assert spld == nx.single_source_shortest_path_length(G, 3)
+
+        assert nxt.shortest_path(["d", "d", "d", "i", "d", "d"], 1, 2) == [1, 2]
+        assert nxt.shortest_path([3, 1, 2], 1, 2) == [1, 2]
+        assert pytest.raises(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1, 2)
+        assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], "a", 2)
+        assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], 1, "b")
+        assert nxt.shortest_path([3, 1, 2], 1, 1) == [1]
+
+    def test_shortest_path_length(self):
+        assert nxt.shortest_path_length([3, 1, 2], 1) == [1, 0, 1, 2, 1, 1]
+        assert nxt.shortest_path_length(["d", "d", "d", "i", "d", "d"], 1) == [
+            1,
+            0,
+            1,
+            2,
+            1,
+            1,
+        ]
+        assert nxt.shortest_path_length(("d", "d", "d", "i", "d", "d"), 1) == [
+            1,
+            0,
+            1,
+            2,
+            1,
+            1,
+        ]
+        assert pytest.raises(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1)
+
+    def test_random_threshold_sequence(self):
+        assert len(nxt.random_threshold_sequence(10, 0.5)) == 10
+        assert nxt.random_threshold_sequence(10, 0.5, seed=42) == [
+            "d",
+            "i",
+            "d",
+            "d",
+            "d",
+            "i",
+            "i",
+            "i",
+            "d",
+            "d",
+        ]
+        assert pytest.raises(ValueError, nxt.random_threshold_sequence, 10, 1.5)
+
+    def test_right_d_threshold_sequence(self):
+        assert nxt.right_d_threshold_sequence(3, 2) == ["d", "i", "d"]
+        assert pytest.raises(ValueError, nxt.right_d_threshold_sequence, 2, 3)
+
+    def test_left_d_threshold_sequence(self):
+        assert nxt.left_d_threshold_sequence(3, 2) == ["d", "i", "d"]
+        assert pytest.raises(ValueError, nxt.left_d_threshold_sequence, 2, 3)
+
+    def test_weights_thresholds(self):
+        wseq = [3, 4, 3, 3, 5, 6, 5, 4, 5, 6]
+        cs = nxt.weights_to_creation_sequence(wseq, threshold=10)
+        wseq = nxt.creation_sequence_to_weights(cs)
+        cs2 = nxt.weights_to_creation_sequence(wseq)
+        assert cs == cs2
+
+        wseq = nxt.creation_sequence_to_weights(nxt.uncompact([3, 1, 2, 3, 3, 2, 3]))
+        assert wseq == [
+            s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7]
+        ]
+
+        wseq = nxt.creation_sequence_to_weights([3, 1, 2, 3, 3, 2, 3])
+        assert wseq == [
+            s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7]
+        ]
+
+        wseq = nxt.creation_sequence_to_weights(list(enumerate("ddidiiidididi")))
+        assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]]
+
+        wseq = nxt.creation_sequence_to_weights("ddidiiidididi")
+        assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]]
+
+        wseq = nxt.creation_sequence_to_weights("ddidiiidididid")
+        ws = [s / 12 for s in [6, 6, 5, 7, 4, 4, 4, 8, 3, 9, 2, 10, 1, 11]]
+        assert sum(abs(c - d) for c, d in zip(wseq, ws)) < 1e-14
+
+    def test_finding_routines(self):
+        G = nx.Graph({1: [2], 2: [3], 3: [4], 4: [5], 5: [6]})
+        G.add_edge(2, 4)
+        G.add_edge(2, 5)
+        G.add_edge(2, 7)
+        G.add_edge(3, 6)
+        G.add_edge(4, 6)
+
+        # Alternating 4 cycle
+        assert nxt.find_alternating_4_cycle(G) == [1, 2, 3, 6]
+
+        # Threshold graph
+        TG = nxt.find_threshold_graph(G)
+        assert nxt.is_threshold_graph(TG)
+        assert sorted(TG.nodes()) == [1, 2, 3, 4, 5, 7]
+
+        cs = nxt.creation_sequence(dict(TG.degree()), with_labels=True)
+        assert nxt.find_creation_sequence(G) == cs
+
+    def test_fast_versions_properties_threshold_graphs(self):
+        cs = "ddiiddid"
+        G = nxt.threshold_graph(cs)
+        assert nxt.density("ddiiddid") == nx.density(G)
+        assert sorted(nxt.degree_sequence(cs)) == sorted(d for n, d in G.degree())
+
+        ts = nxt.triangle_sequence(cs)
+        assert ts == list(nx.triangles(G).values())
+        assert sum(ts) // 3 == nxt.triangles(cs)
+
+        c1 = nxt.cluster_sequence(cs)
+        c2 = list(nx.clustering(G).values())
+        assert sum(abs(c - d) for c, d in zip(c1, c2)) == pytest.approx(0, abs=1e-7)
+
+        b1 = nx.betweenness_centrality(G).values()
+        b2 = nxt.betweenness_sequence(cs)
+        assert sum(abs(c - d) for c, d in zip(b1, b2)) < 1e-7
+
+        assert nxt.eigenvalues(cs) == [0, 1, 3, 3, 5, 7, 7, 8]
+
+        # Degree Correlation
+        assert abs(nxt.degree_correlation(cs) + 0.593038821954) < 1e-12
+        assert nxt.degree_correlation("diiiddi") == -0.8
+        assert nxt.degree_correlation("did") == -1.0
+        assert nxt.degree_correlation("ddd") == 1.0
+        assert nxt.eigenvalues("dddiii") == [0, 0, 0, 0, 3, 3]
+        assert nxt.eigenvalues("dddiiid") == [0, 1, 1, 1, 4, 4, 7]
+
+    def test_tg_creation_routines(self):
+        s = nxt.left_d_threshold_sequence(5, 7)
+        s = nxt.right_d_threshold_sequence(5, 7)
+        s1 = nxt.swap_d(s, 1.0, 1.0)
+        s1 = nxt.swap_d(s, 1.0, 1.0, seed=1)
+
+    def test_eigenvectors(self):
+        np = pytest.importorskip("numpy")
+        eigenval = np.linalg.eigvals
+        pytest.importorskip("scipy")
+
+        cs = "ddiiddid"
+        G = nxt.threshold_graph(cs)
+        (tgeval, tgevec) = nxt.eigenvectors(cs)
+        np.testing.assert_allclose([np.dot(lv, lv) for lv in tgevec], 1.0, rtol=1e-9)
+        lapl = nx.laplacian_matrix(G)
+
+    def test_create_using(self):
+        cs = "ddiiddid"
+        G = nxt.threshold_graph(cs)
+        assert pytest.raises(
+            nx.exception.NetworkXError,
+            nxt.threshold_graph,
+            cs,
+            create_using=nx.DiGraph(),
+        )
+        MG = nxt.threshold_graph(cs, create_using=nx.MultiGraph())
+        assert sorted(MG.edges()) == sorted(G.edges())
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_time_dependent.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_time_dependent.py
new file mode 100644
index 00000000..1e256f4b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_time_dependent.py
@@ -0,0 +1,431 @@
+"""Unit testing for time dependent algorithms."""
+
+from datetime import datetime, timedelta
+
+import pytest
+
+import networkx as nx
+
+_delta = timedelta(days=5 * 365)
+
+
+class TestCdIndex:
+    """Unit testing for the cd index function."""
+
+    def test_common_graph(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+        G.add_edge(4, 2)
+        G.add_edge(4, 0)
+        G.add_edge(4, 1)
+        G.add_edge(4, 3)
+        G.add_edge(5, 2)
+        G.add_edge(6, 2)
+        G.add_edge(6, 4)
+        G.add_edge(7, 4)
+        G.add_edge(8, 4)
+        G.add_edge(9, 4)
+        G.add_edge(9, 1)
+        G.add_edge(9, 3)
+        G.add_edge(10, 4)
+
+        node_attrs = {
+            0: {"time": datetime(1992, 1, 1)},
+            1: {"time": datetime(1992, 1, 1)},
+            2: {"time": datetime(1993, 1, 1)},
+            3: {"time": datetime(1993, 1, 1)},
+            4: {"time": datetime(1995, 1, 1)},
+            5: {"time": datetime(1997, 1, 1)},
+            6: {"time": datetime(1998, 1, 1)},
+            7: {"time": datetime(1999, 1, 1)},
+            8: {"time": datetime(1999, 1, 1)},
+            9: {"time": datetime(1998, 1, 1)},
+            10: {"time": datetime(1997, 4, 1)},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+
+        assert nx.cd_index(G, 4, time_delta=_delta) == 0.17
+
+    def test_common_graph_with_given_attributes(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+        G.add_edge(4, 2)
+        G.add_edge(4, 0)
+        G.add_edge(4, 1)
+        G.add_edge(4, 3)
+        G.add_edge(5, 2)
+        G.add_edge(6, 2)
+        G.add_edge(6, 4)
+        G.add_edge(7, 4)
+        G.add_edge(8, 4)
+        G.add_edge(9, 4)
+        G.add_edge(9, 1)
+        G.add_edge(9, 3)
+        G.add_edge(10, 4)
+
+        node_attrs = {
+            0: {"date": datetime(1992, 1, 1)},
+            1: {"date": datetime(1992, 1, 1)},
+            2: {"date": datetime(1993, 1, 1)},
+            3: {"date": datetime(1993, 1, 1)},
+            4: {"date": datetime(1995, 1, 1)},
+            5: {"date": datetime(1997, 1, 1)},
+            6: {"date": datetime(1998, 1, 1)},
+            7: {"date": datetime(1999, 1, 1)},
+            8: {"date": datetime(1999, 1, 1)},
+            9: {"date": datetime(1998, 1, 1)},
+            10: {"date": datetime(1997, 4, 1)},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+
+        assert nx.cd_index(G, 4, time_delta=_delta, time="date") == 0.17
+
+    def test_common_graph_with_int_attributes(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+        G.add_edge(4, 2)
+        G.add_edge(4, 0)
+        G.add_edge(4, 1)
+        G.add_edge(4, 3)
+        G.add_edge(5, 2)
+        G.add_edge(6, 2)
+        G.add_edge(6, 4)
+        G.add_edge(7, 4)
+        G.add_edge(8, 4)
+        G.add_edge(9, 4)
+        G.add_edge(9, 1)
+        G.add_edge(9, 3)
+        G.add_edge(10, 4)
+
+        node_attrs = {
+            0: {"time": 20},
+            1: {"time": 20},
+            2: {"time": 30},
+            3: {"time": 30},
+            4: {"time": 50},
+            5: {"time": 70},
+            6: {"time": 80},
+            7: {"time": 90},
+            8: {"time": 90},
+            9: {"time": 80},
+            10: {"time": 74},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+
+        assert nx.cd_index(G, 4, time_delta=50) == 0.17
+
+    def test_common_graph_with_float_attributes(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+        G.add_edge(4, 2)
+        G.add_edge(4, 0)
+        G.add_edge(4, 1)
+        G.add_edge(4, 3)
+        G.add_edge(5, 2)
+        G.add_edge(6, 2)
+        G.add_edge(6, 4)
+        G.add_edge(7, 4)
+        G.add_edge(8, 4)
+        G.add_edge(9, 4)
+        G.add_edge(9, 1)
+        G.add_edge(9, 3)
+        G.add_edge(10, 4)
+
+        node_attrs = {
+            0: {"time": 20.2},
+            1: {"time": 20.2},
+            2: {"time": 30.7},
+            3: {"time": 30.7},
+            4: {"time": 50.9},
+            5: {"time": 70.1},
+            6: {"time": 80.6},
+            7: {"time": 90.7},
+            8: {"time": 90.7},
+            9: {"time": 80.6},
+            10: {"time": 74.2},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+
+        assert nx.cd_index(G, 4, time_delta=50) == 0.17
+
+    def test_common_graph_with_weights(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+        G.add_edge(4, 2)
+        G.add_edge(4, 0)
+        G.add_edge(4, 1)
+        G.add_edge(4, 3)
+        G.add_edge(5, 2)
+        G.add_edge(6, 2)
+        G.add_edge(6, 4)
+        G.add_edge(7, 4)
+        G.add_edge(8, 4)
+        G.add_edge(9, 4)
+        G.add_edge(9, 1)
+        G.add_edge(9, 3)
+        G.add_edge(10, 4)
+
+        node_attrs = {
+            0: {"time": datetime(1992, 1, 1)},
+            1: {"time": datetime(1992, 1, 1)},
+            2: {"time": datetime(1993, 1, 1)},
+            3: {"time": datetime(1993, 1, 1)},
+            4: {"time": datetime(1995, 1, 1)},
+            5: {"time": datetime(1997, 1, 1)},
+            6: {"time": datetime(1998, 1, 1), "weight": 5},
+            7: {"time": datetime(1999, 1, 1), "weight": 2},
+            8: {"time": datetime(1999, 1, 1), "weight": 6},
+            9: {"time": datetime(1998, 1, 1), "weight": 3},
+            10: {"time": datetime(1997, 4, 1), "weight": 10},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+        assert nx.cd_index(G, 4, time_delta=_delta, weight="weight") == 0.04
+
+    def test_node_with_no_predecessors(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+        G.add_edge(4, 2)
+        G.add_edge(4, 0)
+        G.add_edge(4, 3)
+        G.add_edge(5, 2)
+        G.add_edge(6, 2)
+        G.add_edge(6, 4)
+        G.add_edge(7, 4)
+        G.add_edge(8, 4)
+        G.add_edge(9, 4)
+        G.add_edge(9, 1)
+        G.add_edge(9, 3)
+        G.add_edge(10, 4)
+
+        node_attrs = {
+            0: {"time": datetime(1992, 1, 1)},
+            1: {"time": datetime(1992, 1, 1)},
+            2: {"time": datetime(1993, 1, 1)},
+            3: {"time": datetime(1993, 1, 1)},
+            4: {"time": datetime(1995, 1, 1)},
+            5: {"time": datetime(2005, 1, 1)},
+            6: {"time": datetime(2010, 1, 1)},
+            7: {"time": datetime(2001, 1, 1)},
+            8: {"time": datetime(2020, 1, 1)},
+            9: {"time": datetime(2017, 1, 1)},
+            10: {"time": datetime(2004, 4, 1)},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+        assert nx.cd_index(G, 4, time_delta=_delta) == 0.0
+
+    def test_node_with_no_successors(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+        G.add_edge(8, 2)
+        G.add_edge(6, 0)
+        G.add_edge(6, 3)
+        G.add_edge(5, 2)
+        G.add_edge(6, 2)
+        G.add_edge(6, 4)
+        G.add_edge(7, 4)
+        G.add_edge(8, 4)
+        G.add_edge(9, 4)
+        G.add_edge(9, 1)
+        G.add_edge(9, 3)
+        G.add_edge(10, 4)
+
+        node_attrs = {
+            0: {"time": datetime(1992, 1, 1)},
+            1: {"time": datetime(1992, 1, 1)},
+            2: {"time": datetime(1993, 1, 1)},
+            3: {"time": datetime(1993, 1, 1)},
+            4: {"time": datetime(1995, 1, 1)},
+            5: {"time": datetime(1997, 1, 1)},
+            6: {"time": datetime(1998, 1, 1)},
+            7: {"time": datetime(1999, 1, 1)},
+            8: {"time": datetime(1999, 1, 1)},
+            9: {"time": datetime(1998, 1, 1)},
+            10: {"time": datetime(1997, 4, 1)},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+        assert nx.cd_index(G, 4, time_delta=_delta) == 1.0
+
+    def test_n_equals_zero(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+        G.add_edge(4, 2)
+        G.add_edge(4, 0)
+        G.add_edge(4, 3)
+        G.add_edge(6, 4)
+        G.add_edge(7, 4)
+        G.add_edge(8, 4)
+        G.add_edge(9, 4)
+        G.add_edge(9, 1)
+        G.add_edge(10, 4)
+
+        node_attrs = {
+            0: {"time": datetime(1992, 1, 1)},
+            1: {"time": datetime(1992, 1, 1)},
+            2: {"time": datetime(1993, 1, 1)},
+            3: {"time": datetime(1993, 1, 1)},
+            4: {"time": datetime(1995, 1, 1)},
+            5: {"time": datetime(2005, 1, 1)},
+            6: {"time": datetime(2010, 1, 1)},
+            7: {"time": datetime(2001, 1, 1)},
+            8: {"time": datetime(2020, 1, 1)},
+            9: {"time": datetime(2017, 1, 1)},
+            10: {"time": datetime(2004, 4, 1)},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+
+        with pytest.raises(
+            nx.NetworkXError, match="The cd index cannot be defined."
+        ) as ve:
+            nx.cd_index(G, 4, time_delta=_delta)
+
+    def test_time_timedelta_compatibility(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+        G.add_edge(4, 2)
+        G.add_edge(4, 0)
+        G.add_edge(4, 3)
+        G.add_edge(6, 4)
+        G.add_edge(7, 4)
+        G.add_edge(8, 4)
+        G.add_edge(9, 4)
+        G.add_edge(9, 1)
+        G.add_edge(10, 4)
+
+        node_attrs = {
+            0: {"time": 20.2},
+            1: {"time": 20.2},
+            2: {"time": 30.7},
+            3: {"time": 30.7},
+            4: {"time": 50.9},
+            5: {"time": 70.1},
+            6: {"time": 80.6},
+            7: {"time": 90.7},
+            8: {"time": 90.7},
+            9: {"time": 80.6},
+            10: {"time": 74.2},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+
+        with pytest.raises(
+            nx.NetworkXError,
+            match="Addition and comparison are not supported between",
+        ) as ve:
+            nx.cd_index(G, 4, time_delta=_delta)
+
+    def test_node_with_no_time(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
+        G.add_edge(8, 2)
+        G.add_edge(6, 0)
+        G.add_edge(6, 3)
+        G.add_edge(5, 2)
+        G.add_edge(6, 2)
+        G.add_edge(6, 4)
+        G.add_edge(7, 4)
+        G.add_edge(8, 4)
+        G.add_edge(9, 4)
+        G.add_edge(9, 1)
+        G.add_edge(9, 3)
+        G.add_edge(10, 4)
+
+        node_attrs = {
+            0: {"time": datetime(1992, 1, 1)},
+            1: {"time": datetime(1992, 1, 1)},
+            2: {"time": datetime(1993, 1, 1)},
+            3: {"time": datetime(1993, 1, 1)},
+            4: {"time": datetime(1995, 1, 1)},
+            6: {"time": datetime(1998, 1, 1)},
+            7: {"time": datetime(1999, 1, 1)},
+            8: {"time": datetime(1999, 1, 1)},
+            9: {"time": datetime(1998, 1, 1)},
+            10: {"time": datetime(1997, 4, 1)},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+
+        with pytest.raises(
+            nx.NetworkXError, match="Not all nodes have a 'time' attribute."
+        ) as ve:
+            nx.cd_index(G, 4, time_delta=_delta)
+
+    def test_maximally_consolidating(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
+        G.add_edge(5, 1)
+        G.add_edge(5, 2)
+        G.add_edge(5, 3)
+        G.add_edge(5, 4)
+        G.add_edge(6, 1)
+        G.add_edge(6, 5)
+        G.add_edge(7, 1)
+        G.add_edge(7, 5)
+        G.add_edge(8, 2)
+        G.add_edge(8, 5)
+        G.add_edge(9, 5)
+        G.add_edge(9, 3)
+        G.add_edge(10, 5)
+        G.add_edge(10, 3)
+        G.add_edge(10, 4)
+        G.add_edge(11, 5)
+        G.add_edge(11, 4)
+
+        node_attrs = {
+            0: {"time": datetime(1992, 1, 1)},
+            1: {"time": datetime(1992, 1, 1)},
+            2: {"time": datetime(1993, 1, 1)},
+            3: {"time": datetime(1993, 1, 1)},
+            4: {"time": datetime(1995, 1, 1)},
+            5: {"time": datetime(1997, 1, 1)},
+            6: {"time": datetime(1998, 1, 1)},
+            7: {"time": datetime(1999, 1, 1)},
+            8: {"time": datetime(1999, 1, 1)},
+            9: {"time": datetime(1998, 1, 1)},
+            10: {"time": datetime(1997, 4, 1)},
+            11: {"time": datetime(1998, 5, 1)},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+
+        assert nx.cd_index(G, 5, time_delta=_delta) == -1
+
+    def test_maximally_destabilizing(self):
+        G = nx.DiGraph()
+        G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
+        G.add_edge(5, 1)
+        G.add_edge(5, 2)
+        G.add_edge(5, 3)
+        G.add_edge(5, 4)
+        G.add_edge(6, 5)
+        G.add_edge(7, 5)
+        G.add_edge(8, 5)
+        G.add_edge(9, 5)
+        G.add_edge(10, 5)
+        G.add_edge(11, 5)
+
+        node_attrs = {
+            0: {"time": datetime(1992, 1, 1)},
+            1: {"time": datetime(1992, 1, 1)},
+            2: {"time": datetime(1993, 1, 1)},
+            3: {"time": datetime(1993, 1, 1)},
+            4: {"time": datetime(1995, 1, 1)},
+            5: {"time": datetime(1997, 1, 1)},
+            6: {"time": datetime(1998, 1, 1)},
+            7: {"time": datetime(1999, 1, 1)},
+            8: {"time": datetime(1999, 1, 1)},
+            9: {"time": datetime(1998, 1, 1)},
+            10: {"time": datetime(1997, 4, 1)},
+            11: {"time": datetime(1998, 5, 1)},
+        }
+
+        nx.set_node_attributes(G, node_attrs)
+
+        assert nx.cd_index(G, 5, time_delta=_delta) == 1
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_tournament.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_tournament.py
new file mode 100644
index 00000000..e75abf8f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_tournament.py
@@ -0,0 +1,163 @@
+"""Unit tests for the :mod:`networkx.algorithms.tournament` module."""
+
+from itertools import combinations
+
+import pytest
+
+from networkx import DiGraph
+from networkx.algorithms.tournament import (
+    hamiltonian_path,
+    index_satisfying,
+    is_reachable,
+    is_strongly_connected,
+    is_tournament,
+    random_tournament,
+    score_sequence,
+    tournament_matrix,
+)
+
+
+def test_condition_not_satisfied():
+    condition = lambda x: x > 0
+    iter_in = [0]
+    assert index_satisfying(iter_in, condition) == 1
+
+
+def test_empty_iterable():
+    condition = lambda x: x > 0
+    with pytest.raises(ValueError):
+        index_satisfying([], condition)
+
+
+def test_is_tournament():
+    G = DiGraph()
+    G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
+    assert is_tournament(G)
+
+
+def test_self_loops():
+    """A tournament must have no self-loops."""
+    G = DiGraph()
+    G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
+    G.add_edge(0, 0)
+    assert not is_tournament(G)
+
+
+def test_missing_edges():
+    """A tournament must not have any pair of nodes without at least
+    one edge joining the pair.
+
+    """
+    G = DiGraph()
+    G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3)])
+    assert not is_tournament(G)
+
+
+def test_bidirectional_edges():
+    """A tournament must not have any pair of nodes with greater
+    than one edge joining the pair.
+
+    """
+    G = DiGraph()
+    G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
+    G.add_edge(1, 0)
+    assert not is_tournament(G)
+
+
+def test_graph_is_tournament():
+    for _ in range(10):
+        G = random_tournament(5)
+        assert is_tournament(G)
+
+
+def test_graph_is_tournament_seed():
+    for _ in range(10):
+        G = random_tournament(5, seed=1)
+        assert is_tournament(G)
+
+
+def test_graph_is_tournament_one_node():
+    G = random_tournament(1)
+    assert is_tournament(G)
+
+
+def test_graph_is_tournament_zero_node():
+    G = random_tournament(0)
+    assert is_tournament(G)
+
+
+def test_hamiltonian_empty_graph():
+    path = hamiltonian_path(DiGraph())
+    assert len(path) == 0
+
+
+def test_path_is_hamiltonian():
+    G = DiGraph()
+    G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
+    path = hamiltonian_path(G)
+    assert len(path) == 4
+    assert all(v in G[u] for u, v in zip(path, path[1:]))
+
+
+def test_hamiltonian_cycle():
+    """Tests that :func:`networkx.tournament.hamiltonian_path`
+    returns a Hamiltonian cycle when provided a strongly connected
+    tournament.
+
+    """
+    G = DiGraph()
+    G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)])
+    path = hamiltonian_path(G)
+    assert len(path) == 4
+    assert all(v in G[u] for u, v in zip(path, path[1:]))
+    assert path[0] in G[path[-1]]
+
+
+def test_score_sequence_edge():
+    G = DiGraph([(0, 1)])
+    assert score_sequence(G) == [0, 1]
+
+
+def test_score_sequence_triangle():
+    G = DiGraph([(0, 1), (1, 2), (2, 0)])
+    assert score_sequence(G) == [1, 1, 1]
+
+
+def test_tournament_matrix():
+    np = pytest.importorskip("numpy")
+    pytest.importorskip("scipy")
+    npt = np.testing
+    G = DiGraph([(0, 1)])
+    m = tournament_matrix(G)
+    npt.assert_array_equal(m.todense(), np.array([[0, 1], [-1, 0]]))
+
+
+def test_reachable_pair():
+    """Tests for a reachable pair of nodes."""
+    G = DiGraph([(0, 1), (1, 2), (2, 0)])
+    assert is_reachable(G, 0, 2)
+
+
+def test_same_node_is_reachable():
+    """Tests that a node is always reachable from it."""
+    # G is an arbitrary tournament on ten nodes.
+    G = DiGraph(sorted(p) for p in combinations(range(10), 2))
+    assert all(is_reachable(G, v, v) for v in G)
+
+
+def test_unreachable_pair():
+    """Tests for an unreachable pair of nodes."""
+    G = DiGraph([(0, 1), (0, 2), (1, 2)])
+    assert not is_reachable(G, 1, 0)
+
+
+def test_is_strongly_connected():
+    """Tests for a strongly connected tournament."""
+    G = DiGraph([(0, 1), (1, 2), (2, 0)])
+    assert is_strongly_connected(G)
+
+
+def test_not_strongly_connected():
+    """Tests for a tournament that is not strongly connected."""
+    G = DiGraph([(0, 1), (0, 2), (1, 2)])
+    assert not is_strongly_connected(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_triads.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_triads.py
new file mode 100644
index 00000000..62670351
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_triads.py
@@ -0,0 +1,289 @@
+"""Tests for the :mod:`networkx.algorithms.triads` module."""
+
+import itertools
+from collections import defaultdict
+from random import sample
+
+import pytest
+
+import networkx as nx
+
+
+def test_all_triplets_deprecated():
+    G = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+    with pytest.deprecated_call():
+        nx.all_triplets(G)
+
+
+def test_random_triad_deprecated():
+    G = nx.path_graph(3, create_using=nx.DiGraph)
+    with pytest.deprecated_call():
+        nx.random_triad(G)
+
+
+def test_triadic_census():
+    """Tests the triadic_census function."""
+    G = nx.DiGraph()
+    G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"])
+    expected = {
+        "030T": 2,
+        "120C": 1,
+        "210": 0,
+        "120U": 0,
+        "012": 9,
+        "102": 3,
+        "021U": 0,
+        "111U": 0,
+        "003": 8,
+        "030C": 0,
+        "021D": 9,
+        "201": 0,
+        "111D": 1,
+        "300": 0,
+        "120D": 0,
+        "021C": 2,
+    }
+    actual = nx.triadic_census(G)
+    assert expected == actual
+
+
+def test_is_triad():
+    """Tests the is_triad function"""
+    G = nx.karate_club_graph()
+    G = G.to_directed()
+    for i in range(100):
+        nodes = sample(sorted(G.nodes()), 3)
+        G2 = G.subgraph(nodes)
+        assert nx.is_triad(G2)
+
+
+def test_all_triplets():
+    """Tests the all_triplets function."""
+    G = nx.DiGraph()
+    G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"])
+    expected = [
+        f"{i},{j},{k}"
+        for i in range(7)
+        for j in range(i + 1, 7)
+        for k in range(j + 1, 7)
+    ]
+    expected = [set(x.split(",")) for x in expected]
+    actual = [set(x) for x in nx.all_triplets(G)]
+    assert all(any(s1 == s2 for s1 in expected) for s2 in actual)
+
+
+def test_all_triads():
+    """Tests the all_triplets function."""
+    G = nx.DiGraph()
+    G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"])
+    expected = [
+        f"{i},{j},{k}"
+        for i in range(7)
+        for j in range(i + 1, 7)
+        for k in range(j + 1, 7)
+    ]
+    expected = [G.subgraph(x.split(",")) for x in expected]
+    actual = list(nx.all_triads(G))
+    assert all(any(nx.is_isomorphic(G1, G2) for G1 in expected) for G2 in actual)
+
+
+def test_triad_type():
+    """Tests the triad_type function."""
+    # 0 edges (1 type)
+    G = nx.DiGraph({0: [], 1: [], 2: []})
+    assert nx.triad_type(G) == "003"
+    # 1 edge (1 type)
+    G = nx.DiGraph({0: [1], 1: [], 2: []})
+    assert nx.triad_type(G) == "012"
+    # 2 edges (4 types)
+    G = nx.DiGraph([(0, 1), (0, 2)])
+    assert nx.triad_type(G) == "021D"
+    G = nx.DiGraph({0: [1], 1: [0], 2: []})
+    assert nx.triad_type(G) == "102"
+    G = nx.DiGraph([(0, 1), (2, 1)])
+    assert nx.triad_type(G) == "021U"
+    G = nx.DiGraph([(0, 1), (1, 2)])
+    assert nx.triad_type(G) == "021C"
+    # 3 edges (4 types)
+    G = nx.DiGraph([(0, 1), (1, 0), (2, 1)])
+    assert nx.triad_type(G) == "111D"
+    G = nx.DiGraph([(0, 1), (1, 0), (1, 2)])
+    assert nx.triad_type(G) == "111U"
+    G = nx.DiGraph([(0, 1), (1, 2), (0, 2)])
+    assert nx.triad_type(G) == "030T"
+    G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
+    assert nx.triad_type(G) == "030C"
+    # 4 edges (4 types)
+    G = nx.DiGraph([(0, 1), (1, 0), (2, 0), (0, 2)])
+    assert nx.triad_type(G) == "201"
+    G = nx.DiGraph([(0, 1), (1, 0), (2, 0), (2, 1)])
+    assert nx.triad_type(G) == "120D"
+    G = nx.DiGraph([(0, 1), (1, 0), (0, 2), (1, 2)])
+    assert nx.triad_type(G) == "120U"
+    G = nx.DiGraph([(0, 1), (1, 0), (0, 2), (2, 1)])
+    assert nx.triad_type(G) == "120C"
+    # 5 edges (1 type)
+    G = nx.DiGraph([(0, 1), (1, 0), (2, 1), (1, 2), (0, 2)])
+    assert nx.triad_type(G) == "210"
+    # 6 edges (1 type)
+    G = nx.DiGraph([(0, 1), (1, 0), (1, 2), (2, 1), (0, 2), (2, 0)])
+    assert nx.triad_type(G) == "300"
+
+
+def test_triads_by_type():
+    """Tests the all_triplets function."""
+    G = nx.DiGraph()
+    G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"])
+    all_triads = nx.all_triads(G)
+    expected = defaultdict(list)
+    for triad in all_triads:
+        name = nx.triad_type(triad)
+        expected[name].append(triad)
+    actual = nx.triads_by_type(G)
+    assert set(actual.keys()) == set(expected.keys())
+    for tri_type, actual_Gs in actual.items():
+        expected_Gs = expected[tri_type]
+        for a in actual_Gs:
+            assert any(nx.is_isomorphic(a, e) for e in expected_Gs)
+
+
+def test_random_triad():
+    """Tests the random_triad function"""
+    G = nx.karate_club_graph()
+    G = G.to_directed()
+    for i in range(100):
+        assert nx.is_triad(nx.random_triad(G))
+
+    G = nx.DiGraph()
+    msg = "at least 3 nodes to form a triad"
+    with pytest.raises(nx.NetworkXError, match=msg):
+        nx.random_triad(G)
+
+
+def test_triadic_census_short_path_nodelist():
+    G = nx.path_graph("abc", create_using=nx.DiGraph)
+    expected = {"021C": 1}
+    for nl in ["a", "b", "c", "ab", "ac", "bc", "abc"]:
+        triad_census = nx.triadic_census(G, nodelist=nl)
+        assert expected == {typ: cnt for typ, cnt in triad_census.items() if cnt > 0}
+
+
+def test_triadic_census_correct_nodelist_values():
+    G = nx.path_graph(5, create_using=nx.DiGraph)
+    msg = r"nodelist includes duplicate nodes or nodes not in G"
+    with pytest.raises(ValueError, match=msg):
+        nx.triadic_census(G, [1, 2, 2, 3])
+    with pytest.raises(ValueError, match=msg):
+        nx.triadic_census(G, [1, 2, "a", 3])
+
+
+def test_triadic_census_tiny_graphs():
+    tc = nx.triadic_census(nx.empty_graph(0, create_using=nx.DiGraph))
+    assert {} == {typ: cnt for typ, cnt in tc.items() if cnt > 0}
+    tc = nx.triadic_census(nx.empty_graph(1, create_using=nx.DiGraph))
+    assert {} == {typ: cnt for typ, cnt in tc.items() if cnt > 0}
+    tc = nx.triadic_census(nx.empty_graph(2, create_using=nx.DiGraph))
+    assert {} == {typ: cnt for typ, cnt in tc.items() if cnt > 0}
+    tc = nx.triadic_census(nx.DiGraph([(1, 2)]))
+    assert {} == {typ: cnt for typ, cnt in tc.items() if cnt > 0}
+
+
+def test_triadic_census_selfloops():
+    GG = nx.path_graph("abc", create_using=nx.DiGraph)
+    expected = {"021C": 1}
+    for n in GG:
+        G = GG.copy()
+        G.add_edge(n, n)
+        tc = nx.triadic_census(G)
+        assert expected == {typ: cnt for typ, cnt in tc.items() if cnt > 0}
+
+    GG = nx.path_graph("abcde", create_using=nx.DiGraph)
+    tbt = nx.triads_by_type(GG)
+    for n in GG:
+        GG.add_edge(n, n)
+    tc = nx.triadic_census(GG)
+    assert tc == {tt: len(tbt[tt]) for tt in tc}
+
+
+def test_triadic_census_four_path():
+    G = nx.path_graph("abcd", create_using=nx.DiGraph)
+    expected = {"012": 2, "021C": 2}
+    triad_census = nx.triadic_census(G)
+    assert expected == {typ: cnt for typ, cnt in triad_census.items() if cnt > 0}
+
+
+def test_triadic_census_four_path_nodelist():
+    G = nx.path_graph("abcd", create_using=nx.DiGraph)
+    expected_end = {"012": 2, "021C": 1}
+    expected_mid = {"012": 1, "021C": 2}
+    a_triad_census = nx.triadic_census(G, nodelist=["a"])
+    assert expected_end == {typ: cnt for typ, cnt in a_triad_census.items() if cnt > 0}
+    b_triad_census = nx.triadic_census(G, nodelist=["b"])
+    assert expected_mid == {typ: cnt for typ, cnt in b_triad_census.items() if cnt > 0}
+    c_triad_census = nx.triadic_census(G, nodelist=["c"])
+    assert expected_mid == {typ: cnt for typ, cnt in c_triad_census.items() if cnt > 0}
+    d_triad_census = nx.triadic_census(G, nodelist=["d"])
+    assert expected_end == {typ: cnt for typ, cnt in d_triad_census.items() if cnt > 0}
+
+
+def test_triadic_census_nodelist():
+    """Tests the triadic_census function."""
+    G = nx.DiGraph()
+    G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"])
+    expected = {
+        "030T": 2,
+        "120C": 1,
+        "210": 0,
+        "120U": 0,
+        "012": 9,
+        "102": 3,
+        "021U": 0,
+        "111U": 0,
+        "003": 8,
+        "030C": 0,
+        "021D": 9,
+        "201": 0,
+        "111D": 1,
+        "300": 0,
+        "120D": 0,
+        "021C": 2,
+    }
+    actual = {k: 0 for k in expected}
+    for node in G.nodes():
+        node_triad_census = nx.triadic_census(G, nodelist=[node])
+        for triad_key in expected:
+            actual[triad_key] += node_triad_census[triad_key]
+    # Divide all counts by 3
+    for k, v in actual.items():
+        actual[k] //= 3
+    assert expected == actual
+
+
+@pytest.mark.parametrize("N", [5, 10])
+def test_triadic_census_on_random_graph(N):
+    G = nx.binomial_graph(N, 0.3, directed=True, seed=42)
+    tc1 = nx.triadic_census(G)
+    tbt = nx.triads_by_type(G)
+    tc2 = {tt: len(tbt[tt]) for tt in tc1}
+    assert tc1 == tc2
+
+    for n in G:
+        tc1 = nx.triadic_census(G, nodelist={n})
+        tc2 = {tt: sum(1 for t in tbt.get(tt, []) if n in t) for tt in tc1}
+        assert tc1 == tc2
+
+    for ns in itertools.combinations(G, 2):
+        ns = set(ns)
+        tc1 = nx.triadic_census(G, nodelist=ns)
+        tc2 = {
+            tt: sum(1 for t in tbt.get(tt, []) if any(n in ns for n in t)) for tt in tc1
+        }
+        assert tc1 == tc2
+
+    for ns in itertools.combinations(G, 3):
+        ns = set(ns)
+        tc1 = nx.triadic_census(G, nodelist=ns)
+        tc2 = {
+            tt: sum(1 for t in tbt.get(tt, []) if any(n in ns for n in t)) for tt in tc1
+        }
+        assert tc1 == tc2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_vitality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_vitality.py
new file mode 100644
index 00000000..248206e6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_vitality.py
@@ -0,0 +1,41 @@
+import networkx as nx
+
+
+class TestClosenessVitality:
+    def test_unweighted(self):
+        G = nx.cycle_graph(3)
+        vitality = nx.closeness_vitality(G)
+        assert vitality == {0: 2, 1: 2, 2: 2}
+
+    def test_weighted(self):
+        G = nx.Graph()
+        nx.add_cycle(G, [0, 1, 2], weight=2)
+        vitality = nx.closeness_vitality(G, weight="weight")
+        assert vitality == {0: 4, 1: 4, 2: 4}
+
+    def test_unweighted_digraph(self):
+        G = nx.DiGraph(nx.cycle_graph(3))
+        vitality = nx.closeness_vitality(G)
+        assert vitality == {0: 4, 1: 4, 2: 4}
+
+    def test_weighted_digraph(self):
+        G = nx.DiGraph()
+        nx.add_cycle(G, [0, 1, 2], weight=2)
+        nx.add_cycle(G, [2, 1, 0], weight=2)
+        vitality = nx.closeness_vitality(G, weight="weight")
+        assert vitality == {0: 8, 1: 8, 2: 8}
+
+    def test_weighted_multidigraph(self):
+        G = nx.MultiDiGraph()
+        nx.add_cycle(G, [0, 1, 2], weight=2)
+        nx.add_cycle(G, [2, 1, 0], weight=2)
+        vitality = nx.closeness_vitality(G, weight="weight")
+        assert vitality == {0: 8, 1: 8, 2: 8}
+
+    def test_disconnecting_graph(self):
+        """Tests that the closeness vitality of a node whose removal
+        disconnects the graph is negative infinity.
+
+        """
+        G = nx.path_graph(3)
+        assert nx.closeness_vitality(G, node=1) == -float("inf")
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_voronoi.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_voronoi.py
new file mode 100644
index 00000000..3269ae62
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_voronoi.py
@@ -0,0 +1,103 @@
+import networkx as nx
+from networkx.utils import pairwise
+
+
+class TestVoronoiCells:
+    """Unit tests for the Voronoi cells function."""
+
+    def test_isolates(self):
+        """Tests that a graph with isolated nodes has all isolates in
+        one block of the partition.
+
+        """
+        G = nx.empty_graph(5)
+        cells = nx.voronoi_cells(G, {0, 2, 4})
+        expected = {0: {0}, 2: {2}, 4: {4}, "unreachable": {1, 3}}
+        assert expected == cells
+
+    def test_undirected_unweighted(self):
+        G = nx.cycle_graph(6)
+        cells = nx.voronoi_cells(G, {0, 3})
+        expected = {0: {0, 1, 5}, 3: {2, 3, 4}}
+        assert expected == cells
+
+    def test_directed_unweighted(self):
+        # This is the singly-linked directed cycle graph on six nodes.
+        G = nx.DiGraph(pairwise(range(6), cyclic=True))
+        cells = nx.voronoi_cells(G, {0, 3})
+        expected = {0: {0, 1, 2}, 3: {3, 4, 5}}
+        assert expected == cells
+
+    def test_directed_inward(self):
+        """Tests that reversing the graph gives the "inward" Voronoi
+        partition.
+
+        """
+        # This is the singly-linked reverse directed cycle graph on six nodes.
+        G = nx.DiGraph(pairwise(range(6), cyclic=True))
+        G = G.reverse(copy=False)
+        cells = nx.voronoi_cells(G, {0, 3})
+        expected = {0: {0, 4, 5}, 3: {1, 2, 3}}
+        assert expected == cells
+
+    def test_undirected_weighted(self):
+        edges = [(0, 1, 10), (1, 2, 1), (2, 3, 1)]
+        G = nx.Graph()
+        G.add_weighted_edges_from(edges)
+        cells = nx.voronoi_cells(G, {0, 3})
+        expected = {0: {0}, 3: {1, 2, 3}}
+        assert expected == cells
+
+    def test_directed_weighted(self):
+        edges = [(0, 1, 10), (1, 2, 1), (2, 3, 1), (3, 2, 1), (2, 1, 1)]
+        G = nx.DiGraph()
+        G.add_weighted_edges_from(edges)
+        cells = nx.voronoi_cells(G, {0, 3})
+        expected = {0: {0}, 3: {1, 2, 3}}
+        assert expected == cells
+
+    def test_multigraph_unweighted(self):
+        """Tests that the Voronoi cells for a multigraph are the same as
+        for a simple graph.
+
+        """
+        edges = [(0, 1), (1, 2), (2, 3)]
+        G = nx.MultiGraph(2 * edges)
+        H = nx.Graph(G)
+        G_cells = nx.voronoi_cells(G, {0, 3})
+        H_cells = nx.voronoi_cells(H, {0, 3})
+        assert G_cells == H_cells
+
+    def test_multidigraph_unweighted(self):
+        # This is the twice-singly-linked directed cycle graph on six nodes.
+        edges = list(pairwise(range(6), cyclic=True))
+        G = nx.MultiDiGraph(2 * edges)
+        H = nx.DiGraph(G)
+        G_cells = nx.voronoi_cells(G, {0, 3})
+        H_cells = nx.voronoi_cells(H, {0, 3})
+        assert G_cells == H_cells
+
+    def test_multigraph_weighted(self):
+        edges = [(0, 1, 10), (0, 1, 10), (1, 2, 1), (1, 2, 100), (2, 3, 1), (2, 3, 100)]
+        G = nx.MultiGraph()
+        G.add_weighted_edges_from(edges)
+        cells = nx.voronoi_cells(G, {0, 3})
+        expected = {0: {0}, 3: {1, 2, 3}}
+        assert expected == cells
+
+    def test_multidigraph_weighted(self):
+        edges = [
+            (0, 1, 10),
+            (0, 1, 10),
+            (1, 2, 1),
+            (2, 3, 1),
+            (3, 2, 10),
+            (3, 2, 1),
+            (2, 1, 10),
+            (2, 1, 1),
+        ]
+        G = nx.MultiDiGraph()
+        G.add_weighted_edges_from(edges)
+        cells = nx.voronoi_cells(G, {0, 3})
+        expected = {0: {0}, 3: {1, 2, 3}}
+        assert expected == cells
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_walks.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_walks.py
new file mode 100644
index 00000000..7a6b3239
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_walks.py
@@ -0,0 +1,54 @@
+"""Unit tests for the :mod:`networkx.algorithms.walks` module."""
+
+import pytest
+
+import networkx as nx
+
+pytest.importorskip("numpy")
+pytest.importorskip("scipy")
+
+
+def test_directed():
+    G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
+    num_walks = nx.number_of_walks(G, 3)
+    expected = {0: {0: 1, 1: 0, 2: 0}, 1: {0: 0, 1: 1, 2: 0}, 2: {0: 0, 1: 0, 2: 1}}
+    assert num_walks == expected
+
+
+def test_undirected():
+    G = nx.cycle_graph(3)
+    num_walks = nx.number_of_walks(G, 3)
+    expected = {0: {0: 2, 1: 3, 2: 3}, 1: {0: 3, 1: 2, 2: 3}, 2: {0: 3, 1: 3, 2: 2}}
+    assert num_walks == expected
+
+
+def test_non_integer_nodes():
+    G = nx.DiGraph([("A", "B"), ("B", "C"), ("C", "A")])
+    num_walks = nx.number_of_walks(G, 2)
+    expected = {
+        "A": {"A": 0, "B": 0, "C": 1},
+        "B": {"A": 1, "B": 0, "C": 0},
+        "C": {"A": 0, "B": 1, "C": 0},
+    }
+    assert num_walks == expected
+
+
+def test_zero_length():
+    G = nx.cycle_graph(3)
+    num_walks = nx.number_of_walks(G, 0)
+    expected = {0: {0: 1, 1: 0, 2: 0}, 1: {0: 0, 1: 1, 2: 0}, 2: {0: 0, 1: 0, 2: 1}}
+    assert num_walks == expected
+
+
+def test_negative_length_exception():
+    G = nx.cycle_graph(3)
+    with pytest.raises(ValueError):
+        nx.number_of_walks(G, -1)
+
+
+def test_hidden_weight_attr():
+    G = nx.cycle_graph(3)
+    G.add_edge(1, 2, weight=5)
+    num_walks = nx.number_of_walks(G, 3)
+    expected = {0: {0: 2, 1: 3, 2: 3}, 1: {0: 3, 1: 2, 2: 3}, 2: {0: 3, 1: 3, 2: 2}}
+    assert num_walks == expected
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_wiener.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_wiener.py
new file mode 100644
index 00000000..aded9514
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_wiener.py
@@ -0,0 +1,123 @@
+import networkx as nx
+
+
+def test_wiener_index_of_disconnected_graph():
+    assert nx.wiener_index(nx.empty_graph(2)) == float("inf")
+
+
+def test_wiener_index_of_directed_graph():
+    G = nx.complete_graph(3)
+    H = nx.DiGraph(G)
+    assert (2 * nx.wiener_index(G)) == nx.wiener_index(H)
+
+
+def test_wiener_index_of_complete_graph():
+    n = 10
+    G = nx.complete_graph(n)
+    assert nx.wiener_index(G) == (n * (n - 1) / 2)
+
+
+def test_wiener_index_of_path_graph():
+    # In P_n, there are n - 1 pairs of vertices at distance one, n -
+    # 2 pairs at distance two, n - 3 at distance three, ..., 1 at
+    # distance n - 1, so the Wiener index should be
+    #
+    #     1 * (n - 1) + 2 * (n - 2) + ... + (n - 2) * 2 + (n - 1) * 1
+    #
+    # For example, in P_5,
+    #
+    #     1 * 4 + 2 * 3 + 3 * 2 + 4 * 1 = 2 (1 * 4 + 2 * 3)
+    #
+    # and in P_6,
+    #
+    #     1 * 5 + 2 * 4 + 3 * 3 + 4 * 2 + 5 * 1 = 2 (1 * 5 + 2 * 4) + 3 * 3
+    #
+    # assuming n is *odd*, this gives the formula
+    #
+    #     2 \sum_{i = 1}^{(n - 1) / 2} [i * (n - i)]
+    #
+    # assuming n is *even*, this gives the formula
+    #
+    #     2 \sum_{i = 1}^{n / 2} [i * (n - i)] - (n / 2) ** 2
+    #
+    n = 9
+    G = nx.path_graph(n)
+    expected = 2 * sum(i * (n - i) for i in range(1, (n // 2) + 1))
+    actual = nx.wiener_index(G)
+    assert expected == actual
+
+
+def test_schultz_and_gutman_index_of_disconnected_graph():
+    n = 4
+    G = nx.Graph()
+    G.add_nodes_from(list(range(1, n + 1)))
+    expected = float("inf")
+
+    G.add_edge(1, 2)
+    G.add_edge(3, 4)
+
+    actual_1 = nx.schultz_index(G)
+    actual_2 = nx.gutman_index(G)
+
+    assert expected == actual_1
+    assert expected == actual_2
+
+
+def test_schultz_and_gutman_index_of_complete_bipartite_graph_1():
+    n = 3
+    m = 3
+    cbg = nx.complete_bipartite_graph(n, m)
+
+    expected_1 = n * m * (n + m) + 2 * n * (n - 1) * m + 2 * m * (m - 1) * n
+    actual_1 = nx.schultz_index(cbg)
+
+    expected_2 = n * m * (n * m) + n * (n - 1) * m * m + m * (m - 1) * n * n
+    actual_2 = nx.gutman_index(cbg)
+
+    assert expected_1 == actual_1
+    assert expected_2 == actual_2
+
+
+def test_schultz_and_gutman_index_of_complete_bipartite_graph_2():
+    n = 2
+    m = 5
+    cbg = nx.complete_bipartite_graph(n, m)
+
+    expected_1 = n * m * (n + m) + 2 * n * (n - 1) * m + 2 * m * (m - 1) * n
+    actual_1 = nx.schultz_index(cbg)
+
+    expected_2 = n * m * (n * m) + n * (n - 1) * m * m + m * (m - 1) * n * n
+    actual_2 = nx.gutman_index(cbg)
+
+    assert expected_1 == actual_1
+    assert expected_2 == actual_2
+
+
+def test_schultz_and_gutman_index_of_complete_graph():
+    n = 5
+    cg = nx.complete_graph(n)
+
+    expected_1 = n * (n - 1) * (n - 1)
+    actual_1 = nx.schultz_index(cg)
+
+    assert expected_1 == actual_1
+
+    expected_2 = n * (n - 1) * (n - 1) * (n - 1) / 2
+    actual_2 = nx.gutman_index(cg)
+
+    assert expected_2 == actual_2
+
+
+def test_schultz_and_gutman_index_of_odd_cycle_graph():
+    k = 5
+    n = 2 * k + 1
+    ocg = nx.cycle_graph(n)
+
+    expected_1 = 2 * n * k * (k + 1)
+    actual_1 = nx.schultz_index(ocg)
+
+    expected_2 = 2 * n * k * (k + 1)
+    actual_2 = nx.gutman_index(ocg)
+
+    assert expected_1 == actual_1
+    assert expected_2 == actual_2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/threshold.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/threshold.py
new file mode 100644
index 00000000..e8fb8efe
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/threshold.py
@@ -0,0 +1,980 @@
+"""
+Threshold Graphs - Creation, manipulation and identification.
+"""
+
+from math import sqrt
+
+import networkx as nx
+from networkx.utils import py_random_state
+
+__all__ = ["is_threshold_graph", "find_threshold_graph"]
+
+
+@nx._dispatchable
+def is_threshold_graph(G):
+    """
+    Returns `True` if `G` is a threshold graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph instance
+        An instance of `Graph`, `DiGraph`, `MultiGraph` or `MultiDiGraph`
+
+    Returns
+    -------
+    bool
+        `True` if `G` is a threshold graph, `False` otherwise.
+
+    Examples
+    --------
+    >>> from networkx.algorithms.threshold import is_threshold_graph
+    >>> G = nx.path_graph(3)
+    >>> is_threshold_graph(G)
+    True
+    >>> G = nx.barbell_graph(3, 3)
+    >>> is_threshold_graph(G)
+    False
+
+    References
+    ----------
+    .. [1] Threshold graphs: https://en.wikipedia.org/wiki/Threshold_graph
+    """
+    return is_threshold_sequence([d for n, d in G.degree()])
+
+
+def is_threshold_sequence(degree_sequence):
+    """
+    Returns True if the sequence is a threshold degree sequence.
+
+    Uses the property that a threshold graph must be constructed by
+    adding either dominating or isolated nodes. Thus, it can be
+    deconstructed iteratively by removing a node of degree zero or a
+    node that connects to the remaining nodes.  If this deconstruction
+    fails then the sequence is not a threshold sequence.
+    """
+    ds = degree_sequence[:]  # get a copy so we don't destroy original
+    ds.sort()
+    while ds:
+        if ds[0] == 0:  # if isolated node
+            ds.pop(0)  # remove it
+            continue
+        if ds[-1] != len(ds) - 1:  # is the largest degree node dominating?
+            return False  # no, not a threshold degree sequence
+        ds.pop()  # yes, largest is the dominating node
+        ds = [d - 1 for d in ds]  # remove it and decrement all degrees
+    return True
+
+
+def creation_sequence(degree_sequence, with_labels=False, compact=False):
+    """
+    Determines the creation sequence for the given threshold degree sequence.
+
+    The creation sequence is a list of single characters 'd'
+    or 'i': 'd' for dominating or 'i' for isolated vertices.
+    Dominating vertices are connected to all vertices present when it
+    is added.  The first node added is by convention 'd'.
+    This list can be converted to a string if desired using "".join(cs)
+
+    If with_labels==True:
+    Returns a list of 2-tuples containing the vertex number
+    and a character 'd' or 'i' which describes the type of vertex.
+
+    If compact==True:
+    Returns the creation sequence in a compact form that is the number
+    of 'i's and 'd's alternating.
+    Examples:
+    [1,2,2,3] represents d,i,i,d,d,i,i,i
+    [3,1,2] represents d,d,d,i,d,d
+
+    Notice that the first number is the first vertex to be used for
+    construction and so is always 'd'.
+
+    with_labels and compact cannot both be True.
+
+    Returns None if the sequence is not a threshold sequence
+    """
+    if with_labels and compact:
+        raise ValueError("compact sequences cannot be labeled")
+
+    # make an indexed copy
+    if isinstance(degree_sequence, dict):  # labeled degree sequence
+        ds = [[degree, label] for (label, degree) in degree_sequence.items()]
+    else:
+        ds = [[d, i] for i, d in enumerate(degree_sequence)]
+    ds.sort()
+    cs = []  # creation sequence
+    while ds:
+        if ds[0][0] == 0:  # isolated node
+            (d, v) = ds.pop(0)
+            if len(ds) > 0:  # make sure we start with a d
+                cs.insert(0, (v, "i"))
+            else:
+                cs.insert(0, (v, "d"))
+            continue
+        if ds[-1][0] != len(ds) - 1:  # Not dominating node
+            return None  # not a threshold degree sequence
+        (d, v) = ds.pop()
+        cs.insert(0, (v, "d"))
+        ds = [[d[0] - 1, d[1]] for d in ds]  # decrement due to removing node
+
+    if with_labels:
+        return cs
+    if compact:
+        return make_compact(cs)
+    return [v[1] for v in cs]  # not labeled
+
+
+def make_compact(creation_sequence):
+    """
+    Returns the creation sequence in a compact form
+    that is the number of 'i's and 'd's alternating.
+
+    Examples
+    --------
+    >>> from networkx.algorithms.threshold import make_compact
+    >>> make_compact(["d", "i", "i", "d", "d", "i", "i", "i"])
+    [1, 2, 2, 3]
+    >>> make_compact(["d", "d", "d", "i", "d", "d"])
+    [3, 1, 2]
+
+    Notice that the first number is the first vertex
+    to be used for construction and so is always 'd'.
+
+    Labeled creation sequences lose their labels in the
+    compact representation.
+
+    >>> make_compact([3, 1, 2])
+    [3, 1, 2]
+    """
+    first = creation_sequence[0]
+    if isinstance(first, str):  # creation sequence
+        cs = creation_sequence[:]
+    elif isinstance(first, tuple):  # labeled creation sequence
+        cs = [s[1] for s in creation_sequence]
+    elif isinstance(first, int):  # compact creation sequence
+        return creation_sequence
+    else:
+        raise TypeError("Not a valid creation sequence type")
+
+    ccs = []
+    count = 1  # count the run lengths of d's or i's.
+    for i in range(1, len(cs)):
+        if cs[i] == cs[i - 1]:
+            count += 1
+        else:
+            ccs.append(count)
+            count = 1
+    ccs.append(count)  # don't forget the last one
+    return ccs
+
+
+def uncompact(creation_sequence):
+    """
+    Converts a compact creation sequence for a threshold
+    graph to a standard creation sequence (unlabeled).
+    If the creation_sequence is already standard, return it.
+    See creation_sequence.
+    """
+    first = creation_sequence[0]
+    if isinstance(first, str):  # creation sequence
+        return creation_sequence
+    elif isinstance(first, tuple):  # labeled creation sequence
+        return creation_sequence
+    elif isinstance(first, int):  # compact creation sequence
+        ccscopy = creation_sequence[:]
+    else:
+        raise TypeError("Not a valid creation sequence type")
+    cs = []
+    while ccscopy:
+        cs.extend(ccscopy.pop(0) * ["d"])
+        if ccscopy:
+            cs.extend(ccscopy.pop(0) * ["i"])
+    return cs
+
+
+def creation_sequence_to_weights(creation_sequence):
+    """
+    Returns a list of node weights which create the threshold
+    graph designated by the creation sequence.  The weights
+    are scaled so that the threshold is 1.0.  The order of the
+    nodes is the same as that in the creation sequence.
+    """
+    # Turn input sequence into a labeled creation sequence
+    first = creation_sequence[0]
+    if isinstance(first, str):  # creation sequence
+        if isinstance(creation_sequence, list):
+            wseq = creation_sequence[:]
+        else:
+            wseq = list(creation_sequence)  # string like 'ddidid'
+    elif isinstance(first, tuple):  # labeled creation sequence
+        wseq = [v[1] for v in creation_sequence]
+    elif isinstance(first, int):  # compact creation sequence
+        wseq = uncompact(creation_sequence)
+    else:
+        raise TypeError("Not a valid creation sequence type")
+    # pass through twice--first backwards
+    wseq.reverse()
+    w = 0
+    prev = "i"
+    for j, s in enumerate(wseq):
+        if s == "i":
+            wseq[j] = w
+            prev = s
+        elif prev == "i":
+            prev = s
+            w += 1
+    wseq.reverse()  # now pass through forwards
+    for j, s in enumerate(wseq):
+        if s == "d":
+            wseq[j] = w
+            prev = s
+        elif prev == "d":
+            prev = s
+            w += 1
+    # Now scale weights
+    if prev == "d":
+        w += 1
+    wscale = 1 / w
+    return [ww * wscale for ww in wseq]
+    # return wseq
+
+
+def weights_to_creation_sequence(
+    weights, threshold=1, with_labels=False, compact=False
+):
+    """
+    Returns a creation sequence for a threshold graph
+    determined by the weights and threshold given as input.
+    If the sum of two node weights is greater than the
+    threshold value, an edge is created between these nodes.
+
+    The creation sequence is a list of single characters 'd'
+    or 'i': 'd' for dominating or 'i' for isolated vertices.
+    Dominating vertices are connected to all vertices present
+    when it is added.  The first node added is by convention 'd'.
+
+    If with_labels==True:
+    Returns a list of 2-tuples containing the vertex number
+    and a character 'd' or 'i' which describes the type of vertex.
+
+    If compact==True:
+    Returns the creation sequence in a compact form that is the number
+    of 'i's and 'd's alternating.
+    Examples:
+    [1,2,2,3] represents d,i,i,d,d,i,i,i
+    [3,1,2] represents d,d,d,i,d,d
+
+    Notice that the first number is the first vertex to be used for
+    construction and so is always 'd'.
+
+    with_labels and compact cannot both be True.
+    """
+    if with_labels and compact:
+        raise ValueError("compact sequences cannot be labeled")
+
+    # make an indexed copy
+    if isinstance(weights, dict):  # labeled weights
+        wseq = [[w, label] for (label, w) in weights.items()]
+    else:
+        wseq = [[w, i] for i, w in enumerate(weights)]
+    wseq.sort()
+    cs = []  # creation sequence
+    cutoff = threshold - wseq[-1][0]
+    while wseq:
+        if wseq[0][0] < cutoff:  # isolated node
+            (w, label) = wseq.pop(0)
+            cs.append((label, "i"))
+        else:
+            (w, label) = wseq.pop()
+            cs.append((label, "d"))
+            cutoff = threshold - wseq[-1][0]
+        if len(wseq) == 1:  # make sure we start with a d
+            (w, label) = wseq.pop()
+            cs.append((label, "d"))
+    # put in correct order
+    cs.reverse()
+
+    if with_labels:
+        return cs
+    if compact:
+        return make_compact(cs)
+    return [v[1] for v in cs]  # not labeled
+
+
+# Manipulating NetworkX.Graphs in context of threshold graphs
+@nx._dispatchable(graphs=None, returns_graph=True)
+def threshold_graph(creation_sequence, create_using=None):
+    """
+    Create a threshold graph from the creation sequence or compact
+    creation_sequence.
+
+    The input sequence can be a
+
+    creation sequence (e.g. ['d','i','d','d','d','i'])
+    labeled creation sequence (e.g. [(0,'d'),(2,'d'),(1,'i')])
+    compact creation sequence (e.g. [2,1,1,2,0])
+
+    Use cs=creation_sequence(degree_sequence,labeled=True)
+    to convert a degree sequence to a creation sequence.
+
+    Returns None if the sequence is not valid
+    """
+    # Turn input sequence into a labeled creation sequence
+    first = creation_sequence[0]
+    if isinstance(first, str):  # creation sequence
+        ci = list(enumerate(creation_sequence))
+    elif isinstance(first, tuple):  # labeled creation sequence
+        ci = creation_sequence[:]
+    elif isinstance(first, int):  # compact creation sequence
+        cs = uncompact(creation_sequence)
+        ci = list(enumerate(cs))
+    else:
+        print("not a valid creation sequence type")
+        return None
+
+    G = nx.empty_graph(0, create_using)
+    if G.is_directed():
+        raise nx.NetworkXError("Directed Graph not supported")
+
+    G.name = "Threshold Graph"
+
+    # add nodes and edges
+    # if type is 'i' just add nodea
+    # if type is a d connect to everything previous
+    while ci:
+        (v, node_type) = ci.pop(0)
+        if node_type == "d":  # dominating type, connect to all existing nodes
+            # We use `for u in list(G):` instead of
+            # `for u in G:` because we edit the graph `G` in
+            # the loop. Hence using an iterator will result in
+            # `RuntimeError: dictionary changed size during iteration`
+            for u in list(G):
+                G.add_edge(v, u)
+        G.add_node(v)
+    return G
+
+
+@nx._dispatchable
+def find_alternating_4_cycle(G):
+    """
+    Returns False if there aren't any alternating 4 cycles.
+    Otherwise returns the cycle as [a,b,c,d] where (a,b)
+    and (c,d) are edges and (a,c) and (b,d) are not.
+    """
+    for u, v in G.edges():
+        for w in G.nodes():
+            if not G.has_edge(u, w) and u != w:
+                for x in G.neighbors(w):
+                    if not G.has_edge(v, x) and v != x:
+                        return [u, v, w, x]
+    return False
+
+
+@nx._dispatchable(returns_graph=True)
+def find_threshold_graph(G, create_using=None):
+    """
+    Returns a threshold subgraph that is close to largest in `G`.
+
+    The threshold graph will contain the largest degree node in G.
+
+    Parameters
+    ----------
+    G : NetworkX graph instance
+        An instance of `Graph`, or `MultiDiGraph`
+    create_using : NetworkX graph class or `None` (default), optional
+        Type of graph to use when constructing the threshold graph.
+        If `None`, infer the appropriate graph type from the input.
+
+    Returns
+    -------
+    graph :
+        A graph instance representing the threshold graph
+
+    Examples
+    --------
+    >>> from networkx.algorithms.threshold import find_threshold_graph
+    >>> G = nx.barbell_graph(3, 3)
+    >>> T = find_threshold_graph(G)
+    >>> T.nodes  # may vary
+    NodeView((7, 8, 5, 6))
+
+    References
+    ----------
+    .. [1] Threshold graphs: https://en.wikipedia.org/wiki/Threshold_graph
+    """
+    return threshold_graph(find_creation_sequence(G), create_using)
+
+
+@nx._dispatchable
+def find_creation_sequence(G):
+    """
+    Find a threshold subgraph that is close to largest in G.
+    Returns the labeled creation sequence of that threshold graph.
+    """
+    cs = []
+    # get a local pointer to the working part of the graph
+    H = G
+    while H.order() > 0:
+        # get new degree sequence on subgraph
+        dsdict = dict(H.degree())
+        ds = [(d, v) for v, d in dsdict.items()]
+        ds.sort()
+        # Update threshold graph nodes
+        if ds[-1][0] == 0:  # all are isolated
+            cs.extend(zip(dsdict, ["i"] * (len(ds) - 1) + ["d"]))
+            break  # Done!
+        # pull off isolated nodes
+        while ds[0][0] == 0:
+            (d, iso) = ds.pop(0)
+            cs.append((iso, "i"))
+        # find new biggest node
+        (d, bigv) = ds.pop()
+        # add edges of star to t_g
+        cs.append((bigv, "d"))
+        # form subgraph of neighbors of big node
+        H = H.subgraph(H.neighbors(bigv))
+    cs.reverse()
+    return cs
+
+
+# Properties of Threshold Graphs
+def triangles(creation_sequence):
+    """
+    Compute number of triangles in the threshold graph with the
+    given creation sequence.
+    """
+    # shortcut algorithm that doesn't require computing number
+    # of triangles at each node.
+    cs = creation_sequence  # alias
+    dr = cs.count("d")  # number of d's in sequence
+    ntri = dr * (dr - 1) * (dr - 2) / 6  # number of triangles in clique of nd d's
+    # now add dr choose 2 triangles for every 'i' in sequence where
+    # dr is the number of d's to the right of the current i
+    for i, typ in enumerate(cs):
+        if typ == "i":
+            ntri += dr * (dr - 1) / 2
+        else:
+            dr -= 1
+    return ntri
+
+
+def triangle_sequence(creation_sequence):
+    """
+    Return triangle sequence for the given threshold graph creation sequence.
+
+    """
+    cs = creation_sequence
+    seq = []
+    dr = cs.count("d")  # number of d's to the right of the current pos
+    dcur = (dr - 1) * (dr - 2) // 2  # number of triangles through a node of clique dr
+    irun = 0  # number of i's in the last run
+    drun = 0  # number of d's in the last run
+    for i, sym in enumerate(cs):
+        if sym == "d":
+            drun += 1
+            tri = dcur + (dr - 1) * irun  # new triangles at this d
+        else:  # cs[i]="i":
+            if prevsym == "d":  # new string of i's
+                dcur += (dr - 1) * irun  # accumulate shared shortest paths
+                irun = 0  # reset i run counter
+                dr -= drun  # reduce number of d's to right
+                drun = 0  # reset d run counter
+            irun += 1
+            tri = dr * (dr - 1) // 2  # new triangles at this i
+        seq.append(tri)
+        prevsym = sym
+    return seq
+
+
+def cluster_sequence(creation_sequence):
+    """
+    Return cluster sequence for the given threshold graph creation sequence.
+    """
+    triseq = triangle_sequence(creation_sequence)
+    degseq = degree_sequence(creation_sequence)
+    cseq = []
+    for i, deg in enumerate(degseq):
+        tri = triseq[i]
+        if deg <= 1:  # isolated vertex or single pair gets cc 0
+            cseq.append(0)
+            continue
+        max_size = (deg * (deg - 1)) // 2
+        cseq.append(tri / max_size)
+    return cseq
+
+
+def degree_sequence(creation_sequence):
+    """
+    Return degree sequence for the threshold graph with the given
+    creation sequence
+    """
+    cs = creation_sequence  # alias
+    seq = []
+    rd = cs.count("d")  # number of d to the right
+    for i, sym in enumerate(cs):
+        if sym == "d":
+            rd -= 1
+            seq.append(rd + i)
+        else:
+            seq.append(rd)
+    return seq
+
+
+def density(creation_sequence):
+    """
+    Return the density of the graph with this creation_sequence.
+    The density is the fraction of possible edges present.
+    """
+    N = len(creation_sequence)
+    two_size = sum(degree_sequence(creation_sequence))
+    two_possible = N * (N - 1)
+    den = two_size / two_possible
+    return den
+
+
+def degree_correlation(creation_sequence):
+    """
+    Return the degree-degree correlation over all edges.
+    """
+    cs = creation_sequence
+    s1 = 0  # deg_i*deg_j
+    s2 = 0  # deg_i^2+deg_j^2
+    s3 = 0  # deg_i+deg_j
+    m = 0  # number of edges
+    rd = cs.count("d")  # number of d nodes to the right
+    rdi = [i for i, sym in enumerate(cs) if sym == "d"]  # index of "d"s
+    ds = degree_sequence(cs)
+    for i, sym in enumerate(cs):
+        if sym == "d":
+            if i != rdi[0]:
+                print("Logic error in degree_correlation", i, rdi)
+                raise ValueError
+            rdi.pop(0)
+        degi = ds[i]
+        for dj in rdi:
+            degj = ds[dj]
+            s1 += degj * degi
+            s2 += degi**2 + degj**2
+            s3 += degi + degj
+            m += 1
+    denom = 2 * m * s2 - s3 * s3
+    numer = 4 * m * s1 - s3 * s3
+    if denom == 0:
+        if numer == 0:
+            return 1
+        raise ValueError(f"Zero Denominator but Numerator is {numer}")
+    return numer / denom
+
+
+def shortest_path(creation_sequence, u, v):
+    """
+    Find the shortest path between u and v in a
+    threshold graph G with the given creation_sequence.
+
+    For an unlabeled creation_sequence, the vertices
+    u and v must be integers in (0,len(sequence)) referring
+    to the position of the desired vertices in the sequence.
+
+    For a labeled creation_sequence, u and v are labels of vertices.
+
+    Use cs=creation_sequence(degree_sequence,with_labels=True)
+    to convert a degree sequence to a creation sequence.
+
+    Returns a list of vertices from u to v.
+    Example: if they are neighbors, it returns [u,v]
+    """
+    # Turn input sequence into a labeled creation sequence
+    first = creation_sequence[0]
+    if isinstance(first, str):  # creation sequence
+        cs = [(i, creation_sequence[i]) for i in range(len(creation_sequence))]
+    elif isinstance(first, tuple):  # labeled creation sequence
+        cs = creation_sequence[:]
+    elif isinstance(first, int):  # compact creation sequence
+        ci = uncompact(creation_sequence)
+        cs = [(i, ci[i]) for i in range(len(ci))]
+    else:
+        raise TypeError("Not a valid creation sequence type")
+
+    verts = [s[0] for s in cs]
+    if v not in verts:
+        raise ValueError(f"Vertex {v} not in graph from creation_sequence")
+    if u not in verts:
+        raise ValueError(f"Vertex {u} not in graph from creation_sequence")
+    # Done checking
+    if u == v:
+        return [u]
+
+    uindex = verts.index(u)
+    vindex = verts.index(v)
+    bigind = max(uindex, vindex)
+    if cs[bigind][1] == "d":
+        return [u, v]
+    # must be that cs[bigind][1]=='i'
+    cs = cs[bigind:]
+    while cs:
+        vert = cs.pop()
+        if vert[1] == "d":
+            return [u, vert[0], v]
+    # All after u are type 'i' so no connection
+    return -1
+
+
+def shortest_path_length(creation_sequence, i):
+    """
+    Return the shortest path length from indicated node to
+    every other node for the threshold graph with the given
+    creation sequence.
+    Node is indicated by index i in creation_sequence unless
+    creation_sequence is labeled in which case, i is taken to
+    be the label of the node.
+
+    Paths lengths in threshold graphs are at most 2.
+    Length to unreachable nodes is set to -1.
+    """
+    # Turn input sequence into a labeled creation sequence
+    first = creation_sequence[0]
+    if isinstance(first, str):  # creation sequence
+        if isinstance(creation_sequence, list):
+            cs = creation_sequence[:]
+        else:
+            cs = list(creation_sequence)
+    elif isinstance(first, tuple):  # labeled creation sequence
+        cs = [v[1] for v in creation_sequence]
+        i = [v[0] for v in creation_sequence].index(i)
+    elif isinstance(first, int):  # compact creation sequence
+        cs = uncompact(creation_sequence)
+    else:
+        raise TypeError("Not a valid creation sequence type")
+
+    # Compute
+    N = len(cs)
+    spl = [2] * N  # length 2 to every node
+    spl[i] = 0  # except self which is 0
+    # 1 for all d's to the right
+    for j in range(i + 1, N):
+        if cs[j] == "d":
+            spl[j] = 1
+    if cs[i] == "d":  # 1 for all nodes to the left
+        for j in range(i):
+            spl[j] = 1
+    # and -1 for any trailing i to indicate unreachable
+    for j in range(N - 1, 0, -1):
+        if cs[j] == "d":
+            break
+        spl[j] = -1
+    return spl
+
+
+def betweenness_sequence(creation_sequence, normalized=True):
+    """
+    Return betweenness for the threshold graph with the given creation
+    sequence.  The result is unscaled.  To scale the values
+    to the interval [0,1] divide by (n-1)*(n-2).
+    """
+    cs = creation_sequence
+    seq = []  # betweenness
+    lastchar = "d"  # first node is always a 'd'
+    dr = float(cs.count("d"))  # number of d's to the right of current pos
+    irun = 0  # number of i's in the last run
+    drun = 0  # number of d's in the last run
+    dlast = 0.0  # betweenness of last d
+    for i, c in enumerate(cs):
+        if c == "d":  # cs[i]=="d":
+            # betweenness = amt shared with earlier d's and i's
+            #             + new isolated nodes covered
+            #             + new paths to all previous nodes
+            b = dlast + (irun - 1) * irun / dr + 2 * irun * (i - drun - irun) / dr
+            drun += 1  # update counter
+        else:  # cs[i]="i":
+            if lastchar == "d":  # if this is a new run of i's
+                dlast = b  # accumulate betweenness
+                dr -= drun  # update number of d's to the right
+                drun = 0  # reset d counter
+                irun = 0  # reset i counter
+            b = 0  # isolated nodes have zero betweenness
+            irun += 1  # add another i to the run
+        seq.append(float(b))
+        lastchar = c
+
+    # normalize by the number of possible shortest paths
+    if normalized:
+        order = len(cs)
+        scale = 1.0 / ((order - 1) * (order - 2))
+        seq = [s * scale for s in seq]
+
+    return seq
+
+
+def eigenvectors(creation_sequence):
+    """
+    Return a 2-tuple of Laplacian eigenvalues and eigenvectors
+    for the threshold network with creation_sequence.
+    The first value is a list of eigenvalues.
+    The second value is a list of eigenvectors.
+    The lists are in the same order so corresponding eigenvectors
+    and eigenvalues are in the same position in the two lists.
+
+    Notice that the order of the eigenvalues returned by eigenvalues(cs)
+    may not correspond to the order of these eigenvectors.
+    """
+    ccs = make_compact(creation_sequence)
+    N = sum(ccs)
+    vec = [0] * N
+    val = vec[:]
+    # get number of type d nodes to the right (all for first node)
+    dr = sum(ccs[::2])
+
+    nn = ccs[0]
+    vec[0] = [1.0 / sqrt(N)] * N
+    val[0] = 0
+    e = dr
+    dr -= nn
+    type_d = True
+    i = 1
+    dd = 1
+    while dd < nn:
+        scale = 1.0 / sqrt(dd * dd + i)
+        vec[i] = i * [-scale] + [dd * scale] + [0] * (N - i - 1)
+        val[i] = e
+        i += 1
+        dd += 1
+    if len(ccs) == 1:
+        return (val, vec)
+    for nn in ccs[1:]:
+        scale = 1.0 / sqrt(nn * i * (i + nn))
+        vec[i] = i * [-nn * scale] + nn * [i * scale] + [0] * (N - i - nn)
+        # find eigenvalue
+        type_d = not type_d
+        if type_d:
+            e = i + dr
+            dr -= nn
+        else:
+            e = dr
+        val[i] = e
+        st = i
+        i += 1
+        dd = 1
+        while dd < nn:
+            scale = 1.0 / sqrt(i - st + dd * dd)
+            vec[i] = [0] * st + (i - st) * [-scale] + [dd * scale] + [0] * (N - i - 1)
+            val[i] = e
+            i += 1
+            dd += 1
+    return (val, vec)
+
+
+def spectral_projection(u, eigenpairs):
+    """
+    Returns the coefficients of each eigenvector
+    in a projection of the vector u onto the normalized
+    eigenvectors which are contained in eigenpairs.
+
+    eigenpairs should be a list of two objects.  The
+    first is a list of eigenvalues and the second a list
+    of eigenvectors.  The eigenvectors should be lists.
+
+    There's not a lot of error checking on lengths of
+    arrays, etc. so be careful.
+    """
+    coeff = []
+    evect = eigenpairs[1]
+    for ev in evect:
+        c = sum(evv * uv for (evv, uv) in zip(ev, u))
+        coeff.append(c)
+    return coeff
+
+
+def eigenvalues(creation_sequence):
+    """
+    Return sequence of eigenvalues of the Laplacian of the threshold
+    graph for the given creation_sequence.
+
+    Based on the Ferrer's diagram method.  The spectrum is integral
+    and is the conjugate of the degree sequence.
+
+    See::
+
+      @Article{degree-merris-1994,
+       author = {Russel Merris},
+       title = {Degree maximal graphs are Laplacian integral},
+       journal = {Linear Algebra Appl.},
+       year = {1994},
+       volume = {199},
+       pages = {381--389},
+      }
+
+    """
+    degseq = degree_sequence(creation_sequence)
+    degseq.sort()
+    eiglist = []  # zero is always one eigenvalue
+    eig = 0
+    row = len(degseq)
+    bigdeg = degseq.pop()
+    while row:
+        if bigdeg < row:
+            eiglist.append(eig)
+            row -= 1
+        else:
+            eig += 1
+            if degseq:
+                bigdeg = degseq.pop()
+            else:
+                bigdeg = 0
+    return eiglist
+
+
+# Threshold graph creation routines
+
+
+@py_random_state(2)
+def random_threshold_sequence(n, p, seed=None):
+    """
+    Create a random threshold sequence of size n.
+    A creation sequence is built by randomly choosing d's with
+    probability p and i's with probability 1-p.
+
+    s=nx.random_threshold_sequence(10,0.5)
+
+    returns a threshold sequence of length 10 with equal
+    probably of an i or a d at each position.
+
+    A "random" threshold graph can be built with
+
+    G=nx.threshold_graph(s)
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+    """
+    if not (0 <= p <= 1):
+        raise ValueError("p must be in [0,1]")
+
+    cs = ["d"]  # threshold sequences always start with a d
+    for i in range(1, n):
+        if seed.random() < p:
+            cs.append("d")
+        else:
+            cs.append("i")
+    return cs
+
+
+# maybe *_d_threshold_sequence routines should
+# be (or be called from) a single routine with a more descriptive name
+# and a keyword parameter?
+def right_d_threshold_sequence(n, m):
+    """
+    Create a skewed threshold graph with a given number
+    of vertices (n) and a given number of edges (m).
+
+    The routine returns an unlabeled creation sequence
+    for the threshold graph.
+
+    FIXME: describe algorithm
+
+    """
+    cs = ["d"] + ["i"] * (n - 1)  # create sequence with n insolated nodes
+
+    #  m <n : not enough edges, make disconnected
+    if m < n:
+        cs[m] = "d"
+        return cs
+
+    # too many edges
+    if m > n * (n - 1) / 2:
+        raise ValueError("Too many edges for this many nodes.")
+
+    # connected case m >n-1
+    ind = n - 1
+    sum = n - 1
+    while sum < m:
+        cs[ind] = "d"
+        ind -= 1
+        sum += ind
+    ind = m - (sum - ind)
+    cs[ind] = "d"
+    return cs
+
+
+def left_d_threshold_sequence(n, m):
+    """
+    Create a skewed threshold graph with a given number
+    of vertices (n) and a given number of edges (m).
+
+    The routine returns an unlabeled creation sequence
+    for the threshold graph.
+
+    FIXME: describe algorithm
+
+    """
+    cs = ["d"] + ["i"] * (n - 1)  # create sequence with n insolated nodes
+
+    #  m <n : not enough edges, make disconnected
+    if m < n:
+        cs[m] = "d"
+        return cs
+
+    # too many edges
+    if m > n * (n - 1) / 2:
+        raise ValueError("Too many edges for this many nodes.")
+
+    # Connected case when M>N-1
+    cs[n - 1] = "d"
+    sum = n - 1
+    ind = 1
+    while sum < m:
+        cs[ind] = "d"
+        sum += ind
+        ind += 1
+    if sum > m:  # be sure not to change the first vertex
+        cs[sum - m] = "i"
+    return cs
+
+
+@py_random_state(3)
+def swap_d(cs, p_split=1.0, p_combine=1.0, seed=None):
+    """
+    Perform a "swap" operation on a threshold sequence.
+
+    The swap preserves the number of nodes and edges
+    in the graph for the given sequence.
+    The resulting sequence is still a threshold sequence.
+
+    Perform one split and one combine operation on the
+    'd's of a creation sequence for a threshold graph.
+    This operation maintains the number of nodes and edges
+    in the graph, but shifts the edges from node to node
+    maintaining the threshold quality of the graph.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+    """
+    # preprocess the creation sequence
+    dlist = [i for (i, node_type) in enumerate(cs[1:-1]) if node_type == "d"]
+    # split
+    if seed.random() < p_split:
+        choice = seed.choice(dlist)
+        split_to = seed.choice(range(choice))
+        flip_side = choice - split_to
+        if split_to != flip_side and cs[split_to] == "i" and cs[flip_side] == "i":
+            cs[choice] = "i"
+            cs[split_to] = "d"
+            cs[flip_side] = "d"
+            dlist.remove(choice)
+            # don't add or combine may reverse this action
+            # dlist.extend([split_to,flip_side])
+    #            print >>sys.stderr,"split at %s to %s and %s"%(choice,split_to,flip_side)
+    # combine
+    if seed.random() < p_combine and dlist:
+        first_choice = seed.choice(dlist)
+        second_choice = seed.choice(dlist)
+        target = first_choice + second_choice
+        if target >= len(cs) or cs[target] == "d" or first_choice == second_choice:
+            return cs
+        # OK to combine
+        cs[first_choice] = "i"
+        cs[second_choice] = "i"
+        cs[target] = "d"
+    #        print >>sys.stderr,"combine %s and %s to make %s."%(first_choice,second_choice,target)
+
+    return cs
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/time_dependent.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/time_dependent.py
new file mode 100644
index 00000000..d67cdcf0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/time_dependent.py
@@ -0,0 +1,142 @@
+"""Time dependent algorithms."""
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = ["cd_index"]
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable(node_attrs={"time": None, "weight": 1})
+def cd_index(G, node, time_delta, *, time="time", weight=None):
+    r"""Compute the CD index for `node` within the graph `G`.
+
+    Calculates the CD index for the given node of the graph,
+    considering only its predecessors who have the `time` attribute
+    smaller than or equal to the `time` attribute of the `node`
+    plus `time_delta`.
+
+    Parameters
+    ----------
+    G : graph
+       A directed networkx graph whose nodes have `time` attributes and optionally
+       `weight` attributes (if a weight is not given, it is considered 1).
+    node : node
+       The node for which the CD index is calculated.
+    time_delta : numeric or timedelta
+       Amount of time after the `time` attribute of the `node`. The value of
+       `time_delta` must support comparison with the `time` node attribute. For
+       example, if the `time` attribute of the nodes are `datetime.datetime`
+       objects, then `time_delta` should be a `datetime.timedelta` object.
+    time : string (Optional, default is "time")
+        The name of the node attribute that will be used for the calculations.
+    weight : string (Optional, default is None)
+        The name of the node attribute used as weight.
+
+    Returns
+    -------
+    float
+       The CD index calculated for the node `node` within the graph `G`.
+
+    Raises
+    ------
+    NetworkXError
+       If not all nodes have a `time` attribute or
+       `time_delta` and `time` attribute types are not compatible or
+       `n` equals 0.
+
+    NetworkXNotImplemented
+        If `G` is a non-directed graph or a multigraph.
+
+    Examples
+    --------
+    >>> from datetime import datetime, timedelta
+    >>> G = nx.DiGraph()
+    >>> nodes = {
+    ...     1: {"time": datetime(2015, 1, 1)},
+    ...     2: {"time": datetime(2012, 1, 1), "weight": 4},
+    ...     3: {"time": datetime(2010, 1, 1)},
+    ...     4: {"time": datetime(2008, 1, 1)},
+    ...     5: {"time": datetime(2014, 1, 1)},
+    ... }
+    >>> G.add_nodes_from([(n, nodes[n]) for n in nodes])
+    >>> edges = [(1, 3), (1, 4), (2, 3), (3, 4), (3, 5)]
+    >>> G.add_edges_from(edges)
+    >>> delta = timedelta(days=5 * 365)
+    >>> nx.cd_index(G, 3, time_delta=delta, time="time")
+    0.5
+    >>> nx.cd_index(G, 3, time_delta=delta, time="time", weight="weight")
+    0.12
+
+    Integers can also be used for the time values:
+    >>> node_times = {1: 2015, 2: 2012, 3: 2010, 4: 2008, 5: 2014}
+    >>> nx.set_node_attributes(G, node_times, "new_time")
+    >>> nx.cd_index(G, 3, time_delta=4, time="new_time")
+    0.5
+    >>> nx.cd_index(G, 3, time_delta=4, time="new_time", weight="weight")
+    0.12
+
+    Notes
+    -----
+    This method implements the algorithm for calculating the CD index,
+    as described in the paper by Funk and Owen-Smith [1]_. The CD index
+    is used in order to check how consolidating or destabilizing a patent
+    is, hence the nodes of the graph represent patents and the edges show
+    the citations between these patents. The mathematical model is given
+    below:
+
+    .. math::
+        CD_{t}=\frac{1}{n_{t}}\sum_{i=1}^{n}\frac{-2f_{it}b_{it}+f_{it}}{w_{it}},
+
+    where `f_{it}` equals 1 if `i` cites the focal patent else 0, `b_{it}` equals
+    1 if `i` cites any of the focal patents successors else 0, `n_{t}` is the number
+    of forward citations in `i` and `w_{it}` is a matrix of weight for patent `i`
+    at time `t`.
+
+    The `datetime.timedelta` package can lead to off-by-one issues when converting
+    from years to days. In the example above `timedelta(days=5 * 365)` looks like
+    5 years, but it isn't because of leap year days. So it gives the same result
+    as `timedelta(days=4 * 365)`. But using `timedelta(days=5 * 365 + 1)` gives
+    a 5 year delta **for this choice of years** but may not if the 5 year gap has
+    more than 1 leap year. To avoid these issues, use integers to represent years,
+    or be very careful when you convert units of time.
+
+    References
+    ----------
+    .. [1] Funk, Russell J., and Jason Owen-Smith.
+           "A dynamic network measure of technological change."
+           Management science 63, no. 3 (2017): 791-817.
+           http://russellfunk.org/cdindex/static/papers/funk_ms_2017.pdf
+
+    """
+    if not all(time in G.nodes[n] for n in G):
+        raise nx.NetworkXError("Not all nodes have a 'time' attribute.")
+
+    try:
+        # get target_date
+        target_date = G.nodes[node][time] + time_delta
+        # keep the predecessors that existed before the target date
+        pred = {i for i in G.pred[node] if G.nodes[i][time] <= target_date}
+    except:
+        raise nx.NetworkXError(
+            "Addition and comparison are not supported between 'time_delta' "
+            "and 'time' types."
+        )
+
+    # -1 if any edge between node's predecessors and node's successors, else 1
+    b = [-1 if any(j in G[i] for j in G[node]) else 1 for i in pred]
+
+    # n is size of the union of the focal node's predecessors and its successors' predecessors
+    n = len(pred.union(*(G.pred[s].keys() - {node} for s in G[node])))
+    if n == 0:
+        raise nx.NetworkXError("The cd index cannot be defined.")
+
+    # calculate cd index
+    if weight is None:
+        return round(sum(bi for bi in b) / n, 2)
+    else:
+        # If a node has the specified weight attribute, its weight is used in the calculation
+        # otherwise, a weight of 1 is assumed for that node
+        weights = [G.nodes[i].get(weight, 1) for i in pred]
+        return round(sum(bi / wt for bi, wt in zip(b, weights)) / n, 2)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tournament.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tournament.py
new file mode 100644
index 00000000..25c1983e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tournament.py
@@ -0,0 +1,403 @@
+"""Functions concerning tournament graphs.
+
+A `tournament graph`_ is a complete oriented graph. In other words, it
+is a directed graph in which there is exactly one directed edge joining
+each pair of distinct nodes. For each function in this module that
+accepts a graph as input, you must provide a tournament graph. The
+responsibility is on the caller to ensure that the graph is a tournament
+graph:
+
+    >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
+    >>> nx.is_tournament(G)
+    True
+
+To access the functions in this module, you must access them through the
+:mod:`networkx.tournament` module::
+
+    >>> nx.tournament.is_reachable(G, 0, 1)
+    True
+
+.. _tournament graph: https://en.wikipedia.org/wiki/Tournament_%28graph_theory%29
+
+"""
+
+from itertools import combinations
+
+import networkx as nx
+from networkx.algorithms.simple_paths import is_simple_path as is_path
+from networkx.utils import arbitrary_element, not_implemented_for, py_random_state
+
+__all__ = [
+    "hamiltonian_path",
+    "is_reachable",
+    "is_strongly_connected",
+    "is_tournament",
+    "random_tournament",
+    "score_sequence",
+]
+
+
+def index_satisfying(iterable, condition):
+    """Returns the index of the first element in `iterable` that
+    satisfies the given condition.
+
+    If no such element is found (that is, when the iterable is
+    exhausted), this returns the length of the iterable (that is, one
+    greater than the last index of the iterable).
+
+    `iterable` must not be empty. If `iterable` is empty, this
+    function raises :exc:`ValueError`.
+
+    """
+    # Pre-condition: iterable must not be empty.
+    for i, x in enumerate(iterable):
+        if condition(x):
+            return i
+    # If we reach the end of the iterable without finding an element
+    # that satisfies the condition, return the length of the iterable,
+    # which is one greater than the index of its last element. If the
+    # iterable was empty, `i` will not be defined, so we raise an
+    # exception.
+    try:
+        return i + 1
+    except NameError as err:
+        raise ValueError("iterable must be non-empty") from err
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def is_tournament(G):
+    """Returns True if and only if `G` is a tournament.
+
+    A tournament is a directed graph, with neither self-loops nor
+    multi-edges, in which there is exactly one directed edge joining
+    each pair of distinct nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A directed graph representing a tournament.
+
+    Returns
+    -------
+    bool
+        Whether the given graph is a tournament graph.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
+    >>> nx.is_tournament(G)
+    True
+
+    Notes
+    -----
+    Some definitions require a self-loop on each node, but that is not
+    the convention used here.
+
+    """
+    # In a tournament, there is exactly one directed edge joining each pair.
+    return (
+        all((v in G[u]) ^ (u in G[v]) for u, v in combinations(G, 2))
+        and nx.number_of_selfloops(G) == 0
+    )
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def hamiltonian_path(G):
+    """Returns a Hamiltonian path in the given tournament graph.
+
+    Each tournament has a Hamiltonian path. If furthermore, the
+    tournament is strongly connected, then the returned Hamiltonian path
+    is a Hamiltonian cycle (by joining the endpoints of the path).
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A directed graph representing a tournament.
+
+    Returns
+    -------
+    path : list
+        A list of nodes which form a Hamiltonian path in `G`.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)])
+    >>> nx.is_tournament(G)
+    True
+    >>> nx.tournament.hamiltonian_path(G)
+    [0, 1, 2, 3]
+
+    Notes
+    -----
+    This is a recursive implementation with an asymptotic running time
+    of $O(n^2)$, ignoring multiplicative polylogarithmic factors, where
+    $n$ is the number of nodes in the graph.
+
+    """
+    if len(G) == 0:
+        return []
+    if len(G) == 1:
+        return [arbitrary_element(G)]
+    v = arbitrary_element(G)
+    hampath = hamiltonian_path(G.subgraph(set(G) - {v}))
+    # Get the index of the first node in the path that does *not* have
+    # an edge to `v`, then insert `v` before that node.
+    index = index_satisfying(hampath, lambda u: v not in G[u])
+    hampath.insert(index, v)
+    return hampath
+
+
+@py_random_state(1)
+@nx._dispatchable(graphs=None, returns_graph=True)
+def random_tournament(n, seed=None):
+    r"""Returns a random tournament graph on `n` nodes.
+
+    Parameters
+    ----------
+    n : int
+        The number of nodes in the returned graph.
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    G : DiGraph
+        A tournament on `n` nodes, with exactly one directed edge joining
+        each pair of distinct nodes.
+
+    Notes
+    -----
+    This algorithm adds, for each pair of distinct nodes, an edge with
+    uniformly random orientation. In other words, `\binom{n}{2}` flips
+    of an unbiased coin decide the orientations of the edges in the
+    graph.
+
+    """
+    # Flip an unbiased coin for each pair of distinct nodes.
+    coins = (seed.random() for i in range((n * (n - 1)) // 2))
+    pairs = combinations(range(n), 2)
+    edges = ((u, v) if r < 0.5 else (v, u) for (u, v), r in zip(pairs, coins))
+    return nx.DiGraph(edges)
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def score_sequence(G):
+    """Returns the score sequence for the given tournament graph.
+
+    The score sequence is the sorted list of the out-degrees of the
+    nodes of the graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A directed graph representing a tournament.
+
+    Returns
+    -------
+    list
+        A sorted list of the out-degrees of the nodes of `G`.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 0), (1, 3), (0, 2), (0, 3), (2, 1), (3, 2)])
+    >>> nx.is_tournament(G)
+    True
+    >>> nx.tournament.score_sequence(G)
+    [1, 1, 2, 2]
+
+    """
+    return sorted(d for v, d in G.out_degree())
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}})
+def tournament_matrix(G):
+    r"""Returns the tournament matrix for the given tournament graph.
+
+    This function requires SciPy.
+
+    The *tournament matrix* of a tournament graph with edge set *E* is
+    the matrix *T* defined by
+
+    .. math::
+
+       T_{i j} =
+       \begin{cases}
+       +1 & \text{if } (i, j) \in E \\
+       -1 & \text{if } (j, i) \in E \\
+       0 & \text{if } i == j.
+       \end{cases}
+
+    An equivalent definition is `T = A - A^T`, where *A* is the
+    adjacency matrix of the graph `G`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A directed graph representing a tournament.
+
+    Returns
+    -------
+    SciPy sparse array
+        The tournament matrix of the tournament graph `G`.
+
+    Raises
+    ------
+    ImportError
+        If SciPy is not available.
+
+    """
+    A = nx.adjacency_matrix(G)
+    return A - A.T
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable
+def is_reachable(G, s, t):
+    """Decides whether there is a path from `s` to `t` in the
+    tournament.
+
+    This function is more theoretically efficient than the reachability
+    checks than the shortest path algorithms in
+    :mod:`networkx.algorithms.shortest_paths`.
+
+    The given graph **must** be a tournament, otherwise this function's
+    behavior is undefined.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A directed graph representing a tournament.
+
+    s : node
+        A node in the graph.
+
+    t : node
+        A node in the graph.
+
+    Returns
+    -------
+    bool
+        Whether there is a path from `s` to `t` in `G`.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 0), (1, 3), (1, 2), (2, 3), (2, 0), (3, 0)])
+    >>> nx.is_tournament(G)
+    True
+    >>> nx.tournament.is_reachable(G, 1, 3)
+    True
+    >>> nx.tournament.is_reachable(G, 3, 2)
+    False
+
+    Notes
+    -----
+    Although this function is more theoretically efficient than the
+    generic shortest path functions, a speedup requires the use of
+    parallelism. Though it may in the future, the current implementation
+    does not use parallelism, thus you may not see much of a speedup.
+
+    This algorithm comes from [1].
+
+    References
+    ----------
+    .. [1] Tantau, Till.
+           "A note on the complexity of the reachability problem for
+           tournaments."
+           *Electronic Colloquium on Computational Complexity*. 2001.
+           <http://eccc.hpi-web.de/report/2001/092/>
+    """
+
+    def two_neighborhood(G, v):
+        """Returns the set of nodes at distance at most two from `v`.
+
+        `G` must be a graph and `v` a node in that graph.
+
+        The returned set includes the nodes at distance zero (that is,
+        the node `v` itself), the nodes at distance one (that is, the
+        out-neighbors of `v`), and the nodes at distance two.
+
+        """
+        return {
+            x for x in G if x == v or x in G[v] or any(is_path(G, [v, z, x]) for z in G)
+        }
+
+    def is_closed(G, nodes):
+        """Decides whether the given set of nodes is closed.
+
+        A set *S* of nodes is *closed* if for each node *u* in the graph
+        not in *S* and for each node *v* in *S*, there is an edge from
+        *u* to *v*.
+
+        """
+        return all(v in G[u] for u in set(G) - nodes for v in nodes)
+
+    neighborhoods = [two_neighborhood(G, v) for v in G]
+    return all(not (is_closed(G, S) and s in S and t not in S) for S in neighborhoods)
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatchable(name="tournament_is_strongly_connected")
+def is_strongly_connected(G):
+    """Decides whether the given tournament is strongly connected.
+
+    This function is more theoretically efficient than the
+    :func:`~networkx.algorithms.components.is_strongly_connected`
+    function.
+
+    The given graph **must** be a tournament, otherwise this function's
+    behavior is undefined.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A directed graph representing a tournament.
+
+    Returns
+    -------
+    bool
+        Whether the tournament is strongly connected.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2), (1, 3), (2, 3), (3, 0)])
+    >>> nx.is_tournament(G)
+    True
+    >>> nx.tournament.is_strongly_connected(G)
+    True
+    >>> G.remove_edge(3, 0)
+    >>> G.add_edge(0, 3)
+    >>> nx.is_tournament(G)
+    True
+    >>> nx.tournament.is_strongly_connected(G)
+    False
+
+    Notes
+    -----
+    Although this function is more theoretically efficient than the
+    generic strong connectivity function, a speedup requires the use of
+    parallelism. Though it may in the future, the current implementation
+    does not use parallelism, thus you may not see much of a speedup.
+
+    This algorithm comes from [1].
+
+    References
+    ----------
+    .. [1] Tantau, Till.
+           "A note on the complexity of the reachability problem for
+           tournaments."
+           *Electronic Colloquium on Computational Complexity*. 2001.
+           <http://eccc.hpi-web.de/report/2001/092/>
+
+    """
+    return all(is_reachable(G, u, v) for u in G for v in G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__init__.py
new file mode 100644
index 00000000..93e6cdd0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__init__.py
@@ -0,0 +1,5 @@
+from .beamsearch import *
+from .breadth_first_search import *
+from .depth_first_search import *
+from .edgedfs import *
+from .edgebfs import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/beamsearch.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/beamsearch.py
new file mode 100644
index 00000000..23fbe7bb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/beamsearch.py
@@ -0,0 +1,90 @@
+"""Basic algorithms for breadth-first searching the nodes of a graph."""
+
+import networkx as nx
+
+__all__ = ["bfs_beam_edges"]
+
+
+@nx._dispatchable
+def bfs_beam_edges(G, source, value, width=None):
+    """Iterates over edges in a beam search.
+
+    The beam search is a generalized breadth-first search in which only
+    the "best" *w* neighbors of the current node are enqueued, where *w*
+    is the beam width and "best" is an application-specific
+    heuristic. In general, a beam search with a small beam width might
+    not visit each node in the graph.
+
+    .. note::
+
+       With the default value of ``width=None`` or `width` greater than the
+       maximum degree of the graph, this function equates to a slower
+       version of `~networkx.algorithms.traversal.breadth_first_search.bfs_edges`.
+       All nodes will be visited, though the order of the reported edges may
+       vary. In such cases, `value` has no effect - consider using `bfs_edges`
+       directly instead.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+        Starting node for the breadth-first search; this function
+        iterates over only those edges in the component reachable from
+        this node.
+
+    value : function
+        A function that takes a node of the graph as input and returns a
+        real number indicating how "good" it is. A higher value means it
+        is more likely to be visited sooner during the search. When
+        visiting a new node, only the `width` neighbors with the highest
+        `value` are enqueued (in decreasing order of `value`).
+
+    width : int (default = None)
+        The beam width for the search. This is the number of neighbors
+        (ordered by `value`) to enqueue when visiting each new node.
+
+    Yields
+    ------
+    edge
+        Edges in the beam search starting from `source`, given as a pair
+        of nodes.
+
+    Examples
+    --------
+    To give nodes with, for example, a higher centrality precedence
+    during the search, set the `value` function to return the centrality
+    value of the node:
+
+    >>> G = nx.karate_club_graph()
+    >>> centrality = nx.eigenvector_centrality(G)
+    >>> list(nx.bfs_beam_edges(G, source=0, value=centrality.get, width=3))
+    [(0, 2), (0, 1), (0, 8), (2, 32), (1, 13), (8, 33)]
+    """
+
+    if width is None:
+        width = len(G)
+
+    def successors(v):
+        """Returns a list of the best neighbors of a node.
+
+        `v` is a node in the graph `G`.
+
+        The "best" neighbors are chosen according to the `value`
+        function (higher is better). Only the `width` best neighbors of
+        `v` are returned.
+        """
+        # TODO The Python documentation states that for small values, it
+        # is better to use `heapq.nlargest`. We should determine the
+        # threshold at which its better to use `heapq.nlargest()`
+        # instead of `sorted()[:]` and apply that optimization here.
+        #
+        # If `width` is greater than the number of neighbors of `v`, all
+        # neighbors are returned by the semantics of slicing in
+        # Python. This occurs in the special case that the user did not
+        # specify a `width`: in this case all neighbors are always
+        # returned, so this is just a (slower) implementation of
+        # `bfs_edges(G, source)` but with a sorted enqueue step.
+        return iter(sorted(G.neighbors(v), key=value, reverse=True)[:width])
+
+    yield from nx.generic_bfs_edges(G, source, successors)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/breadth_first_search.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/breadth_first_search.py
new file mode 100644
index 00000000..899dc92b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/breadth_first_search.py
@@ -0,0 +1,575 @@
+"""Basic algorithms for breadth-first searching the nodes of a graph."""
+
+from collections import deque
+
+import networkx as nx
+
+__all__ = [
+    "bfs_edges",
+    "bfs_tree",
+    "bfs_predecessors",
+    "bfs_successors",
+    "descendants_at_distance",
+    "bfs_layers",
+    "bfs_labeled_edges",
+    "generic_bfs_edges",
+]
+
+
+@nx._dispatchable
+def generic_bfs_edges(G, source, neighbors=None, depth_limit=None):
+    """Iterate over edges in a breadth-first search.
+
+    The breadth-first search begins at `source` and enqueues the
+    neighbors of newly visited nodes specified by the `neighbors`
+    function.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+        Starting node for the breadth-first search; this function
+        iterates over only those edges in the component reachable from
+        this node.
+
+    neighbors : function
+        A function that takes a newly visited node of the graph as input
+        and returns an *iterator* (not just a list) of nodes that are
+        neighbors of that node with custom ordering. If not specified, this is
+        just the ``G.neighbors`` method, but in general it can be any function
+        that returns an iterator over some or all of the neighbors of a
+        given node, in any order.
+
+    depth_limit : int, optional(default=len(G))
+        Specify the maximum search depth.
+
+    Yields
+    ------
+    edge
+        Edges in the breadth-first search starting from `source`.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(7)
+    >>> list(nx.generic_bfs_edges(G, source=0))
+    [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6)]
+    >>> list(nx.generic_bfs_edges(G, source=2))
+    [(2, 1), (2, 3), (1, 0), (3, 4), (4, 5), (5, 6)]
+    >>> list(nx.generic_bfs_edges(G, source=2, depth_limit=2))
+    [(2, 1), (2, 3), (1, 0), (3, 4)]
+
+    The `neighbors` param can be used to specify the visitation order of each
+    node's neighbors generically. In the following example, we modify the default
+    neighbor to return *odd* nodes first:
+
+    >>> def odd_first(n):
+    ...     return sorted(G.neighbors(n), key=lambda x: x % 2, reverse=True)
+
+    >>> G = nx.star_graph(5)
+    >>> list(nx.generic_bfs_edges(G, source=0))  # Default neighbor ordering
+    [(0, 1), (0, 2), (0, 3), (0, 4), (0, 5)]
+    >>> list(nx.generic_bfs_edges(G, source=0, neighbors=odd_first))
+    [(0, 1), (0, 3), (0, 5), (0, 2), (0, 4)]
+
+    Notes
+    -----
+    This implementation is from `PADS`_, which was in the public domain
+    when it was first accessed in July, 2004.  The modifications
+    to allow depth limits are based on the Wikipedia article
+    "`Depth-limited-search`_".
+
+    .. _PADS: http://www.ics.uci.edu/~eppstein/PADS/BFS.py
+    .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search
+    """
+    if neighbors is None:
+        neighbors = G.neighbors
+    if depth_limit is None:
+        depth_limit = len(G)
+
+    seen = {source}
+    n = len(G)
+    depth = 0
+    next_parents_children = [(source, neighbors(source))]
+    while next_parents_children and depth < depth_limit:
+        this_parents_children = next_parents_children
+        next_parents_children = []
+        for parent, children in this_parents_children:
+            for child in children:
+                if child not in seen:
+                    seen.add(child)
+                    next_parents_children.append((child, neighbors(child)))
+                    yield parent, child
+            if len(seen) == n:
+                return
+        depth += 1
+
+
+@nx._dispatchable
+def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None):
+    """Iterate over edges in a breadth-first-search starting at source.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Specify starting node for breadth-first search; this function
+       iterates over only those edges in the component reachable from
+       this node.
+
+    reverse : bool, optional
+       If True traverse a directed graph in the reverse direction
+
+    depth_limit : int, optional(default=len(G))
+        Specify the maximum search depth
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Yields
+    ------
+    edge: 2-tuple of nodes
+       Yields edges resulting from the breadth-first search.
+
+    Examples
+    --------
+    To get the edges in a breadth-first search::
+
+        >>> G = nx.path_graph(3)
+        >>> list(nx.bfs_edges(G, 0))
+        [(0, 1), (1, 2)]
+        >>> list(nx.bfs_edges(G, source=0, depth_limit=1))
+        [(0, 1)]
+
+    To get the nodes in a breadth-first search order::
+
+        >>> G = nx.path_graph(3)
+        >>> root = 2
+        >>> edges = nx.bfs_edges(G, root)
+        >>> nodes = [root] + [v for u, v in edges]
+        >>> nodes
+        [2, 1, 0]
+
+    Notes
+    -----
+    The naming of this function is very similar to
+    :func:`~networkx.algorithms.traversal.edgebfs.edge_bfs`. The difference
+    is that ``edge_bfs`` yields edges even if they extend back to an already
+    explored node while this generator yields the edges of the tree that results
+    from a breadth-first-search (BFS) so no edges are reported if they extend
+    to already explored nodes. That means ``edge_bfs`` reports all edges while
+    ``bfs_edges`` only reports those traversed by a node-based BFS. Yet another
+    description is that ``bfs_edges`` reports the edges traversed during BFS
+    while ``edge_bfs`` reports all edges in the order they are explored.
+
+    Based on the breadth-first search implementation in PADS [1]_
+    by D. Eppstein, July 2004; with modifications to allow depth limits
+    as described in [2]_.
+
+    References
+    ----------
+    .. [1] http://www.ics.uci.edu/~eppstein/PADS/BFS.py.
+    .. [2] https://en.wikipedia.org/wiki/Depth-limited_search
+
+    See Also
+    --------
+    bfs_tree
+    :func:`~networkx.algorithms.traversal.depth_first_search.dfs_edges`
+    :func:`~networkx.algorithms.traversal.edgebfs.edge_bfs`
+
+    """
+    if reverse and G.is_directed():
+        successors = G.predecessors
+    else:
+        successors = G.neighbors
+
+    if sort_neighbors is not None:
+        yield from generic_bfs_edges(
+            G, source, lambda node: iter(sort_neighbors(successors(node))), depth_limit
+        )
+    else:
+        yield from generic_bfs_edges(G, source, successors, depth_limit)
+
+
+@nx._dispatchable(returns_graph=True)
+def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None):
+    """Returns an oriented tree constructed from of a breadth-first-search
+    starting at source.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Specify starting node for breadth-first search
+
+    reverse : bool, optional
+       If True traverse a directed graph in the reverse direction
+
+    depth_limit : int, optional(default=len(G))
+        Specify the maximum search depth
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Returns
+    -------
+    T: NetworkX DiGraph
+       An oriented tree
+
+    Examples
+    --------
+    >>> G = nx.path_graph(3)
+    >>> list(nx.bfs_tree(G, 1).edges())
+    [(1, 0), (1, 2)]
+    >>> H = nx.Graph()
+    >>> nx.add_path(H, [0, 1, 2, 3, 4, 5, 6])
+    >>> nx.add_path(H, [2, 7, 8, 9, 10])
+    >>> sorted(list(nx.bfs_tree(H, source=3, depth_limit=3).edges()))
+    [(1, 0), (2, 1), (2, 7), (3, 2), (3, 4), (4, 5), (5, 6), (7, 8)]
+
+
+    Notes
+    -----
+    Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py
+    by D. Eppstein, July 2004. The modifications
+    to allow depth limits based on the Wikipedia article
+    "`Depth-limited-search`_".
+
+    .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search
+
+    See Also
+    --------
+    dfs_tree
+    bfs_edges
+    edge_bfs
+    """
+    T = nx.DiGraph()
+    T.add_node(source)
+    edges_gen = bfs_edges(
+        G,
+        source,
+        reverse=reverse,
+        depth_limit=depth_limit,
+        sort_neighbors=sort_neighbors,
+    )
+    T.add_edges_from(edges_gen)
+    return T
+
+
+@nx._dispatchable
+def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None):
+    """Returns an iterator of predecessors in breadth-first-search from source.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Specify starting node for breadth-first search
+
+    depth_limit : int, optional(default=len(G))
+        Specify the maximum search depth
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Returns
+    -------
+    pred: iterator
+        (node, predecessor) iterator where `predecessor` is the predecessor of
+        `node` in a breadth first search starting from `source`.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(3)
+    >>> dict(nx.bfs_predecessors(G, 0))
+    {1: 0, 2: 1}
+    >>> H = nx.Graph()
+    >>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)])
+    >>> dict(nx.bfs_predecessors(H, 0))
+    {1: 0, 2: 0, 3: 1, 4: 1, 5: 2, 6: 2}
+    >>> M = nx.Graph()
+    >>> nx.add_path(M, [0, 1, 2, 3, 4, 5, 6])
+    >>> nx.add_path(M, [2, 7, 8, 9, 10])
+    >>> sorted(nx.bfs_predecessors(M, source=1, depth_limit=3))
+    [(0, 1), (2, 1), (3, 2), (4, 3), (7, 2), (8, 7)]
+    >>> N = nx.DiGraph()
+    >>> nx.add_path(N, [0, 1, 2, 3, 4, 7])
+    >>> nx.add_path(N, [3, 5, 6, 7])
+    >>> sorted(nx.bfs_predecessors(N, source=2))
+    [(3, 2), (4, 3), (5, 3), (6, 5), (7, 4)]
+
+    Notes
+    -----
+    Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py
+    by D. Eppstein, July 2004. The modifications
+    to allow depth limits based on the Wikipedia article
+    "`Depth-limited-search`_".
+
+    .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search
+
+    See Also
+    --------
+    bfs_tree
+    bfs_edges
+    edge_bfs
+    """
+    for s, t in bfs_edges(
+        G, source, depth_limit=depth_limit, sort_neighbors=sort_neighbors
+    ):
+        yield (t, s)
+
+
+@nx._dispatchable
+def bfs_successors(G, source, depth_limit=None, sort_neighbors=None):
+    """Returns an iterator of successors in breadth-first-search from source.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node
+       Specify starting node for breadth-first search
+
+    depth_limit : int, optional(default=len(G))
+        Specify the maximum search depth
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Returns
+    -------
+    succ: iterator
+       (node, successors) iterator where `successors` is the non-empty list of
+       successors of `node` in a breadth first search from `source`.
+       To appear in the iterator, `node` must have successors.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(3)
+    >>> dict(nx.bfs_successors(G, 0))
+    {0: [1], 1: [2]}
+    >>> H = nx.Graph()
+    >>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)])
+    >>> dict(nx.bfs_successors(H, 0))
+    {0: [1, 2], 1: [3, 4], 2: [5, 6]}
+    >>> G = nx.Graph()
+    >>> nx.add_path(G, [0, 1, 2, 3, 4, 5, 6])
+    >>> nx.add_path(G, [2, 7, 8, 9, 10])
+    >>> dict(nx.bfs_successors(G, source=1, depth_limit=3))
+    {1: [0, 2], 2: [3, 7], 3: [4], 7: [8]}
+    >>> G = nx.DiGraph()
+    >>> nx.add_path(G, [0, 1, 2, 3, 4, 5])
+    >>> dict(nx.bfs_successors(G, source=3))
+    {3: [4], 4: [5]}
+
+    Notes
+    -----
+    Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py
+    by D. Eppstein, July 2004.The modifications
+    to allow depth limits based on the Wikipedia article
+    "`Depth-limited-search`_".
+
+    .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search
+
+    See Also
+    --------
+    bfs_tree
+    bfs_edges
+    edge_bfs
+    """
+    parent = source
+    children = []
+    for p, c in bfs_edges(
+        G, source, depth_limit=depth_limit, sort_neighbors=sort_neighbors
+    ):
+        if p == parent:
+            children.append(c)
+            continue
+        yield (parent, children)
+        children = [c]
+        parent = p
+    yield (parent, children)
+
+
+@nx._dispatchable
+def bfs_layers(G, sources):
+    """Returns an iterator of all the layers in breadth-first search traversal.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A graph over which to find the layers using breadth-first search.
+
+    sources : node in `G` or list of nodes in `G`
+        Specify starting nodes for single source or multiple sources breadth-first search
+
+    Yields
+    ------
+    layer: list of nodes
+        Yields list of nodes at the same distance from sources
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> dict(enumerate(nx.bfs_layers(G, [0, 4])))
+    {0: [0, 4], 1: [1, 3], 2: [2]}
+    >>> H = nx.Graph()
+    >>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)])
+    >>> dict(enumerate(nx.bfs_layers(H, [1])))
+    {0: [1], 1: [0, 3, 4], 2: [2], 3: [5, 6]}
+    >>> dict(enumerate(nx.bfs_layers(H, [1, 6])))
+    {0: [1, 6], 1: [0, 3, 4, 2], 2: [5]}
+    """
+    if sources in G:
+        sources = [sources]
+
+    current_layer = list(sources)
+    visited = set(sources)
+
+    for source in current_layer:
+        if source not in G:
+            raise nx.NetworkXError(f"The node {source} is not in the graph.")
+
+    # this is basically BFS, except that the current layer only stores the nodes at
+    # same distance from sources at each iteration
+    while current_layer:
+        yield current_layer
+        next_layer = []
+        for node in current_layer:
+            for child in G[node]:
+                if child not in visited:
+                    visited.add(child)
+                    next_layer.append(child)
+        current_layer = next_layer
+
+
+REVERSE_EDGE = "reverse"
+TREE_EDGE = "tree"
+FORWARD_EDGE = "forward"
+LEVEL_EDGE = "level"
+
+
+@nx._dispatchable
+def bfs_labeled_edges(G, sources):
+    """Iterate over edges in a breadth-first search (BFS) labeled by type.
+
+    We generate triple of the form (*u*, *v*, *d*), where (*u*, *v*) is the
+    edge being explored in the breadth-first search and *d* is one of the
+    strings 'tree', 'forward', 'level', or 'reverse'.  A 'tree' edge is one in
+    which *v* is first discovered and placed into the layer below *u*.  A
+    'forward' edge is one in which *u* is on the layer above *v* and *v* has
+    already been discovered.  A 'level' edge is one in which both *u* and *v*
+    occur on the same layer.  A 'reverse' edge is one in which *u* is on a layer
+    below *v*.
+
+    We emit each edge exactly once.  In an undirected graph, 'reverse' edges do
+    not occur, because each is discovered either as a 'tree' or 'forward' edge.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A graph over which to find the layers using breadth-first search.
+
+    sources : node in `G` or list of nodes in `G`
+        Starting nodes for single source or multiple sources breadth-first search
+
+    Yields
+    ------
+    edges: generator
+       A generator of triples (*u*, *v*, *d*) where (*u*, *v*) is the edge being
+       explored and *d* is described above.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4, create_using=nx.DiGraph)
+    >>> list(nx.bfs_labeled_edges(G, 0))
+    [(0, 1, 'tree'), (1, 2, 'tree'), (2, 3, 'tree'), (3, 0, 'reverse')]
+    >>> G = nx.complete_graph(3)
+    >>> list(nx.bfs_labeled_edges(G, 0))
+    [(0, 1, 'tree'), (0, 2, 'tree'), (1, 2, 'level')]
+    >>> list(nx.bfs_labeled_edges(G, [0, 1]))
+    [(0, 1, 'level'), (0, 2, 'tree'), (1, 2, 'forward')]
+    """
+    if sources in G:
+        sources = [sources]
+
+    neighbors = G._adj
+    directed = G.is_directed()
+    visited = set()
+    visit = visited.discard if directed else visited.add
+    # We use visited in a negative sense, so the visited set stays empty for the
+    # directed case and level edges are reported on their first occurrence in
+    # the undirected case.  Note our use of visited.discard -- this is built-in
+    # thus somewhat faster than a python-defined def nop(x): pass
+    depth = {s: 0 for s in sources}
+    queue = deque(depth.items())
+    push = queue.append
+    pop = queue.popleft
+    while queue:
+        u, du = pop()
+        for v in neighbors[u]:
+            if v not in depth:
+                depth[v] = dv = du + 1
+                push((v, dv))
+                yield u, v, TREE_EDGE
+            else:
+                dv = depth[v]
+                if du == dv:
+                    if v not in visited:
+                        yield u, v, LEVEL_EDGE
+                elif du < dv:
+                    yield u, v, FORWARD_EDGE
+                elif directed:
+                    yield u, v, REVERSE_EDGE
+        visit(u)
+
+
+@nx._dispatchable
+def descendants_at_distance(G, source, distance):
+    """Returns all nodes at a fixed `distance` from `source` in `G`.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A graph
+    source : node in `G`
+    distance : the distance of the wanted nodes from `source`
+
+    Returns
+    -------
+    set()
+        The descendants of `source` in `G` at the given `distance` from `source`
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> nx.descendants_at_distance(G, 2, 2)
+    {0, 4}
+    >>> H = nx.DiGraph()
+    >>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)])
+    >>> nx.descendants_at_distance(H, 0, 2)
+    {3, 4, 5, 6}
+    >>> nx.descendants_at_distance(H, 5, 0)
+    {5}
+    >>> nx.descendants_at_distance(H, 5, 1)
+    set()
+    """
+    if source not in G:
+        raise nx.NetworkXError(f"The node {source} is not in the graph.")
+
+    bfs_generator = nx.bfs_layers(G, source)
+    for i, layer in enumerate(bfs_generator):
+        if i == distance:
+            return set(layer)
+    return set()
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/depth_first_search.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/depth_first_search.py
new file mode 100644
index 00000000..5bac5ecf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/depth_first_search.py
@@ -0,0 +1,529 @@
+"""Basic algorithms for depth-first searching the nodes of a graph."""
+
+from collections import defaultdict
+
+import networkx as nx
+
+__all__ = [
+    "dfs_edges",
+    "dfs_tree",
+    "dfs_predecessors",
+    "dfs_successors",
+    "dfs_preorder_nodes",
+    "dfs_postorder_nodes",
+    "dfs_labeled_edges",
+]
+
+
+@nx._dispatchable
+def dfs_edges(G, source=None, depth_limit=None, *, sort_neighbors=None):
+    """Iterate over edges in a depth-first-search (DFS).
+
+    Perform a depth-first-search over the nodes of `G` and yield
+    the edges in order. This may not generate all edges in `G`
+    (see `~networkx.algorithms.traversal.edgedfs.edge_dfs`).
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node, optional
+       Specify starting node for depth-first search and yield edges in
+       the component reachable from source.
+
+    depth_limit : int, optional (default=len(G))
+       Specify the maximum search depth.
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Yields
+    ------
+    edge: 2-tuple of nodes
+       Yields edges resulting from the depth-first-search.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> list(nx.dfs_edges(G, source=0))
+    [(0, 1), (1, 2), (2, 3), (3, 4)]
+    >>> list(nx.dfs_edges(G, source=0, depth_limit=2))
+    [(0, 1), (1, 2)]
+
+    Notes
+    -----
+    If a source is not specified then a source is chosen arbitrarily and
+    repeatedly until all components in the graph are searched.
+
+    The implementation of this function is adapted from David Eppstein's
+    depth-first search function in PADS [1]_, with modifications
+    to allow depth limits based on the Wikipedia article
+    "Depth-limited search" [2]_.
+
+    See Also
+    --------
+    dfs_preorder_nodes
+    dfs_postorder_nodes
+    dfs_labeled_edges
+    :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs`
+    :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_edges`
+
+    References
+    ----------
+    .. [1] http://www.ics.uci.edu/~eppstein/PADS
+    .. [2] https://en.wikipedia.org/wiki/Depth-limited_search
+    """
+    if source is None:
+        # edges for all components
+        nodes = G
+    else:
+        # edges for components with source
+        nodes = [source]
+    if depth_limit is None:
+        depth_limit = len(G)
+
+    get_children = (
+        G.neighbors
+        if sort_neighbors is None
+        else lambda n: iter(sort_neighbors(G.neighbors(n)))
+    )
+
+    visited = set()
+    for start in nodes:
+        if start in visited:
+            continue
+        visited.add(start)
+        stack = [(start, get_children(start))]
+        depth_now = 1
+        while stack:
+            parent, children = stack[-1]
+            for child in children:
+                if child not in visited:
+                    yield parent, child
+                    visited.add(child)
+                    if depth_now < depth_limit:
+                        stack.append((child, get_children(child)))
+                        depth_now += 1
+                        break
+            else:
+                stack.pop()
+                depth_now -= 1
+
+
+@nx._dispatchable(returns_graph=True)
+def dfs_tree(G, source=None, depth_limit=None, *, sort_neighbors=None):
+    """Returns oriented tree constructed from a depth-first-search from source.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node, optional
+       Specify starting node for depth-first search.
+
+    depth_limit : int, optional (default=len(G))
+       Specify the maximum search depth.
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Returns
+    -------
+    T : NetworkX DiGraph
+       An oriented tree
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> T = nx.dfs_tree(G, source=0, depth_limit=2)
+    >>> list(T.edges())
+    [(0, 1), (1, 2)]
+    >>> T = nx.dfs_tree(G, source=0)
+    >>> list(T.edges())
+    [(0, 1), (1, 2), (2, 3), (3, 4)]
+
+    See Also
+    --------
+    dfs_preorder_nodes
+    dfs_postorder_nodes
+    dfs_labeled_edges
+    :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs`
+    :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree`
+    """
+    T = nx.DiGraph()
+    if source is None:
+        T.add_nodes_from(G)
+    else:
+        T.add_node(source)
+    T.add_edges_from(dfs_edges(G, source, depth_limit, sort_neighbors=sort_neighbors))
+    return T
+
+
+@nx._dispatchable
+def dfs_predecessors(G, source=None, depth_limit=None, *, sort_neighbors=None):
+    """Returns dictionary of predecessors in depth-first-search from source.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node, optional
+       Specify starting node for depth-first search.
+       Note that you will get predecessors for all nodes in the
+       component containing `source`. This input only specifies
+       where the DFS starts.
+
+    depth_limit : int, optional (default=len(G))
+       Specify the maximum search depth.
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Returns
+    -------
+    pred: dict
+       A dictionary with nodes as keys and predecessor nodes as values.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(4)
+    >>> nx.dfs_predecessors(G, source=0)
+    {1: 0, 2: 1, 3: 2}
+    >>> nx.dfs_predecessors(G, source=0, depth_limit=2)
+    {1: 0, 2: 1}
+
+    Notes
+    -----
+    If a source is not specified then a source is chosen arbitrarily and
+    repeatedly until all components in the graph are searched.
+
+    The implementation of this function is adapted from David Eppstein's
+    depth-first search function in `PADS`_, with modifications
+    to allow depth limits based on the Wikipedia article
+    "`Depth-limited search`_".
+
+    .. _PADS: http://www.ics.uci.edu/~eppstein/PADS
+    .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search
+
+    See Also
+    --------
+    dfs_preorder_nodes
+    dfs_postorder_nodes
+    dfs_labeled_edges
+    :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs`
+    :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree`
+    """
+    return {
+        t: s
+        for s, t in dfs_edges(G, source, depth_limit, sort_neighbors=sort_neighbors)
+    }
+
+
+@nx._dispatchable
+def dfs_successors(G, source=None, depth_limit=None, *, sort_neighbors=None):
+    """Returns dictionary of successors in depth-first-search from source.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node, optional
+       Specify starting node for depth-first search.
+       Note that you will get successors for all nodes in the
+       component containing `source`. This input only specifies
+       where the DFS starts.
+
+    depth_limit : int, optional (default=len(G))
+       Specify the maximum search depth.
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Returns
+    -------
+    succ: dict
+       A dictionary with nodes as keys and list of successor nodes as values.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> nx.dfs_successors(G, source=0)
+    {0: [1], 1: [2], 2: [3], 3: [4]}
+    >>> nx.dfs_successors(G, source=0, depth_limit=2)
+    {0: [1], 1: [2]}
+
+    Notes
+    -----
+    If a source is not specified then a source is chosen arbitrarily and
+    repeatedly until all components in the graph are searched.
+
+    The implementation of this function is adapted from David Eppstein's
+    depth-first search function in `PADS`_, with modifications
+    to allow depth limits based on the Wikipedia article
+    "`Depth-limited search`_".
+
+    .. _PADS: http://www.ics.uci.edu/~eppstein/PADS
+    .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search
+
+    See Also
+    --------
+    dfs_preorder_nodes
+    dfs_postorder_nodes
+    dfs_labeled_edges
+    :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs`
+    :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree`
+    """
+    d = defaultdict(list)
+    for s, t in dfs_edges(
+        G,
+        source=source,
+        depth_limit=depth_limit,
+        sort_neighbors=sort_neighbors,
+    ):
+        d[s].append(t)
+    return dict(d)
+
+
+@nx._dispatchable
+def dfs_postorder_nodes(G, source=None, depth_limit=None, *, sort_neighbors=None):
+    """Generate nodes in a depth-first-search post-ordering starting at source.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node, optional
+       Specify starting node for depth-first search.
+
+    depth_limit : int, optional (default=len(G))
+       Specify the maximum search depth.
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Returns
+    -------
+    nodes: generator
+       A generator of nodes in a depth-first-search post-ordering.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> list(nx.dfs_postorder_nodes(G, source=0))
+    [4, 3, 2, 1, 0]
+    >>> list(nx.dfs_postorder_nodes(G, source=0, depth_limit=2))
+    [1, 0]
+
+    Notes
+    -----
+    If a source is not specified then a source is chosen arbitrarily and
+    repeatedly until all components in the graph are searched.
+
+    The implementation of this function is adapted from David Eppstein's
+    depth-first search function in `PADS`_, with modifications
+    to allow depth limits based on the Wikipedia article
+    "`Depth-limited search`_".
+
+    .. _PADS: http://www.ics.uci.edu/~eppstein/PADS
+    .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search
+
+    See Also
+    --------
+    dfs_edges
+    dfs_preorder_nodes
+    dfs_labeled_edges
+    :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs`
+    :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree`
+    """
+    edges = nx.dfs_labeled_edges(
+        G, source=source, depth_limit=depth_limit, sort_neighbors=sort_neighbors
+    )
+    return (v for u, v, d in edges if d == "reverse")
+
+
+@nx._dispatchable
+def dfs_preorder_nodes(G, source=None, depth_limit=None, *, sort_neighbors=None):
+    """Generate nodes in a depth-first-search pre-ordering starting at source.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node, optional
+       Specify starting node for depth-first search and return nodes in
+       the component reachable from source.
+
+    depth_limit : int, optional (default=len(G))
+       Specify the maximum search depth.
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Returns
+    -------
+    nodes: generator
+       A generator of nodes in a depth-first-search pre-ordering.
+
+    Examples
+    --------
+    >>> G = nx.path_graph(5)
+    >>> list(nx.dfs_preorder_nodes(G, source=0))
+    [0, 1, 2, 3, 4]
+    >>> list(nx.dfs_preorder_nodes(G, source=0, depth_limit=2))
+    [0, 1, 2]
+
+    Notes
+    -----
+    If a source is not specified then a source is chosen arbitrarily and
+    repeatedly until all components in the graph are searched.
+
+    The implementation of this function is adapted from David Eppstein's
+    depth-first search function in `PADS`_, with modifications
+    to allow depth limits based on the Wikipedia article
+    "`Depth-limited search`_".
+
+    .. _PADS: http://www.ics.uci.edu/~eppstein/PADS
+    .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search
+
+    See Also
+    --------
+    dfs_edges
+    dfs_postorder_nodes
+    dfs_labeled_edges
+    :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_edges`
+    """
+    edges = nx.dfs_labeled_edges(
+        G, source=source, depth_limit=depth_limit, sort_neighbors=sort_neighbors
+    )
+    return (v for u, v, d in edges if d == "forward")
+
+
+@nx._dispatchable
+def dfs_labeled_edges(G, source=None, depth_limit=None, *, sort_neighbors=None):
+    """Iterate over edges in a depth-first-search (DFS) labeled by type.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    source : node, optional
+       Specify starting node for depth-first search and return edges in
+       the component reachable from source.
+
+    depth_limit : int, optional (default=len(G))
+       Specify the maximum search depth.
+
+    sort_neighbors : function (default=None)
+        A function that takes an iterator over nodes as the input, and
+        returns an iterable of the same nodes with a custom ordering.
+        For example, `sorted` will sort the nodes in increasing order.
+
+    Returns
+    -------
+    edges: generator
+       A generator of triples of the form (*u*, *v*, *d*), where (*u*,
+       *v*) is the edge being explored in the depth-first search and *d*
+       is one of the strings 'forward', 'nontree', 'reverse', or 'reverse-depth_limit'.
+       A 'forward' edge is one in which *u* has been visited but *v* has
+       not. A 'nontree' edge is one in which both *u* and *v* have been
+       visited but the edge is not in the DFS tree. A 'reverse' edge is
+       one in which both *u* and *v* have been visited and the edge is in
+       the DFS tree. When the `depth_limit` is reached via a 'forward' edge,
+       a 'reverse' edge is immediately generated rather than the subtree
+       being explored. To indicate this flavor of 'reverse' edge, the string
+       yielded is 'reverse-depth_limit'.
+
+    Examples
+    --------
+
+    The labels reveal the complete transcript of the depth-first search
+    algorithm in more detail than, for example, :func:`dfs_edges`::
+
+        >>> from pprint import pprint
+        >>>
+        >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 1)])
+        >>> pprint(list(nx.dfs_labeled_edges(G, source=0)))
+        [(0, 0, 'forward'),
+         (0, 1, 'forward'),
+         (1, 2, 'forward'),
+         (2, 1, 'nontree'),
+         (1, 2, 'reverse'),
+         (0, 1, 'reverse'),
+         (0, 0, 'reverse')]
+
+    Notes
+    -----
+    If a source is not specified then a source is chosen arbitrarily and
+    repeatedly until all components in the graph are searched.
+
+    The implementation of this function is adapted from David Eppstein's
+    depth-first search function in `PADS`_, with modifications
+    to allow depth limits based on the Wikipedia article
+    "`Depth-limited search`_".
+
+    .. _PADS: http://www.ics.uci.edu/~eppstein/PADS
+    .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search
+
+    See Also
+    --------
+    dfs_edges
+    dfs_preorder_nodes
+    dfs_postorder_nodes
+    """
+    # Based on http://www.ics.uci.edu/~eppstein/PADS/DFS.py
+    # by D. Eppstein, July 2004.
+    if source is None:
+        # edges for all components
+        nodes = G
+    else:
+        # edges for components with source
+        nodes = [source]
+    if depth_limit is None:
+        depth_limit = len(G)
+
+    get_children = (
+        G.neighbors
+        if sort_neighbors is None
+        else lambda n: iter(sort_neighbors(G.neighbors(n)))
+    )
+
+    visited = set()
+    for start in nodes:
+        if start in visited:
+            continue
+        yield start, start, "forward"
+        visited.add(start)
+        stack = [(start, get_children(start))]
+        depth_now = 1
+        while stack:
+            parent, children = stack[-1]
+            for child in children:
+                if child in visited:
+                    yield parent, child, "nontree"
+                else:
+                    yield parent, child, "forward"
+                    visited.add(child)
+                    if depth_now < depth_limit:
+                        stack.append((child, iter(get_children(child))))
+                        depth_now += 1
+                        break
+                    else:
+                        yield parent, child, "reverse-depth_limit"
+            else:
+                stack.pop()
+                depth_now -= 1
+                if stack:
+                    yield stack[-1][0], parent, "reverse"
+        yield start, start, "reverse"
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgebfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgebfs.py
new file mode 100644
index 00000000..6320ddc2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgebfs.py
@@ -0,0 +1,178 @@
+"""
+=============================
+Breadth First Search on Edges
+=============================
+
+Algorithms for a breadth-first traversal of edges in a graph.
+
+"""
+
+from collections import deque
+
+import networkx as nx
+
+FORWARD = "forward"
+REVERSE = "reverse"
+
+__all__ = ["edge_bfs"]
+
+
+@nx._dispatchable
+def edge_bfs(G, source=None, orientation=None):
+    """A directed, breadth-first-search of edges in `G`, beginning at `source`.
+
+    Yield the edges of G in a breadth-first-search order continuing until
+    all edges are generated.
+
+    Parameters
+    ----------
+    G : graph
+        A directed/undirected graph/multigraph.
+
+    source : node, list of nodes
+        The node from which the traversal begins. If None, then a source
+        is chosen arbitrarily and repeatedly until all edges from each node in
+        the graph are searched.
+
+    orientation : None | 'original' | 'reverse' | 'ignore' (default: None)
+        For directed graphs and directed multigraphs, edge traversals need not
+        respect the original orientation of the edges.
+        When set to 'reverse' every edge is traversed in the reverse direction.
+        When set to 'ignore', every edge is treated as undirected.
+        When set to 'original', every edge is treated as directed.
+        In all three cases, the yielded edge tuples add a last entry to
+        indicate the direction in which that edge was traversed.
+        If orientation is None, the yielded edge has no direction indicated.
+        The direction is respected, but not reported.
+
+    Yields
+    ------
+    edge : directed edge
+        A directed edge indicating the path taken by the breadth-first-search.
+        For graphs, `edge` is of the form `(u, v)` where `u` and `v`
+        are the tail and head of the edge as determined by the traversal.
+        For multigraphs, `edge` is of the form `(u, v, key)`, where `key` is
+        the key of the edge. When the graph is directed, then `u` and `v`
+        are always in the order of the actual directed edge.
+        If orientation is not None then the edge tuple is extended to include
+        the direction of traversal ('forward' or 'reverse') on that edge.
+
+    Examples
+    --------
+    >>> nodes = [0, 1, 2, 3]
+    >>> edges = [(0, 1), (1, 0), (1, 0), (2, 0), (2, 1), (3, 1)]
+
+    >>> list(nx.edge_bfs(nx.Graph(edges), nodes))
+    [(0, 1), (0, 2), (1, 2), (1, 3)]
+
+    >>> list(nx.edge_bfs(nx.DiGraph(edges), nodes))
+    [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)]
+
+    >>> list(nx.edge_bfs(nx.MultiGraph(edges), nodes))
+    [(0, 1, 0), (0, 1, 1), (0, 1, 2), (0, 2, 0), (1, 2, 0), (1, 3, 0)]
+
+    >>> list(nx.edge_bfs(nx.MultiDiGraph(edges), nodes))
+    [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 0, 0), (2, 1, 0), (3, 1, 0)]
+
+    >>> list(nx.edge_bfs(nx.DiGraph(edges), nodes, orientation="ignore"))
+    [(0, 1, 'forward'), (1, 0, 'reverse'), (2, 0, 'reverse'), (2, 1, 'reverse'), (3, 1, 'reverse')]
+
+    >>> list(nx.edge_bfs(nx.MultiDiGraph(edges), nodes, orientation="ignore"))
+    [(0, 1, 0, 'forward'), (1, 0, 0, 'reverse'), (1, 0, 1, 'reverse'), (2, 0, 0, 'reverse'), (2, 1, 0, 'reverse'), (3, 1, 0, 'reverse')]
+
+    Notes
+    -----
+    The goal of this function is to visit edges. It differs from the more
+    familiar breadth-first-search of nodes, as provided by
+    :func:`networkx.algorithms.traversal.breadth_first_search.bfs_edges`, in
+    that it does not stop once every node has been visited. In a directed graph
+    with edges [(0, 1), (1, 2), (2, 1)], the edge (2, 1) would not be visited
+    if not for the functionality provided by this function.
+
+    The naming of this function is very similar to bfs_edges. The difference
+    is that 'edge_bfs' yields edges even if they extend back to an already
+    explored node while 'bfs_edges' yields the edges of the tree that results
+    from a breadth-first-search (BFS) so no edges are reported if they extend
+    to already explored nodes. That means 'edge_bfs' reports all edges while
+    'bfs_edges' only report those traversed by a node-based BFS. Yet another
+    description is that 'bfs_edges' reports the edges traversed during BFS
+    while 'edge_bfs' reports all edges in the order they are explored.
+
+    See Also
+    --------
+    bfs_edges
+    bfs_tree
+    edge_dfs
+
+    """
+    nodes = list(G.nbunch_iter(source))
+    if not nodes:
+        return
+
+    directed = G.is_directed()
+    kwds = {"data": False}
+    if G.is_multigraph() is True:
+        kwds["keys"] = True
+
+    # set up edge lookup
+    if orientation is None:
+
+        def edges_from(node):
+            return iter(G.edges(node, **kwds))
+
+    elif not directed or orientation == "original":
+
+        def edges_from(node):
+            for e in G.edges(node, **kwds):
+                yield e + (FORWARD,)
+
+    elif orientation == "reverse":
+
+        def edges_from(node):
+            for e in G.in_edges(node, **kwds):
+                yield e + (REVERSE,)
+
+    elif orientation == "ignore":
+
+        def edges_from(node):
+            for e in G.edges(node, **kwds):
+                yield e + (FORWARD,)
+            for e in G.in_edges(node, **kwds):
+                yield e + (REVERSE,)
+
+    else:
+        raise nx.NetworkXError("invalid orientation argument.")
+
+    if directed:
+        neighbors = G.successors
+
+        def edge_id(edge):
+            # remove direction indicator
+            return edge[:-1] if orientation is not None else edge
+
+    else:
+        neighbors = G.neighbors
+
+        def edge_id(edge):
+            return (frozenset(edge[:2]),) + edge[2:]
+
+    check_reverse = directed and orientation in ("reverse", "ignore")
+
+    # start BFS
+    visited_nodes = set(nodes)
+    visited_edges = set()
+    queue = deque([(n, edges_from(n)) for n in nodes])
+    while queue:
+        parent, children_edges = queue.popleft()
+        for edge in children_edges:
+            if check_reverse and edge[-1] == REVERSE:
+                child = edge[0]
+            else:
+                child = edge[1]
+            if child not in visited_nodes:
+                visited_nodes.add(child)
+                queue.append((child, edges_from(child)))
+            edgeid = edge_id(edge)
+            if edgeid not in visited_edges:
+                visited_edges.add(edgeid)
+                yield edge
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgedfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgedfs.py
new file mode 100644
index 00000000..8f657f39
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgedfs.py
@@ -0,0 +1,176 @@
+"""
+===========================
+Depth First Search on Edges
+===========================
+
+Algorithms for a depth-first traversal of edges in a graph.
+
+"""
+
+import networkx as nx
+
+FORWARD = "forward"
+REVERSE = "reverse"
+
+__all__ = ["edge_dfs"]
+
+
+@nx._dispatchable
+def edge_dfs(G, source=None, orientation=None):
+    """A directed, depth-first-search of edges in `G`, beginning at `source`.
+
+    Yield the edges of G in a depth-first-search order continuing until
+    all edges are generated.
+
+    Parameters
+    ----------
+    G : graph
+        A directed/undirected graph/multigraph.
+
+    source : node, list of nodes
+        The node from which the traversal begins. If None, then a source
+        is chosen arbitrarily and repeatedly until all edges from each node in
+        the graph are searched.
+
+    orientation : None | 'original' | 'reverse' | 'ignore' (default: None)
+        For directed graphs and directed multigraphs, edge traversals need not
+        respect the original orientation of the edges.
+        When set to 'reverse' every edge is traversed in the reverse direction.
+        When set to 'ignore', every edge is treated as undirected.
+        When set to 'original', every edge is treated as directed.
+        In all three cases, the yielded edge tuples add a last entry to
+        indicate the direction in which that edge was traversed.
+        If orientation is None, the yielded edge has no direction indicated.
+        The direction is respected, but not reported.
+
+    Yields
+    ------
+    edge : directed edge
+        A directed edge indicating the path taken by the depth-first traversal.
+        For graphs, `edge` is of the form `(u, v)` where `u` and `v`
+        are the tail and head of the edge as determined by the traversal.
+        For multigraphs, `edge` is of the form `(u, v, key)`, where `key` is
+        the key of the edge. When the graph is directed, then `u` and `v`
+        are always in the order of the actual directed edge.
+        If orientation is not None then the edge tuple is extended to include
+        the direction of traversal ('forward' or 'reverse') on that edge.
+
+    Examples
+    --------
+    >>> nodes = [0, 1, 2, 3]
+    >>> edges = [(0, 1), (1, 0), (1, 0), (2, 1), (3, 1)]
+
+    >>> list(nx.edge_dfs(nx.Graph(edges), nodes))
+    [(0, 1), (1, 2), (1, 3)]
+
+    >>> list(nx.edge_dfs(nx.DiGraph(edges), nodes))
+    [(0, 1), (1, 0), (2, 1), (3, 1)]
+
+    >>> list(nx.edge_dfs(nx.MultiGraph(edges), nodes))
+    [(0, 1, 0), (1, 0, 1), (0, 1, 2), (1, 2, 0), (1, 3, 0)]
+
+    >>> list(nx.edge_dfs(nx.MultiDiGraph(edges), nodes))
+    [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 1, 0), (3, 1, 0)]
+
+    >>> list(nx.edge_dfs(nx.DiGraph(edges), nodes, orientation="ignore"))
+    [(0, 1, 'forward'), (1, 0, 'forward'), (2, 1, 'reverse'), (3, 1, 'reverse')]
+
+    >>> list(nx.edge_dfs(nx.MultiDiGraph(edges), nodes, orientation="ignore"))
+    [(0, 1, 0, 'forward'), (1, 0, 0, 'forward'), (1, 0, 1, 'reverse'), (2, 1, 0, 'reverse'), (3, 1, 0, 'reverse')]
+
+    Notes
+    -----
+    The goal of this function is to visit edges. It differs from the more
+    familiar depth-first traversal of nodes, as provided by
+    :func:`~networkx.algorithms.traversal.depth_first_search.dfs_edges`, in
+    that it does not stop once every node has been visited. In a directed graph
+    with edges [(0, 1), (1, 2), (2, 1)], the edge (2, 1) would not be visited
+    if not for the functionality provided by this function.
+
+    See Also
+    --------
+    :func:`~networkx.algorithms.traversal.depth_first_search.dfs_edges`
+
+    """
+    nodes = list(G.nbunch_iter(source))
+    if not nodes:
+        return
+
+    directed = G.is_directed()
+    kwds = {"data": False}
+    if G.is_multigraph() is True:
+        kwds["keys"] = True
+
+    # set up edge lookup
+    if orientation is None:
+
+        def edges_from(node):
+            return iter(G.edges(node, **kwds))
+
+    elif not directed or orientation == "original":
+
+        def edges_from(node):
+            for e in G.edges(node, **kwds):
+                yield e + (FORWARD,)
+
+    elif orientation == "reverse":
+
+        def edges_from(node):
+            for e in G.in_edges(node, **kwds):
+                yield e + (REVERSE,)
+
+    elif orientation == "ignore":
+
+        def edges_from(node):
+            for e in G.edges(node, **kwds):
+                yield e + (FORWARD,)
+            for e in G.in_edges(node, **kwds):
+                yield e + (REVERSE,)
+
+    else:
+        raise nx.NetworkXError("invalid orientation argument.")
+
+    # set up formation of edge_id to easily look up if edge already returned
+    if directed:
+
+        def edge_id(edge):
+            # remove direction indicator
+            return edge[:-1] if orientation is not None else edge
+
+    else:
+
+        def edge_id(edge):
+            # single id for undirected requires frozenset on nodes
+            return (frozenset(edge[:2]),) + edge[2:]
+
+    # Basic setup
+    check_reverse = directed and orientation in ("reverse", "ignore")
+
+    visited_edges = set()
+    visited_nodes = set()
+    edges = {}
+
+    # start DFS
+    for start_node in nodes:
+        stack = [start_node]
+        while stack:
+            current_node = stack[-1]
+            if current_node not in visited_nodes:
+                edges[current_node] = edges_from(current_node)
+                visited_nodes.add(current_node)
+
+            try:
+                edge = next(edges[current_node])
+            except StopIteration:
+                # No more edges from the current node.
+                stack.pop()
+            else:
+                edgeid = edge_id(edge)
+                if edgeid not in visited_edges:
+                    visited_edges.add(edgeid)
+                    # Mark the traversed "to" node as to-be-explored.
+                    if check_reverse and edge[-1] == REVERSE:
+                        stack.append(edge[0])
+                    else:
+                        stack.append(edge[1])
+                    yield edge
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_beamsearch.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_beamsearch.py
new file mode 100644
index 00000000..049f116b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_beamsearch.py
@@ -0,0 +1,25 @@
+"""Unit tests for the beam search functions."""
+
+import pytest
+
+import networkx as nx
+
+
+def test_narrow():
+    """Tests that a narrow beam width may cause an incomplete search."""
+    # In this search, we enqueue only the neighbor 3 at the first
+    # step, then only the neighbor 2 at the second step. Once at
+    # node 2, the search chooses node 3, since it has a higher value
+    # than node 1, but node 3 has already been visited, so the
+    # search terminates.
+    G = nx.cycle_graph(4)
+    edges = nx.bfs_beam_edges(G, source=0, value=lambda n: n, width=1)
+    assert list(edges) == [(0, 3), (3, 2)]
+
+
+@pytest.mark.parametrize("width", (2, None))
+def test_wide(width):
+    """All nodes are searched when `width` is None or >= max degree"""
+    G = nx.cycle_graph(4)
+    edges = nx.bfs_beam_edges(G, source=0, value=lambda n: n, width=width)
+    assert list(edges) == [(0, 3), (0, 1), (3, 2)]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_bfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_bfs.py
new file mode 100644
index 00000000..fcfbbc68
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_bfs.py
@@ -0,0 +1,203 @@
+from functools import partial
+
+import pytest
+
+import networkx as nx
+
+
+class TestBFS:
+    @classmethod
+    def setup_class(cls):
+        # simple graph
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4)])
+        cls.G = G
+
+    def test_successor(self):
+        assert dict(nx.bfs_successors(self.G, source=0)) == {0: [1], 1: [2, 3], 2: [4]}
+
+    def test_predecessor(self):
+        assert dict(nx.bfs_predecessors(self.G, source=0)) == {1: 0, 2: 1, 3: 1, 4: 2}
+
+    def test_bfs_tree(self):
+        T = nx.bfs_tree(self.G, source=0)
+        assert sorted(T.nodes()) == sorted(self.G.nodes())
+        assert sorted(T.edges()) == [(0, 1), (1, 2), (1, 3), (2, 4)]
+
+    def test_bfs_edges(self):
+        edges = nx.bfs_edges(self.G, source=0)
+        assert list(edges) == [(0, 1), (1, 2), (1, 3), (2, 4)]
+
+    def test_bfs_edges_reverse(self):
+        D = nx.DiGraph()
+        D.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4)])
+        edges = nx.bfs_edges(D, source=4, reverse=True)
+        assert list(edges) == [(4, 2), (4, 3), (2, 1), (1, 0)]
+
+    def test_bfs_edges_sorting(self):
+        D = nx.DiGraph()
+        D.add_edges_from([(0, 1), (0, 2), (1, 4), (1, 3), (2, 5)])
+        sort_desc = partial(sorted, reverse=True)
+        edges_asc = nx.bfs_edges(D, source=0, sort_neighbors=sorted)
+        edges_desc = nx.bfs_edges(D, source=0, sort_neighbors=sort_desc)
+        assert list(edges_asc) == [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5)]
+        assert list(edges_desc) == [(0, 2), (0, 1), (2, 5), (1, 4), (1, 3)]
+
+    def test_bfs_tree_isolates(self):
+        G = nx.Graph()
+        G.add_node(1)
+        G.add_node(2)
+        T = nx.bfs_tree(G, source=1)
+        assert sorted(T.nodes()) == [1]
+        assert sorted(T.edges()) == []
+
+    def test_bfs_layers(self):
+        expected = {
+            0: [0],
+            1: [1],
+            2: [2, 3],
+            3: [4],
+        }
+        assert dict(enumerate(nx.bfs_layers(self.G, sources=[0]))) == expected
+        assert dict(enumerate(nx.bfs_layers(self.G, sources=0))) == expected
+
+    def test_bfs_layers_missing_source(self):
+        with pytest.raises(nx.NetworkXError):
+            next(nx.bfs_layers(self.G, sources="abc"))
+        with pytest.raises(nx.NetworkXError):
+            next(nx.bfs_layers(self.G, sources=["abc"]))
+
+    def test_descendants_at_distance(self):
+        for distance, descendants in enumerate([{0}, {1}, {2, 3}, {4}]):
+            assert nx.descendants_at_distance(self.G, 0, distance) == descendants
+
+    def test_descendants_at_distance_missing_source(self):
+        with pytest.raises(nx.NetworkXError):
+            nx.descendants_at_distance(self.G, "abc", 0)
+
+    def test_bfs_labeled_edges_directed(self):
+        D = nx.cycle_graph(5, create_using=nx.DiGraph)
+        expected = [
+            (0, 1, "tree"),
+            (1, 2, "tree"),
+            (2, 3, "tree"),
+            (3, 4, "tree"),
+            (4, 0, "reverse"),
+        ]
+        answer = list(nx.bfs_labeled_edges(D, 0))
+        assert expected == answer
+
+        D.add_edge(4, 4)
+        expected.append((4, 4, "level"))
+        answer = list(nx.bfs_labeled_edges(D, 0))
+        assert expected == answer
+
+        D.add_edge(0, 2)
+        D.add_edge(1, 5)
+        D.add_edge(2, 5)
+        D.remove_edge(4, 4)
+        expected = [
+            (0, 1, "tree"),
+            (0, 2, "tree"),
+            (1, 2, "level"),
+            (1, 5, "tree"),
+            (2, 3, "tree"),
+            (2, 5, "forward"),
+            (3, 4, "tree"),
+            (4, 0, "reverse"),
+        ]
+        answer = list(nx.bfs_labeled_edges(D, 0))
+        assert expected == answer
+
+        G = D.to_undirected()
+        G.add_edge(4, 4)
+        expected = [
+            (0, 1, "tree"),
+            (0, 2, "tree"),
+            (0, 4, "tree"),
+            (1, 2, "level"),
+            (1, 5, "tree"),
+            (2, 3, "tree"),
+            (2, 5, "forward"),
+            (4, 3, "forward"),
+            (4, 4, "level"),
+        ]
+        answer = list(nx.bfs_labeled_edges(G, 0))
+        assert expected == answer
+
+
+class TestBreadthLimitedSearch:
+    @classmethod
+    def setup_class(cls):
+        # a tree
+        G = nx.Graph()
+        nx.add_path(G, [0, 1, 2, 3, 4, 5, 6])
+        nx.add_path(G, [2, 7, 8, 9, 10])
+        cls.G = G
+        # a disconnected graph
+        D = nx.Graph()
+        D.add_edges_from([(0, 1), (2, 3)])
+        nx.add_path(D, [2, 7, 8, 9, 10])
+        cls.D = D
+
+    def test_limited_bfs_successor(self):
+        assert dict(nx.bfs_successors(self.G, source=1, depth_limit=3)) == {
+            1: [0, 2],
+            2: [3, 7],
+            3: [4],
+            7: [8],
+        }
+        result = {
+            n: sorted(s) for n, s in nx.bfs_successors(self.D, source=7, depth_limit=2)
+        }
+        assert result == {8: [9], 2: [3], 7: [2, 8]}
+
+    def test_limited_bfs_predecessor(self):
+        assert dict(nx.bfs_predecessors(self.G, source=1, depth_limit=3)) == {
+            0: 1,
+            2: 1,
+            3: 2,
+            4: 3,
+            7: 2,
+            8: 7,
+        }
+        assert dict(nx.bfs_predecessors(self.D, source=7, depth_limit=2)) == {
+            2: 7,
+            3: 2,
+            8: 7,
+            9: 8,
+        }
+
+    def test_limited_bfs_tree(self):
+        T = nx.bfs_tree(self.G, source=3, depth_limit=1)
+        assert sorted(T.edges()) == [(3, 2), (3, 4)]
+
+    def test_limited_bfs_edges(self):
+        edges = nx.bfs_edges(self.G, source=9, depth_limit=4)
+        assert list(edges) == [(9, 8), (9, 10), (8, 7), (7, 2), (2, 1), (2, 3)]
+
+    def test_limited_bfs_layers(self):
+        assert dict(enumerate(nx.bfs_layers(self.G, sources=[0]))) == {
+            0: [0],
+            1: [1],
+            2: [2],
+            3: [3, 7],
+            4: [4, 8],
+            5: [5, 9],
+            6: [6, 10],
+        }
+        assert dict(enumerate(nx.bfs_layers(self.D, sources=2))) == {
+            0: [2],
+            1: [3, 7],
+            2: [8],
+            3: [9],
+            4: [10],
+        }
+
+    def test_limited_descendants_at_distance(self):
+        for distance, descendants in enumerate(
+            [{0}, {1}, {2}, {3, 7}, {4, 8}, {5, 9}, {6, 10}]
+        ):
+            assert nx.descendants_at_distance(self.G, 0, distance) == descendants
+        for distance, descendants in enumerate([{2}, {3, 7}, {8}, {9}, {10}]):
+            assert nx.descendants_at_distance(self.D, 2, distance) == descendants
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_dfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_dfs.py
new file mode 100644
index 00000000..e43d7d61
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_dfs.py
@@ -0,0 +1,305 @@
+import networkx as nx
+
+
+class TestDFS:
+    @classmethod
+    def setup_class(cls):
+        # simple graph
+        G = nx.Graph()
+        G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 0), (0, 4)])
+        cls.G = G
+        # simple graph, disconnected
+        D = nx.Graph()
+        D.add_edges_from([(0, 1), (2, 3)])
+        cls.D = D
+
+    def test_preorder_nodes(self):
+        assert list(nx.dfs_preorder_nodes(self.G, source=0)) == [0, 1, 2, 4, 3]
+        assert list(nx.dfs_preorder_nodes(self.D)) == [0, 1, 2, 3]
+        assert list(nx.dfs_preorder_nodes(self.D, source=2)) == [2, 3]
+
+    def test_postorder_nodes(self):
+        assert list(nx.dfs_postorder_nodes(self.G, source=0)) == [4, 2, 3, 1, 0]
+        assert list(nx.dfs_postorder_nodes(self.D)) == [1, 0, 3, 2]
+        assert list(nx.dfs_postorder_nodes(self.D, source=0)) == [1, 0]
+
+    def test_successor(self):
+        assert nx.dfs_successors(self.G, source=0) == {0: [1], 1: [2, 3], 2: [4]}
+        assert nx.dfs_successors(self.G, source=1) == {0: [3, 4], 1: [0], 4: [2]}
+        assert nx.dfs_successors(self.D) == {0: [1], 2: [3]}
+        assert nx.dfs_successors(self.D, source=1) == {1: [0]}
+
+    def test_predecessor(self):
+        assert nx.dfs_predecessors(self.G, source=0) == {1: 0, 2: 1, 3: 1, 4: 2}
+        assert nx.dfs_predecessors(self.D) == {1: 0, 3: 2}
+
+    def test_dfs_tree(self):
+        exp_nodes = sorted(self.G.nodes())
+        exp_edges = [(0, 1), (1, 2), (1, 3), (2, 4)]
+        # Search from first node
+        T = nx.dfs_tree(self.G, source=0)
+        assert sorted(T.nodes()) == exp_nodes
+        assert sorted(T.edges()) == exp_edges
+        # Check source=None
+        T = nx.dfs_tree(self.G, source=None)
+        assert sorted(T.nodes()) == exp_nodes
+        assert sorted(T.edges()) == exp_edges
+        # Check source=None is the default
+        T = nx.dfs_tree(self.G)
+        assert sorted(T.nodes()) == exp_nodes
+        assert sorted(T.edges()) == exp_edges
+
+    def test_dfs_edges(self):
+        edges = nx.dfs_edges(self.G, source=0)
+        assert list(edges) == [(0, 1), (1, 2), (2, 4), (1, 3)]
+        edges = nx.dfs_edges(self.D)
+        assert list(edges) == [(0, 1), (2, 3)]
+
+    def test_dfs_edges_sorting(self):
+        G = nx.Graph([(0, 1), (1, 2), (1, 3), (2, 4), (3, 0), (0, 4)])
+        edges_asc = nx.dfs_edges(G, source=0, sort_neighbors=sorted)
+        sorted_desc = lambda x: sorted(x, reverse=True)
+        edges_desc = nx.dfs_edges(G, source=0, sort_neighbors=sorted_desc)
+        assert list(edges_asc) == [(0, 1), (1, 2), (2, 4), (1, 3)]
+        assert list(edges_desc) == [(0, 4), (4, 2), (2, 1), (1, 3)]
+
+    def test_dfs_labeled_edges(self):
+        edges = list(nx.dfs_labeled_edges(self.G, source=0))
+        forward = [(u, v) for (u, v, d) in edges if d == "forward"]
+        assert forward == [(0, 0), (0, 1), (1, 2), (2, 4), (1, 3)]
+        assert edges == [
+            (0, 0, "forward"),
+            (0, 1, "forward"),
+            (1, 0, "nontree"),
+            (1, 2, "forward"),
+            (2, 1, "nontree"),
+            (2, 4, "forward"),
+            (4, 2, "nontree"),
+            (4, 0, "nontree"),
+            (2, 4, "reverse"),
+            (1, 2, "reverse"),
+            (1, 3, "forward"),
+            (3, 1, "nontree"),
+            (3, 0, "nontree"),
+            (1, 3, "reverse"),
+            (0, 1, "reverse"),
+            (0, 3, "nontree"),
+            (0, 4, "nontree"),
+            (0, 0, "reverse"),
+        ]
+
+    def test_dfs_labeled_edges_sorting(self):
+        G = nx.Graph([(0, 1), (1, 2), (1, 3), (2, 4), (3, 0), (0, 4)])
+        edges_asc = nx.dfs_labeled_edges(G, source=0, sort_neighbors=sorted)
+        sorted_desc = lambda x: sorted(x, reverse=True)
+        edges_desc = nx.dfs_labeled_edges(G, source=0, sort_neighbors=sorted_desc)
+        assert list(edges_asc) == [
+            (0, 0, "forward"),
+            (0, 1, "forward"),
+            (1, 0, "nontree"),
+            (1, 2, "forward"),
+            (2, 1, "nontree"),
+            (2, 4, "forward"),
+            (4, 0, "nontree"),
+            (4, 2, "nontree"),
+            (2, 4, "reverse"),
+            (1, 2, "reverse"),
+            (1, 3, "forward"),
+            (3, 0, "nontree"),
+            (3, 1, "nontree"),
+            (1, 3, "reverse"),
+            (0, 1, "reverse"),
+            (0, 3, "nontree"),
+            (0, 4, "nontree"),
+            (0, 0, "reverse"),
+        ]
+        assert list(edges_desc) == [
+            (0, 0, "forward"),
+            (0, 4, "forward"),
+            (4, 2, "forward"),
+            (2, 4, "nontree"),
+            (2, 1, "forward"),
+            (1, 3, "forward"),
+            (3, 1, "nontree"),
+            (3, 0, "nontree"),
+            (1, 3, "reverse"),
+            (1, 2, "nontree"),
+            (1, 0, "nontree"),
+            (2, 1, "reverse"),
+            (4, 2, "reverse"),
+            (4, 0, "nontree"),
+            (0, 4, "reverse"),
+            (0, 3, "nontree"),
+            (0, 1, "nontree"),
+            (0, 0, "reverse"),
+        ]
+
+    def test_dfs_labeled_disconnected_edges(self):
+        edges = list(nx.dfs_labeled_edges(self.D))
+        forward = [(u, v) for (u, v, d) in edges if d == "forward"]
+        assert forward == [(0, 0), (0, 1), (2, 2), (2, 3)]
+        assert edges == [
+            (0, 0, "forward"),
+            (0, 1, "forward"),
+            (1, 0, "nontree"),
+            (0, 1, "reverse"),
+            (0, 0, "reverse"),
+            (2, 2, "forward"),
+            (2, 3, "forward"),
+            (3, 2, "nontree"),
+            (2, 3, "reverse"),
+            (2, 2, "reverse"),
+        ]
+
+    def test_dfs_tree_isolates(self):
+        G = nx.Graph()
+        G.add_node(1)
+        G.add_node(2)
+        T = nx.dfs_tree(G, source=1)
+        assert sorted(T.nodes()) == [1]
+        assert sorted(T.edges()) == []
+        T = nx.dfs_tree(G, source=None)
+        assert sorted(T.nodes()) == [1, 2]
+        assert sorted(T.edges()) == []
+
+
+class TestDepthLimitedSearch:
+    @classmethod
+    def setup_class(cls):
+        # a tree
+        G = nx.Graph()
+        nx.add_path(G, [0, 1, 2, 3, 4, 5, 6])
+        nx.add_path(G, [2, 7, 8, 9, 10])
+        cls.G = G
+        # a disconnected graph
+        D = nx.Graph()
+        D.add_edges_from([(0, 1), (2, 3)])
+        nx.add_path(D, [2, 7, 8, 9, 10])
+        cls.D = D
+
+    def test_dls_preorder_nodes(self):
+        assert list(nx.dfs_preorder_nodes(self.G, source=0, depth_limit=2)) == [0, 1, 2]
+        assert list(nx.dfs_preorder_nodes(self.D, source=1, depth_limit=2)) == ([1, 0])
+
+    def test_dls_postorder_nodes(self):
+        assert list(nx.dfs_postorder_nodes(self.G, source=3, depth_limit=3)) == [
+            1,
+            7,
+            2,
+            5,
+            4,
+            3,
+        ]
+        assert list(nx.dfs_postorder_nodes(self.D, source=2, depth_limit=2)) == (
+            [3, 7, 2]
+        )
+
+    def test_dls_successor(self):
+        result = nx.dfs_successors(self.G, source=4, depth_limit=3)
+        assert {n: set(v) for n, v in result.items()} == {
+            2: {1, 7},
+            3: {2},
+            4: {3, 5},
+            5: {6},
+        }
+        result = nx.dfs_successors(self.D, source=7, depth_limit=2)
+        assert {n: set(v) for n, v in result.items()} == {8: {9}, 2: {3}, 7: {8, 2}}
+
+    def test_dls_predecessor(self):
+        assert nx.dfs_predecessors(self.G, source=0, depth_limit=3) == {
+            1: 0,
+            2: 1,
+            3: 2,
+            7: 2,
+        }
+        assert nx.dfs_predecessors(self.D, source=2, depth_limit=3) == {
+            8: 7,
+            9: 8,
+            3: 2,
+            7: 2,
+        }
+
+    def test_dls_tree(self):
+        T = nx.dfs_tree(self.G, source=3, depth_limit=1)
+        assert sorted(T.edges()) == [(3, 2), (3, 4)]
+
+    def test_dls_edges(self):
+        edges = nx.dfs_edges(self.G, source=9, depth_limit=4)
+        assert list(edges) == [(9, 8), (8, 7), (7, 2), (2, 1), (2, 3), (9, 10)]
+
+    def test_dls_labeled_edges_depth_1(self):
+        edges = list(nx.dfs_labeled_edges(self.G, source=5, depth_limit=1))
+        forward = [(u, v) for (u, v, d) in edges if d == "forward"]
+        assert forward == [(5, 5), (5, 4), (5, 6)]
+        # Note: reverse-depth_limit edge types were not reported before gh-6240
+        assert edges == [
+            (5, 5, "forward"),
+            (5, 4, "forward"),
+            (5, 4, "reverse-depth_limit"),
+            (5, 6, "forward"),
+            (5, 6, "reverse-depth_limit"),
+            (5, 5, "reverse"),
+        ]
+
+    def test_dls_labeled_edges_depth_2(self):
+        edges = list(nx.dfs_labeled_edges(self.G, source=6, depth_limit=2))
+        forward = [(u, v) for (u, v, d) in edges if d == "forward"]
+        assert forward == [(6, 6), (6, 5), (5, 4)]
+        assert edges == [
+            (6, 6, "forward"),
+            (6, 5, "forward"),
+            (5, 4, "forward"),
+            (5, 4, "reverse-depth_limit"),
+            (5, 6, "nontree"),
+            (6, 5, "reverse"),
+            (6, 6, "reverse"),
+        ]
+
+    def test_dls_labeled_disconnected_edges(self):
+        edges = list(nx.dfs_labeled_edges(self.D, depth_limit=1))
+        assert edges == [
+            (0, 0, "forward"),
+            (0, 1, "forward"),
+            (0, 1, "reverse-depth_limit"),
+            (0, 0, "reverse"),
+            (2, 2, "forward"),
+            (2, 3, "forward"),
+            (2, 3, "reverse-depth_limit"),
+            (2, 7, "forward"),
+            (2, 7, "reverse-depth_limit"),
+            (2, 2, "reverse"),
+            (8, 8, "forward"),
+            (8, 7, "nontree"),
+            (8, 9, "forward"),
+            (8, 9, "reverse-depth_limit"),
+            (8, 8, "reverse"),
+            (10, 10, "forward"),
+            (10, 9, "nontree"),
+            (10, 10, "reverse"),
+        ]
+        # large depth_limit has no impact
+        edges = list(nx.dfs_labeled_edges(self.D, depth_limit=19))
+        assert edges == [
+            (0, 0, "forward"),
+            (0, 1, "forward"),
+            (1, 0, "nontree"),
+            (0, 1, "reverse"),
+            (0, 0, "reverse"),
+            (2, 2, "forward"),
+            (2, 3, "forward"),
+            (3, 2, "nontree"),
+            (2, 3, "reverse"),
+            (2, 7, "forward"),
+            (7, 2, "nontree"),
+            (7, 8, "forward"),
+            (8, 7, "nontree"),
+            (8, 9, "forward"),
+            (9, 8, "nontree"),
+            (9, 10, "forward"),
+            (10, 9, "nontree"),
+            (9, 10, "reverse"),
+            (8, 9, "reverse"),
+            (7, 8, "reverse"),
+            (2, 7, "reverse"),
+            (2, 2, "reverse"),
+        ]
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgebfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgebfs.py
new file mode 100644
index 00000000..1bf3fae0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgebfs.py
@@ -0,0 +1,147 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms.traversal.edgedfs import FORWARD, REVERSE
+
+
+class TestEdgeBFS:
+    @classmethod
+    def setup_class(cls):
+        cls.nodes = [0, 1, 2, 3]
+        cls.edges = [(0, 1), (1, 0), (1, 0), (2, 0), (2, 1), (3, 1)]
+
+    def test_empty(self):
+        G = nx.Graph()
+        edges = list(nx.edge_bfs(G))
+        assert edges == []
+
+    def test_graph_single_source(self):
+        G = nx.Graph(self.edges)
+        G.add_edge(4, 5)
+        x = list(nx.edge_bfs(G, [0]))
+        x_ = [(0, 1), (0, 2), (1, 2), (1, 3)]
+        assert x == x_
+
+    def test_graph(self):
+        G = nx.Graph(self.edges)
+        x = list(nx.edge_bfs(G, self.nodes))
+        x_ = [(0, 1), (0, 2), (1, 2), (1, 3)]
+        assert x == x_
+
+    def test_digraph(self):
+        G = nx.DiGraph(self.edges)
+        x = list(nx.edge_bfs(G, self.nodes))
+        x_ = [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)]
+        assert x == x_
+
+    def test_digraph_orientation_invalid(self):
+        G = nx.DiGraph(self.edges)
+        edge_iterator = nx.edge_bfs(G, self.nodes, orientation="hello")
+        pytest.raises(nx.NetworkXError, list, edge_iterator)
+
+    def test_digraph_orientation_none(self):
+        G = nx.DiGraph(self.edges)
+        x = list(nx.edge_bfs(G, self.nodes, orientation=None))
+        x_ = [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)]
+        assert x == x_
+
+    def test_digraph_orientation_original(self):
+        G = nx.DiGraph(self.edges)
+        x = list(nx.edge_bfs(G, self.nodes, orientation="original"))
+        x_ = [
+            (0, 1, FORWARD),
+            (1, 0, FORWARD),
+            (2, 0, FORWARD),
+            (2, 1, FORWARD),
+            (3, 1, FORWARD),
+        ]
+        assert x == x_
+
+    def test_digraph2(self):
+        G = nx.DiGraph()
+        nx.add_path(G, range(4))
+        x = list(nx.edge_bfs(G, [0]))
+        x_ = [(0, 1), (1, 2), (2, 3)]
+        assert x == x_
+
+    def test_digraph_rev(self):
+        G = nx.DiGraph(self.edges)
+        x = list(nx.edge_bfs(G, self.nodes, orientation="reverse"))
+        x_ = [
+            (1, 0, REVERSE),
+            (2, 0, REVERSE),
+            (0, 1, REVERSE),
+            (2, 1, REVERSE),
+            (3, 1, REVERSE),
+        ]
+        assert x == x_
+
+    def test_digraph_rev2(self):
+        G = nx.DiGraph()
+        nx.add_path(G, range(4))
+        x = list(nx.edge_bfs(G, [3], orientation="reverse"))
+        x_ = [(2, 3, REVERSE), (1, 2, REVERSE), (0, 1, REVERSE)]
+        assert x == x_
+
+    def test_multigraph(self):
+        G = nx.MultiGraph(self.edges)
+        x = list(nx.edge_bfs(G, self.nodes))
+        x_ = [(0, 1, 0), (0, 1, 1), (0, 1, 2), (0, 2, 0), (1, 2, 0), (1, 3, 0)]
+        # This is an example of where hash randomization can break.
+        # There are 3! * 2 alternative outputs, such as:
+        #    [(0, 1, 1), (1, 0, 0), (0, 1, 2), (1, 3, 0), (1, 2, 0)]
+        # But note, the edges (1,2,0) and (1,3,0) always follow the (0,1,k)
+        # edges. So the algorithm only guarantees a partial order. A total
+        # order is guaranteed only if the graph data structures are ordered.
+        assert x == x_
+
+    def test_multidigraph(self):
+        G = nx.MultiDiGraph(self.edges)
+        x = list(nx.edge_bfs(G, self.nodes))
+        x_ = [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 0, 0), (2, 1, 0), (3, 1, 0)]
+        assert x == x_
+
+    def test_multidigraph_rev(self):
+        G = nx.MultiDiGraph(self.edges)
+        x = list(nx.edge_bfs(G, self.nodes, orientation="reverse"))
+        x_ = [
+            (1, 0, 0, REVERSE),
+            (1, 0, 1, REVERSE),
+            (2, 0, 0, REVERSE),
+            (0, 1, 0, REVERSE),
+            (2, 1, 0, REVERSE),
+            (3, 1, 0, REVERSE),
+        ]
+        assert x == x_
+
+    def test_digraph_ignore(self):
+        G = nx.DiGraph(self.edges)
+        x = list(nx.edge_bfs(G, self.nodes, orientation="ignore"))
+        x_ = [
+            (0, 1, FORWARD),
+            (1, 0, REVERSE),
+            (2, 0, REVERSE),
+            (2, 1, REVERSE),
+            (3, 1, REVERSE),
+        ]
+        assert x == x_
+
+    def test_digraph_ignore2(self):
+        G = nx.DiGraph()
+        nx.add_path(G, range(4))
+        x = list(nx.edge_bfs(G, [0], orientation="ignore"))
+        x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 3, FORWARD)]
+        assert x == x_
+
+    def test_multidigraph_ignore(self):
+        G = nx.MultiDiGraph(self.edges)
+        x = list(nx.edge_bfs(G, self.nodes, orientation="ignore"))
+        x_ = [
+            (0, 1, 0, FORWARD),
+            (1, 0, 0, REVERSE),
+            (1, 0, 1, REVERSE),
+            (2, 0, 0, REVERSE),
+            (2, 1, 0, REVERSE),
+            (3, 1, 0, REVERSE),
+        ]
+        assert x == x_
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgedfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgedfs.py
new file mode 100644
index 00000000..7c1967cc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgedfs.py
@@ -0,0 +1,131 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms import edge_dfs
+from networkx.algorithms.traversal.edgedfs import FORWARD, REVERSE
+
+# These tests can fail with hash randomization. The easiest and clearest way
+# to write these unit tests is for the edges to be output in an expected total
+# order, but we cannot guarantee the order amongst outgoing edges from a node,
+# unless each class uses an ordered data structure for neighbors. This is
+# painful to do with the current API. The alternative is that the tests are
+# written (IMO confusingly) so that there is not a total order over the edges,
+# but only a partial order. Due to the small size of the graphs, hopefully
+# failures due to hash randomization will not occur. For an example of how
+# this can fail, see TestEdgeDFS.test_multigraph.
+
+
+class TestEdgeDFS:
+    @classmethod
+    def setup_class(cls):
+        cls.nodes = [0, 1, 2, 3]
+        cls.edges = [(0, 1), (1, 0), (1, 0), (2, 1), (3, 1)]
+
+    def test_empty(self):
+        G = nx.Graph()
+        edges = list(edge_dfs(G))
+        assert edges == []
+
+    def test_graph(self):
+        G = nx.Graph(self.edges)
+        x = list(edge_dfs(G, self.nodes))
+        x_ = [(0, 1), (1, 2), (1, 3)]
+        assert x == x_
+
+    def test_digraph(self):
+        G = nx.DiGraph(self.edges)
+        x = list(edge_dfs(G, self.nodes))
+        x_ = [(0, 1), (1, 0), (2, 1), (3, 1)]
+        assert x == x_
+
+    def test_digraph_orientation_invalid(self):
+        G = nx.DiGraph(self.edges)
+        edge_iterator = edge_dfs(G, self.nodes, orientation="hello")
+        pytest.raises(nx.NetworkXError, list, edge_iterator)
+
+    def test_digraph_orientation_none(self):
+        G = nx.DiGraph(self.edges)
+        x = list(edge_dfs(G, self.nodes, orientation=None))
+        x_ = [(0, 1), (1, 0), (2, 1), (3, 1)]
+        assert x == x_
+
+    def test_digraph_orientation_original(self):
+        G = nx.DiGraph(self.edges)
+        x = list(edge_dfs(G, self.nodes, orientation="original"))
+        x_ = [(0, 1, FORWARD), (1, 0, FORWARD), (2, 1, FORWARD), (3, 1, FORWARD)]
+        assert x == x_
+
+    def test_digraph2(self):
+        G = nx.DiGraph()
+        nx.add_path(G, range(4))
+        x = list(edge_dfs(G, [0]))
+        x_ = [(0, 1), (1, 2), (2, 3)]
+        assert x == x_
+
+    def test_digraph_rev(self):
+        G = nx.DiGraph(self.edges)
+        x = list(edge_dfs(G, self.nodes, orientation="reverse"))
+        x_ = [(1, 0, REVERSE), (0, 1, REVERSE), (2, 1, REVERSE), (3, 1, REVERSE)]
+        assert x == x_
+
+    def test_digraph_rev2(self):
+        G = nx.DiGraph()
+        nx.add_path(G, range(4))
+        x = list(edge_dfs(G, [3], orientation="reverse"))
+        x_ = [(2, 3, REVERSE), (1, 2, REVERSE), (0, 1, REVERSE)]
+        assert x == x_
+
+    def test_multigraph(self):
+        G = nx.MultiGraph(self.edges)
+        x = list(edge_dfs(G, self.nodes))
+        x_ = [(0, 1, 0), (1, 0, 1), (0, 1, 2), (1, 2, 0), (1, 3, 0)]
+        # This is an example of where hash randomization can break.
+        # There are 3! * 2 alternative outputs, such as:
+        #    [(0, 1, 1), (1, 0, 0), (0, 1, 2), (1, 3, 0), (1, 2, 0)]
+        # But note, the edges (1,2,0) and (1,3,0) always follow the (0,1,k)
+        # edges. So the algorithm only guarantees a partial order. A total
+        # order is guaranteed only if the graph data structures are ordered.
+        assert x == x_
+
+    def test_multidigraph(self):
+        G = nx.MultiDiGraph(self.edges)
+        x = list(edge_dfs(G, self.nodes))
+        x_ = [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 1, 0), (3, 1, 0)]
+        assert x == x_
+
+    def test_multidigraph_rev(self):
+        G = nx.MultiDiGraph(self.edges)
+        x = list(edge_dfs(G, self.nodes, orientation="reverse"))
+        x_ = [
+            (1, 0, 0, REVERSE),
+            (0, 1, 0, REVERSE),
+            (1, 0, 1, REVERSE),
+            (2, 1, 0, REVERSE),
+            (3, 1, 0, REVERSE),
+        ]
+        assert x == x_
+
+    def test_digraph_ignore(self):
+        G = nx.DiGraph(self.edges)
+        x = list(edge_dfs(G, self.nodes, orientation="ignore"))
+        x_ = [(0, 1, FORWARD), (1, 0, FORWARD), (2, 1, REVERSE), (3, 1, REVERSE)]
+        assert x == x_
+
+    def test_digraph_ignore2(self):
+        G = nx.DiGraph()
+        nx.add_path(G, range(4))
+        x = list(edge_dfs(G, [0], orientation="ignore"))
+        x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 3, FORWARD)]
+        assert x == x_
+
+    def test_multidigraph_ignore(self):
+        G = nx.MultiDiGraph(self.edges)
+        x = list(edge_dfs(G, self.nodes, orientation="ignore"))
+        x_ = [
+            (0, 1, 0, FORWARD),
+            (1, 0, 0, FORWARD),
+            (1, 0, 1, REVERSE),
+            (2, 1, 0, REVERSE),
+            (3, 1, 0, REVERSE),
+        ]
+        assert x == x_
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__init__.py
new file mode 100644
index 00000000..7120d4bc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__init__.py
@@ -0,0 +1,6 @@
+from .branchings import *
+from .coding import *
+from .mst import *
+from .recognition import *
+from .operations import *
+from .decomposition import *
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/branchings.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/branchings.py
new file mode 100644
index 00000000..cc9c7cf1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/branchings.py
@@ -0,0 +1,1042 @@
+"""
+Algorithms for finding optimum branchings and spanning arborescences.
+
+This implementation is based on:
+
+    J. Edmonds, Optimum branchings, J. Res. Natl. Bur. Standards 71B (1967),
+    233–240. URL: http://archive.org/details/jresv71Bn4p233
+
+"""
+
+# TODO: Implement method from Gabow, Galil, Spence and Tarjan:
+#
+# @article{
+#    year={1986},
+#    issn={0209-9683},
+#    journal={Combinatorica},
+#    volume={6},
+#    number={2},
+#    doi={10.1007/BF02579168},
+#    title={Efficient algorithms for finding minimum spanning trees in
+#        undirected and directed graphs},
+#    url={https://doi.org/10.1007/BF02579168},
+#    publisher={Springer-Verlag},
+#    keywords={68 B 15; 68 C 05},
+#    author={Gabow, Harold N. and Galil, Zvi and Spencer, Thomas and Tarjan,
+#        Robert E.},
+#    pages={109-122},
+#    language={English}
+# }
+import string
+from dataclasses import dataclass, field
+from operator import itemgetter
+from queue import PriorityQueue
+
+import networkx as nx
+from networkx.utils import py_random_state
+
+from .recognition import is_arborescence, is_branching
+
+__all__ = [
+    "branching_weight",
+    "greedy_branching",
+    "maximum_branching",
+    "minimum_branching",
+    "minimal_branching",
+    "maximum_spanning_arborescence",
+    "minimum_spanning_arborescence",
+    "ArborescenceIterator",
+]
+
+KINDS = {"max", "min"}
+
+STYLES = {
+    "branching": "branching",
+    "arborescence": "arborescence",
+    "spanning arborescence": "arborescence",
+}
+
+INF = float("inf")
+
+
+@py_random_state(1)
+def random_string(L=15, seed=None):
+    return "".join([seed.choice(string.ascii_letters) for n in range(L)])
+
+
+def _min_weight(weight):
+    return -weight
+
+
+def _max_weight(weight):
+    return weight
+
+
+@nx._dispatchable(edge_attrs={"attr": "default"})
+def branching_weight(G, attr="weight", default=1):
+    """
+    Returns the total weight of a branching.
+
+    You must access this function through the networkx.algorithms.tree module.
+
+    Parameters
+    ----------
+    G : DiGraph
+        The directed graph.
+    attr : str
+        The attribute to use as weights. If None, then each edge will be
+        treated equally with a weight of 1.
+    default : float
+        When `attr` is not None, then if an edge does not have that attribute,
+        `default` specifies what value it should take.
+
+    Returns
+    -------
+    weight: int or float
+        The total weight of the branching.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_weighted_edges_from([(0, 1, 2), (1, 2, 4), (2, 3, 3), (3, 4, 2)])
+    >>> nx.tree.branching_weight(G)
+    11
+
+    """
+    return sum(edge[2].get(attr, default) for edge in G.edges(data=True))
+
+
+@py_random_state(4)
+@nx._dispatchable(edge_attrs={"attr": "default"}, returns_graph=True)
+def greedy_branching(G, attr="weight", default=1, kind="max", seed=None):
+    """
+    Returns a branching obtained through a greedy algorithm.
+
+    This algorithm is wrong, and cannot give a proper optimal branching.
+    However, we include it for pedagogical reasons, as it can be helpful to
+    see what its outputs are.
+
+    The output is a branching, and possibly, a spanning arborescence. However,
+    it is not guaranteed to be optimal in either case.
+
+    Parameters
+    ----------
+    G : DiGraph
+        The directed graph to scan.
+    attr : str
+        The attribute to use as weights. If None, then each edge will be
+        treated equally with a weight of 1.
+    default : float
+        When `attr` is not None, then if an edge does not have that attribute,
+        `default` specifies what value it should take.
+    kind : str
+        The type of optimum to search for: 'min' or 'max' greedy branching.
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    B : directed graph
+        The greedily obtained branching.
+
+    """
+    if kind not in KINDS:
+        raise nx.NetworkXException("Unknown value for `kind`.")
+
+    if kind == "min":
+        reverse = False
+    else:
+        reverse = True
+
+    if attr is None:
+        # Generate a random string the graph probably won't have.
+        attr = random_string(seed=seed)
+
+    edges = [(u, v, data.get(attr, default)) for (u, v, data) in G.edges(data=True)]
+
+    # We sort by weight, but also by nodes to normalize behavior across runs.
+    try:
+        edges.sort(key=itemgetter(2, 0, 1), reverse=reverse)
+    except TypeError:
+        # This will fail in Python 3.x if the nodes are of varying types.
+        # In that case, we use the arbitrary order.
+        edges.sort(key=itemgetter(2), reverse=reverse)
+
+    # The branching begins with a forest of no edges.
+    B = nx.DiGraph()
+    B.add_nodes_from(G)
+
+    # Now we add edges greedily so long we maintain the branching.
+    uf = nx.utils.UnionFind()
+    for i, (u, v, w) in enumerate(edges):
+        if uf[u] == uf[v]:
+            # Adding this edge would form a directed cycle.
+            continue
+        elif B.in_degree(v) == 1:
+            # The edge would increase the degree to be greater than one.
+            continue
+        else:
+            # If attr was None, then don't insert weights...
+            data = {}
+            if attr is not None:
+                data[attr] = w
+            B.add_edge(u, v, **data)
+            uf.union(u, v)
+
+    return B
+
+
+@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True)
+def maximum_branching(
+    G,
+    attr="weight",
+    default=1,
+    preserve_attrs=False,
+    partition=None,
+):
+    #######################################
+    ### Data Structure Helper Functions ###
+    #######################################
+
+    def edmonds_add_edge(G, edge_index, u, v, key, **d):
+        """
+        Adds an edge to `G` while also updating the edge index.
+
+        This algorithm requires the use of an external dictionary to track
+        the edge keys since it is possible that the source or destination
+        node of an edge will be changed and the default key-handling
+        capabilities of the MultiDiGraph class do not account for this.
+
+        Parameters
+        ----------
+        G : MultiDiGraph
+            The graph to insert an edge into.
+        edge_index : dict
+            A mapping from integers to the edges of the graph.
+        u : node
+            The source node of the new edge.
+        v : node
+            The destination node of the new edge.
+        key : int
+            The key to use from `edge_index`.
+        d : keyword arguments, optional
+            Other attributes to store on the new edge.
+        """
+
+        if key in edge_index:
+            uu, vv, _ = edge_index[key]
+            if (u != uu) or (v != vv):
+                raise Exception(f"Key {key!r} is already in use.")
+
+        G.add_edge(u, v, key, **d)
+        edge_index[key] = (u, v, G.succ[u][v][key])
+
+    def edmonds_remove_node(G, edge_index, n):
+        """
+        Remove a node from the graph, updating the edge index to match.
+
+        Parameters
+        ----------
+        G : MultiDiGraph
+            The graph to remove an edge from.
+        edge_index : dict
+            A mapping from integers to the edges of the graph.
+        n : node
+            The node to remove from `G`.
+        """
+        keys = set()
+        for keydict in G.pred[n].values():
+            keys.update(keydict)
+        for keydict in G.succ[n].values():
+            keys.update(keydict)
+
+        for key in keys:
+            del edge_index[key]
+
+        G.remove_node(n)
+
+    #######################
+    ### Algorithm Setup ###
+    #######################
+
+    # Pick an attribute name that the original graph is unlikly to have
+    candidate_attr = "edmonds' secret candidate attribute"
+    new_node_base_name = "edmonds new node base name "
+
+    G_original = G
+    G = nx.MultiDiGraph()
+    G.__networkx_cache__ = None  # Disable caching
+
+    # A dict to reliably track mutations to the edges using the key of the edge.
+    G_edge_index = {}
+    # Each edge is given an arbitrary numerical key
+    for key, (u, v, data) in enumerate(G_original.edges(data=True)):
+        d = {attr: data.get(attr, default)}
+
+        if data.get(partition) is not None:
+            d[partition] = data.get(partition)
+
+        if preserve_attrs:
+            for d_k, d_v in data.items():
+                if d_k != attr:
+                    d[d_k] = d_v
+
+        edmonds_add_edge(G, G_edge_index, u, v, key, **d)
+
+    level = 0  # Stores the number of contracted nodes
+
+    # These are the buckets from the paper.
+    #
+    # In the paper, G^i are modified versions of the original graph.
+    # D^i and E^i are the nodes and edges of the maximal edges that are
+    # consistent with G^i. In this implementation, D^i and E^i are stored
+    # together as the graph B^i. We will have strictly more B^i then the
+    # paper will have.
+    #
+    # Note that the data in graphs and branchings are tuples with the graph as
+    # the first element and the edge index as the second.
+    B = nx.MultiDiGraph()
+    B_edge_index = {}
+    graphs = []  # G^i list
+    branchings = []  # B^i list
+    selected_nodes = set()  # D^i bucket
+    uf = nx.utils.UnionFind()
+
+    # A list of lists of edge indices. Each list is a circuit for graph G^i.
+    # Note the edge list is not required to be a circuit in G^0.
+    circuits = []
+
+    # Stores the index of the minimum edge in the circuit found in G^i and B^i.
+    # The ordering of the edges seems to preserver the weight ordering from
+    # G^0. So even if the circuit does not form a circuit in G^0, it is still
+    # true that the minimum edges in circuit G^0 (despite their weights being
+    # different)
+    minedge_circuit = []
+
+    ###########################
+    ### Algorithm Structure ###
+    ###########################
+
+    # Each step listed in the algorithm is an inner function. Thus, the overall
+    # loop structure is:
+    #
+    # while True:
+    #     step_I1()
+    #     if cycle detected:
+    #         step_I2()
+    #     elif every node of G is in D and E is a branching:
+    #         break
+
+    ##################################
+    ### Algorithm Helper Functions ###
+    ##################################
+
+    def edmonds_find_desired_edge(v):
+        """
+        Find the edge directed towards v with maximal weight.
+
+        If an edge partition exists in this graph, return the included
+        edge if it exists and never return any excluded edge.
+
+        Note: There can only be one included edge for each vertex otherwise
+        the edge partition is empty.
+
+        Parameters
+        ----------
+        v : node
+            The node to search for the maximal weight incoming edge.
+        """
+        edge = None
+        max_weight = -INF
+        for u, _, key, data in G.in_edges(v, data=True, keys=True):
+            # Skip excluded edges
+            if data.get(partition) == nx.EdgePartition.EXCLUDED:
+                continue
+
+            new_weight = data[attr]
+
+            # Return the included edge
+            if data.get(partition) == nx.EdgePartition.INCLUDED:
+                max_weight = new_weight
+                edge = (u, v, key, new_weight, data)
+                break
+
+            # Find the best open edge
+            if new_weight > max_weight:
+                max_weight = new_weight
+                edge = (u, v, key, new_weight, data)
+
+        return edge, max_weight
+
+    def edmonds_step_I2(v, desired_edge, level):
+        """
+        Perform step I2 from Edmonds' paper
+
+        First, check if the last step I1 created a cycle. If it did not, do nothing.
+        If it did, store the cycle for later reference and contract it.
+
+        Parameters
+        ----------
+        v : node
+            The current node to consider
+        desired_edge : edge
+            The minimum desired edge to remove from the cycle.
+        level : int
+            The current level, i.e. the number of cycles that have already been removed.
+        """
+        u = desired_edge[0]
+
+        Q_nodes = nx.shortest_path(B, v, u)
+        Q_edges = [
+            list(B[Q_nodes[i]][vv].keys())[0] for i, vv in enumerate(Q_nodes[1:])
+        ]
+        Q_edges.append(desired_edge[2])  # Add the new edge key to complete the circuit
+
+        # Get the edge in the circuit with the minimum weight.
+        # Also, save the incoming weights for each node.
+        minweight = INF
+        minedge = None
+        Q_incoming_weight = {}
+        for edge_key in Q_edges:
+            u, v, data = B_edge_index[edge_key]
+            w = data[attr]
+            # We cannot remove an included edge, even if it is the
+            # minimum edge in the circuit
+            Q_incoming_weight[v] = w
+            if data.get(partition) == nx.EdgePartition.INCLUDED:
+                continue
+            if w < minweight:
+                minweight = w
+                minedge = edge_key
+
+        circuits.append(Q_edges)
+        minedge_circuit.append(minedge)
+        graphs.append((G.copy(), G_edge_index.copy()))
+        branchings.append((B.copy(), B_edge_index.copy()))
+
+        # Mutate the graph to contract the circuit
+        new_node = new_node_base_name + str(level)
+        G.add_node(new_node)
+        new_edges = []
+        for u, v, key, data in G.edges(data=True, keys=True):
+            if u in Q_incoming_weight:
+                if v in Q_incoming_weight:
+                    # Circuit edge. For the moment do nothing,
+                    # eventually it will be removed.
+                    continue
+                else:
+                    # Outgoing edge from a node in the circuit.
+                    # Make it come from the new node instead
+                    dd = data.copy()
+                    new_edges.append((new_node, v, key, dd))
+            else:
+                if v in Q_incoming_weight:
+                    # Incoming edge to the circuit.
+                    # Update it's weight
+                    w = data[attr]
+                    w += minweight - Q_incoming_weight[v]
+                    dd = data.copy()
+                    dd[attr] = w
+                    new_edges.append((u, new_node, key, dd))
+                else:
+                    # Outside edge. No modification needed
+                    continue
+
+        for node in Q_nodes:
+            edmonds_remove_node(G, G_edge_index, node)
+            edmonds_remove_node(B, B_edge_index, node)
+
+        selected_nodes.difference_update(set(Q_nodes))
+
+        for u, v, key, data in new_edges:
+            edmonds_add_edge(G, G_edge_index, u, v, key, **data)
+            if candidate_attr in data:
+                del data[candidate_attr]
+                edmonds_add_edge(B, B_edge_index, u, v, key, **data)
+                uf.union(u, v)
+
+    def is_root(G, u, edgekeys):
+        """
+        Returns True if `u` is a root node in G.
+
+        Node `u` is a root node if its in-degree over the specified edges is zero.
+
+        Parameters
+        ----------
+        G : Graph
+            The current graph.
+        u : node
+            The node in `G` to check if it is a root.
+        edgekeys : iterable of edges
+            The edges for which to check if `u` is a root of.
+        """
+        if u not in G:
+            raise Exception(f"{u!r} not in G")
+
+        for v in G.pred[u]:
+            for edgekey in G.pred[u][v]:
+                if edgekey in edgekeys:
+                    return False, edgekey
+        else:
+            return True, None
+
+    nodes = iter(list(G.nodes))
+    while True:
+        try:
+            v = next(nodes)
+        except StopIteration:
+            # If there are no more new nodes to consider, then we should
+            # meet stopping condition (b) from the paper:
+            #   (b) every node of G^i is in D^i and E^i is a branching
+            assert len(G) == len(B)
+            if len(B):
+                assert is_branching(B)
+
+            graphs.append((G.copy(), G_edge_index.copy()))
+            branchings.append((B.copy(), B_edge_index.copy()))
+            circuits.append([])
+            minedge_circuit.append(None)
+
+            break
+        else:
+            #####################
+            ### BEGIN STEP I1 ###
+            #####################
+
+            # This is a very simple step, so I don't think it needs a method of it's own
+            if v in selected_nodes:
+                continue
+
+        selected_nodes.add(v)
+        B.add_node(v)
+        desired_edge, desired_edge_weight = edmonds_find_desired_edge(v)
+
+        # There might be no desired edge if all edges are excluded or
+        # v is the last node to be added to B, the ultimate root of the branching
+        if desired_edge is not None and desired_edge_weight > 0:
+            u = desired_edge[0]
+            # Flag adding the edge will create a circuit before merging the two
+            # connected components of u and v in B
+            circuit = uf[u] == uf[v]
+            dd = {attr: desired_edge_weight}
+            if desired_edge[4].get(partition) is not None:
+                dd[partition] = desired_edge[4].get(partition)
+
+            edmonds_add_edge(B, B_edge_index, u, v, desired_edge[2], **dd)
+            G[u][v][desired_edge[2]][candidate_attr] = True
+            uf.union(u, v)
+
+            ###################
+            ### END STEP I1 ###
+            ###################
+
+            #####################
+            ### BEGIN STEP I2 ###
+            #####################
+
+            if circuit:
+                edmonds_step_I2(v, desired_edge, level)
+                nodes = iter(list(G.nodes()))
+                level += 1
+
+            ###################
+            ### END STEP I2 ###
+            ###################
+
+    #####################
+    ### BEGIN STEP I3 ###
+    #####################
+
+    # Create a new graph of the same class as the input graph
+    H = G_original.__class__()
+
+    # Start with the branching edges in the last level.
+    edges = set(branchings[level][1])
+    while level > 0:
+        level -= 1
+
+        # The current level is i, and we start counting from 0.
+        #
+        # We need the node at level i+1 that results from merging a circuit
+        # at level i. basename_0 is the first merged node and this happens
+        # at level 1. That is basename_0 is a node at level 1 that results
+        # from merging a circuit at level 0.
+
+        merged_node = new_node_base_name + str(level)
+        circuit = circuits[level]
+        isroot, edgekey = is_root(graphs[level + 1][0], merged_node, edges)
+        edges.update(circuit)
+
+        if isroot:
+            minedge = minedge_circuit[level]
+            if minedge is None:
+                raise Exception
+
+            # Remove the edge in the cycle with minimum weight
+            edges.remove(minedge)
+        else:
+            # We have identified an edge at the next higher level that
+            # transitions into the merged node at this level. That edge
+            # transitions to some corresponding node at the current level.
+            #
+            # We want to remove an edge from the cycle that transitions
+            # into the corresponding node, otherwise the result would not
+            # be a branching.
+
+            G, G_edge_index = graphs[level]
+            target = G_edge_index[edgekey][1]
+            for edgekey in circuit:
+                u, v, data = G_edge_index[edgekey]
+                if v == target:
+                    break
+            else:
+                raise Exception("Couldn't find edge incoming to merged node.")
+
+            edges.remove(edgekey)
+
+    H.add_nodes_from(G_original)
+    for edgekey in edges:
+        u, v, d = graphs[0][1][edgekey]
+        dd = {attr: d[attr]}
+
+        if preserve_attrs:
+            for key, value in d.items():
+                if key not in [attr, candidate_attr]:
+                    dd[key] = value
+
+        H.add_edge(u, v, **dd)
+
+    ###################
+    ### END STEP I3 ###
+    ###################
+
+    return H
+
+
+@nx._dispatchable(preserve_edge_attrs=True, mutates_input=True, returns_graph=True)
+def minimum_branching(
+    G, attr="weight", default=1, preserve_attrs=False, partition=None
+):
+    for _, _, d in G.edges(data=True):
+        d[attr] = -d.get(attr, default)
+    nx._clear_cache(G)
+
+    B = maximum_branching(G, attr, default, preserve_attrs, partition)
+
+    for _, _, d in G.edges(data=True):
+        d[attr] = -d.get(attr, default)
+    nx._clear_cache(G)
+
+    for _, _, d in B.edges(data=True):
+        d[attr] = -d.get(attr, default)
+    nx._clear_cache(B)
+
+    return B
+
+
+@nx._dispatchable(preserve_edge_attrs=True, mutates_input=True, returns_graph=True)
+def minimal_branching(
+    G, /, *, attr="weight", default=1, preserve_attrs=False, partition=None
+):
+    """
+    Returns a minimal branching from `G`.
+
+    A minimal branching is a branching similar to a minimal arborescence but
+    without the requirement that the result is actually a spanning arborescence.
+    This allows minimal branchinges to be computed over graphs which may not
+    have arborescence (such as multiple components).
+
+    Parameters
+    ----------
+    G : (multi)digraph-like
+        The graph to be searched.
+    attr : str
+        The edge attribute used in determining optimality.
+    default : float
+        The value of the edge attribute used if an edge does not have
+        the attribute `attr`.
+    preserve_attrs : bool
+        If True, preserve the other attributes of the original graph (that are not
+        passed to `attr`)
+    partition : str
+        The key for the edge attribute containing the partition
+        data on the graph. Edges can be included, excluded or open using the
+        `EdgePartition` enum.
+
+    Returns
+    -------
+    B : (multi)digraph-like
+        A minimal branching.
+    """
+    max_weight = -INF
+    min_weight = INF
+    for _, _, w in G.edges(data=attr, default=default):
+        if w > max_weight:
+            max_weight = w
+        if w < min_weight:
+            min_weight = w
+
+    for _, _, d in G.edges(data=True):
+        # Transform the weights so that the minimum weight is larger than
+        # the difference between the max and min weights. This is important
+        # in order to prevent the edge weights from becoming negative during
+        # computation
+        d[attr] = max_weight + 1 + (max_weight - min_weight) - d.get(attr, default)
+    nx._clear_cache(G)
+
+    B = maximum_branching(G, attr, default, preserve_attrs, partition)
+
+    # Reverse the weight transformations
+    for _, _, d in G.edges(data=True):
+        d[attr] = max_weight + 1 + (max_weight - min_weight) - d.get(attr, default)
+    nx._clear_cache(G)
+
+    for _, _, d in B.edges(data=True):
+        d[attr] = max_weight + 1 + (max_weight - min_weight) - d.get(attr, default)
+    nx._clear_cache(B)
+
+    return B
+
+
+@nx._dispatchable(preserve_edge_attrs=True, mutates_input=True, returns_graph=True)
+def maximum_spanning_arborescence(
+    G, attr="weight", default=1, preserve_attrs=False, partition=None
+):
+    # In order to use the same algorithm is the maximum branching, we need to adjust
+    # the weights of the graph. The branching algorithm can choose to not include an
+    # edge if it doesn't help find a branching, mainly triggered by edges with negative
+    # weights.
+    #
+    # To prevent this from happening while trying to find a spanning arborescence, we
+    # just have to tweak the edge weights so that they are all positive and cannot
+    # become negative during the branching algorithm, find the maximum branching and
+    # then return them to their original values.
+
+    min_weight = INF
+    max_weight = -INF
+    for _, _, w in G.edges(data=attr, default=default):
+        if w < min_weight:
+            min_weight = w
+        if w > max_weight:
+            max_weight = w
+
+    for _, _, d in G.edges(data=True):
+        d[attr] = d.get(attr, default) - min_weight + 1 - (min_weight - max_weight)
+    nx._clear_cache(G)
+
+    B = maximum_branching(G, attr, default, preserve_attrs, partition)
+
+    for _, _, d in G.edges(data=True):
+        d[attr] = d.get(attr, default) + min_weight - 1 + (min_weight - max_weight)
+    nx._clear_cache(G)
+
+    for _, _, d in B.edges(data=True):
+        d[attr] = d.get(attr, default) + min_weight - 1 + (min_weight - max_weight)
+    nx._clear_cache(B)
+
+    if not is_arborescence(B):
+        raise nx.exception.NetworkXException("No maximum spanning arborescence in G.")
+
+    return B
+
+
+@nx._dispatchable(preserve_edge_attrs=True, mutates_input=True, returns_graph=True)
+def minimum_spanning_arborescence(
+    G, attr="weight", default=1, preserve_attrs=False, partition=None
+):
+    B = minimal_branching(
+        G,
+        attr=attr,
+        default=default,
+        preserve_attrs=preserve_attrs,
+        partition=partition,
+    )
+
+    if not is_arborescence(B):
+        raise nx.exception.NetworkXException("No minimum spanning arborescence in G.")
+
+    return B
+
+
+docstring_branching = """
+Returns a {kind} {style} from G.
+
+Parameters
+----------
+G : (multi)digraph-like
+    The graph to be searched.
+attr : str
+    The edge attribute used to in determining optimality.
+default : float
+    The value of the edge attribute used if an edge does not have
+    the attribute `attr`.
+preserve_attrs : bool
+    If True, preserve the other attributes of the original graph (that are not
+    passed to `attr`)
+partition : str
+    The key for the edge attribute containing the partition
+    data on the graph. Edges can be included, excluded or open using the
+    `EdgePartition` enum.
+
+Returns
+-------
+B : (multi)digraph-like
+    A {kind} {style}.
+"""
+
+docstring_arborescence = (
+    docstring_branching
+    + """
+Raises
+------
+NetworkXException
+    If the graph does not contain a {kind} {style}.
+
+"""
+)
+
+maximum_branching.__doc__ = docstring_branching.format(
+    kind="maximum", style="branching"
+)
+
+minimum_branching.__doc__ = (
+    docstring_branching.format(kind="minimum", style="branching")
+    + """
+See Also
+--------
+    minimal_branching
+"""
+)
+
+maximum_spanning_arborescence.__doc__ = docstring_arborescence.format(
+    kind="maximum", style="spanning arborescence"
+)
+
+minimum_spanning_arborescence.__doc__ = docstring_arborescence.format(
+    kind="minimum", style="spanning arborescence"
+)
+
+
+class ArborescenceIterator:
+    """
+    Iterate over all spanning arborescences of a graph in either increasing or
+    decreasing cost.
+
+    Notes
+    -----
+    This iterator uses the partition scheme from [1]_ (included edges,
+    excluded edges and open edges). It generates minimum spanning
+    arborescences using a modified Edmonds' Algorithm which respects the
+    partition of edges. For arborescences with the same weight, ties are
+    broken arbitrarily.
+
+    References
+    ----------
+    .. [1] G.K. Janssens, K. Sörensen, An algorithm to generate all spanning
+           trees in order of increasing cost, Pesquisa Operacional, 2005-08,
+           Vol. 25 (2), p. 219-229,
+           https://www.scielo.br/j/pope/a/XHswBwRwJyrfL88dmMwYNWp/?lang=en
+    """
+
+    @dataclass(order=True)
+    class Partition:
+        """
+        This dataclass represents a partition and stores a dict with the edge
+        data and the weight of the minimum spanning arborescence of the
+        partition dict.
+        """
+
+        mst_weight: float
+        partition_dict: dict = field(compare=False)
+
+        def __copy__(self):
+            return ArborescenceIterator.Partition(
+                self.mst_weight, self.partition_dict.copy()
+            )
+
+    def __init__(self, G, weight="weight", minimum=True, init_partition=None):
+        """
+        Initialize the iterator
+
+        Parameters
+        ----------
+        G : nx.DiGraph
+            The directed graph which we need to iterate trees over
+
+        weight : String, default = "weight"
+            The edge attribute used to store the weight of the edge
+
+        minimum : bool, default = True
+            Return the trees in increasing order while true and decreasing order
+            while false.
+
+        init_partition : tuple, default = None
+            In the case that certain edges have to be included or excluded from
+            the arborescences, `init_partition` should be in the form
+            `(included_edges, excluded_edges)` where each edges is a
+            `(u, v)`-tuple inside an iterable such as a list or set.
+
+        """
+        self.G = G.copy()
+        self.weight = weight
+        self.minimum = minimum
+        self.method = (
+            minimum_spanning_arborescence if minimum else maximum_spanning_arborescence
+        )
+        # Randomly create a key for an edge attribute to hold the partition data
+        self.partition_key = (
+            "ArborescenceIterators super secret partition attribute name"
+        )
+        if init_partition is not None:
+            partition_dict = {}
+            for e in init_partition[0]:
+                partition_dict[e] = nx.EdgePartition.INCLUDED
+            for e in init_partition[1]:
+                partition_dict[e] = nx.EdgePartition.EXCLUDED
+            self.init_partition = ArborescenceIterator.Partition(0, partition_dict)
+        else:
+            self.init_partition = None
+
+    def __iter__(self):
+        """
+        Returns
+        -------
+        ArborescenceIterator
+            The iterator object for this graph
+        """
+        self.partition_queue = PriorityQueue()
+        self._clear_partition(self.G)
+
+        # Write the initial partition if it exists.
+        if self.init_partition is not None:
+            self._write_partition(self.init_partition)
+
+        mst_weight = self.method(
+            self.G,
+            self.weight,
+            partition=self.partition_key,
+            preserve_attrs=True,
+        ).size(weight=self.weight)
+
+        self.partition_queue.put(
+            self.Partition(
+                mst_weight if self.minimum else -mst_weight,
+                (
+                    {}
+                    if self.init_partition is None
+                    else self.init_partition.partition_dict
+                ),
+            )
+        )
+
+        return self
+
+    def __next__(self):
+        """
+        Returns
+        -------
+        (multi)Graph
+            The spanning tree of next greatest weight, which ties broken
+            arbitrarily.
+        """
+        if self.partition_queue.empty():
+            del self.G, self.partition_queue
+            raise StopIteration
+
+        partition = self.partition_queue.get()
+        self._write_partition(partition)
+        next_arborescence = self.method(
+            self.G,
+            self.weight,
+            partition=self.partition_key,
+            preserve_attrs=True,
+        )
+        self._partition(partition, next_arborescence)
+
+        self._clear_partition(next_arborescence)
+        return next_arborescence
+
+    def _partition(self, partition, partition_arborescence):
+        """
+        Create new partitions based of the minimum spanning tree of the
+        current minimum partition.
+
+        Parameters
+        ----------
+        partition : Partition
+            The Partition instance used to generate the current minimum spanning
+            tree.
+        partition_arborescence : nx.Graph
+            The minimum spanning arborescence of the input partition.
+        """
+        # create two new partitions with the data from the input partition dict
+        p1 = self.Partition(0, partition.partition_dict.copy())
+        p2 = self.Partition(0, partition.partition_dict.copy())
+        for e in partition_arborescence.edges:
+            # determine if the edge was open or included
+            if e not in partition.partition_dict:
+                # This is an open edge
+                p1.partition_dict[e] = nx.EdgePartition.EXCLUDED
+                p2.partition_dict[e] = nx.EdgePartition.INCLUDED
+
+                self._write_partition(p1)
+                try:
+                    p1_mst = self.method(
+                        self.G,
+                        self.weight,
+                        partition=self.partition_key,
+                        preserve_attrs=True,
+                    )
+
+                    p1_mst_weight = p1_mst.size(weight=self.weight)
+                    p1.mst_weight = p1_mst_weight if self.minimum else -p1_mst_weight
+                    self.partition_queue.put(p1.__copy__())
+                except nx.NetworkXException:
+                    pass
+
+                p1.partition_dict = p2.partition_dict.copy()
+
+    def _write_partition(self, partition):
+        """
+        Writes the desired partition into the graph to calculate the minimum
+        spanning tree. Also, if one incoming edge is included, mark all others
+        as excluded so that if that vertex is merged during Edmonds' algorithm
+        we cannot still pick another of that vertex's included edges.
+
+        Parameters
+        ----------
+        partition : Partition
+            A Partition dataclass describing a partition on the edges of the
+            graph.
+        """
+        for u, v, d in self.G.edges(data=True):
+            if (u, v) in partition.partition_dict:
+                d[self.partition_key] = partition.partition_dict[(u, v)]
+            else:
+                d[self.partition_key] = nx.EdgePartition.OPEN
+        nx._clear_cache(self.G)
+
+        for n in self.G:
+            included_count = 0
+            excluded_count = 0
+            for u, v, d in self.G.in_edges(nbunch=n, data=True):
+                if d.get(self.partition_key) == nx.EdgePartition.INCLUDED:
+                    included_count += 1
+                elif d.get(self.partition_key) == nx.EdgePartition.EXCLUDED:
+                    excluded_count += 1
+            # Check that if there is an included edges, all other incoming ones
+            # are excluded. If not fix it!
+            if included_count == 1 and excluded_count != self.G.in_degree(n) - 1:
+                for u, v, d in self.G.in_edges(nbunch=n, data=True):
+                    if d.get(self.partition_key) != nx.EdgePartition.INCLUDED:
+                        d[self.partition_key] = nx.EdgePartition.EXCLUDED
+
+    def _clear_partition(self, G):
+        """
+        Removes partition data from the graph
+        """
+        for u, v, d in G.edges(data=True):
+            if self.partition_key in d:
+                del d[self.partition_key]
+        nx._clear_cache(self.G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/coding.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/coding.py
new file mode 100644
index 00000000..f33089f7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/coding.py
@@ -0,0 +1,413 @@
+"""Functions for encoding and decoding trees.
+
+Since a tree is a highly restricted form of graph, it can be represented
+concisely in several ways. This module includes functions for encoding
+and decoding trees in the form of nested tuples and Prüfer
+sequences. The former requires a rooted tree, whereas the latter can be
+applied to unrooted trees. Furthermore, there is a bijection from Prüfer
+sequences to labeled trees.
+
+"""
+
+from collections import Counter
+from itertools import chain
+
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = [
+    "from_nested_tuple",
+    "from_prufer_sequence",
+    "NotATree",
+    "to_nested_tuple",
+    "to_prufer_sequence",
+]
+
+
+class NotATree(nx.NetworkXException):
+    """Raised when a function expects a tree (that is, a connected
+    undirected graph with no cycles) but gets a non-tree graph as input
+    instead.
+
+    """
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(graphs="T")
+def to_nested_tuple(T, root, canonical_form=False):
+    """Returns a nested tuple representation of the given tree.
+
+    The nested tuple representation of a tree is defined
+    recursively. The tree with one node and no edges is represented by
+    the empty tuple, ``()``. A tree with ``k`` subtrees is represented
+    by a tuple of length ``k`` in which each element is the nested tuple
+    representation of a subtree.
+
+    Parameters
+    ----------
+    T : NetworkX graph
+        An undirected graph object representing a tree.
+
+    root : node
+        The node in ``T`` to interpret as the root of the tree.
+
+    canonical_form : bool
+        If ``True``, each tuple is sorted so that the function returns
+        a canonical form for rooted trees. This means "lighter" subtrees
+        will appear as nested tuples before "heavier" subtrees. In this
+        way, each isomorphic rooted tree has the same nested tuple
+        representation.
+
+    Returns
+    -------
+    tuple
+        A nested tuple representation of the tree.
+
+    Notes
+    -----
+    This function is *not* the inverse of :func:`from_nested_tuple`; the
+    only guarantee is that the rooted trees are isomorphic.
+
+    See also
+    --------
+    from_nested_tuple
+    to_prufer_sequence
+
+    Examples
+    --------
+    The tree need not be a balanced binary tree::
+
+        >>> T = nx.Graph()
+        >>> T.add_edges_from([(0, 1), (0, 2), (0, 3)])
+        >>> T.add_edges_from([(1, 4), (1, 5)])
+        >>> T.add_edges_from([(3, 6), (3, 7)])
+        >>> root = 0
+        >>> nx.to_nested_tuple(T, root)
+        (((), ()), (), ((), ()))
+
+    Continuing the above example, if ``canonical_form`` is ``True``, the
+    nested tuples will be sorted::
+
+        >>> nx.to_nested_tuple(T, root, canonical_form=True)
+        ((), ((), ()), ((), ()))
+
+    Even the path graph can be interpreted as a tree::
+
+        >>> T = nx.path_graph(4)
+        >>> root = 0
+        >>> nx.to_nested_tuple(T, root)
+        ((((),),),)
+
+    """
+
+    def _make_tuple(T, root, _parent):
+        """Recursively compute the nested tuple representation of the
+        given rooted tree.
+
+        ``_parent`` is the parent node of ``root`` in the supertree in
+        which ``T`` is a subtree, or ``None`` if ``root`` is the root of
+        the supertree. This argument is used to determine which
+        neighbors of ``root`` are children and which is the parent.
+
+        """
+        # Get the neighbors of `root` that are not the parent node. We
+        # are guaranteed that `root` is always in `T` by construction.
+        children = set(T[root]) - {_parent}
+        if len(children) == 0:
+            return ()
+        nested = (_make_tuple(T, v, root) for v in children)
+        if canonical_form:
+            nested = sorted(nested)
+        return tuple(nested)
+
+    # Do some sanity checks on the input.
+    if not nx.is_tree(T):
+        raise nx.NotATree("provided graph is not a tree")
+    if root not in T:
+        raise nx.NodeNotFound(f"Graph {T} contains no node {root}")
+
+    return _make_tuple(T, root, None)
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def from_nested_tuple(sequence, sensible_relabeling=False):
+    """Returns the rooted tree corresponding to the given nested tuple.
+
+    The nested tuple representation of a tree is defined
+    recursively. The tree with one node and no edges is represented by
+    the empty tuple, ``()``. A tree with ``k`` subtrees is represented
+    by a tuple of length ``k`` in which each element is the nested tuple
+    representation of a subtree.
+
+    Parameters
+    ----------
+    sequence : tuple
+        A nested tuple representing a rooted tree.
+
+    sensible_relabeling : bool
+        Whether to relabel the nodes of the tree so that nodes are
+        labeled in increasing order according to their breadth-first
+        search order from the root node.
+
+    Returns
+    -------
+    NetworkX graph
+        The tree corresponding to the given nested tuple, whose root
+        node is node 0. If ``sensible_labeling`` is ``True``, nodes will
+        be labeled in breadth-first search order starting from the root
+        node.
+
+    Notes
+    -----
+    This function is *not* the inverse of :func:`to_nested_tuple`; the
+    only guarantee is that the rooted trees are isomorphic.
+
+    See also
+    --------
+    to_nested_tuple
+    from_prufer_sequence
+
+    Examples
+    --------
+    Sensible relabeling ensures that the nodes are labeled from the root
+    starting at 0::
+
+        >>> balanced = (((), ()), ((), ()))
+        >>> T = nx.from_nested_tuple(balanced, sensible_relabeling=True)
+        >>> edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]
+        >>> all((u, v) in T.edges() or (v, u) in T.edges() for (u, v) in edges)
+        True
+
+    """
+
+    def _make_tree(sequence):
+        """Recursively creates a tree from the given sequence of nested
+        tuples.
+
+        This function employs the :func:`~networkx.tree.join` function
+        to recursively join subtrees into a larger tree.
+
+        """
+        # The empty sequence represents the empty tree, which is the
+        # (unique) graph with a single node. We mark the single node
+        # with an attribute that indicates that it is the root of the
+        # graph.
+        if len(sequence) == 0:
+            return nx.empty_graph(1)
+        # For a nonempty sequence, get the subtrees for each child
+        # sequence and join all the subtrees at their roots. After
+        # joining the subtrees, the root is node 0.
+        return nx.tree.join_trees([(_make_tree(child), 0) for child in sequence])
+
+    # Make the tree and remove the `is_root` node attribute added by the
+    # helper function.
+    T = _make_tree(sequence)
+    if sensible_relabeling:
+        # Relabel the nodes according to their breadth-first search
+        # order, starting from the root node (that is, the node 0).
+        bfs_nodes = chain([0], (v for u, v in nx.bfs_edges(T, 0)))
+        labels = {v: i for i, v in enumerate(bfs_nodes)}
+        # We would like to use `copy=False`, but `relabel_nodes` doesn't
+        # allow a relabel mapping that can't be topologically sorted.
+        T = nx.relabel_nodes(T, labels)
+    return T
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(graphs="T")
+def to_prufer_sequence(T):
+    r"""Returns the Prüfer sequence of the given tree.
+
+    A *Prüfer sequence* is a list of *n* - 2 numbers between 0 and
+    *n* - 1, inclusive. The tree corresponding to a given Prüfer
+    sequence can be recovered by repeatedly joining a node in the
+    sequence with a node with the smallest potential degree according to
+    the sequence.
+
+    Parameters
+    ----------
+    T : NetworkX graph
+        An undirected graph object representing a tree.
+
+    Returns
+    -------
+    list
+        The Prüfer sequence of the given tree.
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If the number of nodes in `T` is less than two.
+
+    NotATree
+        If `T` is not a tree.
+
+    KeyError
+        If the set of nodes in `T` is not {0, …, *n* - 1}.
+
+    Notes
+    -----
+    There is a bijection from labeled trees to Prüfer sequences. This
+    function is the inverse of the :func:`from_prufer_sequence`
+    function.
+
+    Sometimes Prüfer sequences use nodes labeled from 1 to *n* instead
+    of from 0 to *n* - 1. This function requires nodes to be labeled in
+    the latter form. You can use :func:`~networkx.relabel_nodes` to
+    relabel the nodes of your tree to the appropriate format.
+
+    This implementation is from [1]_ and has a running time of
+    $O(n)$.
+
+    See also
+    --------
+    to_nested_tuple
+    from_prufer_sequence
+
+    References
+    ----------
+    .. [1] Wang, Xiaodong, Lei Wang, and Yingjie Wu.
+           "An optimal algorithm for Prufer codes."
+           *Journal of Software Engineering and Applications* 2.02 (2009): 111.
+           <https://doi.org/10.4236/jsea.2009.22016>
+
+    Examples
+    --------
+    There is a bijection between Prüfer sequences and labeled trees, so
+    this function is the inverse of the :func:`from_prufer_sequence`
+    function:
+
+    >>> edges = [(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)]
+    >>> tree = nx.Graph(edges)
+    >>> sequence = nx.to_prufer_sequence(tree)
+    >>> sequence
+    [3, 3, 3, 4]
+    >>> tree2 = nx.from_prufer_sequence(sequence)
+    >>> list(tree2.edges()) == edges
+    True
+
+    """
+    # Perform some sanity checks on the input.
+    n = len(T)
+    if n < 2:
+        msg = "Prüfer sequence undefined for trees with fewer than two nodes"
+        raise nx.NetworkXPointlessConcept(msg)
+    if not nx.is_tree(T):
+        raise nx.NotATree("provided graph is not a tree")
+    if set(T) != set(range(n)):
+        raise KeyError("tree must have node labels {0, ..., n - 1}")
+
+    degree = dict(T.degree())
+
+    def parents(u):
+        return next(v for v in T[u] if degree[v] > 1)
+
+    index = u = next(k for k in range(n) if degree[k] == 1)
+    result = []
+    for i in range(n - 2):
+        v = parents(u)
+        result.append(v)
+        degree[v] -= 1
+        if v < index and degree[v] == 1:
+            u = v
+        else:
+            index = u = next(k for k in range(index + 1, n) if degree[k] == 1)
+    return result
+
+
+@nx._dispatchable(graphs=None, returns_graph=True)
+def from_prufer_sequence(sequence):
+    r"""Returns the tree corresponding to the given Prüfer sequence.
+
+    A *Prüfer sequence* is a list of *n* - 2 numbers between 0 and
+    *n* - 1, inclusive. The tree corresponding to a given Prüfer
+    sequence can be recovered by repeatedly joining a node in the
+    sequence with a node with the smallest potential degree according to
+    the sequence.
+
+    Parameters
+    ----------
+    sequence : list
+        A Prüfer sequence, which is a list of *n* - 2 integers between
+        zero and *n* - 1, inclusive.
+
+    Returns
+    -------
+    NetworkX graph
+        The tree corresponding to the given Prüfer sequence.
+
+    Raises
+    ------
+    NetworkXError
+        If the Prüfer sequence is not valid.
+
+    Notes
+    -----
+    There is a bijection from labeled trees to Prüfer sequences. This
+    function is the inverse of the :func:`from_prufer_sequence` function.
+
+    Sometimes Prüfer sequences use nodes labeled from 1 to *n* instead
+    of from 0 to *n* - 1. This function requires nodes to be labeled in
+    the latter form. You can use :func:`networkx.relabel_nodes` to
+    relabel the nodes of your tree to the appropriate format.
+
+    This implementation is from [1]_ and has a running time of
+    $O(n)$.
+
+    References
+    ----------
+    .. [1] Wang, Xiaodong, Lei Wang, and Yingjie Wu.
+           "An optimal algorithm for Prufer codes."
+           *Journal of Software Engineering and Applications* 2.02 (2009): 111.
+           <https://doi.org/10.4236/jsea.2009.22016>
+
+    See also
+    --------
+    from_nested_tuple
+    to_prufer_sequence
+
+    Examples
+    --------
+    There is a bijection between Prüfer sequences and labeled trees, so
+    this function is the inverse of the :func:`to_prufer_sequence`
+    function:
+
+    >>> edges = [(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)]
+    >>> tree = nx.Graph(edges)
+    >>> sequence = nx.to_prufer_sequence(tree)
+    >>> sequence
+    [3, 3, 3, 4]
+    >>> tree2 = nx.from_prufer_sequence(sequence)
+    >>> list(tree2.edges()) == edges
+    True
+
+    """
+    n = len(sequence) + 2
+    # `degree` stores the remaining degree (plus one) for each node. The
+    # degree of a node in the decoded tree is one more than the number
+    # of times it appears in the code.
+    degree = Counter(chain(sequence, range(n)))
+    T = nx.empty_graph(n)
+    # `not_orphaned` is the set of nodes that have a parent in the
+    # tree. After the loop, there should be exactly two nodes that are
+    # not in this set.
+    not_orphaned = set()
+    index = u = next(k for k in range(n) if degree[k] == 1)
+    for v in sequence:
+        # check the validity of the prufer sequence
+        if v < 0 or v > n - 1:
+            raise nx.NetworkXError(
+                f"Invalid Prufer sequence: Values must be between 0 and {n-1}, got {v}"
+            )
+        T.add_edge(u, v)
+        not_orphaned.add(u)
+        degree[v] -= 1
+        if v < index and degree[v] == 1:
+            u = v
+        else:
+            index = u = next(k for k in range(index + 1, n) if degree[k] == 1)
+    # At this point, there must be exactly two orphaned nodes; join them.
+    orphans = set(T) - not_orphaned
+    u, v = orphans
+    T.add_edge(u, v)
+    return T
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/decomposition.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/decomposition.py
new file mode 100644
index 00000000..c8b8f247
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/decomposition.py
@@ -0,0 +1,88 @@
+r"""Function for computing a junction tree of a graph."""
+
+from itertools import combinations
+
+import networkx as nx
+from networkx.algorithms import chordal_graph_cliques, complete_to_chordal_graph, moral
+from networkx.utils import not_implemented_for
+
+__all__ = ["junction_tree"]
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(returns_graph=True)
+def junction_tree(G):
+    r"""Returns a junction tree of a given graph.
+
+    A junction tree (or clique tree) is constructed from a (un)directed graph G.
+    The tree is constructed based on a moralized and triangulated version of G.
+    The tree's nodes consist of maximal cliques and sepsets of the revised graph.
+    The sepset of two cliques is the intersection of the nodes of these cliques,
+    e.g. the sepset of (A,B,C) and (A,C,E,F) is (A,C). These nodes are often called
+    "variables" in this literature. The tree is bipartite with each sepset
+    connected to its two cliques.
+
+    Junction Trees are not unique as the order of clique consideration determines
+    which sepsets are included.
+
+    The junction tree algorithm consists of five steps [1]_:
+
+    1. Moralize the graph
+    2. Triangulate the graph
+    3. Find maximal cliques
+    4. Build the tree from cliques, connecting cliques with shared
+       nodes, set edge-weight to number of shared variables
+    5. Find maximum spanning tree
+
+
+    Parameters
+    ----------
+    G : networkx.Graph
+        Directed or undirected graph.
+
+    Returns
+    -------
+    junction_tree : networkx.Graph
+        The corresponding junction tree of `G`.
+
+    Raises
+    ------
+    NetworkXNotImplemented
+        Raised if `G` is an instance of `MultiGraph` or `MultiDiGraph`.
+
+    References
+    ----------
+    .. [1] Junction tree algorithm:
+       https://en.wikipedia.org/wiki/Junction_tree_algorithm
+
+    .. [2] Finn V. Jensen and Frank Jensen. 1994. Optimal
+       junction trees. In Proceedings of the Tenth international
+       conference on Uncertainty in artificial intelligence (UAI’94).
+       Morgan Kaufmann Publishers Inc., San Francisco, CA, USA, 360–366.
+    """
+
+    clique_graph = nx.Graph()
+
+    if G.is_directed():
+        G = moral.moral_graph(G)
+    chordal_graph, _ = complete_to_chordal_graph(G)
+
+    cliques = [tuple(sorted(i)) for i in chordal_graph_cliques(chordal_graph)]
+    clique_graph.add_nodes_from(cliques, type="clique")
+
+    for edge in combinations(cliques, 2):
+        set_edge_0 = set(edge[0])
+        set_edge_1 = set(edge[1])
+        if not set_edge_0.isdisjoint(set_edge_1):
+            sepset = tuple(sorted(set_edge_0.intersection(set_edge_1)))
+            clique_graph.add_edge(edge[0], edge[1], weight=len(sepset), sepset=sepset)
+
+    junction_tree = nx.maximum_spanning_tree(clique_graph)
+
+    for edge in list(junction_tree.edges(data=True)):
+        junction_tree.add_node(edge[2]["sepset"], type="sepset")
+        junction_tree.add_edge(edge[0], edge[2]["sepset"])
+        junction_tree.add_edge(edge[1], edge[2]["sepset"])
+        junction_tree.remove_edge(edge[0], edge[1])
+
+    return junction_tree
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/mst.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/mst.py
new file mode 100644
index 00000000..554613b8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/mst.py
@@ -0,0 +1,1284 @@
+"""
+Algorithms for calculating min/max spanning trees/forests.
+
+"""
+
+from dataclasses import dataclass, field
+from enum import Enum
+from heapq import heappop, heappush
+from itertools import count
+from math import isnan
+from operator import itemgetter
+from queue import PriorityQueue
+
+import networkx as nx
+from networkx.utils import UnionFind, not_implemented_for, py_random_state
+
+__all__ = [
+    "minimum_spanning_edges",
+    "maximum_spanning_edges",
+    "minimum_spanning_tree",
+    "maximum_spanning_tree",
+    "number_of_spanning_trees",
+    "random_spanning_tree",
+    "partition_spanning_tree",
+    "EdgePartition",
+    "SpanningTreeIterator",
+]
+
+
+class EdgePartition(Enum):
+    """
+    An enum to store the state of an edge partition. The enum is written to the
+    edges of a graph before being pasted to `kruskal_mst_edges`. Options are:
+
+    - EdgePartition.OPEN
+    - EdgePartition.INCLUDED
+    - EdgePartition.EXCLUDED
+    """
+
+    OPEN = 0
+    INCLUDED = 1
+    EXCLUDED = 2
+
+
+@not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data")
+def boruvka_mst_edges(
+    G, minimum=True, weight="weight", keys=False, data=True, ignore_nan=False
+):
+    """Iterate over edges of a Borůvka's algorithm min/max spanning tree.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        The edges of `G` must have distinct weights,
+        otherwise the edges may not form a tree.
+
+    minimum : bool (default: True)
+        Find the minimum (True) or maximum (False) spanning tree.
+
+    weight : string (default: 'weight')
+        The name of the edge attribute holding the edge weights.
+
+    keys : bool (default: True)
+        This argument is ignored since this function is not
+        implemented for multigraphs; it exists only for consistency
+        with the other minimum spanning tree functions.
+
+    data : bool (default: True)
+        Flag for whether to yield edge attribute dicts.
+        If True, yield edges `(u, v, d)`, where `d` is the attribute dict.
+        If False, yield edges `(u, v)`.
+
+    ignore_nan : bool (default: False)
+        If a NaN is found as an edge weight normally an exception is raised.
+        If `ignore_nan is True` then that edge is ignored instead.
+
+    """
+    # Initialize a forest, assuming initially that it is the discrete
+    # partition of the nodes of the graph.
+    forest = UnionFind(G)
+
+    def best_edge(component):
+        """Returns the optimum (minimum or maximum) edge on the edge
+        boundary of the given set of nodes.
+
+        A return value of ``None`` indicates an empty boundary.
+
+        """
+        sign = 1 if minimum else -1
+        minwt = float("inf")
+        boundary = None
+        for e in nx.edge_boundary(G, component, data=True):
+            wt = e[-1].get(weight, 1) * sign
+            if isnan(wt):
+                if ignore_nan:
+                    continue
+                msg = f"NaN found as an edge weight. Edge {e}"
+                raise ValueError(msg)
+            if wt < minwt:
+                minwt = wt
+                boundary = e
+        return boundary
+
+    # Determine the optimum edge in the edge boundary of each component
+    # in the forest.
+    best_edges = (best_edge(component) for component in forest.to_sets())
+    best_edges = [edge for edge in best_edges if edge is not None]
+    # If each entry was ``None``, that means the graph was disconnected,
+    # so we are done generating the forest.
+    while best_edges:
+        # Determine the optimum edge in the edge boundary of each
+        # component in the forest.
+        #
+        # This must be a sequence, not an iterator. In this list, the
+        # same edge may appear twice, in different orientations (but
+        # that's okay, since a union operation will be called on the
+        # endpoints the first time it is seen, but not the second time).
+        #
+        # Any ``None`` indicates that the edge boundary for that
+        # component was empty, so that part of the forest has been
+        # completed.
+        #
+        # TODO This can be parallelized, both in the outer loop over
+        # each component in the forest and in the computation of the
+        # minimum. (Same goes for the identical lines outside the loop.)
+        best_edges = (best_edge(component) for component in forest.to_sets())
+        best_edges = [edge for edge in best_edges if edge is not None]
+        # Join trees in the forest using the best edges, and yield that
+        # edge, since it is part of the spanning tree.
+        #
+        # TODO This loop can be parallelized, to an extent (the union
+        # operation must be atomic).
+        for u, v, d in best_edges:
+            if forest[u] != forest[v]:
+                if data:
+                    yield u, v, d
+                else:
+                    yield u, v
+                forest.union(u, v)
+
+
+@nx._dispatchable(
+    edge_attrs={"weight": None, "partition": None}, preserve_edge_attrs="data"
+)
+def kruskal_mst_edges(
+    G, minimum, weight="weight", keys=True, data=True, ignore_nan=False, partition=None
+):
+    """
+    Iterate over edge of a Kruskal's algorithm min/max spanning tree.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        The graph holding the tree of interest.
+
+    minimum : bool (default: True)
+        Find the minimum (True) or maximum (False) spanning tree.
+
+    weight : string (default: 'weight')
+        The name of the edge attribute holding the edge weights.
+
+    keys : bool (default: True)
+        If `G` is a multigraph, `keys` controls whether edge keys ar yielded.
+        Otherwise `keys` is ignored.
+
+    data : bool (default: True)
+        Flag for whether to yield edge attribute dicts.
+        If True, yield edges `(u, v, d)`, where `d` is the attribute dict.
+        If False, yield edges `(u, v)`.
+
+    ignore_nan : bool (default: False)
+        If a NaN is found as an edge weight normally an exception is raised.
+        If `ignore_nan is True` then that edge is ignored instead.
+
+    partition : string (default: None)
+        The name of the edge attribute holding the partition data, if it exists.
+        Partition data is written to the edges using the `EdgePartition` enum.
+        If a partition exists, all included edges and none of the excluded edges
+        will appear in the final tree. Open edges may or may not be used.
+
+    Yields
+    ------
+    edge tuple
+        The edges as discovered by Kruskal's method. Each edge can
+        take the following forms: `(u, v)`, `(u, v, d)` or `(u, v, k, d)`
+        depending on the `key` and `data` parameters
+    """
+    subtrees = UnionFind()
+    if G.is_multigraph():
+        edges = G.edges(keys=True, data=True)
+    else:
+        edges = G.edges(data=True)
+
+    """
+    Sort the edges of the graph with respect to the partition data. 
+    Edges are returned in the following order:
+
+    * Included edges
+    * Open edges from smallest to largest weight
+    * Excluded edges
+    """
+    included_edges = []
+    open_edges = []
+    for e in edges:
+        d = e[-1]
+        wt = d.get(weight, 1)
+        if isnan(wt):
+            if ignore_nan:
+                continue
+            raise ValueError(f"NaN found as an edge weight. Edge {e}")
+
+        edge = (wt,) + e
+        if d.get(partition) == EdgePartition.INCLUDED:
+            included_edges.append(edge)
+        elif d.get(partition) == EdgePartition.EXCLUDED:
+            continue
+        else:
+            open_edges.append(edge)
+
+    if minimum:
+        sorted_open_edges = sorted(open_edges, key=itemgetter(0))
+    else:
+        sorted_open_edges = sorted(open_edges, key=itemgetter(0), reverse=True)
+
+    # Condense the lists into one
+    included_edges.extend(sorted_open_edges)
+    sorted_edges = included_edges
+    del open_edges, sorted_open_edges, included_edges
+
+    # Multigraphs need to handle edge keys in addition to edge data.
+    if G.is_multigraph():
+        for wt, u, v, k, d in sorted_edges:
+            if subtrees[u] != subtrees[v]:
+                if keys:
+                    if data:
+                        yield u, v, k, d
+                    else:
+                        yield u, v, k
+                else:
+                    if data:
+                        yield u, v, d
+                    else:
+                        yield u, v
+                subtrees.union(u, v)
+    else:
+        for wt, u, v, d in sorted_edges:
+            if subtrees[u] != subtrees[v]:
+                if data:
+                    yield u, v, d
+                else:
+                    yield u, v
+                subtrees.union(u, v)
+
+
+@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data")
+def prim_mst_edges(G, minimum, weight="weight", keys=True, data=True, ignore_nan=False):
+    """Iterate over edges of Prim's algorithm min/max spanning tree.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+        The graph holding the tree of interest.
+
+    minimum : bool (default: True)
+        Find the minimum (True) or maximum (False) spanning tree.
+
+    weight : string (default: 'weight')
+        The name of the edge attribute holding the edge weights.
+
+    keys : bool (default: True)
+        If `G` is a multigraph, `keys` controls whether edge keys ar yielded.
+        Otherwise `keys` is ignored.
+
+    data : bool (default: True)
+        Flag for whether to yield edge attribute dicts.
+        If True, yield edges `(u, v, d)`, where `d` is the attribute dict.
+        If False, yield edges `(u, v)`.
+
+    ignore_nan : bool (default: False)
+        If a NaN is found as an edge weight normally an exception is raised.
+        If `ignore_nan is True` then that edge is ignored instead.
+
+    """
+    is_multigraph = G.is_multigraph()
+    push = heappush
+    pop = heappop
+
+    nodes = set(G)
+    c = count()
+
+    sign = 1 if minimum else -1
+
+    while nodes:
+        u = nodes.pop()
+        frontier = []
+        visited = {u}
+        if is_multigraph:
+            for v, keydict in G.adj[u].items():
+                for k, d in keydict.items():
+                    wt = d.get(weight, 1) * sign
+                    if isnan(wt):
+                        if ignore_nan:
+                            continue
+                        msg = f"NaN found as an edge weight. Edge {(u, v, k, d)}"
+                        raise ValueError(msg)
+                    push(frontier, (wt, next(c), u, v, k, d))
+        else:
+            for v, d in G.adj[u].items():
+                wt = d.get(weight, 1) * sign
+                if isnan(wt):
+                    if ignore_nan:
+                        continue
+                    msg = f"NaN found as an edge weight. Edge {(u, v, d)}"
+                    raise ValueError(msg)
+                push(frontier, (wt, next(c), u, v, d))
+        while nodes and frontier:
+            if is_multigraph:
+                W, _, u, v, k, d = pop(frontier)
+            else:
+                W, _, u, v, d = pop(frontier)
+            if v in visited or v not in nodes:
+                continue
+            # Multigraphs need to handle edge keys in addition to edge data.
+            if is_multigraph and keys:
+                if data:
+                    yield u, v, k, d
+                else:
+                    yield u, v, k
+            else:
+                if data:
+                    yield u, v, d
+                else:
+                    yield u, v
+            # update frontier
+            visited.add(v)
+            nodes.discard(v)
+            if is_multigraph:
+                for w, keydict in G.adj[v].items():
+                    if w in visited:
+                        continue
+                    for k2, d2 in keydict.items():
+                        new_weight = d2.get(weight, 1) * sign
+                        if isnan(new_weight):
+                            if ignore_nan:
+                                continue
+                            msg = f"NaN found as an edge weight. Edge {(v, w, k2, d2)}"
+                            raise ValueError(msg)
+                        push(frontier, (new_weight, next(c), v, w, k2, d2))
+            else:
+                for w, d2 in G.adj[v].items():
+                    if w in visited:
+                        continue
+                    new_weight = d2.get(weight, 1) * sign
+                    if isnan(new_weight):
+                        if ignore_nan:
+                            continue
+                        msg = f"NaN found as an edge weight. Edge {(v, w, d2)}"
+                        raise ValueError(msg)
+                    push(frontier, (new_weight, next(c), v, w, d2))
+
+
+ALGORITHMS = {
+    "boruvka": boruvka_mst_edges,
+    "borůvka": boruvka_mst_edges,
+    "kruskal": kruskal_mst_edges,
+    "prim": prim_mst_edges,
+}
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data")
+def minimum_spanning_edges(
+    G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False
+):
+    """Generate edges in a minimum spanning forest of an undirected
+    weighted graph.
+
+    A minimum spanning tree is a subgraph of the graph (a tree)
+    with the minimum sum of edge weights.  A spanning forest is a
+    union of the spanning trees for each connected component of the graph.
+
+    Parameters
+    ----------
+    G : undirected Graph
+       An undirected graph. If `G` is connected, then the algorithm finds a
+       spanning tree. Otherwise, a spanning forest is found.
+
+    algorithm : string
+       The algorithm to use when finding a minimum spanning tree. Valid
+       choices are 'kruskal', 'prim', or 'boruvka'. The default is 'kruskal'.
+
+    weight : string
+       Edge data key to use for weight (default 'weight').
+
+    keys : bool
+       Whether to yield edge key in multigraphs in addition to the edge.
+       If `G` is not a multigraph, this is ignored.
+
+    data : bool, optional
+       If True yield the edge data along with the edge.
+
+    ignore_nan : bool (default: False)
+        If a NaN is found as an edge weight normally an exception is raised.
+        If `ignore_nan is True` then that edge is ignored instead.
+
+    Returns
+    -------
+    edges : iterator
+       An iterator over edges in a maximum spanning tree of `G`.
+       Edges connecting nodes `u` and `v` are represented as tuples:
+       `(u, v, k, d)` or `(u, v, k)` or `(u, v, d)` or `(u, v)`
+
+       If `G` is a multigraph, `keys` indicates whether the edge key `k` will
+       be reported in the third position in the edge tuple. `data` indicates
+       whether the edge datadict `d` will appear at the end of the edge tuple.
+
+       If `G` is not a multigraph, the tuples are `(u, v, d)` if `data` is True
+       or `(u, v)` if `data` is False.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import tree
+
+    Find minimum spanning edges by Kruskal's algorithm
+
+    >>> G = nx.cycle_graph(4)
+    >>> G.add_edge(0, 3, weight=2)
+    >>> mst = tree.minimum_spanning_edges(G, algorithm="kruskal", data=False)
+    >>> edgelist = list(mst)
+    >>> sorted(sorted(e) for e in edgelist)
+    [[0, 1], [1, 2], [2, 3]]
+
+    Find minimum spanning edges by Prim's algorithm
+
+    >>> G = nx.cycle_graph(4)
+    >>> G.add_edge(0, 3, weight=2)
+    >>> mst = tree.minimum_spanning_edges(G, algorithm="prim", data=False)
+    >>> edgelist = list(mst)
+    >>> sorted(sorted(e) for e in edgelist)
+    [[0, 1], [1, 2], [2, 3]]
+
+    Notes
+    -----
+    For Borůvka's algorithm, each edge must have a weight attribute, and
+    each edge weight must be distinct.
+
+    For the other algorithms, if the graph edges do not have a weight
+    attribute a default weight of 1 will be used.
+
+    Modified code from David Eppstein, April 2006
+    http://www.ics.uci.edu/~eppstein/PADS/
+
+    """
+    try:
+        algo = ALGORITHMS[algorithm]
+    except KeyError as err:
+        msg = f"{algorithm} is not a valid choice for an algorithm."
+        raise ValueError(msg) from err
+
+    return algo(
+        G, minimum=True, weight=weight, keys=keys, data=data, ignore_nan=ignore_nan
+    )
+
+
+@not_implemented_for("directed")
+@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data")
+def maximum_spanning_edges(
+    G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False
+):
+    """Generate edges in a maximum spanning forest of an undirected
+    weighted graph.
+
+    A maximum spanning tree is a subgraph of the graph (a tree)
+    with the maximum possible sum of edge weights.  A spanning forest is a
+    union of the spanning trees for each connected component of the graph.
+
+    Parameters
+    ----------
+    G : undirected Graph
+       An undirected graph. If `G` is connected, then the algorithm finds a
+       spanning tree. Otherwise, a spanning forest is found.
+
+    algorithm : string
+       The algorithm to use when finding a maximum spanning tree. Valid
+       choices are 'kruskal', 'prim', or 'boruvka'. The default is 'kruskal'.
+
+    weight : string
+       Edge data key to use for weight (default 'weight').
+
+    keys : bool
+       Whether to yield edge key in multigraphs in addition to the edge.
+       If `G` is not a multigraph, this is ignored.
+
+    data : bool, optional
+       If True yield the edge data along with the edge.
+
+    ignore_nan : bool (default: False)
+        If a NaN is found as an edge weight normally an exception is raised.
+        If `ignore_nan is True` then that edge is ignored instead.
+
+    Returns
+    -------
+    edges : iterator
+       An iterator over edges in a maximum spanning tree of `G`.
+       Edges connecting nodes `u` and `v` are represented as tuples:
+       `(u, v, k, d)` or `(u, v, k)` or `(u, v, d)` or `(u, v)`
+
+       If `G` is a multigraph, `keys` indicates whether the edge key `k` will
+       be reported in the third position in the edge tuple. `data` indicates
+       whether the edge datadict `d` will appear at the end of the edge tuple.
+
+       If `G` is not a multigraph, the tuples are `(u, v, d)` if `data` is True
+       or `(u, v)` if `data` is False.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import tree
+
+    Find maximum spanning edges by Kruskal's algorithm
+
+    >>> G = nx.cycle_graph(4)
+    >>> G.add_edge(0, 3, weight=2)
+    >>> mst = tree.maximum_spanning_edges(G, algorithm="kruskal", data=False)
+    >>> edgelist = list(mst)
+    >>> sorted(sorted(e) for e in edgelist)
+    [[0, 1], [0, 3], [1, 2]]
+
+    Find maximum spanning edges by Prim's algorithm
+
+    >>> G = nx.cycle_graph(4)
+    >>> G.add_edge(0, 3, weight=2)  # assign weight 2 to edge 0-3
+    >>> mst = tree.maximum_spanning_edges(G, algorithm="prim", data=False)
+    >>> edgelist = list(mst)
+    >>> sorted(sorted(e) for e in edgelist)
+    [[0, 1], [0, 3], [2, 3]]
+
+    Notes
+    -----
+    For Borůvka's algorithm, each edge must have a weight attribute, and
+    each edge weight must be distinct.
+
+    For the other algorithms, if the graph edges do not have a weight
+    attribute a default weight of 1 will be used.
+
+    Modified code from David Eppstein, April 2006
+    http://www.ics.uci.edu/~eppstein/PADS/
+    """
+    try:
+        algo = ALGORITHMS[algorithm]
+    except KeyError as err:
+        msg = f"{algorithm} is not a valid choice for an algorithm."
+        raise ValueError(msg) from err
+
+    return algo(
+        G, minimum=False, weight=weight, keys=keys, data=data, ignore_nan=ignore_nan
+    )
+
+
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def minimum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False):
+    """Returns a minimum spanning tree or forest on an undirected graph `G`.
+
+    Parameters
+    ----------
+    G : undirected graph
+        An undirected graph. If `G` is connected, then the algorithm finds a
+        spanning tree. Otherwise, a spanning forest is found.
+
+    weight : str
+       Data key to use for edge weights.
+
+    algorithm : string
+       The algorithm to use when finding a minimum spanning tree. Valid
+       choices are 'kruskal', 'prim', or 'boruvka'. The default is
+       'kruskal'.
+
+    ignore_nan : bool (default: False)
+        If a NaN is found as an edge weight normally an exception is raised.
+        If `ignore_nan is True` then that edge is ignored instead.
+
+    Returns
+    -------
+    G : NetworkX Graph
+       A minimum spanning tree or forest.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4)
+    >>> G.add_edge(0, 3, weight=2)
+    >>> T = nx.minimum_spanning_tree(G)
+    >>> sorted(T.edges(data=True))
+    [(0, 1, {}), (1, 2, {}), (2, 3, {})]
+
+
+    Notes
+    -----
+    For Borůvka's algorithm, each edge must have a weight attribute, and
+    each edge weight must be distinct.
+
+    For the other algorithms, if the graph edges do not have a weight
+    attribute a default weight of 1 will be used.
+
+    There may be more than one tree with the same minimum or maximum weight.
+    See :mod:`networkx.tree.recognition` for more detailed definitions.
+
+    Isolated nodes with self-loops are in the tree as edgeless isolated nodes.
+
+    """
+    edges = minimum_spanning_edges(
+        G, algorithm, weight, keys=True, data=True, ignore_nan=ignore_nan
+    )
+    T = G.__class__()  # Same graph class as G
+    T.graph.update(G.graph)
+    T.add_nodes_from(G.nodes.items())
+    T.add_edges_from(edges)
+    return T
+
+
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def partition_spanning_tree(
+    G, minimum=True, weight="weight", partition="partition", ignore_nan=False
+):
+    """
+    Find a spanning tree while respecting a partition of edges.
+
+    Edges can be flagged as either `INCLUDED` which are required to be in the
+    returned tree, `EXCLUDED`, which cannot be in the returned tree and `OPEN`.
+
+    This is used in the SpanningTreeIterator to create new partitions following
+    the algorithm of Sörensen and Janssens [1]_.
+
+    Parameters
+    ----------
+    G : undirected graph
+        An undirected graph.
+
+    minimum : bool (default: True)
+        Determines whether the returned tree is the minimum spanning tree of
+        the partition of the maximum one.
+
+    weight : str
+        Data key to use for edge weights.
+
+    partition : str
+        The key for the edge attribute containing the partition
+        data on the graph. Edges can be included, excluded or open using the
+        `EdgePartition` enum.
+
+    ignore_nan : bool (default: False)
+        If a NaN is found as an edge weight normally an exception is raised.
+        If `ignore_nan is True` then that edge is ignored instead.
+
+
+    Returns
+    -------
+    G : NetworkX Graph
+        A minimum spanning tree using all of the included edges in the graph and
+        none of the excluded edges.
+
+    References
+    ----------
+    .. [1] G.K. Janssens, K. Sörensen, An algorithm to generate all spanning
+           trees in order of increasing cost, Pesquisa Operacional, 2005-08,
+           Vol. 25 (2), p. 219-229,
+           https://www.scielo.br/j/pope/a/XHswBwRwJyrfL88dmMwYNWp/?lang=en
+    """
+    edges = kruskal_mst_edges(
+        G,
+        minimum,
+        weight,
+        keys=True,
+        data=True,
+        ignore_nan=ignore_nan,
+        partition=partition,
+    )
+    T = G.__class__()  # Same graph class as G
+    T.graph.update(G.graph)
+    T.add_nodes_from(G.nodes.items())
+    T.add_edges_from(edges)
+    return T
+
+
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def maximum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False):
+    """Returns a maximum spanning tree or forest on an undirected graph `G`.
+
+    Parameters
+    ----------
+    G : undirected graph
+        An undirected graph. If `G` is connected, then the algorithm finds a
+        spanning tree. Otherwise, a spanning forest is found.
+
+    weight : str
+       Data key to use for edge weights.
+
+    algorithm : string
+       The algorithm to use when finding a maximum spanning tree. Valid
+       choices are 'kruskal', 'prim', or 'boruvka'. The default is
+       'kruskal'.
+
+    ignore_nan : bool (default: False)
+        If a NaN is found as an edge weight normally an exception is raised.
+        If `ignore_nan is True` then that edge is ignored instead.
+
+
+    Returns
+    -------
+    G : NetworkX Graph
+       A maximum spanning tree or forest.
+
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4)
+    >>> G.add_edge(0, 3, weight=2)
+    >>> T = nx.maximum_spanning_tree(G)
+    >>> sorted(T.edges(data=True))
+    [(0, 1, {}), (0, 3, {'weight': 2}), (1, 2, {})]
+
+
+    Notes
+    -----
+    For Borůvka's algorithm, each edge must have a weight attribute, and
+    each edge weight must be distinct.
+
+    For the other algorithms, if the graph edges do not have a weight
+    attribute a default weight of 1 will be used.
+
+    There may be more than one tree with the same minimum or maximum weight.
+    See :mod:`networkx.tree.recognition` for more detailed definitions.
+
+    Isolated nodes with self-loops are in the tree as edgeless isolated nodes.
+
+    """
+    edges = maximum_spanning_edges(
+        G, algorithm, weight, keys=True, data=True, ignore_nan=ignore_nan
+    )
+    edges = list(edges)
+    T = G.__class__()  # Same graph class as G
+    T.graph.update(G.graph)
+    T.add_nodes_from(G.nodes.items())
+    T.add_edges_from(edges)
+    return T
+
+
+@py_random_state(3)
+@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True)
+def random_spanning_tree(G, weight=None, *, multiplicative=True, seed=None):
+    """
+    Sample a random spanning tree using the edges weights of `G`.
+
+    This function supports two different methods for determining the
+    probability of the graph. If ``multiplicative=True``, the probability
+    is based on the product of edge weights, and if ``multiplicative=False``
+    it is based on the sum of the edge weight. However, since it is
+    easier to determine the total weight of all spanning trees for the
+    multiplicative version, that is significantly faster and should be used if
+    possible. Additionally, setting `weight` to `None` will cause a spanning tree
+    to be selected with uniform probability.
+
+    The function uses algorithm A8 in [1]_ .
+
+    Parameters
+    ----------
+    G : nx.Graph
+        An undirected version of the original graph.
+
+    weight : string
+        The edge key for the edge attribute holding edge weight.
+
+    multiplicative : bool, default=True
+        If `True`, the probability of each tree is the product of its edge weight
+        over the sum of the product of all the spanning trees in the graph. If
+        `False`, the probability is the sum of its edge weight over the sum of
+        the sum of weights for all spanning trees in the graph.
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    nx.Graph
+        A spanning tree using the distribution defined by the weight of the tree.
+
+    References
+    ----------
+    .. [1] V. Kulkarni, Generating random combinatorial objects, Journal of
+       Algorithms, 11 (1990), pp. 185–207
+    """
+
+    def find_node(merged_nodes, node):
+        """
+        We can think of clusters of contracted nodes as having one
+        representative in the graph. Each node which is not in merged_nodes
+        is still its own representative. Since a representative can be later
+        contracted, we need to recursively search though the dict to find
+        the final representative, but once we know it we can use path
+        compression to speed up the access of the representative for next time.
+
+        This cannot be replaced by the standard NetworkX union_find since that
+        data structure will merge nodes with less representing nodes into the
+        one with more representing nodes but this function requires we merge
+        them using the order that contract_edges contracts using.
+
+        Parameters
+        ----------
+        merged_nodes : dict
+            The dict storing the mapping from node to representative
+        node
+            The node whose representative we seek
+
+        Returns
+        -------
+        The representative of the `node`
+        """
+        if node not in merged_nodes:
+            return node
+        else:
+            rep = find_node(merged_nodes, merged_nodes[node])
+            merged_nodes[node] = rep
+            return rep
+
+    def prepare_graph():
+        """
+        For the graph `G`, remove all edges not in the set `V` and then
+        contract all edges in the set `U`.
+
+        Returns
+        -------
+        A copy of `G` which has had all edges not in `V` removed and all edges
+        in `U` contracted.
+        """
+
+        # The result is a MultiGraph version of G so that parallel edges are
+        # allowed during edge contraction
+        result = nx.MultiGraph(incoming_graph_data=G)
+
+        # Remove all edges not in V
+        edges_to_remove = set(result.edges()).difference(V)
+        result.remove_edges_from(edges_to_remove)
+
+        # Contract all edges in U
+        #
+        # Imagine that you have two edges to contract and they share an
+        # endpoint like this:
+        #                        [0] ----- [1] ----- [2]
+        # If we contract (0, 1) first, the contraction function will always
+        # delete the second node it is passed so the resulting graph would be
+        #                             [0] ----- [2]
+        # and edge (1, 2) no longer exists but (0, 2) would need to be contracted
+        # in its place now. That is why I use the below dict as a merge-find
+        # data structure with path compression to track how the nodes are merged.
+        merged_nodes = {}
+
+        for u, v in U:
+            u_rep = find_node(merged_nodes, u)
+            v_rep = find_node(merged_nodes, v)
+            # We cannot contract a node with itself
+            if u_rep == v_rep:
+                continue
+            nx.contracted_nodes(result, u_rep, v_rep, self_loops=False, copy=False)
+            merged_nodes[v_rep] = u_rep
+
+        return merged_nodes, result
+
+    def spanning_tree_total_weight(G, weight):
+        """
+        Find the sum of weights of the spanning trees of `G` using the
+        appropriate `method`.
+
+        This is easy if the chosen method is 'multiplicative', since we can
+        use Kirchhoff's Tree Matrix Theorem directly. However, with the
+        'additive' method, this process is slightly more complex and less
+        computationally efficient as we have to find the number of spanning
+        trees which contain each possible edge in the graph.
+
+        Parameters
+        ----------
+        G : NetworkX Graph
+            The graph to find the total weight of all spanning trees on.
+
+        weight : string
+            The key for the weight edge attribute of the graph.
+
+        Returns
+        -------
+        float
+            The sum of either the multiplicative or additive weight for all
+            spanning trees in the graph.
+        """
+        if multiplicative:
+            return nx.total_spanning_tree_weight(G, weight)
+        else:
+            # There are two cases for the total spanning tree additive weight.
+            # 1. There is one edge in the graph. Then the only spanning tree is
+            #    that edge itself, which will have a total weight of that edge
+            #    itself.
+            if G.number_of_edges() == 1:
+                return G.edges(data=weight).__iter__().__next__()[2]
+            # 2. There are no edges or two or more edges in the graph. Then, we find the
+            #    total weight of the spanning trees using the formula in the
+            #    reference paper: take the weight of each edge and multiply it by
+            #    the number of spanning trees which include that edge. This
+            #    can be accomplished by contracting the edge and finding the
+            #    multiplicative total spanning tree weight if the weight of each edge
+            #    is assumed to be 1, which is conveniently built into networkx already,
+            #    by calling total_spanning_tree_weight with weight=None.
+            #    Note that with no edges the returned value is just zero.
+            else:
+                total = 0
+                for u, v, w in G.edges(data=weight):
+                    total += w * nx.total_spanning_tree_weight(
+                        nx.contracted_edge(G, edge=(u, v), self_loops=False), None
+                    )
+                return total
+
+    if G.number_of_nodes() < 2:
+        # no edges in the spanning tree
+        return nx.empty_graph(G.nodes)
+
+    U = set()
+    st_cached_value = 0
+    V = set(G.edges())
+    shuffled_edges = list(G.edges())
+    seed.shuffle(shuffled_edges)
+
+    for u, v in shuffled_edges:
+        e_weight = G[u][v][weight] if weight is not None else 1
+        node_map, prepared_G = prepare_graph()
+        G_total_tree_weight = spanning_tree_total_weight(prepared_G, weight)
+        # Add the edge to U so that we can compute the total tree weight
+        # assuming we include that edge
+        # Now, if (u, v) cannot exist in G because it is fully contracted out
+        # of existence, then it by definition cannot influence G_e's Kirchhoff
+        # value. But, we also cannot pick it.
+        rep_edge = (find_node(node_map, u), find_node(node_map, v))
+        # Check to see if the 'representative edge' for the current edge is
+        # in prepared_G. If so, then we can pick it.
+        if rep_edge in prepared_G.edges:
+            prepared_G_e = nx.contracted_edge(
+                prepared_G, edge=rep_edge, self_loops=False
+            )
+            G_e_total_tree_weight = spanning_tree_total_weight(prepared_G_e, weight)
+            if multiplicative:
+                threshold = e_weight * G_e_total_tree_weight / G_total_tree_weight
+            else:
+                numerator = (
+                    st_cached_value + e_weight
+                ) * nx.total_spanning_tree_weight(prepared_G_e) + G_e_total_tree_weight
+                denominator = (
+                    st_cached_value * nx.total_spanning_tree_weight(prepared_G)
+                    + G_total_tree_weight
+                )
+                threshold = numerator / denominator
+        else:
+            threshold = 0.0
+        z = seed.uniform(0.0, 1.0)
+        if z > threshold:
+            # Remove the edge from V since we did not pick it.
+            V.remove((u, v))
+        else:
+            # Add the edge to U since we picked it.
+            st_cached_value += e_weight
+            U.add((u, v))
+        # If we decide to keep an edge, it may complete the spanning tree.
+        if len(U) == G.number_of_nodes() - 1:
+            spanning_tree = nx.Graph()
+            spanning_tree.add_edges_from(U)
+            return spanning_tree
+    raise Exception(f"Something went wrong! Only {len(U)} edges in the spanning tree!")
+
+
+class SpanningTreeIterator:
+    """
+    Iterate over all spanning trees of a graph in either increasing or
+    decreasing cost.
+
+    Notes
+    -----
+    This iterator uses the partition scheme from [1]_ (included edges,
+    excluded edges and open edges) as well as a modified Kruskal's Algorithm
+    to generate minimum spanning trees which respect the partition of edges.
+    For spanning trees with the same weight, ties are broken arbitrarily.
+
+    References
+    ----------
+    .. [1] G.K. Janssens, K. Sörensen, An algorithm to generate all spanning
+           trees in order of increasing cost, Pesquisa Operacional, 2005-08,
+           Vol. 25 (2), p. 219-229,
+           https://www.scielo.br/j/pope/a/XHswBwRwJyrfL88dmMwYNWp/?lang=en
+    """
+
+    @dataclass(order=True)
+    class Partition:
+        """
+        This dataclass represents a partition and stores a dict with the edge
+        data and the weight of the minimum spanning tree of the partition dict.
+        """
+
+        mst_weight: float
+        partition_dict: dict = field(compare=False)
+
+        def __copy__(self):
+            return SpanningTreeIterator.Partition(
+                self.mst_weight, self.partition_dict.copy()
+            )
+
+    def __init__(self, G, weight="weight", minimum=True, ignore_nan=False):
+        """
+        Initialize the iterator
+
+        Parameters
+        ----------
+        G : nx.Graph
+            The directed graph which we need to iterate trees over
+
+        weight : String, default = "weight"
+            The edge attribute used to store the weight of the edge
+
+        minimum : bool, default = True
+            Return the trees in increasing order while true and decreasing order
+            while false.
+
+        ignore_nan : bool, default = False
+            If a NaN is found as an edge weight normally an exception is raised.
+            If `ignore_nan is True` then that edge is ignored instead.
+        """
+        self.G = G.copy()
+        self.G.__networkx_cache__ = None  # Disable caching
+        self.weight = weight
+        self.minimum = minimum
+        self.ignore_nan = ignore_nan
+        # Randomly create a key for an edge attribute to hold the partition data
+        self.partition_key = (
+            "SpanningTreeIterators super secret partition attribute name"
+        )
+
+    def __iter__(self):
+        """
+        Returns
+        -------
+        SpanningTreeIterator
+            The iterator object for this graph
+        """
+        self.partition_queue = PriorityQueue()
+        self._clear_partition(self.G)
+        mst_weight = partition_spanning_tree(
+            self.G, self.minimum, self.weight, self.partition_key, self.ignore_nan
+        ).size(weight=self.weight)
+
+        self.partition_queue.put(
+            self.Partition(mst_weight if self.minimum else -mst_weight, {})
+        )
+
+        return self
+
+    def __next__(self):
+        """
+        Returns
+        -------
+        (multi)Graph
+            The spanning tree of next greatest weight, which ties broken
+            arbitrarily.
+        """
+        if self.partition_queue.empty():
+            del self.G, self.partition_queue
+            raise StopIteration
+
+        partition = self.partition_queue.get()
+        self._write_partition(partition)
+        next_tree = partition_spanning_tree(
+            self.G, self.minimum, self.weight, self.partition_key, self.ignore_nan
+        )
+        self._partition(partition, next_tree)
+
+        self._clear_partition(next_tree)
+        return next_tree
+
+    def _partition(self, partition, partition_tree):
+        """
+        Create new partitions based of the minimum spanning tree of the
+        current minimum partition.
+
+        Parameters
+        ----------
+        partition : Partition
+            The Partition instance used to generate the current minimum spanning
+            tree.
+        partition_tree : nx.Graph
+            The minimum spanning tree of the input partition.
+        """
+        # create two new partitions with the data from the input partition dict
+        p1 = self.Partition(0, partition.partition_dict.copy())
+        p2 = self.Partition(0, partition.partition_dict.copy())
+        for e in partition_tree.edges:
+            # determine if the edge was open or included
+            if e not in partition.partition_dict:
+                # This is an open edge
+                p1.partition_dict[e] = EdgePartition.EXCLUDED
+                p2.partition_dict[e] = EdgePartition.INCLUDED
+
+                self._write_partition(p1)
+                p1_mst = partition_spanning_tree(
+                    self.G,
+                    self.minimum,
+                    self.weight,
+                    self.partition_key,
+                    self.ignore_nan,
+                )
+                p1_mst_weight = p1_mst.size(weight=self.weight)
+                if nx.is_connected(p1_mst):
+                    p1.mst_weight = p1_mst_weight if self.minimum else -p1_mst_weight
+                    self.partition_queue.put(p1.__copy__())
+                p1.partition_dict = p2.partition_dict.copy()
+
+    def _write_partition(self, partition):
+        """
+        Writes the desired partition into the graph to calculate the minimum
+        spanning tree.
+
+        Parameters
+        ----------
+        partition : Partition
+            A Partition dataclass describing a partition on the edges of the
+            graph.
+        """
+
+        partition_dict = partition.partition_dict
+        partition_key = self.partition_key
+        G = self.G
+
+        edges = (
+            G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True)
+        )
+        for *e, d in edges:
+            d[partition_key] = partition_dict.get(tuple(e), EdgePartition.OPEN)
+
+    def _clear_partition(self, G):
+        """
+        Removes partition data from the graph
+        """
+        partition_key = self.partition_key
+        edges = (
+            G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True)
+        )
+        for *e, d in edges:
+            if partition_key in d:
+                del d[partition_key]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def number_of_spanning_trees(G, *, root=None, weight=None):
+    """Returns the number of spanning trees in `G`.
+
+    A spanning tree for an undirected graph is a tree that connects
+    all nodes in the graph. For a directed graph, the analog of a
+    spanning tree is called a (spanning) arborescence. The arborescence
+    includes a unique directed path from the `root` node to each other node.
+    The graph must be weakly connected, and the root must be a node
+    that includes all nodes as successors [3]_. Note that to avoid
+    discussing sink-roots and reverse-arborescences, we have reversed
+    the edge orientation from [3]_ and use the in-degree laplacian.
+
+    This function (when `weight` is `None`) returns the number of
+    spanning trees for an undirected graph and the number of
+    arborescences from a single root node for a directed graph.
+    When `weight` is the name of an edge attribute which holds the
+    weight value of each edge, the function returns the sum over
+    all trees of the multiplicative weight of each tree. That is,
+    the weight of the tree is the product of its edge weights.
+
+    Kirchoff's Tree Matrix Theorem states that any cofactor of the
+    Laplacian matrix of a graph is the number of spanning trees in the
+    graph. (Here we use cofactors for a diagonal entry so that the
+    cofactor becomes the determinant of the matrix with one row
+    and its matching column removed.) For a weighted Laplacian matrix,
+    the cofactor is the sum across all spanning trees of the
+    multiplicative weight of each tree. That is, the weight of each
+    tree is the product of its edge weights. The theorem is also
+    known as Kirchhoff's theorem [1]_ and the Matrix-Tree theorem [2]_.
+
+    For directed graphs, a similar theorem (Tutte's Theorem) holds with
+    the cofactor chosen to be the one with row and column removed that
+    correspond to the root. The cofactor is the number of arborescences
+    with the specified node as root. And the weighted version gives the
+    sum of the arborescence weights with root `root`. The arborescence
+    weight is the product of its edge weights.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    root : node
+       A node in the directed graph `G` that has all nodes as descendants.
+       (This is ignored for undirected graphs.)
+
+    weight : string or None, optional (default=None)
+        The name of the edge attribute holding the edge weight.
+        If `None`, then each edge is assumed to have a weight of 1.
+
+    Returns
+    -------
+    Number
+        Undirected graphs:
+            The number of spanning trees of the graph `G`.
+            Or the sum of all spanning tree weights of the graph `G`
+            where the weight of a tree is the product of its edge weights.
+        Directed graphs:
+            The number of arborescences of `G` rooted at node `root`.
+            Or the sum of all arborescence weights of the graph `G` with
+            specified root where the weight of an arborescence is the product
+            of its edge weights.
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If `G` does not contain any nodes.
+
+    NetworkXError
+        If the graph `G` is directed and the root node
+        is not specified or is not in G.
+
+    Examples
+    --------
+    >>> G = nx.complete_graph(5)
+    >>> round(nx.number_of_spanning_trees(G))
+    125
+
+    >>> G = nx.Graph()
+    >>> G.add_edge(1, 2, weight=2)
+    >>> G.add_edge(1, 3, weight=1)
+    >>> G.add_edge(2, 3, weight=1)
+    >>> round(nx.number_of_spanning_trees(G, weight="weight"))
+    5
+
+    Notes
+    -----
+    Self-loops are excluded. Multi-edges are contracted in one edge
+    equal to the sum of the weights.
+
+    References
+    ----------
+    .. [1] Wikipedia
+       "Kirchhoff's theorem."
+       https://en.wikipedia.org/wiki/Kirchhoff%27s_theorem
+    .. [2] Kirchhoff, G. R.
+        Über die Auflösung der Gleichungen, auf welche man
+        bei der Untersuchung der linearen Vertheilung
+        Galvanischer Ströme geführt wird
+        Annalen der Physik und Chemie, vol. 72, pp. 497-508, 1847.
+    .. [3] Margoliash, J.
+        "Matrix-Tree Theorem for Directed Graphs"
+        https://www.math.uchicago.edu/~may/VIGRE/VIGRE2010/REUPapers/Margoliash.pdf
+    """
+    import numpy as np
+
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept("Graph G must contain at least one node.")
+
+    # undirected G
+    if not nx.is_directed(G):
+        if not nx.is_connected(G):
+            return 0
+        G_laplacian = nx.laplacian_matrix(G, weight=weight).toarray()
+        return float(np.linalg.det(G_laplacian[1:, 1:]))
+
+    # directed G
+    if root is None:
+        raise nx.NetworkXError("Input `root` must be provided when G is directed")
+    if root not in G:
+        raise nx.NetworkXError("The node root is not in the graph G.")
+    if not nx.is_weakly_connected(G):
+        return 0
+
+    # Compute directed Laplacian matrix
+    nodelist = [root] + [n for n in G if n != root]
+    A = nx.adjacency_matrix(G, nodelist=nodelist, weight=weight)
+    D = np.diag(A.sum(axis=0))
+    G_laplacian = D - A
+
+    # Compute number of spanning trees
+    return float(np.linalg.det(G_laplacian[1:, 1:]))
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/operations.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/operations.py
new file mode 100644
index 00000000..6c3e8394
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/operations.py
@@ -0,0 +1,105 @@
+"""Operations on trees."""
+
+from functools import partial
+from itertools import accumulate, chain
+
+import networkx as nx
+
+__all__ = ["join_trees"]
+
+
+# Argument types don't match dispatching, but allow manual selection of backend
+@nx._dispatchable(graphs=None, returns_graph=True)
+def join_trees(rooted_trees, *, label_attribute=None, first_label=0):
+    """Returns a new rooted tree made by joining `rooted_trees`
+
+    Constructs a new tree by joining each tree in `rooted_trees`.
+    A new root node is added and connected to each of the roots
+    of the input trees. While copying the nodes from the trees,
+    relabeling to integers occurs. If the `label_attribute` is provided,
+    the old node labels will be stored in the new tree under this attribute.
+
+    Parameters
+    ----------
+    rooted_trees : list
+        A list of pairs in which each left element is a NetworkX graph
+        object representing a tree and each right element is the root
+        node of that tree. The nodes of these trees will be relabeled to
+        integers.
+
+    label_attribute : str
+        If provided, the old node labels will be stored in the new tree
+        under this node attribute. If not provided, the original labels
+        of the nodes in the input trees are not stored.
+
+    first_label : int, optional (default=0)
+        Specifies the label for the new root node. If provided, the root node of the joined tree
+        will have this label. If not provided, the root node will default to a label of 0.
+
+    Returns
+    -------
+    NetworkX graph
+        The rooted tree resulting from joining the provided `rooted_trees`. The new tree has a root node
+        labeled as specified by `first_label` (defaulting to 0 if not provided). Subtrees from the input
+        `rooted_trees` are attached to this new root node. Each non-root node, if the `label_attribute`
+        is provided, has an attribute that indicates the original label of the node in the input tree.
+
+    Notes
+    -----
+    Trees are stored in NetworkX as NetworkX Graphs. There is no specific
+    enforcement of the fact that these are trees. Testing for each tree
+    can be done using :func:`networkx.is_tree`.
+
+    Graph, edge, and node attributes are propagated from the given
+    rooted trees to the created tree. If there are any overlapping graph
+    attributes, those from later trees will overwrite those from earlier
+    trees in the tuple of positional arguments.
+
+    Examples
+    --------
+    Join two full balanced binary trees of height *h* to get a full
+    balanced binary tree of depth *h* + 1::
+
+        >>> h = 4
+        >>> left = nx.balanced_tree(2, h)
+        >>> right = nx.balanced_tree(2, h)
+        >>> joined_tree = nx.join_trees([(left, 0), (right, 0)])
+        >>> nx.is_isomorphic(joined_tree, nx.balanced_tree(2, h + 1))
+        True
+
+    """
+    if not rooted_trees:
+        return nx.empty_graph(1)
+
+    # Unzip the zipped list of (tree, root) pairs.
+    trees, roots = zip(*rooted_trees)
+
+    # The join of the trees has the same type as the type of the first tree.
+    R = type(trees[0])()
+
+    lengths = (len(tree) for tree in trees[:-1])
+    first_labels = list(accumulate(lengths, initial=first_label + 1))
+
+    new_roots = []
+    for tree, root, first_node in zip(trees, roots, first_labels):
+        new_root = first_node + list(tree.nodes()).index(root)
+        new_roots.append(new_root)
+
+    # Relabel the nodes so that their union is the integers starting at first_label.
+    relabel = partial(
+        nx.convert_node_labels_to_integers, label_attribute=label_attribute
+    )
+    new_trees = [
+        relabel(tree, first_label=first_label)
+        for tree, first_label in zip(trees, first_labels)
+    ]
+
+    # Add all sets of nodes and edges, attributes
+    for tree in new_trees:
+        R.update(tree)
+
+    # Finally, join the subtrees at the root. We know first_label is unused by the way we relabeled the subtrees.
+    R.add_node(first_label)
+    R.add_edges_from((first_label, root) for root in new_roots)
+
+    return R
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/recognition.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/recognition.py
new file mode 100644
index 00000000..a9eae987
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/recognition.py
@@ -0,0 +1,273 @@
+"""
+Recognition Tests
+=================
+
+A *forest* is an acyclic, undirected graph, and a *tree* is a connected forest.
+Depending on the subfield, there are various conventions for generalizing these
+definitions to directed graphs.
+
+In one convention, directed variants of forest and tree are defined in an
+identical manner, except that the direction of the edges is ignored. In effect,
+each directed edge is treated as a single undirected edge. Then, additional
+restrictions are imposed to define *branchings* and *arborescences*.
+
+In another convention, directed variants of forest and tree correspond to
+the previous convention's branchings and arborescences, respectively. Then two
+new terms, *polyforest* and *polytree*, are defined to correspond to the other
+convention's forest and tree.
+
+Summarizing::
+
+   +-----------------------------+
+   | Convention A | Convention B |
+   +=============================+
+   | forest       | polyforest   |
+   | tree         | polytree     |
+   | branching    | forest       |
+   | arborescence | tree         |
+   +-----------------------------+
+
+Each convention has its reasons. The first convention emphasizes definitional
+similarity in that directed forests and trees are only concerned with
+acyclicity and do not have an in-degree constraint, just as their undirected
+counterparts do not. The second convention emphasizes functional similarity
+in the sense that the directed analog of a spanning tree is a spanning
+arborescence. That is, take any spanning tree and choose one node as the root.
+Then every edge is assigned a direction such there is a directed path from the
+root to every other node. The result is a spanning arborescence.
+
+NetworkX follows convention "A". Explicitly, these are:
+
+undirected forest
+   An undirected graph with no undirected cycles.
+
+undirected tree
+   A connected, undirected forest.
+
+directed forest
+   A directed graph with no undirected cycles. Equivalently, the underlying
+   graph structure (which ignores edge orientations) is an undirected forest.
+   In convention B, this is known as a polyforest.
+
+directed tree
+   A weakly connected, directed forest. Equivalently, the underlying graph
+   structure (which ignores edge orientations) is an undirected tree. In
+   convention B, this is known as a polytree.
+
+branching
+   A directed forest with each node having, at most, one parent. So the maximum
+   in-degree is equal to 1. In convention B, this is known as a forest.
+
+arborescence
+   A directed tree with each node having, at most, one parent. So the maximum
+   in-degree is equal to 1. In convention B, this is known as a tree.
+
+For trees and arborescences, the adjective "spanning" may be added to designate
+that the graph, when considered as a forest/branching, consists of a single
+tree/arborescence that includes all nodes in the graph. It is true, by
+definition, that every tree/arborescence is spanning with respect to the nodes
+that define the tree/arborescence and so, it might seem redundant to introduce
+the notion of "spanning". However, the nodes may represent a subset of
+nodes from a larger graph, and it is in this context that the term "spanning"
+becomes a useful notion.
+
+"""
+
+import networkx as nx
+
+__all__ = ["is_arborescence", "is_branching", "is_forest", "is_tree"]
+
+
+@nx.utils.not_implemented_for("undirected")
+@nx._dispatchable
+def is_arborescence(G):
+    """
+    Returns True if `G` is an arborescence.
+
+    An arborescence is a directed tree with maximum in-degree equal to 1.
+
+    Parameters
+    ----------
+    G : graph
+        The graph to test.
+
+    Returns
+    -------
+    b : bool
+        A boolean that is True if `G` is an arborescence.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (0, 2), (2, 3), (3, 4)])
+    >>> nx.is_arborescence(G)
+    True
+    >>> G.remove_edge(0, 1)
+    >>> G.add_edge(1, 2)  # maximum in-degree is 2
+    >>> nx.is_arborescence(G)
+    False
+
+    Notes
+    -----
+    In another convention, an arborescence is known as a *tree*.
+
+    See Also
+    --------
+    is_tree
+
+    """
+    return is_tree(G) and max(d for n, d in G.in_degree()) <= 1
+
+
+@nx.utils.not_implemented_for("undirected")
+@nx._dispatchable
+def is_branching(G):
+    """
+    Returns True if `G` is a branching.
+
+    A branching is a directed forest with maximum in-degree equal to 1.
+
+    Parameters
+    ----------
+    G : directed graph
+        The directed graph to test.
+
+    Returns
+    -------
+    b : bool
+        A boolean that is True if `G` is a branching.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)])
+    >>> nx.is_branching(G)
+    True
+    >>> G.remove_edge(2, 3)
+    >>> G.add_edge(3, 1)  # maximum in-degree is 2
+    >>> nx.is_branching(G)
+    False
+
+    Notes
+    -----
+    In another convention, a branching is also known as a *forest*.
+
+    See Also
+    --------
+    is_forest
+
+    """
+    return is_forest(G) and max(d for n, d in G.in_degree()) <= 1
+
+
+@nx._dispatchable
+def is_forest(G):
+    """
+    Returns True if `G` is a forest.
+
+    A forest is a graph with no undirected cycles.
+
+    For directed graphs, `G` is a forest if the underlying graph is a forest.
+    The underlying graph is obtained by treating each directed edge as a single
+    undirected edge in a multigraph.
+
+    Parameters
+    ----------
+    G : graph
+        The graph to test.
+
+    Returns
+    -------
+    b : bool
+        A boolean that is True if `G` is a forest.
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If `G` is empty.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5)])
+    >>> nx.is_forest(G)
+    True
+    >>> G.add_edge(4, 1)
+    >>> nx.is_forest(G)
+    False
+
+    Notes
+    -----
+    In another convention, a directed forest is known as a *polyforest* and
+    then *forest* corresponds to a *branching*.
+
+    See Also
+    --------
+    is_branching
+
+    """
+    if len(G) == 0:
+        raise nx.exception.NetworkXPointlessConcept("G has no nodes.")
+
+    if G.is_directed():
+        components = (G.subgraph(c) for c in nx.weakly_connected_components(G))
+    else:
+        components = (G.subgraph(c) for c in nx.connected_components(G))
+
+    return all(len(c) - 1 == c.number_of_edges() for c in components)
+
+
+@nx._dispatchable
+def is_tree(G):
+    """
+    Returns True if `G` is a tree.
+
+    A tree is a connected graph with no undirected cycles.
+
+    For directed graphs, `G` is a tree if the underlying graph is a tree. The
+    underlying graph is obtained by treating each directed edge as a single
+    undirected edge in a multigraph.
+
+    Parameters
+    ----------
+    G : graph
+        The graph to test.
+
+    Returns
+    -------
+    b : bool
+        A boolean that is True if `G` is a tree.
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If `G` is empty.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5)])
+    >>> nx.is_tree(G)  # n-1 edges
+    True
+    >>> G.add_edge(3, 4)
+    >>> nx.is_tree(G)  # n edges
+    False
+
+    Notes
+    -----
+    In another convention, a directed tree is known as a *polytree* and then
+    *tree* corresponds to an *arborescence*.
+
+    See Also
+    --------
+    is_arborescence
+
+    """
+    if len(G) == 0:
+        raise nx.exception.NetworkXPointlessConcept("G has no nodes.")
+
+    if G.is_directed():
+        is_connected = nx.is_weakly_connected
+    else:
+        is_connected = nx.is_connected
+
+    # A connected graph with no cycles has n-1 edges.
+    return len(G) - 1 == G.number_of_edges() and is_connected(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_branchings.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_branchings.py
new file mode 100644
index 00000000..e19ddee3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_branchings.py
@@ -0,0 +1,624 @@
+import math
+from operator import itemgetter
+
+import pytest
+
+np = pytest.importorskip("numpy")
+
+import networkx as nx
+from networkx.algorithms.tree import branchings, recognition
+
+#
+# Explicitly discussed examples from Edmonds paper.
+#
+
+# Used in Figures A-F.
+#
+# fmt: off
+G_array = np.array([
+    # 0   1   2   3   4   5   6   7   8
+    [0, 0, 12, 0, 12, 0, 0, 0, 0],  # 0
+    [4, 0, 0, 0, 0, 13, 0, 0, 0],  # 1
+    [0, 17, 0, 21, 0, 12, 0, 0, 0],  # 2
+    [5, 0, 0, 0, 17, 0, 18, 0, 0],  # 3
+    [0, 0, 0, 0, 0, 0, 0, 12, 0],  # 4
+    [0, 0, 0, 0, 0, 0, 14, 0, 12],  # 5
+    [0, 0, 21, 0, 0, 0, 0, 0, 15],  # 6
+    [0, 0, 0, 19, 0, 0, 15, 0, 0],  # 7
+    [0, 0, 0, 0, 0, 0, 0, 18, 0],  # 8
+], dtype=int)
+
+# Two copies of the graph from the original paper as disconnected components
+G_big_array = np.zeros(np.array(G_array.shape) * 2, dtype=int)
+G_big_array[:G_array.shape[0], :G_array.shape[1]] = G_array
+G_big_array[G_array.shape[0]:, G_array.shape[1]:] = G_array
+
+# fmt: on
+
+
+def G1():
+    G = nx.from_numpy_array(G_array, create_using=nx.MultiDiGraph)
+    return G
+
+
+def G2():
+    # Now we shift all the weights by -10.
+    # Should not affect optimal arborescence, but does affect optimal branching.
+    Garr = G_array.copy()
+    Garr[np.nonzero(Garr)] -= 10
+    G = nx.from_numpy_array(Garr, create_using=nx.MultiDiGraph)
+    return G
+
+
+# An optimal branching for G1 that is also a spanning arborescence. So it is
+# also an optimal spanning arborescence.
+#
+optimal_arborescence_1 = [
+    (0, 2, 12),
+    (2, 1, 17),
+    (2, 3, 21),
+    (1, 5, 13),
+    (3, 4, 17),
+    (3, 6, 18),
+    (6, 8, 15),
+    (8, 7, 18),
+]
+
+# For G2, the optimal branching of G1 (with shifted weights) is no longer
+# an optimal branching, but it is still an optimal spanning arborescence
+# (just with shifted weights). An optimal branching for G2 is similar to what
+# appears in figure G (this is greedy_subopt_branching_1a below), but with the
+# edge (3, 0, 5), which is now (3, 0, -5), removed. Thus, the optimal branching
+# is not a spanning arborescence. The code finds optimal_branching_2a.
+# An alternative and equivalent branching is optimal_branching_2b. We would
+# need to modify the code to iterate through all equivalent optimal branchings.
+#
+# These are maximal branchings or arborescences.
+optimal_branching_2a = [
+    (5, 6, 4),
+    (6, 2, 11),
+    (6, 8, 5),
+    (8, 7, 8),
+    (2, 1, 7),
+    (2, 3, 11),
+    (3, 4, 7),
+]
+optimal_branching_2b = [
+    (8, 7, 8),
+    (7, 3, 9),
+    (3, 4, 7),
+    (3, 6, 8),
+    (6, 2, 11),
+    (2, 1, 7),
+    (1, 5, 3),
+]
+optimal_arborescence_2 = [
+    (0, 2, 2),
+    (2, 1, 7),
+    (2, 3, 11),
+    (1, 5, 3),
+    (3, 4, 7),
+    (3, 6, 8),
+    (6, 8, 5),
+    (8, 7, 8),
+]
+
+# Two suboptimal maximal branchings on G1 obtained from a greedy algorithm.
+# 1a matches what is shown in Figure G in Edmonds's paper.
+greedy_subopt_branching_1a = [
+    (5, 6, 14),
+    (6, 2, 21),
+    (6, 8, 15),
+    (8, 7, 18),
+    (2, 1, 17),
+    (2, 3, 21),
+    (3, 0, 5),
+    (3, 4, 17),
+]
+greedy_subopt_branching_1b = [
+    (8, 7, 18),
+    (7, 6, 15),
+    (6, 2, 21),
+    (2, 1, 17),
+    (2, 3, 21),
+    (1, 5, 13),
+    (3, 0, 5),
+    (3, 4, 17),
+]
+
+
+def build_branching(edges, double=False):
+    G = nx.DiGraph()
+    for u, v, weight in edges:
+        G.add_edge(u, v, weight=weight)
+        if double:
+            G.add_edge(u + 9, v + 9, weight=weight)
+    return G
+
+
+def sorted_edges(G, attr="weight", default=1):
+    edges = [(u, v, data.get(attr, default)) for (u, v, data) in G.edges(data=True)]
+    edges = sorted(edges, key=lambda x: (x[2], x[1], x[0]))
+    return edges
+
+
+def assert_equal_branchings(G1, G2, attr="weight", default=1):
+    edges1 = list(G1.edges(data=True))
+    edges2 = list(G2.edges(data=True))
+    assert len(edges1) == len(edges2)
+
+    # Grab the weights only.
+    e1 = sorted_edges(G1, attr, default)
+    e2 = sorted_edges(G2, attr, default)
+
+    for a, b in zip(e1, e2):
+        assert a[:2] == b[:2]
+        np.testing.assert_almost_equal(a[2], b[2])
+
+
+################
+
+
+def test_optimal_branching1():
+    G = build_branching(optimal_arborescence_1)
+    assert recognition.is_arborescence(G), True
+    assert branchings.branching_weight(G) == 131
+
+
+def test_optimal_branching2a():
+    G = build_branching(optimal_branching_2a)
+    assert recognition.is_arborescence(G), True
+    assert branchings.branching_weight(G) == 53
+
+
+def test_optimal_branching2b():
+    G = build_branching(optimal_branching_2b)
+    assert recognition.is_arborescence(G), True
+    assert branchings.branching_weight(G) == 53
+
+
+def test_optimal_arborescence2():
+    G = build_branching(optimal_arborescence_2)
+    assert recognition.is_arborescence(G), True
+    assert branchings.branching_weight(G) == 51
+
+
+def test_greedy_suboptimal_branching1a():
+    G = build_branching(greedy_subopt_branching_1a)
+    assert recognition.is_arborescence(G), True
+    assert branchings.branching_weight(G) == 128
+
+
+def test_greedy_suboptimal_branching1b():
+    G = build_branching(greedy_subopt_branching_1b)
+    assert recognition.is_arborescence(G), True
+    assert branchings.branching_weight(G) == 127
+
+
+def test_greedy_max1():
+    # Standard test.
+    #
+    G = G1()
+    B = branchings.greedy_branching(G)
+    # There are only two possible greedy branchings. The sorting is such
+    # that it should equal the second suboptimal branching: 1b.
+    B_ = build_branching(greedy_subopt_branching_1b)
+    assert_equal_branchings(B, B_)
+
+
+def test_greedy_branching_kwarg_kind():
+    G = G1()
+    with pytest.raises(nx.NetworkXException, match="Unknown value for `kind`."):
+        B = branchings.greedy_branching(G, kind="lol")
+
+
+def test_greedy_branching_for_unsortable_nodes():
+    G = nx.DiGraph()
+    G.add_weighted_edges_from([((2, 3), 5, 1), (3, "a", 1), (2, 4, 5)])
+    edges = [(u, v, data.get("weight", 1)) for (u, v, data) in G.edges(data=True)]
+    with pytest.raises(TypeError):
+        edges.sort(key=itemgetter(2, 0, 1), reverse=True)
+    B = branchings.greedy_branching(G, kind="max").edges(data=True)
+    assert list(B) == [
+        ((2, 3), 5, {"weight": 1}),
+        (3, "a", {"weight": 1}),
+        (2, 4, {"weight": 5}),
+    ]
+
+
+def test_greedy_max2():
+    # Different default weight.
+    #
+    G = G1()
+    del G[1][0][0]["weight"]
+    B = branchings.greedy_branching(G, default=6)
+    # Chosen so that edge (3,0,5) is not selected and (1,0,6) is instead.
+
+    edges = [
+        (1, 0, 6),
+        (1, 5, 13),
+        (7, 6, 15),
+        (2, 1, 17),
+        (3, 4, 17),
+        (8, 7, 18),
+        (2, 3, 21),
+        (6, 2, 21),
+    ]
+    B_ = build_branching(edges)
+    assert_equal_branchings(B, B_)
+
+
+def test_greedy_max3():
+    # All equal weights.
+    #
+    G = G1()
+    B = branchings.greedy_branching(G, attr=None)
+
+    # This is mostly arbitrary...the output was generated by running the algo.
+    edges = [
+        (2, 1, 1),
+        (3, 0, 1),
+        (3, 4, 1),
+        (5, 8, 1),
+        (6, 2, 1),
+        (7, 3, 1),
+        (7, 6, 1),
+        (8, 7, 1),
+    ]
+    B_ = build_branching(edges)
+    assert_equal_branchings(B, B_, default=1)
+
+
+def test_greedy_min():
+    G = G1()
+    B = branchings.greedy_branching(G, kind="min")
+
+    edges = [
+        (1, 0, 4),
+        (0, 2, 12),
+        (0, 4, 12),
+        (2, 5, 12),
+        (4, 7, 12),
+        (5, 8, 12),
+        (5, 6, 14),
+        (7, 3, 19),
+    ]
+    B_ = build_branching(edges)
+    assert_equal_branchings(B, B_)
+
+
+def test_edmonds1_maxbranch():
+    G = G1()
+    x = branchings.maximum_branching(G)
+    x_ = build_branching(optimal_arborescence_1)
+    assert_equal_branchings(x, x_)
+
+
+def test_edmonds1_maxarbor():
+    G = G1()
+    x = branchings.maximum_spanning_arborescence(G)
+    x_ = build_branching(optimal_arborescence_1)
+    assert_equal_branchings(x, x_)
+
+
+def test_edmonds1_minimal_branching():
+    # graph will have something like a minimum arborescence but no spanning one
+    G = nx.from_numpy_array(G_big_array, create_using=nx.DiGraph)
+    B = branchings.minimal_branching(G)
+    edges = [
+        (3, 0, 5),
+        (0, 2, 12),
+        (0, 4, 12),
+        (2, 5, 12),
+        (4, 7, 12),
+        (5, 8, 12),
+        (5, 6, 14),
+        (2, 1, 17),
+    ]
+    B_ = build_branching(edges, double=True)
+    assert_equal_branchings(B, B_)
+
+
+def test_edmonds2_maxbranch():
+    G = G2()
+    x = branchings.maximum_branching(G)
+    x_ = build_branching(optimal_branching_2a)
+    assert_equal_branchings(x, x_)
+
+
+def test_edmonds2_maxarbor():
+    G = G2()
+    x = branchings.maximum_spanning_arborescence(G)
+    x_ = build_branching(optimal_arborescence_2)
+    assert_equal_branchings(x, x_)
+
+
+def test_edmonds2_minarbor():
+    G = G1()
+    x = branchings.minimum_spanning_arborescence(G)
+    # This was obtained from algorithm. Need to verify it independently.
+    # Branch weight is: 96
+    edges = [
+        (3, 0, 5),
+        (0, 2, 12),
+        (0, 4, 12),
+        (2, 5, 12),
+        (4, 7, 12),
+        (5, 8, 12),
+        (5, 6, 14),
+        (2, 1, 17),
+    ]
+    x_ = build_branching(edges)
+    assert_equal_branchings(x, x_)
+
+
+def test_edmonds3_minbranch1():
+    G = G1()
+    x = branchings.minimum_branching(G)
+    edges = []
+    x_ = build_branching(edges)
+    assert_equal_branchings(x, x_)
+
+
+def test_edmonds3_minbranch2():
+    G = G1()
+    G.add_edge(8, 9, weight=-10)
+    x = branchings.minimum_branching(G)
+    edges = [(8, 9, -10)]
+    x_ = build_branching(edges)
+    assert_equal_branchings(x, x_)
+
+
+# Need more tests
+
+
+def test_mst():
+    # Make sure we get the same results for undirected graphs.
+    # Example from: https://en.wikipedia.org/wiki/Kruskal's_algorithm
+    G = nx.Graph()
+    edgelist = [
+        (0, 3, [("weight", 5)]),
+        (0, 1, [("weight", 7)]),
+        (1, 3, [("weight", 9)]),
+        (1, 2, [("weight", 8)]),
+        (1, 4, [("weight", 7)]),
+        (3, 4, [("weight", 15)]),
+        (3, 5, [("weight", 6)]),
+        (2, 4, [("weight", 5)]),
+        (4, 5, [("weight", 8)]),
+        (4, 6, [("weight", 9)]),
+        (5, 6, [("weight", 11)]),
+    ]
+    G.add_edges_from(edgelist)
+    G = G.to_directed()
+    x = branchings.minimum_spanning_arborescence(G)
+
+    edges = [
+        ({0, 1}, 7),
+        ({0, 3}, 5),
+        ({3, 5}, 6),
+        ({1, 4}, 7),
+        ({4, 2}, 5),
+        ({4, 6}, 9),
+    ]
+
+    assert x.number_of_edges() == len(edges)
+    for u, v, d in x.edges(data=True):
+        assert ({u, v}, d["weight"]) in edges
+
+
+def test_mixed_nodetypes():
+    # Smoke test to make sure no TypeError is raised for mixed node types.
+    G = nx.Graph()
+    edgelist = [(0, 3, [("weight", 5)]), (0, "1", [("weight", 5)])]
+    G.add_edges_from(edgelist)
+    G = G.to_directed()
+    x = branchings.minimum_spanning_arborescence(G)
+
+
+def test_edmonds1_minbranch():
+    # Using -G_array and min should give the same as optimal_arborescence_1,
+    # but with all edges negative.
+    edges = [(u, v, -w) for (u, v, w) in optimal_arborescence_1]
+
+    G = nx.from_numpy_array(-G_array, create_using=nx.DiGraph)
+
+    # Quickly make sure max branching is empty.
+    x = branchings.maximum_branching(G)
+    x_ = build_branching([])
+    assert_equal_branchings(x, x_)
+
+    # Now test the min branching.
+    x = branchings.minimum_branching(G)
+    x_ = build_branching(edges)
+    assert_equal_branchings(x, x_)
+
+
+def test_edge_attribute_preservation_normal_graph():
+    # Test that edge attributes are preserved when finding an optimum graph
+    # using the Edmonds class for normal graphs.
+    G = nx.Graph()
+
+    edgelist = [
+        (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]),
+        (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]),
+        (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]),
+    ]
+    G.add_edges_from(edgelist)
+
+    B = branchings.maximum_branching(G, preserve_attrs=True)
+
+    assert B[0][1]["otherattr"] == 1
+    assert B[0][1]["otherattr2"] == 3
+
+
+def test_edge_attribute_preservation_multigraph():
+    # Test that edge attributes are preserved when finding an optimum graph
+    # using the Edmonds class for multigraphs.
+    G = nx.MultiGraph()
+
+    edgelist = [
+        (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]),
+        (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]),
+        (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]),
+    ]
+    G.add_edges_from(edgelist * 2)  # Make sure we have duplicate edge paths
+
+    B = branchings.maximum_branching(G, preserve_attrs=True)
+
+    assert B[0][1][0]["otherattr"] == 1
+    assert B[0][1][0]["otherattr2"] == 3
+
+
+def test_edge_attribute_discard():
+    # Test that edge attributes are discarded if we do not specify to keep them
+    G = nx.Graph()
+
+    edgelist = [
+        (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]),
+        (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]),
+        (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]),
+    ]
+    G.add_edges_from(edgelist)
+
+    B = branchings.maximum_branching(G, preserve_attrs=False)
+
+    edge_dict = B[0][1]
+    with pytest.raises(KeyError):
+        _ = edge_dict["otherattr"]
+
+
+def test_partition_spanning_arborescence():
+    """
+    Test that we can generate minimum spanning arborescences which respect the
+    given partition.
+    """
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+    G[3][0]["partition"] = nx.EdgePartition.EXCLUDED
+    G[2][3]["partition"] = nx.EdgePartition.INCLUDED
+    G[7][3]["partition"] = nx.EdgePartition.EXCLUDED
+    G[0][2]["partition"] = nx.EdgePartition.EXCLUDED
+    G[6][2]["partition"] = nx.EdgePartition.INCLUDED
+
+    actual_edges = [
+        (0, 4, 12),
+        (1, 0, 4),
+        (1, 5, 13),
+        (2, 3, 21),
+        (4, 7, 12),
+        (5, 6, 14),
+        (5, 8, 12),
+        (6, 2, 21),
+    ]
+
+    B = branchings.minimum_spanning_arborescence(G, partition="partition")
+    assert_equal_branchings(build_branching(actual_edges), B)
+
+
+def test_arborescence_iterator_min():
+    """
+    Tests the arborescence iterator.
+
+    A brute force method found 680 arborescences in this graph.
+    This test will not verify all of them individually, but will check two
+    things
+
+    * The iterator returns 680 arborescences
+    * The weight of the arborescences is non-strictly increasing
+
+    for more information please visit
+    https://mjschwenne.github.io/2021/06/10/implementing-the-iterators.html
+    """
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+
+    arborescence_count = 0
+    arborescence_weight = -math.inf
+    for B in branchings.ArborescenceIterator(G):
+        arborescence_count += 1
+        new_arborescence_weight = B.size(weight="weight")
+        assert new_arborescence_weight >= arborescence_weight
+        arborescence_weight = new_arborescence_weight
+
+    assert arborescence_count == 680
+
+
+def test_arborescence_iterator_max():
+    """
+    Tests the arborescence iterator.
+
+    A brute force method found 680 arborescences in this graph.
+    This test will not verify all of them individually, but will check two
+    things
+
+    * The iterator returns 680 arborescences
+    * The weight of the arborescences is non-strictly decreasing
+
+    for more information please visit
+    https://mjschwenne.github.io/2021/06/10/implementing-the-iterators.html
+    """
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+
+    arborescence_count = 0
+    arborescence_weight = math.inf
+    for B in branchings.ArborescenceIterator(G, minimum=False):
+        arborescence_count += 1
+        new_arborescence_weight = B.size(weight="weight")
+        assert new_arborescence_weight <= arborescence_weight
+        arborescence_weight = new_arborescence_weight
+
+    assert arborescence_count == 680
+
+
+def test_arborescence_iterator_initial_partition():
+    """
+    Tests the arborescence iterator with three included edges and three excluded
+    in the initial partition.
+
+    A brute force method similar to the one used in the above tests found that
+    there are 16 arborescences which contain the included edges and not the
+    excluded edges.
+    """
+    G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
+    included_edges = [(1, 0), (5, 6), (8, 7)]
+    excluded_edges = [(0, 2), (3, 6), (1, 5)]
+
+    arborescence_count = 0
+    arborescence_weight = -math.inf
+    for B in branchings.ArborescenceIterator(
+        G, init_partition=(included_edges, excluded_edges)
+    ):
+        arborescence_count += 1
+        new_arborescence_weight = B.size(weight="weight")
+        assert new_arborescence_weight >= arborescence_weight
+        arborescence_weight = new_arborescence_weight
+        for e in included_edges:
+            assert e in B.edges
+        for e in excluded_edges:
+            assert e not in B.edges
+    assert arborescence_count == 16
+
+
+def test_branchings_with_default_weights():
+    """
+    Tests that various branching algorithms work on graphs without weights.
+    For more information, see issue #7279.
+    """
+    graph = nx.erdos_renyi_graph(10, p=0.2, directed=True, seed=123)
+
+    assert all(
+        "weight" not in d for (u, v, d) in graph.edges(data=True)
+    ), "test is for graphs without a weight attribute"
+
+    # Calling these functions will modify graph inplace to add weights
+    # copy the graph to avoid this.
+    nx.minimum_spanning_arborescence(graph.copy())
+    nx.maximum_spanning_arborescence(graph.copy())
+    nx.minimum_branching(graph.copy())
+    nx.maximum_branching(graph.copy())
+    nx.algorithms.tree.minimal_branching(graph.copy())
+    nx.algorithms.tree.branching_weight(graph.copy())
+    nx.algorithms.tree.greedy_branching(graph.copy())
+
+    assert all(
+        "weight" not in d for (u, v, d) in graph.edges(data=True)
+    ), "The above calls should not modify the initial graph in-place"
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_coding.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_coding.py
new file mode 100644
index 00000000..26bd4083
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_coding.py
@@ -0,0 +1,114 @@
+"""Unit tests for the :mod:`~networkx.algorithms.tree.coding` module."""
+
+from itertools import product
+
+import pytest
+
+import networkx as nx
+from networkx.utils import edges_equal, nodes_equal
+
+
+class TestPruferSequence:
+    """Unit tests for the Prüfer sequence encoding and decoding
+    functions.
+
+    """
+
+    def test_nontree(self):
+        with pytest.raises(nx.NotATree):
+            G = nx.cycle_graph(3)
+            nx.to_prufer_sequence(G)
+
+    def test_null_graph(self):
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            nx.to_prufer_sequence(nx.null_graph())
+
+    def test_trivial_graph(self):
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            nx.to_prufer_sequence(nx.trivial_graph())
+
+    def test_bad_integer_labels(self):
+        with pytest.raises(KeyError):
+            T = nx.Graph(nx.utils.pairwise("abc"))
+            nx.to_prufer_sequence(T)
+
+    def test_encoding(self):
+        """Tests for encoding a tree as a Prüfer sequence using the
+        iterative strategy.
+
+        """
+        # Example from Wikipedia.
+        tree = nx.Graph([(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)])
+        sequence = nx.to_prufer_sequence(tree)
+        assert sequence == [3, 3, 3, 4]
+
+    def test_decoding(self):
+        """Tests for decoding a tree from a Prüfer sequence."""
+        # Example from Wikipedia.
+        sequence = [3, 3, 3, 4]
+        tree = nx.from_prufer_sequence(sequence)
+        assert nodes_equal(list(tree), list(range(6)))
+        edges = [(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)]
+        assert edges_equal(list(tree.edges()), edges)
+
+    def test_decoding2(self):
+        # Example from "An Optimal Algorithm for Prufer Codes".
+        sequence = [2, 4, 0, 1, 3, 3]
+        tree = nx.from_prufer_sequence(sequence)
+        assert nodes_equal(list(tree), list(range(8)))
+        edges = [(0, 1), (0, 4), (1, 3), (2, 4), (2, 5), (3, 6), (3, 7)]
+        assert edges_equal(list(tree.edges()), edges)
+
+    def test_inverse(self):
+        """Tests that the encoding and decoding functions are inverses."""
+        for T in nx.nonisomorphic_trees(4):
+            T2 = nx.from_prufer_sequence(nx.to_prufer_sequence(T))
+            assert nodes_equal(list(T), list(T2))
+            assert edges_equal(list(T.edges()), list(T2.edges()))
+
+        for seq in product(range(4), repeat=2):
+            seq2 = nx.to_prufer_sequence(nx.from_prufer_sequence(seq))
+            assert list(seq) == seq2
+
+
+class TestNestedTuple:
+    """Unit tests for the nested tuple encoding and decoding functions."""
+
+    def test_nontree(self):
+        with pytest.raises(nx.NotATree):
+            G = nx.cycle_graph(3)
+            nx.to_nested_tuple(G, 0)
+
+    def test_unknown_root(self):
+        with pytest.raises(nx.NodeNotFound):
+            G = nx.path_graph(2)
+            nx.to_nested_tuple(G, "bogus")
+
+    def test_encoding(self):
+        T = nx.full_rary_tree(2, 2**3 - 1)
+        expected = (((), ()), ((), ()))
+        actual = nx.to_nested_tuple(T, 0)
+        assert nodes_equal(expected, actual)
+
+    def test_canonical_form(self):
+        T = nx.Graph()
+        T.add_edges_from([(0, 1), (0, 2), (0, 3)])
+        T.add_edges_from([(1, 4), (1, 5)])
+        T.add_edges_from([(3, 6), (3, 7)])
+        root = 0
+        actual = nx.to_nested_tuple(T, root, canonical_form=True)
+        expected = ((), ((), ()), ((), ()))
+        assert actual == expected
+
+    def test_decoding(self):
+        balanced = (((), ()), ((), ()))
+        expected = nx.full_rary_tree(2, 2**3 - 1)
+        actual = nx.from_nested_tuple(balanced)
+        assert nx.is_isomorphic(expected, actual)
+
+    def test_sensible_relabeling(self):
+        balanced = (((), ()), ((), ()))
+        T = nx.from_nested_tuple(balanced, sensible_relabeling=True)
+        edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]
+        assert nodes_equal(list(T), list(range(2**3 - 1)))
+        assert edges_equal(list(T.edges()), edges)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_decomposition.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_decomposition.py
new file mode 100644
index 00000000..8c376053
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_decomposition.py
@@ -0,0 +1,79 @@
+import networkx as nx
+from networkx.algorithms.tree.decomposition import junction_tree
+
+
+def test_junction_tree_directed_confounders():
+    B = nx.DiGraph()
+    B.add_edges_from([("A", "C"), ("B", "C"), ("C", "D"), ("C", "E")])
+
+    G = junction_tree(B)
+    J = nx.Graph()
+    J.add_edges_from(
+        [
+            (("C", "E"), ("C",)),
+            (("C",), ("A", "B", "C")),
+            (("A", "B", "C"), ("C",)),
+            (("C",), ("C", "D")),
+        ]
+    )
+
+    assert nx.is_isomorphic(G, J)
+
+
+def test_junction_tree_directed_unconnected_nodes():
+    B = nx.DiGraph()
+    B.add_nodes_from([("A", "B", "C", "D")])
+    G = junction_tree(B)
+
+    J = nx.Graph()
+    J.add_nodes_from([("A", "B", "C", "D")])
+
+    assert nx.is_isomorphic(G, J)
+
+
+def test_junction_tree_directed_cascade():
+    B = nx.DiGraph()
+    B.add_edges_from([("A", "B"), ("B", "C"), ("C", "D")])
+    G = junction_tree(B)
+
+    J = nx.Graph()
+    J.add_edges_from(
+        [
+            (("A", "B"), ("B",)),
+            (("B",), ("B", "C")),
+            (("B", "C"), ("C",)),
+            (("C",), ("C", "D")),
+        ]
+    )
+    assert nx.is_isomorphic(G, J)
+
+
+def test_junction_tree_directed_unconnected_edges():
+    B = nx.DiGraph()
+    B.add_edges_from([("A", "B"), ("C", "D"), ("E", "F")])
+    G = junction_tree(B)
+
+    J = nx.Graph()
+    J.add_nodes_from([("A", "B"), ("C", "D"), ("E", "F")])
+
+    assert nx.is_isomorphic(G, J)
+
+
+def test_junction_tree_undirected():
+    B = nx.Graph()
+    B.add_edges_from([("A", "C"), ("A", "D"), ("B", "C"), ("C", "E")])
+    G = junction_tree(B)
+
+    J = nx.Graph()
+    J.add_edges_from(
+        [
+            (("A", "D"), ("A",)),
+            (("A",), ("A", "C")),
+            (("A", "C"), ("C",)),
+            (("C",), ("B", "C")),
+            (("B", "C"), ("C",)),
+            (("C",), ("C", "E")),
+        ]
+    )
+
+    assert nx.is_isomorphic(G, J)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_mst.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_mst.py
new file mode 100644
index 00000000..f8945a71
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_mst.py
@@ -0,0 +1,918 @@
+"""Unit tests for the :mod:`networkx.algorithms.tree.mst` module."""
+
+import pytest
+
+import networkx as nx
+from networkx.utils import edges_equal, nodes_equal
+
+
+def test_unknown_algorithm():
+    with pytest.raises(ValueError):
+        nx.minimum_spanning_tree(nx.Graph(), algorithm="random")
+    with pytest.raises(
+        ValueError, match="random is not a valid choice for an algorithm."
+    ):
+        nx.maximum_spanning_edges(nx.Graph(), algorithm="random")
+
+
+class MinimumSpanningTreeTestBase:
+    """Base class for test classes for minimum spanning tree algorithms.
+    This class contains some common tests that will be inherited by
+    subclasses. Each subclass must have a class attribute
+    :data:`algorithm` that is a string representing the algorithm to
+    run, as described under the ``algorithm`` keyword argument for the
+    :func:`networkx.minimum_spanning_edges` function.  Subclasses can
+    then implement any algorithm-specific tests.
+    """
+
+    def setup_method(self, method):
+        """Creates an example graph and stores the expected minimum and
+        maximum spanning tree edges.
+        """
+        # This stores the class attribute `algorithm` in an instance attribute.
+        self.algo = self.algorithm
+        # This example graph comes from Wikipedia:
+        # https://en.wikipedia.org/wiki/Kruskal's_algorithm
+        edges = [
+            (0, 1, 7),
+            (0, 3, 5),
+            (1, 2, 8),
+            (1, 3, 9),
+            (1, 4, 7),
+            (2, 4, 5),
+            (3, 4, 15),
+            (3, 5, 6),
+            (4, 5, 8),
+            (4, 6, 9),
+            (5, 6, 11),
+        ]
+        self.G = nx.Graph()
+        self.G.add_weighted_edges_from(edges)
+        self.minimum_spanning_edgelist = [
+            (0, 1, {"weight": 7}),
+            (0, 3, {"weight": 5}),
+            (1, 4, {"weight": 7}),
+            (2, 4, {"weight": 5}),
+            (3, 5, {"weight": 6}),
+            (4, 6, {"weight": 9}),
+        ]
+        self.maximum_spanning_edgelist = [
+            (0, 1, {"weight": 7}),
+            (1, 2, {"weight": 8}),
+            (1, 3, {"weight": 9}),
+            (3, 4, {"weight": 15}),
+            (4, 6, {"weight": 9}),
+            (5, 6, {"weight": 11}),
+        ]
+
+    def test_minimum_edges(self):
+        edges = nx.minimum_spanning_edges(self.G, algorithm=self.algo)
+        # Edges from the spanning edges functions don't come in sorted
+        # orientation, so we need to sort each edge individually.
+        actual = sorted((min(u, v), max(u, v), d) for u, v, d in edges)
+        assert edges_equal(actual, self.minimum_spanning_edgelist)
+
+    def test_maximum_edges(self):
+        edges = nx.maximum_spanning_edges(self.G, algorithm=self.algo)
+        # Edges from the spanning edges functions don't come in sorted
+        # orientation, so we need to sort each edge individually.
+        actual = sorted((min(u, v), max(u, v), d) for u, v, d in edges)
+        assert edges_equal(actual, self.maximum_spanning_edgelist)
+
+    def test_without_data(self):
+        edges = nx.minimum_spanning_edges(self.G, algorithm=self.algo, data=False)
+        # Edges from the spanning edges functions don't come in sorted
+        # orientation, so we need to sort each edge individually.
+        actual = sorted((min(u, v), max(u, v)) for u, v in edges)
+        expected = [(u, v) for u, v, d in self.minimum_spanning_edgelist]
+        assert edges_equal(actual, expected)
+
+    def test_nan_weights(self):
+        # Edge weights NaN never appear in the spanning tree. see #2164
+        G = self.G
+        G.add_edge(0, 12, weight=float("nan"))
+        edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, data=False, ignore_nan=True
+        )
+        actual = sorted((min(u, v), max(u, v)) for u, v in edges)
+        expected = [(u, v) for u, v, d in self.minimum_spanning_edgelist]
+        assert edges_equal(actual, expected)
+        # Now test for raising exception
+        edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, data=False, ignore_nan=False
+        )
+        with pytest.raises(ValueError):
+            list(edges)
+        # test default for ignore_nan as False
+        edges = nx.minimum_spanning_edges(G, algorithm=self.algo, data=False)
+        with pytest.raises(ValueError):
+            list(edges)
+
+    def test_nan_weights_MultiGraph(self):
+        G = nx.MultiGraph()
+        G.add_edge(0, 12, weight=float("nan"))
+        edges = nx.minimum_spanning_edges(
+            G, algorithm="prim", data=False, ignore_nan=False
+        )
+        with pytest.raises(ValueError):
+            list(edges)
+        # test default for ignore_nan as False
+        edges = nx.minimum_spanning_edges(G, algorithm="prim", data=False)
+        with pytest.raises(ValueError):
+            list(edges)
+
+    def test_nan_weights_order(self):
+        # now try again with a nan edge at the beginning of G.nodes
+        edges = [
+            (0, 1, 7),
+            (0, 3, 5),
+            (1, 2, 8),
+            (1, 3, 9),
+            (1, 4, 7),
+            (2, 4, 5),
+            (3, 4, 15),
+            (3, 5, 6),
+            (4, 5, 8),
+            (4, 6, 9),
+            (5, 6, 11),
+        ]
+        G = nx.Graph()
+        G.add_weighted_edges_from([(u + 1, v + 1, wt) for u, v, wt in edges])
+        G.add_edge(0, 7, weight=float("nan"))
+        edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, data=False, ignore_nan=True
+        )
+        actual = sorted((min(u, v), max(u, v)) for u, v in edges)
+        shift = [(u + 1, v + 1) for u, v, d in self.minimum_spanning_edgelist]
+        assert edges_equal(actual, shift)
+
+    def test_isolated_node(self):
+        # now try again with an isolated node
+        edges = [
+            (0, 1, 7),
+            (0, 3, 5),
+            (1, 2, 8),
+            (1, 3, 9),
+            (1, 4, 7),
+            (2, 4, 5),
+            (3, 4, 15),
+            (3, 5, 6),
+            (4, 5, 8),
+            (4, 6, 9),
+            (5, 6, 11),
+        ]
+        G = nx.Graph()
+        G.add_weighted_edges_from([(u + 1, v + 1, wt) for u, v, wt in edges])
+        G.add_node(0)
+        edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, data=False, ignore_nan=True
+        )
+        actual = sorted((min(u, v), max(u, v)) for u, v in edges)
+        shift = [(u + 1, v + 1) for u, v, d in self.minimum_spanning_edgelist]
+        assert edges_equal(actual, shift)
+
+    def test_minimum_tree(self):
+        T = nx.minimum_spanning_tree(self.G, algorithm=self.algo)
+        actual = sorted(T.edges(data=True))
+        assert edges_equal(actual, self.minimum_spanning_edgelist)
+
+    def test_maximum_tree(self):
+        T = nx.maximum_spanning_tree(self.G, algorithm=self.algo)
+        actual = sorted(T.edges(data=True))
+        assert edges_equal(actual, self.maximum_spanning_edgelist)
+
+    def test_disconnected(self):
+        G = nx.Graph([(0, 1, {"weight": 1}), (2, 3, {"weight": 2})])
+        T = nx.minimum_spanning_tree(G, algorithm=self.algo)
+        assert nodes_equal(list(T), list(range(4)))
+        assert edges_equal(list(T.edges()), [(0, 1), (2, 3)])
+
+    def test_empty_graph(self):
+        G = nx.empty_graph(3)
+        T = nx.minimum_spanning_tree(G, algorithm=self.algo)
+        assert nodes_equal(sorted(T), list(range(3)))
+        assert T.number_of_edges() == 0
+
+    def test_attributes(self):
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=1, color="red", distance=7)
+        G.add_edge(2, 3, weight=1, color="green", distance=2)
+        G.add_edge(1, 3, weight=10, color="blue", distance=1)
+        G.graph["foo"] = "bar"
+        T = nx.minimum_spanning_tree(G, algorithm=self.algo)
+        assert T.graph == G.graph
+        assert nodes_equal(T, G)
+        for u, v in T.edges():
+            assert T.adj[u][v] == G.adj[u][v]
+
+    def test_weight_attribute(self):
+        G = nx.Graph()
+        G.add_edge(0, 1, weight=1, distance=7)
+        G.add_edge(0, 2, weight=30, distance=1)
+        G.add_edge(1, 2, weight=1, distance=1)
+        G.add_node(3)
+        T = nx.minimum_spanning_tree(G, algorithm=self.algo, weight="distance")
+        assert nodes_equal(sorted(T), list(range(4)))
+        assert edges_equal(sorted(T.edges()), [(0, 2), (1, 2)])
+        T = nx.maximum_spanning_tree(G, algorithm=self.algo, weight="distance")
+        assert nodes_equal(sorted(T), list(range(4)))
+        assert edges_equal(sorted(T.edges()), [(0, 1), (0, 2)])
+
+
+class TestBoruvka(MinimumSpanningTreeTestBase):
+    """Unit tests for computing a minimum (or maximum) spanning tree
+    using Borůvka's algorithm.
+    """
+
+    algorithm = "boruvka"
+
+    def test_unicode_name(self):
+        """Tests that using a Unicode string can correctly indicate
+        Borůvka's algorithm.
+        """
+        edges = nx.minimum_spanning_edges(self.G, algorithm="borůvka")
+        # Edges from the spanning edges functions don't come in sorted
+        # orientation, so we need to sort each edge individually.
+        actual = sorted((min(u, v), max(u, v), d) for u, v, d in edges)
+        assert edges_equal(actual, self.minimum_spanning_edgelist)
+
+
+class MultigraphMSTTestBase(MinimumSpanningTreeTestBase):
+    # Abstract class
+
+    def test_multigraph_keys_min(self):
+        """Tests that the minimum spanning edges of a multigraph
+        preserves edge keys.
+        """
+        G = nx.MultiGraph()
+        G.add_edge(0, 1, key="a", weight=2)
+        G.add_edge(0, 1, key="b", weight=1)
+        min_edges = nx.minimum_spanning_edges
+        mst_edges = min_edges(G, algorithm=self.algo, data=False)
+        assert edges_equal([(0, 1, "b")], list(mst_edges))
+
+    def test_multigraph_keys_max(self):
+        """Tests that the maximum spanning edges of a multigraph
+        preserves edge keys.
+        """
+        G = nx.MultiGraph()
+        G.add_edge(0, 1, key="a", weight=2)
+        G.add_edge(0, 1, key="b", weight=1)
+        max_edges = nx.maximum_spanning_edges
+        mst_edges = max_edges(G, algorithm=self.algo, data=False)
+        assert edges_equal([(0, 1, "a")], list(mst_edges))
+
+
+class TestKruskal(MultigraphMSTTestBase):
+    """Unit tests for computing a minimum (or maximum) spanning tree
+    using Kruskal's algorithm.
+    """
+
+    algorithm = "kruskal"
+
+    def test_key_data_bool(self):
+        """Tests that the keys and data values are included in
+        MST edges based on whether keys and data parameters are
+        true or false"""
+        G = nx.MultiGraph()
+        G.add_edge(1, 2, key=1, weight=2)
+        G.add_edge(1, 2, key=2, weight=3)
+        G.add_edge(3, 2, key=1, weight=2)
+        G.add_edge(3, 1, key=1, weight=4)
+
+        # keys are included and data is not included
+        mst_edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, keys=True, data=False
+        )
+        assert edges_equal([(1, 2, 1), (2, 3, 1)], list(mst_edges))
+
+        # keys are not included and data is included
+        mst_edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, keys=False, data=True
+        )
+        assert edges_equal(
+            [(1, 2, {"weight": 2}), (2, 3, {"weight": 2})], list(mst_edges)
+        )
+
+        # both keys and data are not included
+        mst_edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, keys=False, data=False
+        )
+        assert edges_equal([(1, 2), (2, 3)], list(mst_edges))
+
+        # both keys and data are included
+        mst_edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, keys=True, data=True
+        )
+        assert edges_equal(
+            [(1, 2, 1, {"weight": 2}), (2, 3, 1, {"weight": 2})], list(mst_edges)
+        )
+
+
+class TestPrim(MultigraphMSTTestBase):
+    """Unit tests for computing a minimum (or maximum) spanning tree
+    using Prim's algorithm.
+    """
+
+    algorithm = "prim"
+
+    def test_prim_mst_edges_simple_graph(self):
+        H = nx.Graph()
+        H.add_edge(1, 2, key=2, weight=3)
+        H.add_edge(3, 2, key=1, weight=2)
+        H.add_edge(3, 1, key=1, weight=4)
+
+        mst_edges = nx.minimum_spanning_edges(H, algorithm=self.algo, ignore_nan=True)
+        assert edges_equal(
+            [(1, 2, {"key": 2, "weight": 3}), (2, 3, {"key": 1, "weight": 2})],
+            list(mst_edges),
+        )
+
+    def test_ignore_nan(self):
+        """Tests that the edges with NaN weights are ignored or
+        raise an Error based on ignore_nan is true or false"""
+        H = nx.MultiGraph()
+        H.add_edge(1, 2, key=1, weight=float("nan"))
+        H.add_edge(1, 2, key=2, weight=3)
+        H.add_edge(3, 2, key=1, weight=2)
+        H.add_edge(3, 1, key=1, weight=4)
+
+        # NaN weight edges are ignored when ignore_nan=True
+        mst_edges = nx.minimum_spanning_edges(H, algorithm=self.algo, ignore_nan=True)
+        assert edges_equal(
+            [(1, 2, 2, {"weight": 3}), (2, 3, 1, {"weight": 2})], list(mst_edges)
+        )
+
+        # NaN weight edges raise Error when ignore_nan=False
+        with pytest.raises(ValueError):
+            list(nx.minimum_spanning_edges(H, algorithm=self.algo, ignore_nan=False))
+
+    def test_multigraph_keys_tree(self):
+        G = nx.MultiGraph()
+        G.add_edge(0, 1, key="a", weight=2)
+        G.add_edge(0, 1, key="b", weight=1)
+        T = nx.minimum_spanning_tree(G, algorithm=self.algo)
+        assert edges_equal([(0, 1, 1)], list(T.edges(data="weight")))
+
+    def test_multigraph_keys_tree_max(self):
+        G = nx.MultiGraph()
+        G.add_edge(0, 1, key="a", weight=2)
+        G.add_edge(0, 1, key="b", weight=1)
+        T = nx.maximum_spanning_tree(G, algorithm=self.algo)
+        assert edges_equal([(0, 1, 2)], list(T.edges(data="weight")))
+
+
+class TestSpanningTreeIterator:
+    """
+    Tests the spanning tree iterator on the example graph in the 2005 Sörensen
+    and Janssens paper An Algorithm to Generate all Spanning Trees of a Graph in
+    Order of Increasing Cost
+    """
+
+    def setup_method(self):
+        # Original Graph
+        edges = [(0, 1, 5), (1, 2, 4), (1, 4, 6), (2, 3, 5), (2, 4, 7), (3, 4, 3)]
+        self.G = nx.Graph()
+        self.G.add_weighted_edges_from(edges)
+        # List of lists of spanning trees in increasing order
+        self.spanning_trees = [
+            # 1, MST, cost = 17
+            [
+                (0, 1, {"weight": 5}),
+                (1, 2, {"weight": 4}),
+                (2, 3, {"weight": 5}),
+                (3, 4, {"weight": 3}),
+            ],
+            # 2, cost = 18
+            [
+                (0, 1, {"weight": 5}),
+                (1, 2, {"weight": 4}),
+                (1, 4, {"weight": 6}),
+                (3, 4, {"weight": 3}),
+            ],
+            # 3, cost = 19
+            [
+                (0, 1, {"weight": 5}),
+                (1, 4, {"weight": 6}),
+                (2, 3, {"weight": 5}),
+                (3, 4, {"weight": 3}),
+            ],
+            # 4, cost = 19
+            [
+                (0, 1, {"weight": 5}),
+                (1, 2, {"weight": 4}),
+                (2, 4, {"weight": 7}),
+                (3, 4, {"weight": 3}),
+            ],
+            # 5, cost = 20
+            [
+                (0, 1, {"weight": 5}),
+                (1, 2, {"weight": 4}),
+                (1, 4, {"weight": 6}),
+                (2, 3, {"weight": 5}),
+            ],
+            # 6, cost = 21
+            [
+                (0, 1, {"weight": 5}),
+                (1, 4, {"weight": 6}),
+                (2, 4, {"weight": 7}),
+                (3, 4, {"weight": 3}),
+            ],
+            # 7, cost = 21
+            [
+                (0, 1, {"weight": 5}),
+                (1, 2, {"weight": 4}),
+                (2, 3, {"weight": 5}),
+                (2, 4, {"weight": 7}),
+            ],
+            # 8, cost = 23
+            [
+                (0, 1, {"weight": 5}),
+                (1, 4, {"weight": 6}),
+                (2, 3, {"weight": 5}),
+                (2, 4, {"weight": 7}),
+            ],
+        ]
+
+    def test_minimum_spanning_tree_iterator(self):
+        """
+        Tests that the spanning trees are correctly returned in increasing order
+        """
+        tree_index = 0
+        for tree in nx.SpanningTreeIterator(self.G):
+            actual = sorted(tree.edges(data=True))
+            assert edges_equal(actual, self.spanning_trees[tree_index])
+            tree_index += 1
+
+    def test_maximum_spanning_tree_iterator(self):
+        """
+        Tests that the spanning trees are correctly returned in decreasing order
+        """
+        tree_index = 7
+        for tree in nx.SpanningTreeIterator(self.G, minimum=False):
+            actual = sorted(tree.edges(data=True))
+            assert edges_equal(actual, self.spanning_trees[tree_index])
+            tree_index -= 1
+
+
+class TestSpanningTreeMultiGraphIterator:
+    """
+    Uses the same graph as the above class but with an added edge of twice the weight.
+    """
+
+    def setup_method(self):
+        # New graph
+        edges = [
+            (0, 1, 5),
+            (0, 1, 10),
+            (1, 2, 4),
+            (1, 2, 8),
+            (1, 4, 6),
+            (1, 4, 12),
+            (2, 3, 5),
+            (2, 3, 10),
+            (2, 4, 7),
+            (2, 4, 14),
+            (3, 4, 3),
+            (3, 4, 6),
+        ]
+        self.G = nx.MultiGraph()
+        self.G.add_weighted_edges_from(edges)
+
+        # There are 128 trees. I'd rather not list all 128 here, and computing them
+        # on such a small graph actually doesn't take that long.
+        from itertools import combinations
+
+        self.spanning_trees = []
+        for e in combinations(self.G.edges, 4):
+            tree = self.G.edge_subgraph(e)
+            if nx.is_tree(tree):
+                self.spanning_trees.append(sorted(tree.edges(keys=True, data=True)))
+
+    def test_minimum_spanning_tree_iterator_multigraph(self):
+        """
+        Tests that the spanning trees are correctly returned in increasing order
+        """
+        tree_index = 0
+        last_weight = 0
+        for tree in nx.SpanningTreeIterator(self.G):
+            actual = sorted(tree.edges(keys=True, data=True))
+            weight = sum([e[3]["weight"] for e in actual])
+            assert actual in self.spanning_trees
+            assert weight >= last_weight
+            tree_index += 1
+
+    def test_maximum_spanning_tree_iterator_multigraph(self):
+        """
+        Tests that the spanning trees are correctly returned in decreasing order
+        """
+        tree_index = 127
+        # Maximum weight tree is 46
+        last_weight = 50
+        for tree in nx.SpanningTreeIterator(self.G, minimum=False):
+            actual = sorted(tree.edges(keys=True, data=True))
+            weight = sum([e[3]["weight"] for e in actual])
+            assert actual in self.spanning_trees
+            assert weight <= last_weight
+            tree_index -= 1
+
+
+def test_random_spanning_tree_multiplicative_small():
+    """
+    Using a fixed seed, sample one tree for repeatability.
+    """
+    from math import exp
+
+    pytest.importorskip("scipy")
+
+    gamma = {
+        (0, 1): -0.6383,
+        (0, 2): -0.6827,
+        (0, 5): 0,
+        (1, 2): -1.0781,
+        (1, 4): 0,
+        (2, 3): 0,
+        (5, 3): -0.2820,
+        (5, 4): -0.3327,
+        (4, 3): -0.9927,
+    }
+
+    # The undirected support of gamma
+    G = nx.Graph()
+    for u, v in gamma:
+        G.add_edge(u, v, lambda_key=exp(gamma[(u, v)]))
+
+    solution_edges = [(2, 3), (3, 4), (0, 5), (5, 4), (4, 1)]
+    solution = nx.Graph()
+    solution.add_edges_from(solution_edges)
+
+    sampled_tree = nx.random_spanning_tree(G, "lambda_key", seed=42)
+
+    assert nx.utils.edges_equal(solution.edges, sampled_tree.edges)
+
+
+@pytest.mark.slow
+def test_random_spanning_tree_multiplicative_large():
+    """
+    Sample many trees from the distribution created in the last test
+    """
+    from math import exp
+    from random import Random
+
+    pytest.importorskip("numpy")
+    stats = pytest.importorskip("scipy.stats")
+
+    gamma = {
+        (0, 1): -0.6383,
+        (0, 2): -0.6827,
+        (0, 5): 0,
+        (1, 2): -1.0781,
+        (1, 4): 0,
+        (2, 3): 0,
+        (5, 3): -0.2820,
+        (5, 4): -0.3327,
+        (4, 3): -0.9927,
+    }
+
+    # The undirected support of gamma
+    G = nx.Graph()
+    for u, v in gamma:
+        G.add_edge(u, v, lambda_key=exp(gamma[(u, v)]))
+
+    # Find the multiplicative weight for each tree.
+    total_weight = 0
+    tree_expected = {}
+    for t in nx.SpanningTreeIterator(G):
+        # Find the multiplicative weight of the spanning tree
+        weight = 1
+        for u, v, d in t.edges(data="lambda_key"):
+            weight *= d
+        tree_expected[t] = weight
+        total_weight += weight
+
+    # Assert that every tree has an entry in the expected distribution
+    assert len(tree_expected) == 75
+
+    # Set the sample size and then calculate the expected number of times we
+    # expect to see each tree. This test uses a near minimum sample size where
+    # the most unlikely tree has an expected frequency of 5.15.
+    # (Minimum required is 5)
+    #
+    # Here we also initialize the tree_actual dict so that we know the keys
+    # match between the two. We will later take advantage of the fact that since
+    # python 3.7 dict order is guaranteed so the expected and actual data will
+    # have the same order.
+    sample_size = 1200
+    tree_actual = {}
+    for t in tree_expected:
+        tree_expected[t] = (tree_expected[t] / total_weight) * sample_size
+        tree_actual[t] = 0
+
+    # Sample the spanning trees
+    #
+    # Assert that they are actually trees and record which of the 75 trees we
+    # have sampled.
+    #
+    # For repeatability, we want to take advantage of the decorators in NetworkX
+    # to randomly sample the same sample each time. However, if we pass in a
+    # constant seed to sample_spanning_tree we will get the same tree each time.
+    # Instead, we can create our own random number generator with a fixed seed
+    # and pass those into sample_spanning_tree.
+    rng = Random(37)
+    for _ in range(sample_size):
+        sampled_tree = nx.random_spanning_tree(G, "lambda_key", seed=rng)
+        assert nx.is_tree(sampled_tree)
+
+        for t in tree_expected:
+            if nx.utils.edges_equal(t.edges, sampled_tree.edges):
+                tree_actual[t] += 1
+                break
+
+    # Conduct a Chi squared test to see if the actual distribution matches the
+    # expected one at an alpha = 0.05 significance level.
+    #
+    # H_0: The distribution of trees in tree_actual matches the normalized product
+    # of the edge weights in the tree.
+    #
+    # H_a: The distribution of trees in tree_actual follows some other
+    # distribution of spanning trees.
+    _, p = stats.chisquare(list(tree_actual.values()), list(tree_expected.values()))
+
+    # Assert that p is greater than the significance level so that we do not
+    # reject the null hypothesis
+    assert not p < 0.05
+
+
+def test_random_spanning_tree_additive_small():
+    """
+    Sample a single spanning tree from the additive method.
+    """
+    pytest.importorskip("scipy")
+
+    edges = {
+        (0, 1): 1,
+        (0, 2): 1,
+        (0, 5): 3,
+        (1, 2): 2,
+        (1, 4): 3,
+        (2, 3): 3,
+        (5, 3): 4,
+        (5, 4): 5,
+        (4, 3): 4,
+    }
+
+    # Build the graph
+    G = nx.Graph()
+    for u, v in edges:
+        G.add_edge(u, v, weight=edges[(u, v)])
+
+    solution_edges = [(0, 2), (1, 2), (2, 3), (3, 4), (3, 5)]
+    solution = nx.Graph()
+    solution.add_edges_from(solution_edges)
+
+    sampled_tree = nx.random_spanning_tree(
+        G, weight="weight", multiplicative=False, seed=37
+    )
+
+    assert nx.utils.edges_equal(solution.edges, sampled_tree.edges)
+
+
+@pytest.mark.slow
+def test_random_spanning_tree_additive_large():
+    """
+    Sample many spanning trees from the additive method.
+    """
+    from random import Random
+
+    pytest.importorskip("numpy")
+    stats = pytest.importorskip("scipy.stats")
+
+    edges = {
+        (0, 1): 1,
+        (0, 2): 1,
+        (0, 5): 3,
+        (1, 2): 2,
+        (1, 4): 3,
+        (2, 3): 3,
+        (5, 3): 4,
+        (5, 4): 5,
+        (4, 3): 4,
+    }
+
+    # Build the graph
+    G = nx.Graph()
+    for u, v in edges:
+        G.add_edge(u, v, weight=edges[(u, v)])
+
+    # Find the additive weight for each tree.
+    total_weight = 0
+    tree_expected = {}
+    for t in nx.SpanningTreeIterator(G):
+        # Find the multiplicative weight of the spanning tree
+        weight = 0
+        for u, v, d in t.edges(data="weight"):
+            weight += d
+        tree_expected[t] = weight
+        total_weight += weight
+
+    # Assert that every tree has an entry in the expected distribution
+    assert len(tree_expected) == 75
+
+    # Set the sample size and then calculate the expected number of times we
+    # expect to see each tree. This test uses a near minimum sample size where
+    # the most unlikely tree has an expected frequency of 5.07.
+    # (Minimum required is 5)
+    #
+    # Here we also initialize the tree_actual dict so that we know the keys
+    # match between the two. We will later take advantage of the fact that since
+    # python 3.7 dict order is guaranteed so the expected and actual data will
+    # have the same order.
+    sample_size = 500
+    tree_actual = {}
+    for t in tree_expected:
+        tree_expected[t] = (tree_expected[t] / total_weight) * sample_size
+        tree_actual[t] = 0
+
+    # Sample the spanning trees
+    #
+    # Assert that they are actually trees and record which of the 75 trees we
+    # have sampled.
+    #
+    # For repeatability, we want to take advantage of the decorators in NetworkX
+    # to randomly sample the same sample each time. However, if we pass in a
+    # constant seed to sample_spanning_tree we will get the same tree each time.
+    # Instead, we can create our own random number generator with a fixed seed
+    # and pass those into sample_spanning_tree.
+    rng = Random(37)
+    for _ in range(sample_size):
+        sampled_tree = nx.random_spanning_tree(
+            G, "weight", multiplicative=False, seed=rng
+        )
+        assert nx.is_tree(sampled_tree)
+
+        for t in tree_expected:
+            if nx.utils.edges_equal(t.edges, sampled_tree.edges):
+                tree_actual[t] += 1
+                break
+
+    # Conduct a Chi squared test to see if the actual distribution matches the
+    # expected one at an alpha = 0.05 significance level.
+    #
+    # H_0: The distribution of trees in tree_actual matches the normalized product
+    # of the edge weights in the tree.
+    #
+    # H_a: The distribution of trees in tree_actual follows some other
+    # distribution of spanning trees.
+    _, p = stats.chisquare(list(tree_actual.values()), list(tree_expected.values()))
+
+    # Assert that p is greater than the significance level so that we do not
+    # reject the null hypothesis
+    assert not p < 0.05
+
+
+def test_random_spanning_tree_empty_graph():
+    G = nx.Graph()
+    rst = nx.tree.random_spanning_tree(G)
+    assert len(rst.nodes) == 0
+    assert len(rst.edges) == 0
+
+
+def test_random_spanning_tree_single_node_graph():
+    G = nx.Graph()
+    G.add_node(0)
+    rst = nx.tree.random_spanning_tree(G)
+    assert len(rst.nodes) == 1
+    assert len(rst.edges) == 0
+
+
+def test_random_spanning_tree_single_node_loop():
+    G = nx.Graph()
+    G.add_node(0)
+    G.add_edge(0, 0)
+    rst = nx.tree.random_spanning_tree(G)
+    assert len(rst.nodes) == 1
+    assert len(rst.edges) == 0
+
+
+class TestNumberSpanningTrees:
+    @classmethod
+    def setup_class(cls):
+        global np
+        np = pytest.importorskip("numpy")
+        sp = pytest.importorskip("scipy")
+
+    def test_nst_disconnected(self):
+        G = nx.empty_graph(2)
+        assert np.isclose(nx.number_of_spanning_trees(G), 0)
+
+    def test_nst_no_nodes(self):
+        G = nx.Graph()
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            nx.number_of_spanning_trees(G)
+
+    def test_nst_weight(self):
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=1)
+        G.add_edge(1, 3, weight=1)
+        G.add_edge(2, 3, weight=2)
+        # weights are ignored
+        assert np.isclose(nx.number_of_spanning_trees(G), 3)
+        # including weight
+        assert np.isclose(nx.number_of_spanning_trees(G, weight="weight"), 5)
+
+    def test_nst_negative_weight(self):
+        G = nx.Graph()
+        G.add_edge(1, 2, weight=1)
+        G.add_edge(1, 3, weight=-1)
+        G.add_edge(2, 3, weight=-2)
+        # weights are ignored
+        assert np.isclose(nx.number_of_spanning_trees(G), 3)
+        # including weight
+        assert np.isclose(nx.number_of_spanning_trees(G, weight="weight"), -1)
+
+    def test_nst_selfloop(self):
+        # self-loops are ignored
+        G = nx.complete_graph(3)
+        G.add_edge(1, 1)
+        assert np.isclose(nx.number_of_spanning_trees(G), 3)
+
+    def test_nst_multigraph(self):
+        G = nx.MultiGraph()
+        G.add_edge(1, 2)
+        G.add_edge(1, 2)
+        G.add_edge(1, 3)
+        G.add_edge(2, 3)
+        assert np.isclose(nx.number_of_spanning_trees(G), 5)
+
+    def test_nst_complete_graph(self):
+        # this is known as Cayley's formula
+        N = 5
+        G = nx.complete_graph(N)
+        assert np.isclose(nx.number_of_spanning_trees(G), N ** (N - 2))
+
+    def test_nst_path_graph(self):
+        G = nx.path_graph(5)
+        assert np.isclose(nx.number_of_spanning_trees(G), 1)
+
+    def test_nst_cycle_graph(self):
+        G = nx.cycle_graph(5)
+        assert np.isclose(nx.number_of_spanning_trees(G), 5)
+
+    def test_nst_directed_noroot(self):
+        G = nx.empty_graph(3, create_using=nx.MultiDiGraph)
+        with pytest.raises(nx.NetworkXError):
+            nx.number_of_spanning_trees(G)
+
+    def test_nst_directed_root_not_exist(self):
+        G = nx.empty_graph(3, create_using=nx.MultiDiGraph)
+        with pytest.raises(nx.NetworkXError):
+            nx.number_of_spanning_trees(G, root=42)
+
+    def test_nst_directed_not_weak_connected(self):
+        G = nx.DiGraph()
+        G.add_edge(1, 2)
+        G.add_edge(3, 4)
+        assert np.isclose(nx.number_of_spanning_trees(G, root=1), 0)
+
+    def test_nst_directed_cycle_graph(self):
+        G = nx.DiGraph()
+        G = nx.cycle_graph(7, G)
+        assert np.isclose(nx.number_of_spanning_trees(G, root=0), 1)
+
+    def test_nst_directed_complete_graph(self):
+        G = nx.DiGraph()
+        G = nx.complete_graph(7, G)
+        assert np.isclose(nx.number_of_spanning_trees(G, root=0), 7**5)
+
+    def test_nst_directed_multi(self):
+        G = nx.MultiDiGraph()
+        G = nx.cycle_graph(3, G)
+        G.add_edge(1, 2)
+        assert np.isclose(nx.number_of_spanning_trees(G, root=0), 2)
+
+    def test_nst_directed_selfloop(self):
+        G = nx.MultiDiGraph()
+        G = nx.cycle_graph(3, G)
+        G.add_edge(1, 1)
+        assert np.isclose(nx.number_of_spanning_trees(G, root=0), 1)
+
+    def test_nst_directed_weak_connected(self):
+        G = nx.MultiDiGraph()
+        G = nx.cycle_graph(3, G)
+        G.remove_edge(1, 2)
+        assert np.isclose(nx.number_of_spanning_trees(G, root=0), 0)
+
+    def test_nst_directed_weighted(self):
+        # from root=1:
+        # arborescence 1: 1->2, 1->3, weight=2*1
+        # arborescence 2: 1->2, 2->3, weight=2*3
+        G = nx.DiGraph()
+        G.add_edge(1, 2, weight=2)
+        G.add_edge(1, 3, weight=1)
+        G.add_edge(2, 3, weight=3)
+        Nst = nx.number_of_spanning_trees(G, root=1, weight="weight")
+        assert np.isclose(Nst, 8)
+        Nst = nx.number_of_spanning_trees(G, root=2, weight="weight")
+        assert np.isclose(Nst, 0)
+        Nst = nx.number_of_spanning_trees(G, root=3, weight="weight")
+        assert np.isclose(Nst, 0)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_operations.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_operations.py
new file mode 100644
index 00000000..284d94e2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_operations.py
@@ -0,0 +1,53 @@
+from itertools import chain
+
+import networkx as nx
+from networkx.utils import edges_equal, nodes_equal
+
+
+def _check_custom_label_attribute(input_trees, res_tree, label_attribute):
+    res_attr_dict = nx.get_node_attributes(res_tree, label_attribute)
+    res_attr_set = set(res_attr_dict.values())
+    input_label = (tree for tree, root in input_trees)
+    input_label_set = set(chain.from_iterable(input_label))
+    return res_attr_set == input_label_set
+
+
+def test_empty_sequence():
+    """Joining the empty sequence results in the tree with one node."""
+    T = nx.join_trees([])
+    assert len(T) == 1
+    assert T.number_of_edges() == 0
+
+
+def test_single():
+    """Joining just one tree yields a tree with one more node."""
+    T = nx.empty_graph(1)
+    trees = [(T, 0)]
+    actual_with_label = nx.join_trees(trees, label_attribute="custom_label")
+    expected = nx.path_graph(2)
+    assert nodes_equal(list(expected), list(actual_with_label))
+    assert edges_equal(list(expected.edges()), list(actual_with_label.edges()))
+
+
+def test_basic():
+    """Joining multiple subtrees at a root node."""
+    trees = [(nx.full_rary_tree(2, 2**2 - 1), 0) for i in range(2)]
+    expected = nx.full_rary_tree(2, 2**3 - 1)
+    actual = nx.join_trees(trees, label_attribute="old_labels")
+    assert nx.is_isomorphic(actual, expected)
+    assert _check_custom_label_attribute(trees, actual, "old_labels")
+
+    actual_without_label = nx.join_trees(trees)
+    assert nx.is_isomorphic(actual_without_label, expected)
+    # check that no labels were stored
+    assert all(not data for _, data in actual_without_label.nodes(data=True))
+
+
+def test_first_label():
+    """Test the functionality of the first_label argument."""
+    T1 = nx.path_graph(3)
+    T2 = nx.path_graph(2)
+    actual = nx.join_trees([(T1, 0), (T2, 0)], first_label=10)
+    expected_nodes = set(range(10, 16))
+    assert set(actual.nodes()) == expected_nodes
+    assert set(actual.neighbors(10)) == {11, 14}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_recognition.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_recognition.py
new file mode 100644
index 00000000..105f5a89
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_recognition.py
@@ -0,0 +1,174 @@
+import pytest
+
+import networkx as nx
+
+
+class TestTreeRecognition:
+    graph = nx.Graph
+    multigraph = nx.MultiGraph
+
+    @classmethod
+    def setup_class(cls):
+        cls.T1 = cls.graph()
+
+        cls.T2 = cls.graph()
+        cls.T2.add_node(1)
+
+        cls.T3 = cls.graph()
+        cls.T3.add_nodes_from(range(5))
+        edges = [(i, i + 1) for i in range(4)]
+        cls.T3.add_edges_from(edges)
+
+        cls.T5 = cls.multigraph()
+        cls.T5.add_nodes_from(range(5))
+        edges = [(i, i + 1) for i in range(4)]
+        cls.T5.add_edges_from(edges)
+
+        cls.T6 = cls.graph()
+        cls.T6.add_nodes_from([6, 7])
+        cls.T6.add_edge(6, 7)
+
+        cls.F1 = nx.compose(cls.T6, cls.T3)
+
+        cls.N4 = cls.graph()
+        cls.N4.add_node(1)
+        cls.N4.add_edge(1, 1)
+
+        cls.N5 = cls.graph()
+        cls.N5.add_nodes_from(range(5))
+
+        cls.N6 = cls.graph()
+        cls.N6.add_nodes_from(range(3))
+        cls.N6.add_edges_from([(0, 1), (1, 2), (2, 0)])
+
+        cls.NF1 = nx.compose(cls.T6, cls.N6)
+
+    def test_null_tree(self):
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            nx.is_tree(self.graph())
+
+    def test_null_tree2(self):
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            nx.is_tree(self.multigraph())
+
+    def test_null_forest(self):
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            nx.is_forest(self.graph())
+
+    def test_null_forest2(self):
+        with pytest.raises(nx.NetworkXPointlessConcept):
+            nx.is_forest(self.multigraph())
+
+    def test_is_tree(self):
+        assert nx.is_tree(self.T2)
+        assert nx.is_tree(self.T3)
+        assert nx.is_tree(self.T5)
+
+    def test_is_not_tree(self):
+        assert not nx.is_tree(self.N4)
+        assert not nx.is_tree(self.N5)
+        assert not nx.is_tree(self.N6)
+
+    def test_is_forest(self):
+        assert nx.is_forest(self.T2)
+        assert nx.is_forest(self.T3)
+        assert nx.is_forest(self.T5)
+        assert nx.is_forest(self.F1)
+        assert nx.is_forest(self.N5)
+
+    def test_is_not_forest(self):
+        assert not nx.is_forest(self.N4)
+        assert not nx.is_forest(self.N6)
+        assert not nx.is_forest(self.NF1)
+
+
+class TestDirectedTreeRecognition(TestTreeRecognition):
+    graph = nx.DiGraph
+    multigraph = nx.MultiDiGraph
+
+
+def test_disconnected_graph():
+    # https://github.com/networkx/networkx/issues/1144
+    G = nx.Graph()
+    G.add_edges_from([(0, 1), (1, 2), (2, 0), (3, 4)])
+    assert not nx.is_tree(G)
+
+    G = nx.DiGraph()
+    G.add_edges_from([(0, 1), (1, 2), (2, 0), (3, 4)])
+    assert not nx.is_tree(G)
+
+
+def test_dag_nontree():
+    G = nx.DiGraph()
+    G.add_edges_from([(0, 1), (0, 2), (1, 2)])
+    assert not nx.is_tree(G)
+    assert nx.is_directed_acyclic_graph(G)
+
+
+def test_multicycle():
+    G = nx.MultiDiGraph()
+    G.add_edges_from([(0, 1), (0, 1)])
+    assert not nx.is_tree(G)
+    assert nx.is_directed_acyclic_graph(G)
+
+
+def test_emptybranch():
+    G = nx.DiGraph()
+    G.add_nodes_from(range(10))
+    assert nx.is_branching(G)
+    assert not nx.is_arborescence(G)
+
+
+def test_is_branching_empty_graph_raises():
+    G = nx.DiGraph()
+    with pytest.raises(nx.NetworkXPointlessConcept, match="G has no nodes."):
+        nx.is_branching(G)
+
+
+def test_path():
+    G = nx.DiGraph()
+    nx.add_path(G, range(5))
+    assert nx.is_branching(G)
+    assert nx.is_arborescence(G)
+
+
+def test_notbranching1():
+    # Acyclic violation.
+    G = nx.MultiDiGraph()
+    G.add_nodes_from(range(10))
+    G.add_edges_from([(0, 1), (1, 0)])
+    assert not nx.is_branching(G)
+    assert not nx.is_arborescence(G)
+
+
+def test_notbranching2():
+    # In-degree violation.
+    G = nx.MultiDiGraph()
+    G.add_nodes_from(range(10))
+    G.add_edges_from([(0, 1), (0, 2), (3, 2)])
+    assert not nx.is_branching(G)
+    assert not nx.is_arborescence(G)
+
+
+def test_notarborescence1():
+    # Not an arborescence due to not spanning.
+    G = nx.MultiDiGraph()
+    G.add_nodes_from(range(10))
+    G.add_edges_from([(0, 1), (0, 2), (1, 3), (5, 6)])
+    assert nx.is_branching(G)
+    assert not nx.is_arborescence(G)
+
+
+def test_notarborescence2():
+    # Not an arborescence due to in-degree violation.
+    G = nx.MultiDiGraph()
+    nx.add_path(G, range(5))
+    G.add_edge(6, 4)
+    assert not nx.is_branching(G)
+    assert not nx.is_arborescence(G)
+
+
+def test_is_arborescense_empty_graph_raises():
+    G = nx.DiGraph()
+    with pytest.raises(nx.NetworkXPointlessConcept, match="G has no nodes."):
+        nx.is_arborescence(G)
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/triads.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/triads.py
new file mode 100644
index 00000000..640fc304
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/triads.py
@@ -0,0 +1,604 @@
+# See https://github.com/networkx/networkx/pull/1474
+# Copyright 2011 Reya Group <http://www.reyagroup.com>
+# Copyright 2011 Alex Levenson <alex@isnotinvain.com>
+# Copyright 2011 Diederik van Liere <diederik.vanliere@rotman.utoronto.ca>
+"""Functions for analyzing triads of a graph."""
+
+from collections import defaultdict
+from itertools import combinations, permutations
+
+import networkx as nx
+from networkx.utils import not_implemented_for, py_random_state
+
+__all__ = [
+    "triadic_census",
+    "is_triad",
+    "all_triplets",
+    "all_triads",
+    "triads_by_type",
+    "triad_type",
+    "random_triad",
+]
+
+#: The integer codes representing each type of triad.
+#:
+#: Triads that are the same up to symmetry have the same code.
+TRICODES = (
+    1,
+    2,
+    2,
+    3,
+    2,
+    4,
+    6,
+    8,
+    2,
+    6,
+    5,
+    7,
+    3,
+    8,
+    7,
+    11,
+    2,
+    6,
+    4,
+    8,
+    5,
+    9,
+    9,
+    13,
+    6,
+    10,
+    9,
+    14,
+    7,
+    14,
+    12,
+    15,
+    2,
+    5,
+    6,
+    7,
+    6,
+    9,
+    10,
+    14,
+    4,
+    9,
+    9,
+    12,
+    8,
+    13,
+    14,
+    15,
+    3,
+    7,
+    8,
+    11,
+    7,
+    12,
+    14,
+    15,
+    8,
+    14,
+    13,
+    15,
+    11,
+    15,
+    15,
+    16,
+)
+
+#: The names of each type of triad. The order of the elements is
+#: important: it corresponds to the tricodes given in :data:`TRICODES`.
+TRIAD_NAMES = (
+    "003",
+    "012",
+    "102",
+    "021D",
+    "021U",
+    "021C",
+    "111D",
+    "111U",
+    "030T",
+    "030C",
+    "201",
+    "120D",
+    "120U",
+    "120C",
+    "210",
+    "300",
+)
+
+
+#: A dictionary mapping triad code to triad name.
+TRICODE_TO_NAME = {i: TRIAD_NAMES[code - 1] for i, code in enumerate(TRICODES)}
+
+
+def _tricode(G, v, u, w):
+    """Returns the integer code of the given triad.
+
+    This is some fancy magic that comes from Batagelj and Mrvar's paper. It
+    treats each edge joining a pair of `v`, `u`, and `w` as a bit in
+    the binary representation of an integer.
+
+    """
+    combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16), (w, u, 32))
+    return sum(x for u, v, x in combos if v in G[u])
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def triadic_census(G, nodelist=None):
+    """Determines the triadic census of a directed graph.
+
+    The triadic census is a count of how many of the 16 possible types of
+    triads are present in a directed graph. If a list of nodes is passed, then
+    only those triads are taken into account which have elements of nodelist in them.
+
+    Parameters
+    ----------
+    G : digraph
+       A NetworkX DiGraph
+    nodelist : list
+        List of nodes for which you want to calculate triadic census
+
+    Returns
+    -------
+    census : dict
+       Dictionary with triad type as keys and number of occurrences as values.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)])
+    >>> triadic_census = nx.triadic_census(G)
+    >>> for key, value in triadic_census.items():
+    ...     print(f"{key}: {value}")
+    003: 0
+    012: 0
+    102: 0
+    021D: 0
+    021U: 0
+    021C: 0
+    111D: 0
+    111U: 0
+    030T: 2
+    030C: 2
+    201: 0
+    120D: 0
+    120U: 0
+    120C: 0
+    210: 0
+    300: 0
+
+    Notes
+    -----
+    This algorithm has complexity $O(m)$ where $m$ is the number of edges in
+    the graph.
+
+    For undirected graphs, the triadic census can be computed by first converting
+    the graph into a directed graph using the ``G.to_directed()`` method.
+    After this conversion, only the triad types 003, 102, 201 and 300 will be
+    present in the undirected scenario.
+
+    Raises
+    ------
+    ValueError
+        If `nodelist` contains duplicate nodes or nodes not in `G`.
+        If you want to ignore this you can preprocess with `set(nodelist) & G.nodes`
+
+    See also
+    --------
+    triad_graph
+
+    References
+    ----------
+    .. [1] Vladimir Batagelj and Andrej Mrvar, A subquadratic triad census
+        algorithm for large sparse networks with small maximum degree,
+        University of Ljubljana,
+        http://vlado.fmf.uni-lj.si/pub/networks/doc/triads/triads.pdf
+
+    """
+    nodeset = set(G.nbunch_iter(nodelist))
+    if nodelist is not None and len(nodelist) != len(nodeset):
+        raise ValueError("nodelist includes duplicate nodes or nodes not in G")
+
+    N = len(G)
+    Nnot = N - len(nodeset)  # can signal special counting for subset of nodes
+
+    # create an ordering of nodes with nodeset nodes first
+    m = {n: i for i, n in enumerate(nodeset)}
+    if Nnot:
+        # add non-nodeset nodes later in the ordering
+        not_nodeset = G.nodes - nodeset
+        m.update((n, i + N) for i, n in enumerate(not_nodeset))
+
+    # build all_neighbor dicts for easy counting
+    # After Python 3.8 can leave off these keys(). Speedup also using G._pred
+    # nbrs = {n: G._pred[n].keys() | G._succ[n].keys() for n in G}
+    nbrs = {n: G.pred[n].keys() | G.succ[n].keys() for n in G}
+    dbl_nbrs = {n: G.pred[n].keys() & G.succ[n].keys() for n in G}
+
+    if Nnot:
+        sgl_nbrs = {n: G.pred[n].keys() ^ G.succ[n].keys() for n in not_nodeset}
+        # find number of edges not incident to nodes in nodeset
+        sgl = sum(1 for n in not_nodeset for nbr in sgl_nbrs[n] if nbr not in nodeset)
+        sgl_edges_outside = sgl // 2
+        dbl = sum(1 for n in not_nodeset for nbr in dbl_nbrs[n] if nbr not in nodeset)
+        dbl_edges_outside = dbl // 2
+
+    # Initialize the count for each triad to be zero.
+    census = {name: 0 for name in TRIAD_NAMES}
+    # Main loop over nodes
+    for v in nodeset:
+        vnbrs = nbrs[v]
+        dbl_vnbrs = dbl_nbrs[v]
+        if Nnot:
+            # set up counts of edges attached to v.
+            sgl_unbrs_bdy = sgl_unbrs_out = dbl_unbrs_bdy = dbl_unbrs_out = 0
+        for u in vnbrs:
+            if m[u] <= m[v]:
+                continue
+            unbrs = nbrs[u]
+            neighbors = (vnbrs | unbrs) - {u, v}
+            # Count connected triads.
+            for w in neighbors:
+                if m[u] < m[w] or (m[v] < m[w] < m[u] and v not in nbrs[w]):
+                    code = _tricode(G, v, u, w)
+                    census[TRICODE_TO_NAME[code]] += 1
+
+            # Use a formula for dyadic triads with edge incident to v
+            if u in dbl_vnbrs:
+                census["102"] += N - len(neighbors) - 2
+            else:
+                census["012"] += N - len(neighbors) - 2
+
+            # Count edges attached to v. Subtract later to get triads with v isolated
+            # _out are (u,unbr) for unbrs outside boundary of nodeset
+            # _bdy are (u,unbr) for unbrs on boundary of nodeset (get double counted)
+            if Nnot and u not in nodeset:
+                sgl_unbrs = sgl_nbrs[u]
+                sgl_unbrs_bdy += len(sgl_unbrs & vnbrs - nodeset)
+                sgl_unbrs_out += len(sgl_unbrs - vnbrs - nodeset)
+                dbl_unbrs = dbl_nbrs[u]
+                dbl_unbrs_bdy += len(dbl_unbrs & vnbrs - nodeset)
+                dbl_unbrs_out += len(dbl_unbrs - vnbrs - nodeset)
+        # if nodeset == G.nodes, skip this b/c we will find the edge later.
+        if Nnot:
+            # Count edges outside nodeset not connected with v (v isolated triads)
+            census["012"] += sgl_edges_outside - (sgl_unbrs_out + sgl_unbrs_bdy // 2)
+            census["102"] += dbl_edges_outside - (dbl_unbrs_out + dbl_unbrs_bdy // 2)
+
+    # calculate null triads: "003"
+    # null triads = total number of possible triads - all found triads
+    total_triangles = (N * (N - 1) * (N - 2)) // 6
+    triangles_without_nodeset = (Nnot * (Nnot - 1) * (Nnot - 2)) // 6
+    total_census = total_triangles - triangles_without_nodeset
+    census["003"] = total_census - sum(census.values())
+
+    return census
+
+
+@nx._dispatchable
+def is_triad(G):
+    """Returns True if the graph G is a triad, else False.
+
+    Parameters
+    ----------
+    G : graph
+       A NetworkX Graph
+
+    Returns
+    -------
+    istriad : boolean
+       Whether G is a valid triad
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+    >>> nx.is_triad(G)
+    True
+    >>> G.add_edge(0, 1)
+    >>> nx.is_triad(G)
+    False
+    """
+    if isinstance(G, nx.Graph):
+        if G.order() == 3 and nx.is_directed(G):
+            if not any((n, n) in G.edges() for n in G.nodes()):
+                return True
+    return False
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def all_triplets(G):
+    """Returns a generator of all possible sets of 3 nodes in a DiGraph.
+
+    .. deprecated:: 3.3
+
+       all_triplets is deprecated and will be removed in NetworkX version 3.5.
+       Use `itertools.combinations` instead::
+
+          all_triplets = itertools.combinations(G, 3)
+
+    Parameters
+    ----------
+    G : digraph
+       A NetworkX DiGraph
+
+    Returns
+    -------
+    triplets : generator of 3-tuples
+       Generator of tuples of 3 nodes
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+    >>> list(nx.all_triplets(G))
+    [(1, 2, 3), (1, 2, 4), (1, 3, 4), (2, 3, 4)]
+
+    """
+    import warnings
+
+    warnings.warn(
+        (
+            "\n\nall_triplets is deprecated and will be removed in v3.5.\n"
+            "Use `itertools.combinations(G, 3)` instead."
+        ),
+        category=DeprecationWarning,
+        stacklevel=4,
+    )
+    triplets = combinations(G.nodes(), 3)
+    return triplets
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable(returns_graph=True)
+def all_triads(G):
+    """A generator of all possible triads in G.
+
+    Parameters
+    ----------
+    G : digraph
+       A NetworkX DiGraph
+
+    Returns
+    -------
+    all_triads : generator of DiGraphs
+       Generator of triads (order-3 DiGraphs)
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)])
+    >>> for triad in nx.all_triads(G):
+    ...     print(triad.edges)
+    [(1, 2), (2, 3), (3, 1)]
+    [(1, 2), (4, 1), (4, 2)]
+    [(3, 1), (3, 4), (4, 1)]
+    [(2, 3), (3, 4), (4, 2)]
+
+    """
+    triplets = combinations(G.nodes(), 3)
+    for triplet in triplets:
+        yield G.subgraph(triplet).copy()
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def triads_by_type(G):
+    """Returns a list of all triads for each triad type in a directed graph.
+    There are exactly 16 different types of triads possible. Suppose 1, 2, 3 are three
+    nodes, they will be classified as a particular triad type if their connections
+    are as follows:
+
+    - 003: 1, 2, 3
+    - 012: 1 -> 2, 3
+    - 102: 1 <-> 2, 3
+    - 021D: 1 <- 2 -> 3
+    - 021U: 1 -> 2 <- 3
+    - 021C: 1 -> 2 -> 3
+    - 111D: 1 <-> 2 <- 3
+    - 111U: 1 <-> 2 -> 3
+    - 030T: 1 -> 2 -> 3, 1 -> 3
+    - 030C: 1 <- 2 <- 3, 1 -> 3
+    - 201: 1 <-> 2 <-> 3
+    - 120D: 1 <- 2 -> 3, 1 <-> 3
+    - 120U: 1 -> 2 <- 3, 1 <-> 3
+    - 120C: 1 -> 2 -> 3, 1 <-> 3
+    - 210: 1 -> 2 <-> 3, 1 <-> 3
+    - 300: 1 <-> 2 <-> 3, 1 <-> 3
+
+    Refer to the :doc:`example gallery </auto_examples/graph/plot_triad_types>`
+    for visual examples of the triad types.
+
+    Parameters
+    ----------
+    G : digraph
+       A NetworkX DiGraph
+
+    Returns
+    -------
+    tri_by_type : dict
+       Dictionary with triad types as keys and lists of triads as values.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)])
+    >>> dict = nx.triads_by_type(G)
+    >>> dict["120C"][0].edges()
+    OutEdgeView([(1, 2), (1, 3), (2, 3), (3, 1)])
+    >>> dict["012"][0].edges()
+    OutEdgeView([(1, 2)])
+
+    References
+    ----------
+    .. [1] Snijders, T. (2012). "Transitivity and triads." University of
+        Oxford.
+        https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf
+    """
+    # num_triads = o * (o - 1) * (o - 2) // 6
+    # if num_triads > TRIAD_LIMIT: print(WARNING)
+    all_tri = all_triads(G)
+    tri_by_type = defaultdict(list)
+    for triad in all_tri:
+        name = triad_type(triad)
+        tri_by_type[name].append(triad)
+    return tri_by_type
+
+
+@not_implemented_for("undirected")
+@nx._dispatchable
+def triad_type(G):
+    """Returns the sociological triad type for a triad.
+
+    Parameters
+    ----------
+    G : digraph
+       A NetworkX DiGraph with 3 nodes
+
+    Returns
+    -------
+    triad_type : str
+       A string identifying the triad type
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+    >>> nx.triad_type(G)
+    '030C'
+    >>> G.add_edge(1, 3)
+    >>> nx.triad_type(G)
+    '120C'
+
+    Notes
+    -----
+    There can be 6 unique edges in a triad (order-3 DiGraph) (so 2^^6=64 unique
+    triads given 3 nodes). These 64 triads each display exactly 1 of 16
+    topologies of triads (topologies can be permuted). These topologies are
+    identified by the following notation:
+
+    {m}{a}{n}{type} (for example: 111D, 210, 102)
+
+    Here:
+
+    {m}     = number of mutual ties (takes 0, 1, 2, 3); a mutual tie is (0,1)
+              AND (1,0)
+    {a}     = number of asymmetric ties (takes 0, 1, 2, 3); an asymmetric tie
+              is (0,1) BUT NOT (1,0) or vice versa
+    {n}     = number of null ties (takes 0, 1, 2, 3); a null tie is NEITHER
+              (0,1) NOR (1,0)
+    {type}  = a letter (takes U, D, C, T) corresponding to up, down, cyclical
+              and transitive. This is only used for topologies that can have
+              more than one form (eg: 021D and 021U).
+
+    References
+    ----------
+    .. [1] Snijders, T. (2012). "Transitivity and triads." University of
+        Oxford.
+        https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf
+    """
+    if not is_triad(G):
+        raise nx.NetworkXAlgorithmError("G is not a triad (order-3 DiGraph)")
+    num_edges = len(G.edges())
+    if num_edges == 0:
+        return "003"
+    elif num_edges == 1:
+        return "012"
+    elif num_edges == 2:
+        e1, e2 = G.edges()
+        if set(e1) == set(e2):
+            return "102"
+        elif e1[0] == e2[0]:
+            return "021D"
+        elif e1[1] == e2[1]:
+            return "021U"
+        elif e1[1] == e2[0] or e2[1] == e1[0]:
+            return "021C"
+    elif num_edges == 3:
+        for e1, e2, e3 in permutations(G.edges(), 3):
+            if set(e1) == set(e2):
+                if e3[0] in e1:
+                    return "111U"
+                # e3[1] in e1:
+                return "111D"
+            elif set(e1).symmetric_difference(set(e2)) == set(e3):
+                if {e1[0], e2[0], e3[0]} == {e1[0], e2[0], e3[0]} == set(G.nodes()):
+                    return "030C"
+                # e3 == (e1[0], e2[1]) and e2 == (e1[1], e3[1]):
+                return "030T"
+    elif num_edges == 4:
+        for e1, e2, e3, e4 in permutations(G.edges(), 4):
+            if set(e1) == set(e2):
+                # identify pair of symmetric edges (which necessarily exists)
+                if set(e3) == set(e4):
+                    return "201"
+                if {e3[0]} == {e4[0]} == set(e3).intersection(set(e4)):
+                    return "120D"
+                if {e3[1]} == {e4[1]} == set(e3).intersection(set(e4)):
+                    return "120U"
+                if e3[1] == e4[0]:
+                    return "120C"
+    elif num_edges == 5:
+        return "210"
+    elif num_edges == 6:
+        return "300"
+
+
+@not_implemented_for("undirected")
+@py_random_state(1)
+@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
+def random_triad(G, seed=None):
+    """Returns a random triad from a directed graph.
+
+    .. deprecated:: 3.3
+
+       random_triad is deprecated and will be removed in version 3.5.
+       Use random sampling directly instead::
+
+          G.subgraph(random.sample(list(G), 3))
+
+    Parameters
+    ----------
+    G : digraph
+       A NetworkX DiGraph
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    G2 : subgraph
+       A randomly selected triad (order-3 NetworkX DiGraph)
+
+    Raises
+    ------
+    NetworkXError
+        If the input Graph has less than 3 nodes.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)])
+    >>> triad = nx.random_triad(G, seed=1)
+    >>> triad.edges
+    OutEdgeView([(1, 2)])
+
+    """
+    import warnings
+
+    warnings.warn(
+        (
+            "\n\nrandom_triad is deprecated and will be removed in NetworkX v3.5.\n"
+            "Use random.sample instead, e.g.::\n\n"
+            "\tG.subgraph(random.sample(list(G), 3))\n"
+        ),
+        category=DeprecationWarning,
+        stacklevel=5,
+    )
+    if len(G) < 3:
+        raise nx.NetworkXError(
+            f"G needs at least 3 nodes to form a triad; (it has {len(G)} nodes)"
+        )
+    nodes = seed.sample(list(G.nodes()), 3)
+    G2 = G.subgraph(nodes)
+    return G2
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/vitality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/vitality.py
new file mode 100644
index 00000000..bf4b016e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/vitality.py
@@ -0,0 +1,76 @@
+"""
+Vitality measures.
+"""
+
+from functools import partial
+
+import networkx as nx
+
+__all__ = ["closeness_vitality"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def closeness_vitality(G, node=None, weight=None, wiener_index=None):
+    """Returns the closeness vitality for nodes in the graph.
+
+    The *closeness vitality* of a node, defined in Section 3.6.2 of [1],
+    is the change in the sum of distances between all node pairs when
+    excluding that node.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        A strongly-connected graph.
+
+    weight : string
+        The name of the edge attribute used as weight. This is passed
+        directly to the :func:`~networkx.wiener_index` function.
+
+    node : object
+        If specified, only the closeness vitality for this node will be
+        returned. Otherwise, a dictionary mapping each node to its
+        closeness vitality will be returned.
+
+    Other parameters
+    ----------------
+    wiener_index : number
+        If you have already computed the Wiener index of the graph
+        `G`, you can provide that value here. Otherwise, it will be
+        computed for you.
+
+    Returns
+    -------
+    dictionary or float
+        If `node` is None, this function returns a dictionary
+        with nodes as keys and closeness vitality as the
+        value. Otherwise, it returns only the closeness vitality for the
+        specified `node`.
+
+        The closeness vitality of a node may be negative infinity if
+        removing that node would disconnect the graph.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(3)
+    >>> nx.closeness_vitality(G)
+    {0: 2.0, 1: 2.0, 2: 2.0}
+
+    See Also
+    --------
+    closeness_centrality
+
+    References
+    ----------
+    .. [1] Ulrik Brandes, Thomas Erlebach (eds.).
+           *Network Analysis: Methodological Foundations*.
+           Springer, 2005.
+           <http://books.google.com/books?id=TTNhSm7HYrIC>
+
+    """
+    if wiener_index is None:
+        wiener_index = nx.wiener_index(G, weight=weight)
+    if node is not None:
+        after = nx.wiener_index(G.subgraph(set(G) - {node}), weight=weight)
+        return wiener_index - after
+    vitality = partial(closeness_vitality, G, weight=weight, wiener_index=wiener_index)
+    return {v: vitality(node=v) for v in G}
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/voronoi.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/voronoi.py
new file mode 100644
index 00000000..609a68de
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/voronoi.py
@@ -0,0 +1,86 @@
+"""Functions for computing the Voronoi cells of a graph."""
+
+import networkx as nx
+from networkx.utils import groups
+
+__all__ = ["voronoi_cells"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def voronoi_cells(G, center_nodes, weight="weight"):
+    """Returns the Voronoi cells centered at `center_nodes` with respect
+    to the shortest-path distance metric.
+
+    If $C$ is a set of nodes in the graph and $c$ is an element of $C$,
+    the *Voronoi cell* centered at a node $c$ is the set of all nodes
+    $v$ that are closer to $c$ than to any other center node in $C$ with
+    respect to the shortest-path distance metric. [1]_
+
+    For directed graphs, this will compute the "outward" Voronoi cells,
+    as defined in [1]_, in which distance is measured from the center
+    nodes to the target node. For the "inward" Voronoi cells, use the
+    :meth:`DiGraph.reverse` method to reverse the orientation of the
+    edges before invoking this function on the directed graph.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    center_nodes : set
+        A nonempty set of nodes in the graph `G` that represent the
+        center of the Voronoi cells.
+
+    weight : string or function
+        The edge attribute (or an arbitrary function) representing the
+        weight of an edge. This keyword argument is as described in the
+        documentation for :func:`~networkx.multi_source_dijkstra_path`,
+        for example.
+
+    Returns
+    -------
+    dictionary
+        A mapping from center node to set of all nodes in the graph
+        closer to that center node than to any other center node. The
+        keys of the dictionary are the element of `center_nodes`, and
+        the values of the dictionary form a partition of the nodes of
+        `G`.
+
+    Examples
+    --------
+    To get only the partition of the graph induced by the Voronoi cells,
+    take the collection of all values in the returned dictionary::
+
+        >>> G = nx.path_graph(6)
+        >>> center_nodes = {0, 3}
+        >>> cells = nx.voronoi_cells(G, center_nodes)
+        >>> partition = set(map(frozenset, cells.values()))
+        >>> sorted(map(sorted, partition))
+        [[0, 1], [2, 3, 4, 5]]
+
+    Raises
+    ------
+    ValueError
+        If `center_nodes` is empty.
+
+    References
+    ----------
+    .. [1] Erwig, Martin. (2000),"The graph Voronoi diagram with applications."
+        *Networks*, 36: 156--163.
+        https://doi.org/10.1002/1097-0037(200010)36:3<156::AID-NET2>3.0.CO;2-L
+
+    """
+    # Determine the shortest paths from any one of the center nodes to
+    # every node in the graph.
+    #
+    # This raises `ValueError` if `center_nodes` is an empty set.
+    paths = nx.multi_source_dijkstra_path(G, center_nodes, weight=weight)
+    # Determine the center node from which the shortest path originates.
+    nearest = {v: p[0] for v, p in paths.items()}
+    # Get the mapping from center node to all nodes closer to it than to
+    # any other center node.
+    cells = groups(nearest)
+    # We collect all unreachable nodes under a special key, if there are any.
+    unreachable = set(G) - set(nearest)
+    if unreachable:
+        cells["unreachable"] = unreachable
+    return cells
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/walks.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/walks.py
new file mode 100644
index 00000000..0ef9dac1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/walks.py
@@ -0,0 +1,79 @@
+"""Function for computing walks in a graph."""
+
+import networkx as nx
+
+__all__ = ["number_of_walks"]
+
+
+@nx._dispatchable
+def number_of_walks(G, walk_length):
+    """Returns the number of walks connecting each pair of nodes in `G`
+
+    A *walk* is a sequence of nodes in which each adjacent pair of nodes
+    in the sequence is adjacent in the graph. A walk can repeat the same
+    edge and go in the opposite direction just as people can walk on a
+    set of paths, but standing still is not counted as part of the walk.
+
+    This function only counts the walks with `walk_length` edges. Note that
+    the number of nodes in the walk sequence is one more than `walk_length`.
+    The number of walks can grow very quickly on a larger graph
+    and with a larger walk length.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    walk_length : int
+        A nonnegative integer representing the length of a walk.
+
+    Returns
+    -------
+    dict
+        A dictionary of dictionaries in which outer keys are source
+        nodes, inner keys are target nodes, and inner values are the
+        number of walks of length `walk_length` connecting those nodes.
+
+    Raises
+    ------
+    ValueError
+        If `walk_length` is negative
+
+    Examples
+    --------
+
+    >>> G = nx.Graph([(0, 1), (1, 2)])
+    >>> walks = nx.number_of_walks(G, 2)
+    >>> walks
+    {0: {0: 1, 1: 0, 2: 1}, 1: {0: 0, 1: 2, 2: 0}, 2: {0: 1, 1: 0, 2: 1}}
+    >>> total_walks = sum(sum(tgts.values()) for _, tgts in walks.items())
+
+    You can also get the number of walks from a specific source node using the
+    returned dictionary. For example, number of walks of length 1 from node 0
+    can be found as follows:
+
+    >>> walks = nx.number_of_walks(G, 1)
+    >>> walks[0]
+    {0: 0, 1: 1, 2: 0}
+    >>> sum(walks[0].values())  # walks from 0 of length 1
+    1
+
+    Similarly, a target node can also be specified:
+
+    >>> walks[0][1]
+    1
+
+    """
+    import numpy as np
+
+    if walk_length < 0:
+        raise ValueError(f"`walk_length` cannot be negative: {walk_length}")
+
+    A = nx.adjacency_matrix(G, weight=None)
+    # TODO: Use matrix_power from scipy.sparse when available
+    # power = sp.sparse.linalg.matrix_power(A, walk_length)
+    power = np.linalg.matrix_power(A.toarray(), walk_length)
+    result = {
+        u: {v: power.item(u_idx, v_idx) for v_idx, v in enumerate(G)}
+        for u_idx, u in enumerate(G)
+    }
+    return result
diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/wiener.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/wiener.py
new file mode 100644
index 00000000..ac3abe4a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/wiener.py
@@ -0,0 +1,226 @@
+"""Functions related to the Wiener Index of a graph.
+
+The Wiener Index is a topological measure of a graph
+related to the distance between nodes and their degree.
+The Schultz Index and Gutman Index are similar measures.
+They are used categorize molecules via the network of
+atoms connected by chemical bonds. The indices are
+correlated with functional aspects of the molecules.
+
+References
+----------
+.. [1] `Wikipedia: Wiener Index <https://en.wikipedia.org/wiki/Wiener_index>`_
+.. [2] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices,
+       Croatica Chemica Acta, 71 (1998), 21-51.
+       https://hrcak.srce.hr/132323
+"""
+
+import itertools as it
+
+import networkx as nx
+
+__all__ = ["wiener_index", "schultz_index", "gutman_index"]
+
+
+@nx._dispatchable(edge_attrs="weight")
+def wiener_index(G, weight=None):
+    """Returns the Wiener index of the given graph.
+
+    The *Wiener index* of a graph is the sum of the shortest-path
+    (weighted) distances between each pair of reachable nodes.
+    For pairs of nodes in undirected graphs, only one orientation
+    of the pair is counted.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : string or None, optional (default: None)
+        If None, every edge has weight 1.
+        If a string, use this edge attribute as the edge weight.
+        Any edge attribute not present defaults to 1.
+        The edge weights are used to computing shortest-path distances.
+
+    Returns
+    -------
+    number
+        The Wiener index of the graph `G`.
+
+    Raises
+    ------
+    NetworkXError
+        If the graph `G` is not connected.
+
+    Notes
+    -----
+    If a pair of nodes is not reachable, the distance is assumed to be
+    infinity. This means that for graphs that are not
+    strongly-connected, this function returns ``inf``.
+
+    The Wiener index is not usually defined for directed graphs, however
+    this function uses the natural generalization of the Wiener index to
+    directed graphs.
+
+    Examples
+    --------
+    The Wiener index of the (unweighted) complete graph on *n* nodes
+    equals the number of pairs of the *n* nodes, since each pair of
+    nodes is at distance one::
+
+        >>> n = 10
+        >>> G = nx.complete_graph(n)
+        >>> nx.wiener_index(G) == n * (n - 1) / 2
+        True
+
+    Graphs that are not strongly-connected have infinite Wiener index::
+
+        >>> G = nx.empty_graph(2)
+        >>> nx.wiener_index(G)
+        inf
+
+    References
+    ----------
+    .. [1] `Wikipedia: Wiener Index <https://en.wikipedia.org/wiki/Wiener_index>`_
+    """
+    connected = nx.is_strongly_connected(G) if G.is_directed() else nx.is_connected(G)
+    if not connected:
+        return float("inf")
+
+    spl = nx.shortest_path_length(G, weight=weight)
+    total = sum(it.chain.from_iterable(nbrs.values() for node, nbrs in spl))
+    # Need to account for double counting pairs of nodes in undirected graphs.
+    return total if G.is_directed() else total / 2
+
+
+@nx.utils.not_implemented_for("directed")
+@nx.utils.not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def schultz_index(G, weight=None):
+    r"""Returns the Schultz Index (of the first kind) of `G`
+
+    The *Schultz Index* [3]_ of a graph is the sum over all node pairs of
+    distances times the sum of degrees. Consider an undirected graph `G`.
+    For each node pair ``(u, v)`` compute ``dist(u, v) * (deg(u) + deg(v)``
+    where ``dist`` is the shortest path length between two nodes and ``deg``
+    is the degree of a node.
+
+    The Schultz Index is the sum of these quantities over all (unordered)
+    pairs of nodes.
+
+    Parameters
+    ----------
+    G : NetworkX graph
+        The undirected graph of interest.
+    weight : string or None, optional (default: None)
+        If None, every edge has weight 1.
+        If a string, use this edge attribute as the edge weight.
+        Any edge attribute not present defaults to 1.
+        The edge weights are used to computing shortest-path distances.
+
+    Returns
+    -------
+    number
+        The first kind of Schultz Index of the graph `G`.
+
+    Examples
+    --------
+    The Schultz Index of the (unweighted) complete graph on *n* nodes
+    equals the number of pairs of the *n* nodes times ``2 * (n - 1)``,
+    since each pair of nodes is at distance one and the sum of degree
+    of two nodes is ``2 * (n - 1)``.
+
+    >>> n = 10
+    >>> G = nx.complete_graph(n)
+    >>> nx.schultz_index(G) == (n * (n - 1) / 2) * (2 * (n - 1))
+    True
+
+    Graph that is disconnected
+
+    >>> nx.schultz_index(nx.empty_graph(2))
+    inf
+
+    References
+    ----------
+    .. [1] I. Gutman, Selected properties of the Schultz molecular topological index,
+           J. Chem. Inf. Comput. Sci. 34 (1994), 1087–1089.
+           https://doi.org/10.1021/ci00021a009
+    .. [2] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices,
+           Croatica Chemica Acta, 71 (1998), 21-51.
+           https://hrcak.srce.hr/132323
+    .. [3] H. P. Schultz, Topological organic chemistry. 1.
+           Graph theory and topological indices of alkanes,i
+           J. Chem. Inf. Comput. Sci. 29 (1989), 239–257.
+
+    """
+    if not nx.is_connected(G):
+        return float("inf")
+
+    spl = nx.shortest_path_length(G, weight=weight)
+    d = dict(G.degree, weight=weight)
+    return sum(dist * (d[u] + d[v]) for u, info in spl for v, dist in info.items()) / 2
+
+
+@nx.utils.not_implemented_for("directed")
+@nx.utils.not_implemented_for("multigraph")
+@nx._dispatchable(edge_attrs="weight")
+def gutman_index(G, weight=None):
+    r"""Returns the Gutman Index for the graph `G`.
+
+    The *Gutman Index* measures the topology of networks, especially for molecule
+    networks of atoms connected by bonds [1]_. It is also called the Schultz Index
+    of the second kind [2]_.
+
+    Consider an undirected graph `G` with node set ``V``.
+    The Gutman Index of a graph is the sum over all (unordered) pairs of nodes
+    of nodes ``(u, v)``, with distance ``dist(u, v)`` and degrees ``deg(u)``
+    and ``deg(v)``, of ``dist(u, v) * deg(u) * deg(v)``
+
+    Parameters
+    ----------
+    G : NetworkX graph
+
+    weight : string or None, optional (default: None)
+        If None, every edge has weight 1.
+        If a string, use this edge attribute as the edge weight.
+        Any edge attribute not present defaults to 1.
+        The edge weights are used to computing shortest-path distances.
+
+    Returns
+    -------
+    number
+        The Gutman Index of the graph `G`.
+
+    Examples
+    --------
+    The Gutman Index of the (unweighted) complete graph on *n* nodes
+    equals the number of pairs of the *n* nodes times ``(n - 1) * (n - 1)``,
+    since each pair of nodes is at distance one and the product of degree of two
+    vertices is ``(n - 1) * (n - 1)``.
+
+    >>> n = 10
+    >>> G = nx.complete_graph(n)
+    >>> nx.gutman_index(G) == (n * (n - 1) / 2) * ((n - 1) * (n - 1))
+    True
+
+    Graphs that are disconnected
+
+    >>> G = nx.empty_graph(2)
+    >>> nx.gutman_index(G)
+    inf
+
+    References
+    ----------
+    .. [1] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices,
+           Croatica Chemica Acta, 71 (1998), 21-51.
+           https://hrcak.srce.hr/132323
+    .. [2] I. Gutman, Selected properties of the Schultz molecular topological index,
+           J. Chem. Inf. Comput. Sci. 34 (1994), 1087–1089.
+           https://doi.org/10.1021/ci00021a009
+
+    """
+    if not nx.is_connected(G):
+        return float("inf")
+
+    spl = nx.shortest_path_length(G, weight=weight)
+    d = dict(G.degree, weight=weight)
+    return sum(dist * d[u] * d[v] for u, vinfo in spl for v, dist in vinfo.items()) / 2