From 68d354558e147c9845100420e6be618e6ec8b239 Mon Sep 17 00:00:00 2001 From: Gareth Simons Date: Sat, 26 Oct 2024 07:56:34 +0100 Subject: [PATCH] adds nx_snap_gapped_endings --- .github/workflows/firebase-hosting-merge.yml | 2 +- .../firebase-hosting-pull-request.yml | 2 +- .github/workflows/publish_package.yml | 6 +- .github/workflows/publish_package_dev.yml | 6 +- .vscode/extensions.json | 0 .vscode/settings.json | 20 +- pyproject.toml | 9 +- pysrc/cityseer/config.py | 3 +- pysrc/cityseer/metrics/layers.py | 1 + pysrc/cityseer/metrics/networks.py | 1 + pysrc/cityseer/metrics/observe.py | 3 +- pysrc/cityseer/tools/graphs.py | 202 +++++++++++++++--- pysrc/cityseer/tools/io.py | 7 +- tests/tools/test_graphs.py | 138 +++++++++++- 14 files changed, 332 insertions(+), 68 deletions(-) create mode 100644 .vscode/extensions.json diff --git a/.github/workflows/firebase-hosting-merge.yml b/.github/workflows/firebase-hosting-merge.yml index f7ec9e1f..f7368bbd 100644 --- a/.github/workflows/firebase-hosting-merge.yml +++ b/.github/workflows/firebase-hosting-merge.yml @@ -16,7 +16,7 @@ jobs: - name: Build docs run: | uv sync --dev - python docs/generate_docs.py + poe run generate_docs - run: cd docs && npm install && npm run build && cd .. - uses: FirebaseExtended/action-hosting-deploy@v0 with: diff --git a/.github/workflows/firebase-hosting-pull-request.yml b/.github/workflows/firebase-hosting-pull-request.yml index 28149456..3816d46f 100644 --- a/.github/workflows/firebase-hosting-pull-request.yml +++ b/.github/workflows/firebase-hosting-pull-request.yml @@ -13,7 +13,7 @@ jobs: - name: Build docs run: | uv sync --dev - python docs/generate_docs.py + poe generate_docs - run: cd docs && npm install && npm run build && cd .. - uses: FirebaseExtended/action-hosting-deploy@v0 with: diff --git a/.github/workflows/publish_package.yml b/.github/workflows/publish_package.yml index b222ff4f..29255b65 100644 --- a/.github/workflows/publish_package.yml +++ b/.github/workflows/publish_package.yml @@ -23,11 +23,7 @@ jobs: - name: Verify project run: | uv sync - ruff format - ruff check - pyright . - uv build - pytest ./tests + poe verify_project linux: runs-on: ubuntu-latest needs: check diff --git a/.github/workflows/publish_package_dev.yml b/.github/workflows/publish_package_dev.yml index 534789b3..216cc863 100644 --- a/.github/workflows/publish_package_dev.yml +++ b/.github/workflows/publish_package_dev.yml @@ -24,11 +24,7 @@ jobs: - name: Verify project run: | uv sync - ruff format - ruff check - pyright . - uv build - pytest ./tests + poe verify_project linux: runs-on: ubuntu-latest needs: check diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..e69de29b diff --git a/.vscode/settings.json b/.vscode/settings.json index c04f53c9..22ee0d62 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -10,7 +10,6 @@ "scss.validate": false, "stylelint.enable": true, "stylelint.validate": ["css", "postcss", "vue"], - "stylelint.packageManager": "pnpm", "stylelint.reportInvalidScopeDisables": true, "stylelint.reportNeedlessDisables": true, "prettier.documentSelectors": ["**/*.astro"], @@ -20,14 +19,6 @@ "[markdown]": { "editor.defaultFormatter": "esbenp.prettier-vscode" }, - "notebook.lineNumbers": "on", - "notebook.diff.ignoreMetadata": true, - "notebook.diff.ignoreOutputs": true, - "notebook.formatOnSave.enabled": true, - "notebook.codeActionsOnSave": { - "notebook.source.fixAll": "explicit", - "notebook.source.organizeImports": "explicit" - }, "[python]": { "editor.formatOnSave": true, "editor.codeActionsOnSave": { @@ -36,11 +27,8 @@ }, "editor.defaultFormatter": "charliermarsh.ruff" }, - "python.testing.pytestEnabled": true, - "python.analysis.completeFunctionParens": true, - "python.analysis.indexing": true, - "python.analysis.autoImportCompletions": true, - "python.languageServer": "Pylance", - "python.testing.pytestArgs": ["."], - "python.testing.unittestEnabled": false + "[rust]": { + "editor.defaultFormatter": "rust-lang.rust-analyzer", + "editor.formatOnSave": true + }, } diff --git a/pyproject.toml b/pyproject.toml index 494302f9..c995667a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,11 +62,11 @@ features = ["pyo3/extension-module"] # ruff pending script support # https://github.com/astral-sh/uv/issues/5903#issuecomment-2360856896 -[tool.scripts] +[tool.poe.tasks] generate_docs = "python docs/generate_docs.py" -docs_dev = { shell = "uv run docstyle && uv run generate_docs && npm run dev --prefix docs" } -docs_build = { shell = "uv run docstyle && uv run generate_docs && npm run build --prefix docs" } -docs_preview = { shell = "uv run docstyle && uv run generate_docs && npm run preview --prefix docs" } +docs_dev = { shell = "uv run generate_docs && npm run dev --prefix docs" } +docs_build = { shell = "uv run generate_docs && npm run build --prefix docs" } +docs_preview = { shell = "uv run generate_docs && npm run preview --prefix docs" } verify_project = { shell = "ruff format && ruff check && pyright . && pytest ./tests" } [tool.uv] @@ -87,6 +87,7 @@ dev-dependencies = [ "utm>=0.7.0", "ruff>=0.5.1", "pyright>=1.1.380", + "poethepoet>=0.29.0", ] [build-system] diff --git a/pysrc/cityseer/config.py b/pysrc/cityseer/config.py index f4e98104..3b079b8f 100644 --- a/pysrc/cityseer/config.py +++ b/pysrc/cityseer/config.py @@ -7,9 +7,10 @@ from queue import Queue import numpy as np -from cityseer import rustalgos from tqdm import tqdm +from cityseer import rustalgos + np.seterr(invalid="ignore") diff --git a/pysrc/cityseer/metrics/layers.py b/pysrc/cityseer/metrics/layers.py index a5b99d6b..a8347feb 100644 --- a/pysrc/cityseer/metrics/layers.py +++ b/pysrc/cityseer/metrics/layers.py @@ -4,6 +4,7 @@ from functools import partial import geopandas as gpd + from cityseer import config, rustalgos logging.basicConfig(level=logging.INFO) diff --git a/pysrc/cityseer/metrics/networks.py b/pysrc/cityseer/metrics/networks.py index 482ff685..d5dcb104 100644 --- a/pysrc/cityseer/metrics/networks.py +++ b/pysrc/cityseer/metrics/networks.py @@ -59,6 +59,7 @@ from typing import Any import geopandas as gpd + from cityseer import config, rustalgos logging.basicConfig(level=logging.INFO) diff --git a/pysrc/cityseer/metrics/observe.py b/pysrc/cityseer/metrics/observe.py index 3fe26d97..ca942926 100644 --- a/pysrc/cityseer/metrics/observe.py +++ b/pysrc/cityseer/metrics/observe.py @@ -18,9 +18,10 @@ import networkx as nx import numpy as np +from tqdm import tqdm + from cityseer import config from cityseer.tools import graphs -from tqdm import tqdm logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) diff --git a/pysrc/cityseer/tools/graphs.py b/pysrc/cityseer/tools/graphs.py index 3586720c..27fa8afb 100644 --- a/pysrc/cityseer/tools/graphs.py +++ b/pysrc/cityseer/tools/graphs.py @@ -15,11 +15,12 @@ import networkx as nx import numpy as np +from shapely import BufferCapStyle, geometry, ops +from tqdm import tqdm + from cityseer import config from cityseer.tools import util from cityseer.tools.util import EdgeData, ListCoordsType, MultiGraph, NodeData, NodeKey -from shapely import geometry, ops -from tqdm import tqdm logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -118,7 +119,7 @@ def nx_remove_filler_nodes(nx_multigraph: MultiGraph) -> MultiGraph: removed_nodes: set[NodeKey] = set() # iterates the original graph, but changes are written to the copied version (to avoid in-place snafus) nd_key: NodeKey - for nd_key in tqdm(nx_multigraph.nodes(), disable=config.QUIET_MODE): + for nd_key in tqdm(sorted(nx_multigraph.nodes()), disable=config.QUIET_MODE): # some nodes will already have been removed if nd_key in removed_nodes: continue @@ -126,7 +127,7 @@ def nx_remove_filler_nodes(nx_multigraph: MultiGraph) -> MultiGraph: if nx.degree(nx_multigraph, nd_key) == 2: # pick the first neighbour and follow the chain until a non-simple node is encountered # this will become the starting point of the chain of simple nodes to be consolidated - nbs: list[NodeKey] = list(nx.neighbors(nx_multigraph, nd_key)) + nbs: list[NodeKey] = sorted(list(nx.neighbors(nx_multigraph, nd_key))) # catch the edge case where the a single dead-end node has two out-edges to a single neighbour if len(nbs) == 1: continue @@ -146,7 +147,7 @@ def nx_remove_filler_nodes(nx_multigraph: MultiGraph) -> MultiGraph: # probe neighbours in one-direction only - i.e. don't backtrack nb_a: NodeKey nb_b: NodeKey - nb_a, nb_b = list(nx.neighbors(nx_multigraph, nb_nd_key)) + nb_a, nb_b = sorted(list(nx.neighbors(nx_multigraph, nb_nd_key))) if nb_a == next_link_nd: next_link_nd = nb_nd_key nb_nd_key = nb_b @@ -194,13 +195,13 @@ def nx_remove_filler_nodes(nx_multigraph: MultiGraph) -> MultiGraph: # get the next set of neighbours # in the above-mentioned edge-case, a single dead-end node with two edges back to a start node # will only have one neighbour - new_nbs: list[NodeKey] = list(nx.neighbors(nx_multigraph, next_link_nd)) + new_nbs: list[NodeKey] = sorted(list(nx.neighbors(nx_multigraph, next_link_nd))) if len(new_nbs) == 1: trailing_nd = next_link_nd next_link_nd = new_nbs[0] # but in almost all cases there will be two neighbours, one of which will be the previous node else: - nb_a, nb_b = list(nx.neighbors(nx_multigraph, next_link_nd)) + nb_a, nb_b = sorted(list(nx.neighbors(nx_multigraph, next_link_nd))) # proceed to the new_next node if nb_a == trailing_nd: trailing_nd = next_link_nd @@ -280,7 +281,7 @@ def nx_remove_dangling_nodes( g_multi_copy.add_nodes_from(subgraph.nodes(data=True)) g_multi_copy.add_edges_from(subgraph.edges(data=True)) - # remove dangleres + # remove danglers if despine > 0: remove_nodes = [] nd_key: NodeKey @@ -928,6 +929,107 @@ def recursive_squash( return _multi_graph +def nx_snap_gapped_endings( + nx_multigraph: nx.MultiGraph, + buffer_dist: float = 12, + osm_hwy_target_tags: list[str] | None = None, + osm_matched_tags_only: bool = False, +) -> nx.MultiGraph: + if not isinstance(nx_multigraph, nx.MultiGraph): + raise TypeError("This method requires an undirected networkX MultiGraph.") + _multi_graph = nx_multigraph.copy() + # if using OSM tags heuristic + hwy_tags = _extract_tags_to_set(osm_hwy_target_tags) + # create an edges STRtree (nodes and edges) + nodes_tree, node_lookups = util.create_nodes_strtree(_multi_graph) + # create an edges STRtree (nodes and edges) + edges_tree, edge_lookups = util.create_edges_strtree(_multi_graph) + # iter + logger.info("Snapping gapped endings.") + # iterate origin graph + for nd_key, nd_data in tqdm(nx_multigraph.nodes(data=True), disable=config.QUIET_MODE): + # don't split opposing geoms from nodes of degree 1 + nd_degree = nx.degree(nx_multigraph, nd_key) + if nd_degree != 1: + continue + # check tags + if osm_hwy_target_tags: + nb_hwy_tags = _gather_nb_tags(nx_multigraph, nd_key, "highways") + if not hwy_tags.intersection(nb_hwy_tags): + continue + # get name tags for matching against potential gapped edges + nb_name_tags = _gather_nb_name_tags(nx_multigraph, nd_key) + # get all other nodes within the buffer distance + # the spatial index using bounding boxes, so further filtering is required (see further down) + n_point = geometry.Point(nd_data["x"], nd_data["y"]) + # spatial query from point returns all buffers with buffer_dist + node_hits: list[dict] = nodes_tree.query(n_point.buffer(buffer_dist)) # type: ignore + # extract the start node, end node, geom + node_keys: list = [] + for node_hit_idx in node_hits: + j_nd_key = node_lookups[node_hit_idx]["nd_key"] + if j_nd_key == nd_key: + continue + j_nd_degree = node_lookups[node_hit_idx]["nd_degree"] + if j_nd_degree == 1: + node_keys.append(j_nd_key) + # abort if no gapped nodes + if not node_keys: + continue + # prepare the root node's point geom + n_geom = geometry.Point(nd_data["x"], nd_data["y"]) + # iter gapped edges + for j_nd_key in node_keys: + # check distance + j_nd_data = nx_multigraph.nodes[j_nd_key] + j_geom = geometry.Point(j_nd_data["x"], j_nd_data["y"]) + if n_geom.distance(j_geom) > buffer_dist: + continue + # hwy tags + if osm_hwy_target_tags: + edge_hwy_tags = _gather_nb_tags(nx_multigraph, j_nd_key, "highways") + if not hwy_tags.intersection(edge_hwy_tags): + continue + # names tags + if osm_matched_tags_only is True: + edge_name_tags = _gather_nb_name_tags(nx_multigraph, j_nd_key) + if not nb_name_tags.intersection(edge_name_tags): + continue + # create new geom + new_geom = geometry.LineString( + [ + [nd_data["x"], nd_data["y"]], + [j_nd_data["x"], j_nd_data["y"]], + ] + ) + # don't add new edges that would criss cross existing + bail = False + edge_hits = edges_tree.query(new_geom) + for edge_hit_idx in edge_hits: + edge_lookup = edge_lookups[edge_hit_idx] + start_nd_key = edge_lookup["start_nd_key"] + end_nd_key = edge_lookup["end_nd_key"] + edge_idx = edge_lookup["edge_idx"] + edge_geom: dict = nx_multigraph[start_nd_key][end_nd_key][edge_idx]["geom"] + if edge_geom.crosses(new_geom): + bail = True + break + if bail: + continue + # add new edge + if not _multi_graph.has_edge(nd_key, j_nd_key): + _multi_graph.add_edge( + nd_key, + j_nd_key, + names=[], + routes=[], + highways=[], + geom=new_geom, + ) + + return _multi_graph + + def nx_split_opposing_geoms( nx_multigraph: nx.MultiGraph, buffer_dist: float = 12, @@ -1032,15 +1134,16 @@ def recurse_child_keys( # create an edges STRtree (nodes and edges) edges_tree, edge_lookups = util.create_edges_strtree(_multi_graph) # node groups - node_groups: list[list] = [] + node_groups: list[set] = [] # iter logger.info("Splitting opposing edges.") # iterate origin graph (else node structure changes in place) nd_key: NodeKey for nd_key, nd_data in tqdm(nx_multigraph.nodes(data=True), disable=config.QUIET_MODE): - # don't split opposing geoms from nodes of degree 1 nd_degree = nx.degree(_multi_graph, nd_key) - if nd_degree < min_node_degree or nd_degree > max_node_degree: + if nd_degree < min_node_degree: + continue + if max_node_degree is not None and nd_degree > max_node_degree: continue # check tags if osm_hwy_target_tags: @@ -1049,9 +1152,12 @@ def recurse_child_keys( continue # get name tags for matching against potential gapped edges nb_name_tags = _gather_nb_name_tags(nx_multigraph, nd_key) + # neighbours for filtering out + neighbours = list(nx.neighbors(nx_multigraph, nd_key)) # get all other edges within the buffer distance # the spatial index using bounding boxes, so further filtering is required (see further down) - # furthermore, successive iterations may remove old edges, so keep track of removed parent vs new child edges + # furthermore, successive iterations may remove old edges + # so keep track of removed parent vs new child edges n_point = geometry.Point(nd_data["x"], nd_data["y"]) # spatial query from point returns all buffers with buffer_dist edge_hits: list[int] = edges_tree.query(n_point.buffer(buffer_dist)) # type: ignore @@ -1063,9 +1169,12 @@ def recurse_child_keys( end_nd_key = edge_lookup["end_nd_key"] edge_idx = edge_lookup["edge_idx"] edge_data: dict = nx_multigraph[start_nd_key][end_nd_key][edge_idx] - # don't add neighbouring edges + # don't add attached edge if nd_key in (start_nd_key, end_nd_key): continue + # don't add neighbouring edges + if start_nd_key in neighbours or end_nd_key in neighbours: + continue edges.append((start_nd_key, end_nd_key, edge_idx, edge_data)) # review gapped edges # if already removed, get the new child edges @@ -1087,6 +1196,17 @@ def recurse_child_keys( n_geom = geometry.Point(nd_data["x"], nd_data["y"]) # nodes for squashing node_group = [nd_key] + # sort gapped edges by distance + gapped_edges = sorted(gapped_edges, key=lambda edge: n_point.distance(edge[3]["geom"])) + # unique edges not sharing a node - i.e. only pierce nearest rather than in multiple directions + shared_nodes = set() + distinct_edges = [] + for start_nd_key, end_nd_key, edge_idx, edge_data in gapped_edges: + if start_nd_key in shared_nodes or end_nd_key in shared_nodes: + continue + shared_nodes.add(start_nd_key) + shared_nodes.add(end_nd_key) + distinct_edges.append((start_nd_key, end_nd_key, edge_idx, edge_data)) # iter gapped edges for start_nd_key, end_nd_key, edge_idx, edge_data in gapped_edges: edge_geom = edge_data["geom"] @@ -1196,8 +1316,8 @@ def recurse_child_keys( ), ] # drop the old edge from _multi_graph - if _multi_graph.has_edge(start_nd_key, end_nd_key, edge_idx): # type: ignore - _multi_graph.remove_edge(start_nd_key, end_nd_key, edge_idx) # type: ignore + if _multi_graph.has_edge(start_nd_key, end_nd_key, edge_idx): + _multi_graph.remove_edge(start_nd_key, end_nd_key, edge_idx) node_groups.append(list(node_group)) # iter and squash if squash_nodes is True: @@ -1205,30 +1325,54 @@ def recurse_child_keys( for node_group in node_groups: _multi_graph = _squash_adjacent( _multi_graph, - node_group, # type: ignore + node_group, centroid_by_itx=True, prioritise_by_hwy_tag=prioritise_by_hwy_tag, ) else: for node_group in node_groups: origin_nd_key = node_group.pop(0) + template = None for new_nd_key in node_group: origin_nd_data = _multi_graph.nodes[origin_nd_key] new_nd_data = _multi_graph.nodes[new_nd_key] - _multi_graph.add_edge( - origin_nd_key, - new_nd_key, - names=[], - routes=[], - highways=[], - geom=geometry.LineString( - [ - [origin_nd_data["x"], origin_nd_data["y"]], - [new_nd_data["x"], new_nd_data["y"]], - ] - ), + new_geom = geometry.LineString( + [ + [origin_nd_data["x"], origin_nd_data["y"]], + [new_nd_data["x"], new_nd_data["y"]], + ] ) - _multi_graph = nx_remove_filler_nodes(_multi_graph) + # don't add overly similar new edges + if template is None: + template = new_geom.buffer(5, cap_style=BufferCapStyle.flat) + elif template.contains(new_geom): + continue + else: + template = template.union(new_geom.buffer(10, cap_style=BufferCapStyle.flat)) + # don't add new edges that would criss cross existing + bail = False + edge_hits = edges_tree.query(new_geom) + for edge_hit_idx in edge_hits: + edge_lookup = edge_lookups[edge_hit_idx] + start_nd_key = edge_lookup["start_nd_key"] + end_nd_key = edge_lookup["end_nd_key"] + edge_idx = edge_lookup["edge_idx"] + edge_geom: dict = nx_multigraph[start_nd_key][end_nd_key][edge_idx]["geom"] + if edge_geom.crosses(new_geom): + bail = True + break + if bail: + continue + # add + if not _multi_graph.has_edge(origin_nd_key, new_nd_key): + _multi_graph.add_edge( + origin_nd_key, + new_nd_key, + names=[], + routes=[], + highways=[], + geom=new_geom, + ) # squashing nodes can result in edge duplicates deduped_graph = nx_merge_parallel_edges( _multi_graph, diff --git a/pysrc/cityseer/tools/io.py b/pysrc/cityseer/tools/io.py index e46ef8c7..17881659 100644 --- a/pysrc/cityseer/tools/io.py +++ b/pysrc/cityseer/tools/io.py @@ -18,13 +18,14 @@ import numpy.typing as npt import pandas as pd import requests -from cityseer import config, rustalgos -from cityseer.tools import graphs, util -from cityseer.tools.util import EdgeData, ListCoordsType, MultiDiGraph, NodeData, NodeKey from pyproj import CRS, Transformer from shapely import geometry from tqdm import tqdm +from cityseer import config, rustalgos +from cityseer.tools import graphs, util +from cityseer.tools.util import EdgeData, ListCoordsType, MultiDiGraph, NodeData, NodeKey + logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) diff --git a/tests/tools/test_graphs.py b/tests/tools/test_graphs.py index 50f26fb3..0c274c53 100644 --- a/tests/tools/test_graphs.py +++ b/tests/tools/test_graphs.py @@ -268,10 +268,144 @@ def test_nx_remove_filler_nodes(primal_graph): def test_nx_remove_dangling_nodes(primal_graph): G_messy = make_messy_graph(primal_graph) + # plot.plot_nx(G_messy) # no despining or disconnected components removal G_post = graphs.nx_remove_dangling_nodes(G_messy, despine=0, remove_disconnected=0) - assert G_post.nodes == G_messy.nodes - assert G_post.edges == G_messy.edges + # plot.plot_nx(G_post, plot_geoms=True) + print(list(G_post.nodes)) + print(list(G_post.edges)) + assert list(G_post.nodes) == [ + "0", + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8", + "9", + "10", + "11", + "12", + "13", + "14", + "15", + "16", + "17", + "18", + "19", + "20", + "21", + "22", + "23", + "24", + "25", + "26", + "27", + "28", + "29", + "30", + "31", + "32", + "33", + "34", + "35", + "36", + "37", + "38", + "39", + "40", + "41", + "42", + "43", + "44", + "45", + "46", + "47", + "48", + "49", + "50", + "51", + "55", + ] + assert G_post.edges == [ + ("0", "16", 0), + ("0", "1", 0), + ("0", "31", 0), + ("1", "2", 0), + ("1", "4", 0), + ("2", "3", 0), + ("2", "5", 0), + ("3", "4", 0), + ("3", "7", 0), + ("4", "9", 0), + ("5", "10", 0), + ("5", "6", 0), + ("6", "11", 0), + ("6", "7", 0), + ("7", "8", 0), + ("8", "9", 0), + ("8", "12", 0), + ("9", "13", 0), + ("10", "14", 0), + ("10", "43", 0), + ("11", "14", 0), + ("11", "12", 0), + ("12", "13", 0), + ("13", "15", 0), + ("14", "15", 0), + ("15", "28", 0), + ("16", "19", 0), + ("16", "17", 0), + ("17", "20", 0), + ("17", "18", 0), + ("18", "21", 0), + ("18", "19", 0), + ("19", "22", 0), + ("20", "24", 0), + ("20", "28", 0), + ("21", "23", 0), + ("21", "24", 0), + ("22", "23", 0), + ("22", "46", 0), + ("22", "27", 0), + ("23", "26", 0), + ("24", "25", 0), + ("25", "29", 0), + ("25", "26", 0), + ("26", "27", 0), + ("27", "30", 0), + ("28", "29", 0), + ("29", "30", 0), + ("30", "45", 0), + ("30", "45", 1), + ("31", "33", 0), + ("31", "32", 0), + ("32", "35", 0), + ("32", "34", 0), + ("33", "38", 0), + ("33", "34", 0), + ("34", "37", 0), + ("35", "36", 0), + ("35", "42", 0), + ("36", "37", 0), + ("36", "41", 0), + ("37", "39", 0), + ("38", "45", 0), + ("38", "39", 0), + ("39", "40", 0), + ("40", "41", 0), + ("40", "44", 0), + ("41", "42", 0), + ("42", "43", 0), + ("43", "44", 0), + ("44", "45", 0), + ("46", "48", 0), + ("46", "47", 0), + ("50", "51", 0), + ("55", "55", 0), + ] # check that all single neighbour nodes have been removed if geom less than despine distance G_post = graphs.nx_remove_dangling_nodes(G_messy, despine=100, remove_disconnected=0) for n in G_messy.nodes():