Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

build(pre-commit.ci): pre-commit autoupdate #429

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,17 @@
default_stages: [commit]
repos:
- repo: https://github.com/compilerla/conventional-pre-commit
rev: v3.0.0
rev: v3.2.0
hooks:
- id: conventional-pre-commit
stages: [commit-msg]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: 'v0.1.9'
rev: 'v0.3.5'
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/psf/black
rev: 23.12.1
rev: 24.3.0
hooks:
- id: black-jupyter
- repo: https://github.com/PyCQA/docformatter
Expand All @@ -23,20 +23,20 @@ repos:
additional_dependencies: [tomli]
args: ["--in-place", "--config", "./pyproject.toml"]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.8.0
rev: v1.9.0
hooks:
- id: mypy
additional_dependencies: ['types-requests', 'types-six']
- repo: https://github.com/pdm-project/pdm
rev: 2.11.2
rev: 2.13.2
hooks:
- id: pdm-lock-check
- id: pdm-export
args: ["-o", "requirements.txt", "--without-hashes", "-d", "-G", "all"]
files: ^pdm.lock$
stages: [manual]
- repo: https://github.com/kynan/nbstripout
rev: 0.6.1
rev: 0.7.1
hooks:
- id: nbstripout
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand All @@ -51,15 +51,15 @@ repos:
args: ['--maxkb=1000']
- id: detect-private-key
- repo: https://github.com/dosisod/refurb
rev: v1.26.0
rev: v2.0.0
hooks:
- id: refurb
args: ["--python-version", "3.9", "--disable", "FURB184", "--format", "github"]
language: python
language_version: python3.10
stages: [manual]
- repo: https://github.com/FHPythonUtils/LicenseCheck
rev: "2023.5.2"
rev: "2024.1.4"
hooks:
- id: licensecheck
stages: [manual]
Expand Down
16 changes: 9 additions & 7 deletions examples/embedders/count_embedder.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,15 @@
"metadata": {},
"outputs": [],
"source": [
"bbox_polygon = geometry.Polygon([\n",
" [17.0198822, 51.1191217],\n",
" [17.017436, 51.105004],\n",
" [17.0485067, 51.1027944],\n",
" [17.0511246, 51.1175054],\n",
" [17.0198822, 51.1191217],\n",
"])\n",
"bbox_polygon = geometry.Polygon(\n",
" [\n",
" [17.0198822, 51.1191217],\n",
" [17.017436, 51.105004],\n",
" [17.0485067, 51.1027944],\n",
" [17.0511246, 51.1175054],\n",
" [17.0198822, 51.1191217],\n",
" ]\n",
")\n",
"bbox_gdf = gpd.GeoDataFrame(geometry=[bbox_polygon], crs=WGS84_CRS)\n",
"bbox_gdf"
]
Expand Down
30 changes: 17 additions & 13 deletions examples/loaders/osm_way_loader.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -37,19 +37,23 @@
"metadata": {},
"outputs": [],
"source": [
"polygon1 = shpg.Polygon([\n",
" (17.1005309, 51.1100158),\n",
" (17.1020436, 51.1100427),\n",
" (17.1021938, 51.1082509),\n",
" (17.1006274, 51.1081027),\n",
" (17.1005201, 51.1099956),\n",
"])\n",
"polygon2 = shpg.Polygon([\n",
" (17.0994473, 51.1084126),\n",
" (17.1023226, 51.1086551),\n",
" (17.1023333, 51.1076312),\n",
" (17.0994473, 51.1083722),\n",
"])\n",
"polygon1 = shpg.Polygon(\n",
" [\n",
" (17.1005309, 51.1100158),\n",
" (17.1020436, 51.1100427),\n",
" (17.1021938, 51.1082509),\n",
" (17.1006274, 51.1081027),\n",
" (17.1005201, 51.1099956),\n",
" ]\n",
")\n",
"polygon2 = shpg.Polygon(\n",
" [\n",
" (17.0994473, 51.1084126),\n",
" (17.1023226, 51.1086551),\n",
" (17.1023333, 51.1076312),\n",
" (17.0994473, 51.1083722),\n",
" ]\n",
")\n",
"gdf_place = gpd.GeoDataFrame(\n",
" {\"geometry\": [polygon1, polygon2]},\n",
" crs=WGS84_CRS,\n",
Expand Down
14 changes: 8 additions & 6 deletions examples/regionalizers/h3_regionalizer.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -44,12 +44,14 @@
" (1, 1),\n",
" (0, 1),\n",
" ],\n",
" holes=[[\n",
" (0.8, 0.9),\n",
" (0.9, 0.55),\n",
" (0.8, 0.3),\n",
" (0.5, 0.4),\n",
" ]],\n",
" holes=[\n",
" [\n",
" (0.8, 0.9),\n",
" (0.9, 0.55),\n",
" (0.8, 0.3),\n",
" (0.5, 0.4),\n",
" ]\n",
" ],\n",
" ),\n",
" geometry.Polygon(shell=[(-0.25, 0), (0.25, 0), (0, 0.2)]),\n",
" ],\n",
Expand Down
14 changes: 8 additions & 6 deletions examples/regionalizers/s2_regionalizer.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,14 @@
" (1, 1),\n",
" (0, 1),\n",
" ],\n",
" holes=[[\n",
" (0.8, 0.9),\n",
" (0.9, 0.55),\n",
" (0.8, 0.3),\n",
" (0.5, 0.4),\n",
" ]],\n",
" holes=[\n",
" [\n",
" (0.8, 0.9),\n",
" (0.9, 0.55),\n",
" (0.8, 0.3),\n",
" (0.5, 0.4),\n",
" ]\n",
" ],\n",
" ),\n",
" geometry.Polygon(shell=[(-0.25, 0), (0.25, 0), (0, 0.2)]),\n",
" ],\n",
Expand Down
14 changes: 8 additions & 6 deletions examples/regionalizers/slippy_map_regionalizer.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,14 @@
" (1, 1),\n",
" (0, 1),\n",
" ],\n",
" holes=[[\n",
" (0.8, 0.9),\n",
" (0.9, 0.55),\n",
" (0.8, 0.3),\n",
" (0.5, 0.4),\n",
" ]],\n",
" holes=[\n",
" [\n",
" (0.8, 0.9),\n",
" (0.9, 0.55),\n",
" (0.8, 0.3),\n",
" (0.5, 0.4),\n",
" ]\n",
" ],\n",
" ),\n",
" geometry.Polygon(shell=[(-0.25, 0), (0.25, 0), (0, 0.2)]),\n",
" ],\n",
Expand Down
10 changes: 0 additions & 10 deletions srai/embedders/_pytorch_stubs.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,31 +6,21 @@
class Dataset(Generic[T_co]):
"""Dataset class stub."""

...


class DataLoader(Generic[T_co]):
"""DataLoader class stub."""

...


class LightningModule: # pragma: no cover
"""LightningModule class stub."""

...


class nn: # pragma: no cover
"""Pytorch nn class stub."""

class Module:
"""Pytorch nn.Module class stub."""

...


class torch: # pragma: no cover
"""Pytorch class stub."""

...
32 changes: 18 additions & 14 deletions srai/embedders/geovex/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,15 +95,17 @@ def _seperate_valid_invalid_cells(
# check if all the neighbors are in the dataset
if len(neighbors.intersection(all_indices)) == len(neighbors):
# add the h3_index to the valid h3 indices, with the ring of neighbors
valid_h3s.append((
h3_index,
data.index.get_loc(h3_index),
[
# get the index of the h3 in the dataset
(data.index.get_loc(_h), get_local_ij_index(h3_index, _h))
for _h in neighbors
],
))
valid_h3s.append(
(
h3_index,
data.index.get_loc(h3_index),
[
# get the index of the h3 in the dataset
(data.index.get_loc(_h), get_local_ij_index(h3_index, _h))
for _h in neighbors
],
)
)
else:
# some of the neighbors are not in the dataset, add the h3_index to the invalid h3s
invalid_h3s.add(h3_index)
Expand Down Expand Up @@ -140,11 +142,13 @@ def _build_tensor(self, target_idx: int, neighbors_idxs: Neighbors) -> "torch.Te
# the target h3 is in the center of the tensor
# the tensor is 2*neighbor_k_ring + 1 x 2*neighbor_k_ring + 1 x 2*neighbor_k_ring + 1
# make a tensor of zeros, padded by 1 zero all around to make it even for the convolutions
tensor: torch.Tensor = torch.zeros((
self._N,
2 * self._k + 2,
2 * self._k + 2,
))
tensor: torch.Tensor = torch.zeros(
(
self._N,
2 * self._k + 2,
2 * self._k + 2,
)
)

# set the target h3 to the center of the tensor
tensor[
Expand Down
2 changes: 1 addition & 1 deletion srai/h3.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def h3_to_shapely_geometry(h3_index: Iterable[Union[int, str]]) -> list[Polygon]

# TODO: write tests (#322)
def h3_to_shapely_geometry(
h3_index: Union[int, str, Iterable[Union[int, str]]]
h3_index: Union[int, str, Iterable[Union[int, str]]],
) -> Union[Polygon, list[Polygon]]:
"""
Convert H3 index to Shapely polygon.
Expand Down
2 changes: 1 addition & 1 deletion srai/loaders/osm_loaders/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@


def prepare_area_gdf_for_loader(
area: Union[BaseGeometry, Iterable[BaseGeometry], gpd.GeoSeries, gpd.GeoDataFrame]
area: Union[BaseGeometry, Iterable[BaseGeometry], gpd.GeoSeries, gpd.GeoDataFrame],
) -> gpd.GeoDataFrame:
"""
Prepare an area for the loader.
Expand Down
16 changes: 9 additions & 7 deletions srai/loaders/osm_loaders/filters/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def merge_osm_tags_filter(osm_tags_filter: Iterable[GroupedOsmTagsFilter]) -> Os
def merge_osm_tags_filter(
osm_tags_filter: Union[
OsmTagsFilter, GroupedOsmTagsFilter, Iterable[OsmTagsFilter], Iterable[GroupedOsmTagsFilter]
]
],
) -> OsmTagsFilter:
"""
Merge OSM tags filter into `OsmTagsFilter` type.
Expand All @@ -52,12 +52,14 @@ def merge_osm_tags_filter(
elif is_expected_type(osm_tags_filter, GroupedOsmTagsFilter):
return _merge_grouped_osm_tags_filter(cast(GroupedOsmTagsFilter, osm_tags_filter))
elif is_expected_type(osm_tags_filter, Iterable):
return _merge_multiple_osm_tags_filters([
merge_osm_tags_filter(
cast(Union[OsmTagsFilter, GroupedOsmTagsFilter], sub_osm_tags_filter)
)
for sub_osm_tags_filter in osm_tags_filter
])
return _merge_multiple_osm_tags_filters(
[
merge_osm_tags_filter(
cast(Union[OsmTagsFilter, GroupedOsmTagsFilter], sub_osm_tags_filter)
)
for sub_osm_tags_filter in osm_tags_filter
]
)

raise AttributeError(
"Provided tags don't match required type definitions"
Expand Down
10 changes: 6 additions & 4 deletions srai/regionalizers/_spherical_voronoi.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,10 +214,12 @@ def generate_voronoi_regions(
end_hash,
start_hash,
) not in hashed_edges:
hashed_edges.add((
start_hash,
end_hash,
))
hashed_edges.add(
(
start_hash,
end_hash,
)
)

regions_parts[region_id].append((sphere_part_id, polygon_edges))

Expand Down
18 changes: 10 additions & 8 deletions tests/embedders/geovex/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,12 +116,14 @@ def test_dataset_item(regions_data_df: pd.DataFrame) -> None:
# commpare to the transposed image in the paper
# specifically fig. 3
# the bottom and right are padded by 0s for even #
desired = np.array([
[(0, 0), (0, 0), (0, 2), (1, 2), (2, 2), (0, 0)],
[(0, 0), (-1, 1), (0, 1), (1, 1), (2, 1), (0, 0)],
[(-2, 0), (-1, 0), (0, 0), (1, 0), (2, 0), (0, 0)],
[(-2, -1), (-1, -1), (0, -1), (1, -1), (0, 0), (0, 0)],
[(-2, -2), (-1, -2), (0, -2), (0, 0), (0, 0), (0, 0)],
[(0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0)],
])
desired = np.array(
[
[(0, 0), (0, 0), (0, 2), (1, 2), (2, 2), (0, 0)],
[(0, 0), (-1, 1), (0, 1), (1, 1), (2, 1), (0, 0)],
[(-2, 0), (-1, 0), (0, 0), (1, 0), (2, 0), (0, 0)],
[(-2, -1), (-1, -1), (0, -1), (1, -1), (0, 0), (0, 0)],
[(-2, -2), (-1, -2), (0, -2), (0, 0), (0, 0), (0, 0)],
[(0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0)],
]
)
assert np.all(ijs.transpose(1, 0, -1) == desired)
Loading
Loading