Skip to content

Commit

Permalink
chore: change linter to Ruff (#216)
Browse files Browse the repository at this point in the history
* chore: add ruff linter; remove flake8, isort, pydocstyle

* docs: edit changelog

* chore: add rules to ruff

* fix: missing import

* chore: remove unnecessary exclude
  • Loading branch information
Calychas authored Mar 26, 2023
1 parent 8264d96 commit 1eddc42
Show file tree
Hide file tree
Showing 27 changed files with 152 additions and 190 deletions.
11 changes: 0 additions & 11 deletions .flake8

This file was deleted.

3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -146,3 +146,6 @@ lightning_logs/

# files_cache
files/

# ruff
.ruff_cache
27 changes: 8 additions & 19 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,39 +7,28 @@ repos:
hooks:
- id: conventional-pre-commit
stages: [commit-msg]
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.0.259'
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black-jupyter
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
name: isort (python)
- repo: https://github.com/PyCQA/docformatter
rev: v1.5.1
hooks:
- id: docformatter
additional_dependencies: [tomli]
args: ["--in-place", "--config", "./pyproject.toml"]
- repo: https://github.com/pycqa/pydocstyle
rev: 6.3.0
hooks:
- id: pydocstyle
additional_dependencies: ["toml", "tomli"]
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
additional_dependencies:
- flake8-bugbear
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.0.1
rev: v1.1.1
hooks:
- id: mypy
additional_dependencies: ["types-requests"]
- repo: https://github.com/pdm-project/pdm
rev: 2.4.7
rev: 2.4.9
hooks:
- id: pdm-lock-check
- id: pdm-export
Expand All @@ -61,7 +50,7 @@ repos:
- id: check-added-large-files
- id: detect-private-key
- repo: https://github.com/dosisod/refurb
rev: v1.13.0
rev: v1.15.0
hooks:
- id: refurb
language: python
Expand Down
2 changes: 1 addition & 1 deletion .vscode/extensions.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
"recommendations": ["njpwerner.autodocstring"]
"recommendations": ["njpwerner.autodocstring", "charliermarsh.ruff"]
}
12 changes: 9 additions & 3 deletions .vscode/settings.json.default
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
{
"editor.formatOnSave": true,
"python.formatting.provider": "black",
"python.linting.flake8Enabled": true,
"python.linting.enabled": true,
"python.linting.mypyEnabled": true,
"[python]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": true,
"source.organizeImports": true
}
},
"python.formatting.provider": "black",
"autoDocstring.docstringFormat": "google",
"python.testing.pytestArgs": [
"tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"python.analysis.typeCheckingMode": "off",
}
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed

- Change embedders and joiners interface to have `.transform` method
- Change linter to Ruff and removed flake8, isort, pydocstyle

### Deprecated

Expand Down
2 changes: 1 addition & 1 deletion examples/regionizers/h3_regionizer.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.14"
"version": "3.11.1"
},
"vscode": {
"interpreter": {
Expand Down
129 changes: 26 additions & 103 deletions pdm.lock

Large diffs are not rendered by default.

61 changes: 41 additions & 20 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,9 @@ dev = ["bumpver>=2022.1118"]
lint = [
"pre-commit~=2.20",
"black[jupyter]>=22.10.0",
"flake8~=5.0",
"pydocstyle[toml]>=6.1.1",
"flake8-docstrings>=1.6.0",
"isort~=5.10",
"mypy~=0.982",
"docformatter[tomli]>=1.5.0",
"ruff>=0.0.259",
]
# pdm add -dG test <library>
test = [
Expand Down Expand Up @@ -112,19 +109,37 @@ line-length = 100
target-version = ["py38", "py39", "py310", "py311"]
preview = true

[tool.isort]
profile = "black"
[tool.ruff]
line-length = 100
target-version = "py38"
select = [
"E", "W", # pycodestyle
"F", # pyflakes
"UP", # pyupgrade
"D", # pydocstyle
"I", # isort
"B", # flake8-bugbear
"NPY", # NumPy
"YTT", # flake8-2020
"Q", # flake8-quotes
"PLE", "PLW", # pylint (add "PLR" in the future)
"PIE", # misc lints
"TID", # tidy imports
"ISC", # implicit string concatenation
"TCH", # type-checking imports
# "N", # pep8-naming
# "ANN", # flake8-annotations
]
ignore = ["D212"]
extend-exclude = [
"old",
]

[tool.bumpver]
current_version = "0.0.1"
version_pattern = "MAJOR.MINOR.PATCH[PYTAGNUM]"
commit_message = "chore: bump version {old_version} -> {new_version}"
commit = true
tag = true
push = false
[tool.ruff.pydocstyle]
convention = "google"

[tool.bumpver.file_patterns]
"pyproject.toml" = ['current_version = "{version}"', 'version = "{version}"']
[tool.ruff.pycodestyle]
max-doc-length = 100

[tool.mypy]
strict = true
Expand All @@ -134,11 +149,6 @@ no_implicit_optional = true
check_untyped_defs = true
warn_return_any = true

[tool.pydocstyle]
convention = "google"
add-select = ["D213"]
add-ignore = ["D202", "D212"]

[tool.docformatter]
syntax = 'google'
black = true
Expand All @@ -152,6 +162,17 @@ pre-summary-newline = true
close-quotes-on-newline = true
wrap-one-line = true

[tool.bumpver]
current_version = "0.0.1"
version_pattern = "MAJOR.MINOR.PATCH[PYTAGNUM]"
commit_message = "chore: bump version {old_version} -> {new_version}"
commit = true
tag = true
push = false

[tool.bumpver.file_patterns]
"pyproject.toml" = ['current_version = "{version}"', 'version = "{version}"']

[tool.pytest.ini_options]
addopts = ["--import-mode=importlib"]
markers = ["slow: marks tests as slow (deselect with '-m \"not slow\"')"]
Expand Down
4 changes: 0 additions & 4 deletions srai/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,10 @@
class SRAIException(Exception):
"""Base class for all SRAI exceptions."""

pass


class ModelNotFitException(SRAIException):
"""Exception raised when a model is not fit."""

pass


class LoadedDataIsEmptyException(SRAIException):
"""Exception when the loaded data returned by the loader is empty."""
Expand Down
2 changes: 1 addition & 1 deletion srai/loaders/gtfs_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def _validate_feed(self, feed: "Feed", fail: bool = True) -> None:
if (validation_result["type"] == "error").sum() > 0:
import warnings

warnings.warn(f"Invalid GTFS feed: \n{validation_result}", RuntimeWarning)
warnings.warn(f"Invalid GTFS feed: \n{validation_result}", RuntimeWarning, stacklevel=2)
if fail:
raise ValueError("Invalid GTFS feed.")

Expand Down
1 change: 1 addition & 0 deletions srai/loaders/osm_loaders/filters/popular.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ def get_popular_tags(
This is a wrapper around the `popular` taginfo api endpoint [1].
It queries the API, and optionally filters the results
according to argument values.
Args:
in_wiki_only (bool, optional): If True, only return results tags
that have at least one wiki page. Defaults to False.
Expand Down
2 changes: 1 addition & 1 deletion srai/loaders/osm_loaders/pbf_file_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def download_pbf_file_for_polygon(self, polygon: Polygon) -> Path:
extraction_uuid = start_extract_result["uuid"]
status_check_url = start_extract_result["url"]
except KeyError:
warnings.warn(json.dumps(start_extract_result))
warnings.warn(json.dumps(start_extract_result), stacklevel=2)
raise

with tqdm() as pbar:
Expand Down
10 changes: 6 additions & 4 deletions srai/loaders/osm_loaders/pbf_file_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@
This module contains a handler capable of parsing a PBF file into a GeoDataFrame.
"""
import os
import warnings
from typing import Any, Callable, Dict, Optional, Sequence, Union
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Sequence, Union

import geopandas as gpd
import osmium
Expand All @@ -20,6 +19,9 @@
from srai.loaders.osm_loaders.filters.hex2vec import HEX2VEC_FILTER
from srai.loaders.osm_loaders.filters.osm_tags_type import osm_tags_type

if TYPE_CHECKING:
import os


class PbfFileHandler(osmium.SimpleHandler): # type: ignore
"""
Expand Down Expand Up @@ -66,7 +68,7 @@ def __init__(
region_geometry (BaseGeometry, optional): Region which can be used to filter only
intersecting OSM objects. Defaults to None.
"""
super(PbfFileHandler, self).__init__()
super().__init__()
self.filter_tags = tags
if self.filter_tags:
self.filter_tags_keys = set(self.filter_tags.keys())
Expand Down Expand Up @@ -234,7 +236,7 @@ def _get_osm_geometry(
geometry = wkblib.loads(wkb, hex=True)
except RuntimeError as ex:
message = str(ex)
warnings.warn(message, RuntimeWarning)
warnings.warn(message, RuntimeWarning, stacklevel=2)

return geometry

Expand Down
4 changes: 2 additions & 2 deletions srai/loaders/osm_way_loader/osm_way_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def _graph_from_gdf(self, gdf: gpd.GeoDataFrame) -> Tuple[gpd.GeoDataFrame, gpd.
Obtain the raw road infrastructure data from OSM.
Args:
gss (gpd.GeoDataFrame): (Multi)Polygons for which to download road infrastructure data.
gdf (gpd.GeoDataFrame): (Multi)Polygons for which to download road infrastructure data.
Returns:
Tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]: Road infrastructure as (intersections, roads)
Expand Down Expand Up @@ -324,7 +324,7 @@ def _sanitize(self, x: Any, column_name: str) -> Any:
x = x * constants.MPH_TO_KMH
x = float(x)
elif column_name == "width":
if x.endswith("m") or x.endswith("meter"):
if x.endswith(("m", "meter")):
x = x.split("m")[0].strip()
elif "'" in x:
x = float(x.split("'")[0].strip())
Expand Down
1 change: 0 additions & 1 deletion srai/neighbourhoods/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ def get_neighbours(self, index: IndexType) -> Set[IndexType]:
Returns:
Set[IndexType]: Indexes of the neighbours.
"""
pass # pragma: no cover

def get_neighbours_up_to_distance(self, index: IndexType, distance: int) -> Set[IndexType]:
"""
Expand Down
4 changes: 3 additions & 1 deletion srai/regionizers/_spherical_voronoi.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@


def _generate_sphere_parts() -> None:
global SPHERE_PARTS, SPHERE_PARTS_BOUNDING_BOXES
global SPHERE_PARTS, SPHERE_PARTS_BOUNDING_BOXES # noqa: PLW0603

if not SPHERE_PARTS:
# LON: 0; LAT: 0
Expand Down Expand Up @@ -85,6 +85,7 @@ def map_to_geocentric(lon: float, lat: float, ell: Ellipsoid) -> Tuple[float, fl
Args:
lon (float): longitude of a point in a wgs84 crs.
lat (float): latitude of a point in a wgs84 crs.
ell (Ellipsoid): an ellipsoid.
Returns:
Tuple[float, float, float]: (x, y, z) coordinates tuple.
Expand All @@ -101,6 +102,7 @@ def map_from_geocentric(x: float, y: float, z: float, ell: Ellipsoid) -> Tuple[f
x (float): X cartesian coordinate.
y (float): Y cartesian coordinate.
z (float): Z cartesian coordinate.
ell (Ellipsoid): an ellipsoid.
Returns:
Tuple[float, float]: longitude and latitude coordinates in a wgs84 crs.
Expand Down
7 changes: 3 additions & 4 deletions srai/utils/_optional.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,17 +45,17 @@ def import_optional_dependency(
try:
imported_module = importlib.import_module(module)
return imported_module if name is None else getattr(imported_module, name)
except ImportError:
except ImportError as exc:
error_msg = (
f'Missing optional dependency "{module}". Please install required packages using '
f"`pip install srai[{dependency_group}]`."
)
if error == ImportErrorHandle.RAISE:
raise ImportError(error_msg)
raise ImportError(error_msg) from exc
if error == ImportErrorHandle.WARN:
import warnings

warnings.warn(f"{error_msg} Skipping import.", ImportWarning)
warnings.warn(f"{error_msg} Skipping import.", ImportWarning, stacklevel=2)
return None


Expand All @@ -71,6 +71,5 @@ def import_optional_dependencies(
error (ErrorHandle, {'raise', 'warn', 'ignore'}): Information what to do when any of
the modules hasn't been found. Defaults to ErrorHandle.RAISE.
"""

for module in modules:
import_optional_dependency(dependency_group=dependency_group, module=module, error=error)
2 changes: 1 addition & 1 deletion tests/embedders/test_gtfs2vec_embedder.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def test_transform_with_mismatched_features_count(
"""Test GTFS2VecEmbedder transform with mismatched features count."""
embedder = GTFS2VecEmbedder(skip_autoencoder=False)
mock_model = mocker.MagicMock()
mock_model.configure_mock(**{"n_fetures": 42})
mock_model.configure_mock(n_features=42)
embedder._model = mock_model

with pytest.raises(ValueError):
Expand Down
Loading

0 comments on commit 1eddc42

Please sign in to comment.