Skip to content

Commit

Permalink
refactor: update pre-commit and use ruff (#388)
Browse files Browse the repository at this point in the history
* update pre-commit with ruff

* fix reuse
  • Loading branch information
lkstrp authored Oct 28, 2024
1 parent e908167 commit 1e42c17
Show file tree
Hide file tree
Showing 46 changed files with 579 additions and 5,850 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/codeql.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# SPDX-FileCopyrightText: 2021 The Atlite Authors
#
# SPDX-License-Identifier: CC0-1.0

# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
Expand Down
7 changes: 5 additions & 2 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# SPDX-FileCopyrightText: 2016 - 2023 The Atlite Authors
#
# SPDX-License-Identifier: MIT
name: Tests

on:
Expand Down Expand Up @@ -80,7 +83,7 @@ jobs:
key: retrieved-cutouts-${{ env.today }}
enableCrossOsArchive: true
id: cache-env

- name: Download package
uses: actions/download-artifact@v4
with:
Expand All @@ -99,4 +102,4 @@ jobs:
- name: Upload code coverage report
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
token: ${{ secrets.CODECOV_TOKEN }}
59 changes: 24 additions & 35 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,31 +1,27 @@
# SPDX-FileCopyrightText: 2021 The Atlite Authors
#
# SPDX-License-Identifier: CC0-1.0
ci:
autoupdate_schedule: monthly

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v5.0.0
hooks:
- id: check-merge-conflict
- id: end-of-file-fixer
- id: fix-encoding-pragma
- id: mixed-line-ending
- id: trailing-whitespace
- id: check-added-large-files
args: ['--maxkb=2000']

# Sort package imports alphabetically
- repo: https://github.com/PyCQA/isort
rev: 5.13.2
# Run ruff to lint and format
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.6.9
hooks:
- id: isort
args: ["--profile", "black", "--filter-files"]

# Convert relative imports to absolute imports
- repo: https://github.com/MarcoGorelli/absolufy-imports
rev: v0.3.1
hooks:
- id: absolufy-imports
# Run the linter.
- id: ruff
args: [--fix]
# Run the formatter.
- id: ruff-format

# Find common spelling mistakes in comments and docstrings
- repo: https://github.com/codespell-project/codespell
Expand All @@ -36,30 +32,23 @@ repos:
types_or: [python, rst, markdown]
files: ^(scripts|doc)/

# Make docstrings PEP 257 compliant
- repo: https://github.com/PyCQA/docformatter
rev: v1.7.5
# Remove output from Jupyter notebooks
- repo: https://github.com/aflc/pre-commit-jupyter
rev: v1.2.1
hooks:
- id: docformatter
args: ['--in-place', '--make-summary-multi-line', '--pre-summary-newline']
- id: jupyter-notebook-cleanup
args: ['--remove-kernel-metadata']

# Formatting with "black" coding style
- repo: https://github.com/psf/black
rev: 24.8.0
hooks:
# Format Python files
- id: black
# Format Jupyter Python notebooks
- id: black-jupyter

- repo: https://github.com/fsfe/reuse-tool
rev: v4.0.3
hooks:
- id: reuse

# Do YAML formatting (before the linter checks it for misses)
# Do YAML formatting
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.14.0
hooks:
- id: pretty-format-yaml
args: [--autofix, --indent, '2', --preserve-quotes]

# Reuse compliance
- repo: https://github.com/fsfe/reuse-tool
rev: v4.0.3
hooks:
- id: reuse
28 changes: 19 additions & 9 deletions atlite/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-

# SPDX-FileCopyrightText: 2016 - 2023 The Atlite Authors
#
# SPDX-License-Identifier: MIT
Expand All @@ -12,13 +10,6 @@
resource requirements especially on CPU and RAM resources low.
"""

from importlib.metadata import version
import re

from atlite.cutout import Cutout
from atlite.gis import ExclusionContainer, compute_indicatormatrix, regrid
from atlite.resource import cspinstallations, solarpanels, windturbines

__author__ = (
"The Atlite Authors: Gorm Andresen (Aarhus University), "
"Jonas Hoersch (FIAS/KIT/RLI), "
Expand All @@ -28,12 +19,31 @@
"Markus Schlott (FIAS), "
"David Schlachtberger (FIAS), "
)

__copyright__ = "Copyright 2016 - 2021 The Atlite Authors"

import re
from importlib.metadata import version

from atlite.cutout import Cutout
from atlite.gis import ExclusionContainer, compute_indicatormatrix, regrid
from atlite.resource import cspinstallations, solarpanels, windturbines

# e.g. "0.17.1" or "0.17.1.dev4+ga3890dc0" (if installed from git)
__version__ = version("atlite")
# e.g. "0.17.0" # TODO, in the network structure it should use the dev version
match = re.match(r"(\d+\.\d+(\.\d+)?)", __version__)
assert match, f"Could not determine release_version of pypsa: {__version__}"
release_version = match.group(0)
assert not __version__.startswith("0.0"), "Could not determine version of atlite."

__all__ = [
Cutout,
ExclusionContainer,
compute_indicatormatrix,
regrid,
cspinstallations,
solarpanels,
windturbines,
__version__,
]
2 changes: 0 additions & 2 deletions atlite/aggregate.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-

# SPDX-FileCopyrightText: 2016 - 2023 The Atlite Authors
#
# SPDX-License-Identifier: MIT
Expand Down
25 changes: 15 additions & 10 deletions atlite/convert.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
# -*- coding: utf-8 -*-

# SPDX-FileCopyrightText: 2016 - 2023 The Atlite Authors
#
# SPDX-License-Identifier: MIT
"""
All functions for converting weather data into energy system model data.
"""

import datetime as dt
import logging
from collections import namedtuple
Expand All @@ -22,8 +21,6 @@
from numpy import pi
from scipy.sparse import csr_matrix

logger = logging.getLogger(__name__)

from atlite import csp as cspm
from atlite import hydro as hydrom
from atlite import wind as windm
Expand All @@ -40,6 +37,8 @@
windturbine_smooth,
)

logger = logging.getLogger(__name__)


def convert_and_aggregate(
cutout,
Expand All @@ -66,7 +65,7 @@ def convert_and_aggregate(
available from these.
Parameters
-----------
----------
matrix : N x S - xr.DataArray or sp.sparse.csr_matrix or None
If given, it is used to aggregate the grid cells to buses.
N is the number of buses, S the number of spatial coordinates, in the
Expand Down Expand Up @@ -99,7 +98,7 @@ def convert_and_aggregate(
Dict with keyword arguments passed to `dask.compute`.
Other Parameters
-----------------
----------------
convert_func : Function
Callback like convert_wind, convert_pv
Expand All @@ -113,8 +112,8 @@ def convert_and_aggregate(
units : xr.DataArray (optional)
The installed units per bus in MW corresponding to `layout`
(only if `return_capacity` is True).
"""
"""
func_name = convert_func.__name__.replace("convert_", "")
logger.info(f"Convert and aggregate '{func_name}'.")
da = convert_func(cutout.data, **convert_kwds)
Expand Down Expand Up @@ -313,7 +312,6 @@ def coefficient_of_performance(
Energy & Environmental Science (2012), 5, 9291-9306,
https://doi.org/10.1039/C2EE22653G.
"""

return cutout.convert_and_aggregate(
convert_func=convert_coefficient_of_performance,
source=source,
Expand Down Expand Up @@ -383,6 +381,7 @@ def heat_demand(cutout, threshold=15.0, a=1.0, constant=0.0, hour_shift=0.0, **p
----
You can also specify all of the general conversion arguments
documented in the `convert_and_aggregate` function.
"""
return cutout.convert_and_aggregate(
convert_func=convert_heat_demand,
Expand Down Expand Up @@ -461,8 +460,8 @@ def solar_thermal(
----------
[1] Henning and Palzer, Renewable and Sustainable Energy Reviews 30
(2014) 1003-1018
"""
"""
if not callable(orientation):
orientation = get_orientation(orientation)

Expand Down Expand Up @@ -540,6 +539,7 @@ def wind(cutout, turbine, smooth=False, add_cutout_windspeed=False, **params):
----------
[1] Andresen G B, Søndergaard A A and Greiner M 2015 Energy 93, Part 1
1074 – 1088. doi:10.1016/j.energy.2015.09.071
"""
if isinstance(turbine, (str, Path, dict)):
turbine = get_windturbineconfig(
Expand Down Expand Up @@ -631,6 +631,7 @@ def irradiation(
----------
[1] D.T. Reindl, W.A. Beckman, and J.A. Duffie. Diffuse fraction correla-
tions. Solar Energy, 45(1):1 – 7, 1990.
"""
if not callable(orientation):
orientation = get_orientation(orientation)
Expand Down Expand Up @@ -715,6 +716,7 @@ def pv(cutout, panel, orientation, tracking=None, clearsky_model=None, **params)
for the MPP Performance of Different Types of PV-Modules Applied for
the Performance Check of Grid Connected Systems, Freiburg, June 2004.
Eurosun (ISES Europe Solar Congress).
"""
if isinstance(panel, (str, Path)):
panel = get_solarpanelconfig(panel)
Expand Down Expand Up @@ -803,6 +805,7 @@ def csp(cutout, installation, technology=None, **params):
[2] Tobias Hirsch (ed.). CSPBankability Project Report, DLR, 2017.
URL: https://www.dlr.de/sf/en/desktopdefault.aspx/tabid-11126/19467_read-48251/
"""
if isinstance(installation, (str, Path)):
installation = get_cspinstallationconfig(installation)
Expand Down Expand Up @@ -916,6 +919,7 @@ def hydro(
routing: baseline data and new approaches to study the world’s large river
systems. Hydrological Processes, 27(15): 2171–2186. Data is available at
www.hydrosheds.org.
"""
basins = hydrom.determine_basins(plants, hydrobasins, show_progress=show_progress)

Expand Down Expand Up @@ -986,8 +990,8 @@ def convert_line_rating(
-------
Imax
xr.DataArray giving the maximal current capacity per timestep in Ampere.
"""
"""
Ta = ds["temperature"]
Tfilm = (Ta + Ts) / 2
T0 = 273.15
Expand Down Expand Up @@ -1115,6 +1119,7 @@ def line_rating(
>>> i = cutout.line_rating(shapes, n.lines.r/n.lines.length)
>>> v = xr.DataArray(n.lines.v_nom, dims='name')
>>> s = np.sqrt(3) * i * v / 1e3 # in MW
"""
if not isinstance(shapes, gpd.GeoSeries):
shapes = gpd.GeoSeries(shapes).rename_axis("dim_0")
Expand Down
3 changes: 1 addition & 2 deletions atlite/csp.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-

# SPDX-FileCopyrightText: 2016 - 2023 The Atlite Authors
#
# SPDX-License-Identifier: MIT
Expand Down Expand Up @@ -38,6 +36,7 @@ def calculate_dni(ds, solar_position=None, altitude_threshold=3.75):
The default values '3.75 deg' corresponds to
the solar altitude traversed by the sun within about 15 minutes in a location with
maximum solar altitude of 60 deg and 10h day time.
"""
if solar_position is None:
solar_position = SolarPosition(ds)
Expand Down
Loading

0 comments on commit 1e42c17

Please sign in to comment.