diff --git a/benchmarks/interpolation.py b/benchmarks/interpolation.py index 95566feeb..ee277ccd6 100644 --- a/benchmarks/interpolation.py +++ b/benchmarks/interpolation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -48,7 +47,12 @@ class Data: def request_weather_data( - parameter: str, lat: float, lon: float, distance: float, start_date: dt.datetime, end_date: dt.datetime + parameter: str, + lat: float, + lon: float, + distance: float, + start_date: dt.datetime, + end_date: dt.datetime, ): stations = DwdObservationRequest( parameter=parameter, diff --git a/benchmarks/interpolation_over_time.py b/benchmarks/interpolation_over_time.py index 97e5ea638..40a263238 100644 --- a/benchmarks/interpolation_over_time.py +++ b/benchmarks/interpolation_over_time.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import os @@ -45,13 +44,15 @@ def get_regular_df(parameter: str, start_date: datetime, end_date: datetime, exc def get_rmse(regular_values: pl.Series, interpolated_values: pl.Series) -> float: return root_mean_squared_error( - regular_values.reshape((-1, 1)).to_list(), interpolated_values.reshape((-1, 1)).to_list() + regular_values.reshape((-1, 1)).to_list(), + interpolated_values.reshape((-1, 1)).to_list(), ) def get_corr(regular_values: pl.Series, interpolated_values: pl.Series) -> float: return r_regression( - regular_values.reshape((-1, 1)).to_list(), interpolated_values.reshape((-1, 1)).to_list() + regular_values.reshape((-1, 1)).to_list(), + interpolated_values.reshape((-1, 1)).to_list(), ).item() @@ -62,7 +63,10 @@ def visualize(parameter: str, unit: str, regular_df: pl.DataFrame, interpolated_ plt.figure(figsize=(factor * 19.2, factor * 10.8)) plt.plot(regular_df.get_column("date"), regular_df.get_column("value"), color="red", label="regular") plt.plot( - interpolated_df.get_column("date"), interpolated_df.get_column("value"), color="black", label="interpolated" + interpolated_df.get_column("date"), + interpolated_df.get_column("value"), + color="black", + label="interpolated", ) ylabel = f"{parameter.lower()} [{unit}]" plt.ylabel(ylabel) diff --git a/benchmarks/interpolation_precipitation_difference.py b/benchmarks/interpolation_precipitation_difference.py index 3b6f18ebb..4d9e3e123 100644 --- a/benchmarks/interpolation_precipitation_difference.py +++ b/benchmarks/interpolation_precipitation_difference.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from datetime import datetime diff --git a/benchmarks/summary_over_time.py b/benchmarks/summary_over_time.py index ae87ad00d..ac8e43117 100644 --- a/benchmarks/summary_over_time.py +++ b/benchmarks/summary_over_time.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import os @@ -48,7 +47,7 @@ def main(): summarized_df = summarized_df.with_columns( pl.col("taken_station_id") .replace({"01050": "yellow", "01048": "green", "01051": "blue", "05282": "violet"}) - .alias("color") + .alias("color"), ) regular_df_01050 = get_regular_df(start_date, end_date, "01050") diff --git a/examples/dwd_climate_summary_xarray_dump.py b/examples/dwd_climate_summary_xarray_dump.py index 8a5b01b9e..6330b7700 100644 --- a/examples/dwd_climate_summary_xarray_dump.py +++ b/examples/dwd_climate_summary_xarray_dump.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import os diff --git a/examples/dwd_describe_fields.py b/examples/dwd_describe_fields.py index c1be7f606..741475d83 100644 --- a/examples/dwd_describe_fields.py +++ b/examples/dwd_describe_fields.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ @@ -37,7 +36,7 @@ def fields_example(): resolution=DwdObservationResolution.DAILY, period=DwdObservationPeriod.RECENT, language="en", - ) + ), ) pprint( @@ -46,7 +45,7 @@ def fields_example(): resolution=DwdObservationResolution.DAILY, period=DwdObservationPeriod.RECENT, language="de", - ) + ), ) diff --git a/examples/dwd_road_weather.py b/examples/dwd_road_weather.py index a9e825012..3a0782af2 100644 --- a/examples/dwd_road_weather.py +++ b/examples/dwd_road_weather.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ @@ -29,14 +28,19 @@ def dwd_road_weather_example(): end_date = dt.datetime.now(ZoneInfo("UTC")).replace(tzinfo=None) start_date = end_date - dt.timedelta(days=1) drw_request = DwdRoadRequest( - parameter="airTemperature", start_date=start_date, end_date=end_date + parameter="airTemperature", + start_date=start_date, + end_date=end_date, ).filter_by_station_id("A006") print(drw_request) df_drw = drw_request.values.all().df.drop_nulls(subset="value") print(df_drw) dobs_request = DwdObservationRequest( - parameter="temperature_air_mean_200", resolution="10_minutes", start_date=start_date, end_date=end_date + parameter="temperature_air_mean_200", + resolution="10_minutes", + start_date=start_date, + end_date=end_date, ).summarize(latlon=(54.8892, 8.9087)) print(dobs_request.stations) df_dobs = dobs_request.df.drop_nulls(subset="value") diff --git a/examples/mosmix_forecasts.py b/examples/mosmix_forecasts.py index 8e0bf3315..8aeb70677 100644 --- a/examples/mosmix_forecasts.py +++ b/examples/mosmix_forecasts.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/examples/observations_sql.py b/examples/observations_sql.py index 69edd81db..0a6d3e401 100644 --- a/examples/observations_sql.py +++ b/examples/observations_sql.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/examples/observations_station_gaussian_model.py b/examples/observations_station_gaussian_model.py index 51ee8762d..6aafdd672 100644 --- a/examples/observations_station_gaussian_model.py +++ b/examples/observations_station_gaussian_model.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ @@ -14,24 +13,29 @@ """ # Noqa:D205,D400 +from __future__ import annotations + import logging import os from pathlib import Path -from typing import Tuple +from typing import TYPE_CHECKING import matplotlib.pyplot as plt import polars as pl -from lmfit import Parameters -from lmfit.model import ModelResult from wetterdienst import Settings -from wetterdienst.core.timeseries.result import StationsResult from wetterdienst.provider.dwd.observation import ( DwdObservationParameter, DwdObservationRequest, DwdObservationResolution, ) +if TYPE_CHECKING: + from lmfit import Parameters + from lmfit.model import ModelResult + + from wetterdienst.core.timeseries.result import StationsResult + HERE = Path(__file__).parent log = logging.getLogger() @@ -103,7 +107,7 @@ def validate_yearly_data(df: pl.DataFrame) -> bool: return False return True - def make_composite_yearly_model(self, valid_data: pl.DataFrame) -> Tuple[GaussianModel, Parameters]: + def make_composite_yearly_model(self, valid_data: pl.DataFrame) -> tuple[GaussianModel, Parameters]: """makes a composite model https://lmfit.github.io/lmfit-py/model.html#composite-models-adding-or-multiplying-models""" number_of_years = valid_data.get_column("date").dt.year().n_unique() @@ -129,7 +133,11 @@ def make_composite_yearly_model(self, valid_data: pl.DataFrame) -> Tuple[Gaussia @staticmethod def model_pars_update( - year: int, group: pl.DataFrame, pars: Parameters, index_per_year: float, y_max: float + year: int, + group: pl.DataFrame, + pars: Parameters, + index_per_year: float, + y_max: float, ) -> Parameters: """updates the initial values of the model parameters""" idx = group.get_column("rc").to_numpy() @@ -150,7 +158,7 @@ def plot_data_and_model(self, valid_data: pl.DataFrame, out: ModelResult, savefi "year": valid_data.get_column("date"), "value": valid_data.get_column("value").to_numpy(), "model": out.best_fit, - } + }, ) title = valid_data.get_column("parameter").unique()[0] df.to_pandas().plot(x="year", y=["value", "model"], title=title) diff --git a/examples/observations_stations.py b/examples/observations_stations.py index 2613f4d89..e873befa2 100644 --- a/examples/observations_stations.py +++ b/examples/observations_stations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/examples/plot_german_weather_stations.py b/examples/plot_german_weather_stations.py index bc5a3e3b9..be3b8960a 100644 --- a/examples/plot_german_weather_stations.py +++ b/examples/plot_german_weather_stations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (c) 2018-2023 earthobservations import os from pathlib import Path diff --git a/examples/plot_hohenpeissenberg_warming_stripes.py b/examples/plot_hohenpeissenberg_warming_stripes.py index 5086b8b2d..017245a7a 100644 --- a/examples/plot_hohenpeissenberg_warming_stripes.py +++ b/examples/plot_hohenpeissenberg_warming_stripes.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (c) 2018-2023 earthobservations import os from pathlib import Path @@ -30,7 +29,7 @@ def plot_hohenpeissenberg_warming_stripes(): Source: https://matplotlib.org/matplotblog/posts/warming-stripes/ """ request = DwdObservationRequest("temperature_air_mean_200", "annual", "historical").filter_by_name( - "Hohenpeissenberg" + "Hohenpeissenberg", ) df_values = request.values.all().df # definition of years @@ -65,7 +64,7 @@ def plot_hohenpeissenberg_warming_stripes(): "#cb181d", "#a50f15", "#67000d", - ] + ], ) ax.set_axis_off() col = PatchCollection([Rectangle((y, 0), 1, 1) for y in range(first_year, last_year + 1)]) diff --git a/examples/plot_temperature_timeseries.py b/examples/plot_temperature_timeseries.py index 443087b67..aca4a414d 100644 --- a/examples/plot_temperature_timeseries.py +++ b/examples/plot_temperature_timeseries.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (c) 2018-2023 earthobservations import os from pathlib import Path @@ -21,12 +20,15 @@ def plot_temperature_timeseries(): """Create plot for README sketch""" stations = DwdObservationRequest( - parameter="temperature_air_mean_200", resolution="daily", period="historical" + parameter="temperature_air_mean_200", + resolution="daily", + period="historical", ).filter_by_name("Hohenpeissenberg") df = stations.values.all().df df_annual = df.group_by([pl.col("date").dt.year()], maintain_order=True).agg(pl.col("value").mean().alias("value")) df_annual = df_annual.with_columns( - pl.col("date").cast(str).str.to_datetime("%Y"), pl.col("value").mean().alias("mean") + pl.col("date").cast(str).str.to_datetime("%Y"), + pl.col("value").mean().alias("mean"), ) fig, ax = plt.subplots(tight_layout=True) df.to_pandas().plot("date", "value", ax=ax, color="blue", label="Tmean,daily", legend=False) diff --git a/examples/radar/radar_composite_rw.py b/examples/radar/radar_composite_rw.py index 0404bd5b8..9a427a23a 100644 --- a/examples/radar/radar_composite_rw.py +++ b/examples/radar/radar_composite_rw.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/examples/radar/radar_radolan_cdc.py b/examples/radar/radar_radolan_cdc.py index d3cb130a5..f7c650bdb 100644 --- a/examples/radar/radar_radolan_cdc.py +++ b/examples/radar/radar_radolan_cdc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/examples/radar/radar_radolan_rw.py b/examples/radar/radar_radolan_rw.py index 3686569c2..c274a9fc2 100644 --- a/examples/radar/radar_radolan_rw.py +++ b/examples/radar/radar_radolan_rw.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/examples/radar/radar_scan_precip.py b/examples/radar/radar_scan_precip.py index 1b42d139b..8294f40f2 100644 --- a/examples/radar/radar_scan_precip.py +++ b/examples/radar/radar_scan_precip.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/examples/radar/radar_scan_volume.py b/examples/radar/radar_scan_volume.py index 1e864752d..c2304f048 100644 --- a/examples/radar/radar_scan_volume.py +++ b/examples/radar/radar_scan_volume.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/examples/radar/radar_site_dx.py b/examples/radar/radar_site_dx.py index b64e0ffaa..2585651bf 100644 --- a/examples/radar/radar_site_dx.py +++ b/examples/radar/radar_site_dx.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/examples/radar/radar_sweep_hdf5.py b/examples/radar/radar_sweep_hdf5.py index 33c979631..edbc2abb1 100644 --- a/examples/radar/radar_sweep_hdf5.py +++ b/examples/radar/radar_sweep_hdf5.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/examples/wetterdienst_notebook.ipynb b/examples/wetterdienst_notebook.ipynb index 7db43c988..96e8ac504 100644 --- a/examples/wetterdienst_notebook.ipynb +++ b/examples/wetterdienst_notebook.ipynb @@ -681,7 +681,9 @@ "]\n", "values = (\n", " DwdObservationRequest(\n", - " parameter=parameters, resolution=DwdObservationResolution.DAILY, period=DwdObservationPeriod.HISTORICAL\n", + " parameter=parameters,\n", + " resolution=DwdObservationResolution.DAILY,\n", + " period=DwdObservationPeriod.HISTORICAL,\n", " )\n", " .filter_by_station_id(station_id=(1048,))\n", " .values.all()\n", @@ -762,7 +764,7 @@ "for (parameter,), group in values.df.groupby([pl.col(\"parameter\")], maintain_order=True):\n", " if parameter == \"precipitation_height\":\n", " agg_df = group.groupby(pl.col(\"date\").dt.year(), maintain_order=True).agg(\n", - " pl.when(pl.col(\"value\").is_not_null().sum() > 330).then(pl.col(\"value\").sum())\n", + " pl.when(pl.col(\"value\").is_not_null().sum() > 330).then(pl.col(\"value\").sum()),\n", " )\n", " else:\n", " agg_df = group.groupby(pl.col(\"date\").dt.year(), maintain_order=True).agg([pl.col(\"value\").mean()])\n", @@ -835,7 +837,14 @@ " ax2 = ax\n", " daily.to_pandas().plot(x=\"date\", y=\"value\", label=parameter, alpha=0.75, ax=ax, c=color, legend=False)\n", " annual.to_pandas().plot(\n", - " x=\"date\", y=\"value\", kind=\"line\", label=f\"annual({parameter})\", alpha=0.75, ax=ax2, c=\"black\", legend=False\n", + " x=\"date\",\n", + " y=\"value\",\n", + " kind=\"line\",\n", + " label=f\"annual({parameter})\",\n", + " alpha=0.75,\n", + " ax=ax2,\n", + " c=\"black\",\n", + " legend=False,\n", " )\n", " ax.legend(loc=0)\n", " if ax != ax2:\n", diff --git a/poetry.lock b/poetry.lock index 950aa9443..5beece9d3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -289,13 +289,13 @@ files = [ [[package]] name = "asteval" -version = "0.9.31" +version = "0.9.32" description = "Safe, minimalistic evaluator of python expression using ast module" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "asteval-0.9.31-py3-none-any.whl", hash = "sha256:2761750c184d97707c292b62df3b10e330a809a2201721acc435a2b89a114263"}, - {file = "asteval-0.9.31.tar.gz", hash = "sha256:a2da066b6696dba9835c5f7dec63e0ffb5bd2b4e3bb5f0b9a604aeafb17d833d"}, + {file = "asteval-0.9.32-py3-none-any.whl", hash = "sha256:4d0da45a15f15eeb88bb53cf4c352591ccb00f00f81f74649fd7084519adc3fe"}, + {file = "asteval-0.9.32.tar.gz", hash = "sha256:3bef25a973d378fda21c83a38c6292c4d0d94773f49f42073e69dbb19932bb74"}, ] [package.extras] @@ -1572,6 +1572,20 @@ django = ["dj-database-url", "dj-email-url", "django-cache-url"] lint = ["flake8 (==7.0.0)", "flake8-bugbear (==23.11.28)", "mypy (==1.8.0)", "pre-commit (>=3.6,<4.0)"] tests = ["environs[django]", "pytest"] +[[package]] +name = "eval-type-backport" +version = "0.1.3" +description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." +optional = false +python-versions = ">=3.7" +files = [ + {file = "eval_type_backport-0.1.3-py3-none-any.whl", hash = "sha256:519d2a993b3da286df9f90e17f503f66435106ad870cf26620c5720e2158ddf2"}, + {file = "eval_type_backport-0.1.3.tar.gz", hash = "sha256:d83ee225331dfa009493cec1f3608a71550b515ee4749abe78da14e3c5e314f5"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "exceptiongroup" version = "1.2.0" @@ -2197,13 +2211,13 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-resources" -version = "6.1.1" +version = "6.1.3" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"}, - {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, + {file = "importlib_resources-6.1.3-py3-none-any.whl", hash = "sha256:4c0269e3580fe2634d364b39b38b961540a7738c02cb984e98add8b4221d793d"}, + {file = "importlib_resources-6.1.3.tar.gz", hash = "sha256:56fb4525197b78544a3354ea27793952ab93f935bb4bf746b846bb1015020f2b"}, ] [package.dependencies] @@ -2211,7 +2225,7 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] +testing = ["jaraco.collections", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] [[package]] name = "influxdb" @@ -2295,13 +2309,13 @@ files = [ [[package]] name = "ipykernel" -version = "6.29.2" +version = "6.29.3" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.29.2-py3-none-any.whl", hash = "sha256:50384f5c577a260a1d53f1f59a828c7266d321c9b7d00d345693783f66616055"}, - {file = "ipykernel-6.29.2.tar.gz", hash = "sha256:3bade28004e3ff624ed57974948116670604ac5f676d12339693f3142176d3f0"}, + {file = "ipykernel-6.29.3-py3-none-any.whl", hash = "sha256:5aa086a4175b0229d4eca211e181fb473ea78ffd9869af36ba7694c947302a21"}, + {file = "ipykernel-6.29.3.tar.gz", hash = "sha256:e14c250d1f9ea3989490225cc1a542781b095a18a19447fcf2b5eaf7d0ac5bd2"}, ] [package.dependencies] @@ -2324,7 +2338,7 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.4)", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" @@ -2458,13 +2472,13 @@ files = [ [[package]] name = "json5" -version = "0.9.17" +version = "0.9.22" description = "A Python implementation of the JSON5 data format." optional = false python-versions = ">=3.8" files = [ - {file = "json5-0.9.17-py2.py3-none-any.whl", hash = "sha256:f8ec1ecf985951d70f780f6f877c4baca6a47b6e61e02c4cd190138d10a7805a"}, - {file = "json5-0.9.17.tar.gz", hash = "sha256:717d99d657fa71b7094877b1d921b1cce40ab444389f6d770302563bb7dfd9ae"}, + {file = "json5-0.9.22-py3-none-any.whl", hash = "sha256:6621007c70897652f8b5d03885f732771c48d1925591ad989aa80c7e0e5ad32f"}, + {file = "json5-0.9.22.tar.gz", hash = "sha256:b729bde7650b2196a35903a597d2b704b8fdf8648bfb67368cfb79f1174a17bd"}, ] [package.extras] @@ -2478,7 +2492,6 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] [[package]] @@ -2638,13 +2651,13 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p [[package]] name = "jupyter-lsp" -version = "2.2.2" +version = "2.2.4" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"}, - {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"}, + {file = "jupyter-lsp-2.2.4.tar.gz", hash = "sha256:5e50033149344065348e688608f3c6d654ef06d9856b67655bd7b6bac9ee2d59"}, + {file = "jupyter_lsp-2.2.4-py3-none-any.whl", hash = "sha256:da61cb63a16b6dff5eac55c2699cc36eac975645adee02c41bdfc03bf4802e77"}, ] [package.dependencies] @@ -2653,13 +2666,13 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.12.5" +version = "2.13.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, - {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, + {file = "jupyter_server-2.13.0-py3-none-any.whl", hash = "sha256:77b2b49c3831fbbfbdb5048cef4350d12946191f833a24e5f83e5f8f4803e97b"}, + {file = "jupyter_server-2.13.0.tar.gz", hash = "sha256:c80bfb049ea20053c3d9641c2add4848b38073bf79f1729cea1faed32fc1c78e"}, ] [package.dependencies] @@ -2685,7 +2698,7 @@ websocket-client = "*" [package.extras] docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] [[package]] name = "jupyter-server-mathjax" @@ -2725,13 +2738,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.1.2" +version = "4.1.4" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.1.2-py3-none-any.whl", hash = "sha256:aa88193f03cf4d3555f6712f04d74112b5eb85edd7d222c588c7603a26d33c5b"}, - {file = "jupyterlab-4.1.2.tar.gz", hash = "sha256:5d6348b3ed4085181499f621b7dfb6eb0b1f57f3586857aadfc8e3bf4c4885f9"}, + {file = "jupyterlab-4.1.4-py3-none-any.whl", hash = "sha256:f92c3f2b12b88efcf767205f49be9b2f86b85544f9c4f342bb5e9904a16cf931"}, + {file = "jupyterlab-4.1.4.tar.gz", hash = "sha256:e03c82c124ad8a0892e498b9dde79c50868b2c267819aca3f55ce47c57ebeb1d"}, ] [package.dependencies] @@ -2769,13 +2782,13 @@ files = [ [[package]] name = "jupyterlab-server" -version = "2.25.3" +version = "2.25.4" description = "A set of server components for JupyterLab and JupyterLab like applications." optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab_server-2.25.3-py3-none-any.whl", hash = "sha256:c48862519fded9b418c71645d85a49b2f0ec50d032ba8316738e9276046088c1"}, - {file = "jupyterlab_server-2.25.3.tar.gz", hash = "sha256:846f125a8a19656611df5b03e5912c8393cea6900859baa64fa515eb64a8dc40"}, + {file = "jupyterlab_server-2.25.4-py3-none-any.whl", hash = "sha256:eb645ecc8f9b24bac5decc7803b6d5363250e16ec5af814e516bc2c54dd88081"}, + {file = "jupyterlab_server-2.25.4.tar.gz", hash = "sha256:2098198e1e82e0db982440f9b5136175d73bea2cd42a6480aa6fd502cb23c4f9"}, ] [package.dependencies] @@ -2791,7 +2804,7 @@ requests = ">=2.31" [package.extras] docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] [[package]] name = "jupyterlab-widgets" @@ -3165,22 +3178,21 @@ files = [ [[package]] name = "marshmallow" -version = "3.20.2" +version = "3.21.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, - {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["pre-commit (>=2.4,<4.0)"] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -3305,67 +3317,68 @@ tests = ["pytest (>=4.6)"] [[package]] name = "msgpack" -version = "1.0.7" +version = "1.0.8" description = "MessagePack serializer" optional = true python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"}, - {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"}, - {file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"}, - {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"}, - {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"}, - {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"}, - {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"}, - {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"}, - {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"}, - {file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"}, - {file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"}, - {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"}, - {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"}, - {file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"}, - {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"}, - {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"}, - {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"}, - {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"}, - {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"}, - {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"}, - {file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"}, - {file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"}, - {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"}, - {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"}, - {file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"}, - {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"}, - {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"}, - {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"}, - {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"}, - {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"}, - {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"}, - {file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"}, - {file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"}, - {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"}, - {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"}, - {file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"}, - {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"}, - {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"}, - {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"}, - {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"}, - {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"}, - {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"}, - {file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"}, - {file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"}, - {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"}, - {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"}, - {file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"}, - {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"}, - {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"}, - {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"}, - {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"}, - {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"}, - {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"}, - {file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"}, - {file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"}, - {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, + {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, + {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, + {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, + {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, + {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, + {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, + {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, + {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, + {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, + {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, + {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] [[package]] @@ -3547,13 +3560,13 @@ test = ["black", "check-manifest", "flake8", "ipykernel", "ipython (<8.0.0)", "i [[package]] name = "nbconvert" -version = "7.16.1" +version = "7.16.2" description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.16.1-py3-none-any.whl", hash = "sha256:3188727dffadfdc9c6a1c7250729063d7bc78b355ad7aa023138afa030d1cd07"}, - {file = "nbconvert-7.16.1.tar.gz", hash = "sha256:e79e6a074f49ba3ed29428ed86487bf51509d9aab613bd8522ac08f6d28fd7fd"}, + {file = "nbconvert-7.16.2-py3-none-any.whl", hash = "sha256:0c01c23981a8de0220255706822c40b751438e32467d6a686e26be08ba784382"}, + {file = "nbconvert-7.16.2.tar.gz", hash = "sha256:8310edd41e1c43947e4ecf16614c61469ebc024898eb808cce0999860fc9fb16"}, ] [package.dependencies] @@ -3687,13 +3700,13 @@ tests = ["Cython", "packaging", "pytest"] [[package]] name = "notebook" -version = "7.1.0" +version = "7.1.1" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.1.0-py3-none-any.whl", hash = "sha256:a8fa4ccb5e5fe220f29d9900337efd7752bc6f2efe004d6f320db01f7743adc9"}, - {file = "notebook-7.1.0.tar.gz", hash = "sha256:99caf01ff166b1cc86355c9b37c1ba9bf566c1d7fc4ab57bb6f8f24e36c4260e"}, + {file = "notebook-7.1.1-py3-none-any.whl", hash = "sha256:197d8e0595acabf4005851c8716e952a81b405f7aefb648067a761fbde267ce7"}, + {file = "notebook-7.1.1.tar.gz", hash = "sha256:818e7420fa21f402e726afb9f02df7f3c10f294c02e383ed19852866c316108b"}, ] [package.dependencies] @@ -3807,7 +3820,6 @@ files = [ {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -4196,25 +4208,26 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "polars" -version = "0.20.10" +version = "0.20.15" description = "Blazingly fast DataFrame library" optional = false python-versions = ">=3.8" files = [ - {file = "polars-0.20.10-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:14b126dbe626c8df34a9cc1449dea270dbafd64deff88fc3620046e69e06f84c"}, - {file = "polars-0.20.10-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6d5f485dba006aa1ce443980b351a5cb8ff481cbbc51343debfbf66fb9594269"}, - {file = "polars-0.20.10-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff934fe816856db7b72565b35abf1656db485772cd3bc5631071cef7ec1d10c7"}, - {file = "polars-0.20.10-cp38-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:f5b7222ca39a4cbd286d9927d4924d2bc2ce6d7fc83a256bfd20b4199482722f"}, - {file = "polars-0.20.10-cp38-abi3-win_amd64.whl", hash = "sha256:082a22c0c1bfa1fe0c24198e646ffb19478b893f594ecf8e330c7cdc136f6e6b"}, - {file = "polars-0.20.10.tar.gz", hash = "sha256:ab32a232916df61c9377edcb5893d0b1624d810444d8fa627f9885d33819a8b7"}, + {file = "polars-0.20.15-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d528acc0b0900cb8363f065cbf65325571eeb4b245e4b68679beae75287451c9"}, + {file = "polars-0.20.15-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:3adc68bd1400c651da826e66ad735c07dafd5f1811f369f394f8d8fb71f1178b"}, + {file = "polars-0.20.15-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be613e4640a607040e3361622a254f88ac99bd92b212d6f580a3f4b74b6617ed"}, + {file = "polars-0.20.15-cp38-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:a1936ec8de4262ce68dd5c4f43b74c996184a36012bdd0ff9454c33132bd4d28"}, + {file = "polars-0.20.15-cp38-abi3-win_amd64.whl", hash = "sha256:00b5687d1fdcb09f7c2babdf88f63b3238284bf9f6cddd2ea60aea07b711172e"}, + {file = "polars-0.20.15.tar.gz", hash = "sha256:88ad0c3e1f92185b86041d68783f9862ec21adc92a33001818697644dd0794ee"}, ] [package.extras] -adbc = ["adbc_driver_sqlite"] -all = ["polars[adbc,cloudpickle,connectorx,deltalake,fsspec,gevent,numpy,pandas,plot,pyarrow,pydantic,pyiceberg,sqlalchemy,timezone,xlsx2csv,xlsxwriter]"] +adbc = ["adbc-driver-manager", "adbc-driver-sqlite"] +all = ["polars[adbc,cloudpickle,connectorx,deltalake,fastexcel,fsspec,gevent,numpy,pandas,plot,pyarrow,pydantic,pyiceberg,sqlalchemy,timezone,xlsx2csv,xlsxwriter]"] cloudpickle = ["cloudpickle"] connectorx = ["connectorx (>=0.3.2)"] deltalake = ["deltalake (>=0.14.0)"] +fastexcel = ["fastexcel (>=0.9)"] fsspec = ["fsspec"] gevent = ["gevent"] matplotlib = ["matplotlib"] @@ -4227,7 +4240,7 @@ pydantic = ["pydantic"] pyiceberg = ["pyiceberg (>=0.5.0)"] pyxlsb = ["pyxlsb (>=1.0)"] sqlalchemy = ["pandas", "sqlalchemy"] -timezone = ["backports.zoneinfo", "tzdata"] +timezone = ["backports-zoneinfo", "tzdata"] xlsx2csv = ["xlsx2csv (>=0.8.0)"] xlsxwriter = ["xlsxwriter"] @@ -4357,7 +4370,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -4366,8 +4378,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -4432,47 +4442,47 @@ tests = ["pytest"] [[package]] name = "pyarrow" -version = "15.0.0" +version = "15.0.1" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, - {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, - {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, - {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, - {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, - {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, - {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, - {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, - {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, - {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, - {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, - {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, - {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, - {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, - {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, - {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, + {file = "pyarrow-15.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:c2ddb3be5ea938c329a84171694fc230b241ce1b6b0ff1a0280509af51c375fa"}, + {file = "pyarrow-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7543ea88a0ff72f8e6baaf9bfdbec2c62aeabdbede9e4a571c71cc3bc43b6302"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1519e218a6941fc074e4501088d891afcb2adf77c236e03c34babcf3d6a0d1c7"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28cafa86e1944761970d3b3fc0411b14ff9b5c2b73cd22aaf470d7a3976335f5"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:be5c3d463e33d03eab496e1af7916b1d44001c08f0f458ad27dc16093a020638"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:47b1eda15d3aa3f49a07b1808648e1397e5dc6a80a30bf87faa8e2d02dad7ac3"}, + {file = "pyarrow-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e524a31be7db22deebbbcf242b189063ab9a7652c62471d296b31bc6e3cae77b"}, + {file = "pyarrow-15.0.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:a476fefe8bdd56122fb0d4881b785413e025858803cc1302d0d788d3522b374d"}, + {file = "pyarrow-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:309e6191be385f2e220586bfdb643f9bb21d7e1bc6dd0a6963dc538e347b2431"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83bc586903dbeb4365cbc72b602f99f70b96c5882e5dfac5278813c7d624ca3c"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07e652daac6d8b05280cd2af31c0fb61a4490ec6a53dc01588014d9fa3fdbee9"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:abad2e08652df153a72177ce20c897d083b0c4ebeec051239e2654ddf4d3c996"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cde663352bc83ad75ba7b3206e049ca1a69809223942362a8649e37bd22f9e3b"}, + {file = "pyarrow-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:1b6e237dd7a08482a8b8f3f6512d258d2460f182931832a8c6ef3953203d31e1"}, + {file = "pyarrow-15.0.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:7bd167536ee23192760b8c731d39b7cfd37914c27fd4582335ffd08450ff799d"}, + {file = "pyarrow-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c08bb31eb2984ba5c3747d375bb522e7e536b8b25b149c9cb5e1c49b0ccb736"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f9c1d630ed2524bd1ddf28ec92780a7b599fd54704cd653519f7ff5aec177a"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5186048493395220550bca7b524420471aac2d77af831f584ce132680f55c3df"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:31dc30c7ec8958da3a3d9f31d6c3630429b2091ede0ecd0d989fd6bec129f0e4"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3f111a014fb8ac2297b43a74bf4495cc479a332908f7ee49cb7cbd50714cb0c1"}, + {file = "pyarrow-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a6d1f7c15d7f68f08490d0cb34611497c74285b8a6bbeab4ef3fc20117310983"}, + {file = "pyarrow-15.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:9ad931b996f51c2f978ed517b55cb3c6078272fb4ec579e3da5a8c14873b698d"}, + {file = "pyarrow-15.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:738f6b53ab1c2f66b2bde8a1d77e186aeaab702d849e0dfa1158c9e2c030add3"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c1c3fc16bc74e33bf8f1e5a212938ed8d88e902f372c4dac6b5bad328567d2f"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1fa92512128f6c1b8dde0468c1454dd70f3bff623970e370d52efd4d24fd0be"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b4157f307c202cbbdac147d9b07447a281fa8e63494f7fc85081da351ec6ace9"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:b75e7da26f383787f80ad76143b44844ffa28648fcc7099a83df1538c078d2f2"}, + {file = "pyarrow-15.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:3a99eac76ae14096c209850935057b9e8ce97a78397c5cde8724674774f34e5d"}, + {file = "pyarrow-15.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:dd532d3177e031e9b2d2df19fd003d0cc0520d1747659fcabbd4d9bb87de508c"}, + {file = "pyarrow-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce8c89848fd37e5313fc2ce601483038ee5566db96ba0808d5883b2e2e55dc53"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:862eac5e5f3b6477f7a92b2f27e560e1f4e5e9edfca9ea9da8a7478bb4abd5ce"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f0ea3a29cd5cb99bf14c1c4533eceaa00ea8fb580950fb5a89a5c771a994a4e"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:bb902f780cfd624b2e8fd8501fadab17618fdb548532620ef3d91312aaf0888a"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:4f87757f02735a6bb4ad2e1b98279ac45d53b748d5baf52401516413007c6999"}, + {file = "pyarrow-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:efd3816c7fbfcbd406ac0f69873cebb052effd7cdc153ae5836d1b00845845d7"}, + {file = "pyarrow-15.0.1.tar.gz", hash = "sha256:21d812548d39d490e0c6928a7c663f37b96bf764034123d4b4ab4530ecc757a9"}, ] [package.dependencies] @@ -4505,13 +4515,13 @@ files = [ [[package]] name = "pydantic" -version = "2.6.2" +version = "2.6.3" description = "Data validation using Python type hints" optional = true python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.2-py3-none-any.whl", hash = "sha256:37a5432e54b12fecaa1049c5195f3d860a10e01bdfd24f1840ef14bd0d3aeab3"}, - {file = "pydantic-2.6.2.tar.gz", hash = "sha256:a09be1c3d28f3abe37f8a78af58284b236a92ce520105ddc91a6d29ea1176ba7"}, + {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, + {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, ] [package.dependencies] @@ -4649,13 +4659,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyopenssl" -version = "24.0.0" +version = "24.1.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-24.0.0-py3-none-any.whl", hash = "sha256:ba07553fb6fd6a7a2259adb9b84e12302a9a8a75c44046e8bb5d3e5ee887e3c3"}, - {file = "pyOpenSSL-24.0.0.tar.gz", hash = "sha256:6aa33039a93fffa4563e655b61d11364d01264be8ccb49906101e02a334530bf"}, + {file = "pyOpenSSL-24.1.0-py3-none-any.whl", hash = "sha256:17ed5be5936449c5418d1cd269a1a9e9081bc54c17aed272b45856a3d3dc86ad"}, + {file = "pyOpenSSL-24.1.0.tar.gz", hash = "sha256:cabed4bfaa5df9f1a16c0ef64a0cb65318b5cd077a7eda7d6970131ca2f41a6f"}, ] [package.dependencies] @@ -4663,17 +4673,17 @@ cryptography = ">=41.0.5,<43" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] -test = ["flaky", "pretend", "pytest (>=3.0.1)"] +test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] @@ -4773,13 +4783,13 @@ files = [ [[package]] name = "pytest" -version = "8.0.2" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.2-py3-none-any.whl", hash = "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096"}, - {file = "pytest-8.0.2.tar.gz", hash = "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -4787,11 +4797,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -4887,13 +4897,13 @@ testing = ["filelock"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -4960,17 +4970,17 @@ files = [ [[package]] name = "pywinpty" -version = "2.0.12" +version = "2.0.13" description = "Pseudo terminal support for Windows from Python." optional = false python-versions = ">=3.8" files = [ - {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, - {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, - {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, - {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, - {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, - {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, + {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, + {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, + {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, + {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, + {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, + {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, ] [[package]] @@ -4985,7 +4995,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -4993,16 +5002,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -5019,7 +5020,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -5027,7 +5027,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -5182,101 +5181,101 @@ test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] name = "rapidfuzz" -version = "3.6.1" +version = "3.6.2" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.8" files = [ - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ac434fc71edda30d45db4a92ba5e7a42c7405e1a54cb4ec01d03cc668c6dcd40"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a791168e119cfddf4b5a40470620c872812042f0621e6a293983a2d52372db0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a2f3e9df346145c2be94e4d9eeffb82fab0cbfee85bd4a06810e834fe7c03fa"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23de71e7f05518b0bbeef55d67b5dbce3bcd3e2c81e7e533051a2e9401354eb0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d056e342989248d2bdd67f1955bb7c3b0ecfa239d8f67a8dfe6477b30872c607"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01835d02acd5d95c1071e1da1bb27fe213c84a013b899aba96380ca9962364bc"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed0f712e0bb5fea327e92aec8a937afd07ba8de4c529735d82e4c4124c10d5a0"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96cd19934f76a1264e8ecfed9d9f5291fde04ecb667faef5f33bdbfd95fe2d1f"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e06c4242a1354cf9d48ee01f6f4e6e19c511d50bb1e8d7d20bcadbb83a2aea90"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d73dcfe789d37c6c8b108bf1e203e027714a239e50ad55572ced3c004424ed3b"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:06e98ff000e2619e7cfe552d086815671ed09b6899408c2c1b5103658261f6f3"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:08b6fb47dd889c69fbc0b915d782aaed43e025df6979b6b7f92084ba55edd526"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1788ebb5f5b655a15777e654ea433d198f593230277e74d51a2a1e29a986283"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win32.whl", hash = "sha256:c65f92881753aa1098c77818e2b04a95048f30edbe9c3094dc3707d67df4598b"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:4243a9c35667a349788461aae6471efde8d8800175b7db5148a6ab929628047f"}, - {file = "rapidfuzz-3.6.1-cp310-cp310-win_arm64.whl", hash = "sha256:f59d19078cc332dbdf3b7b210852ba1f5db8c0a2cd8cc4c0ed84cc00c76e6802"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fbc07e2e4ac696497c5f66ec35c21ddab3fc7a406640bffed64c26ab2f7ce6d6"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cced1a8852652813f30fb5d4b8f9b237112a0bbaeebb0f4cc3611502556764"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82300e5f8945d601c2daaaac139d5524d7c1fdf719aa799a9439927739917460"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf97c321fd641fea2793abce0e48fa4f91f3c202092672f8b5b4e781960b891"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7420e801b00dee4a344ae2ee10e837d603461eb180e41d063699fb7efe08faf0"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060bd7277dc794279fa95522af355034a29c90b42adcb7aa1da358fc839cdb11"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7e3375e4f2bfec77f907680328e4cd16cc64e137c84b1886d547ab340ba6928"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a490cd645ef9d8524090551016f05f052e416c8adb2d8b85d35c9baa9d0428ab"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2e03038bfa66d2d7cffa05d81c2f18fd6acbb25e7e3c068d52bb7469e07ff382"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b19795b26b979c845dba407fe79d66975d520947b74a8ab6cee1d22686f7967"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:064c1d66c40b3a0f488db1f319a6e75616b2e5fe5430a59f93a9a5e40a656d15"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3c772d04fb0ebeece3109d91f6122b1503023086a9591a0b63d6ee7326bd73d9"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:841eafba6913c4dfd53045835545ba01a41e9644e60920c65b89c8f7e60c00a9"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win32.whl", hash = "sha256:266dd630f12696ea7119f31d8b8e4959ef45ee2cbedae54417d71ae6f47b9848"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:d79aec8aeee02ab55d0ddb33cea3ecd7b69813a48e423c966a26d7aab025cdfe"}, - {file = "rapidfuzz-3.6.1-cp311-cp311-win_arm64.whl", hash = "sha256:484759b5dbc5559e76fefaa9170147d1254468f555fd9649aea3bad46162a88b"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b2ef4c0fd3256e357b70591ffb9e8ed1d439fb1f481ba03016e751a55261d7c1"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:588c4b20fa2fae79d60a4e438cf7133d6773915df3cc0a7f1351da19eb90f720"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7142ee354e9c06e29a2636b9bbcb592bb00600a88f02aa5e70e4f230347b373e"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dfc557c0454ad22382373ec1b7df530b4bbd974335efe97a04caec936f2956a"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03f73b381bdeccb331a12c3c60f1e41943931461cdb52987f2ecf46bfc22f50d"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b0ccc2ec1781c7e5370d96aef0573dd1f97335343e4982bdb3a44c133e27786"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da3e8c9f7e64bb17faefda085ff6862ecb3ad8b79b0f618a6cf4452028aa2222"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fde9b14302a31af7bdafbf5cfbb100201ba21519be2b9dedcf4f1048e4fbe65d"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1a23eee225dfb21c07f25c9fcf23eb055d0056b48e740fe241cbb4b22284379"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e49b9575d16c56c696bc7b06a06bf0c3d4ef01e89137b3ddd4e2ce709af9fe06"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:0a9fc714b8c290261669f22808913aad49553b686115ad0ee999d1cb3df0cd66"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a3ee4f8f076aa92184e80308fc1a079ac356b99c39408fa422bbd00145be9854"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f056ba42fd2f32e06b2c2ba2443594873cfccc0c90c8b6327904fc2ddf6d5799"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win32.whl", hash = "sha256:5d82b9651e3d34b23e4e8e201ecd3477c2baa17b638979deeabbb585bcb8ba74"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:dad55a514868dae4543ca48c4e1fc0fac704ead038dafedf8f1fc0cc263746c1"}, - {file = "rapidfuzz-3.6.1-cp312-cp312-win_arm64.whl", hash = "sha256:3c84294f4470fcabd7830795d754d808133329e0a81d62fcc2e65886164be83b"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e19d519386e9db4a5335a4b29f25b8183a1c3f78cecb4c9c3112e7f86470e37f"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01eb03cd880a294d1bf1a583fdd00b87169b9cc9c9f52587411506658c864d73"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:be368573255f8fbb0125a78330a1a40c65e9ba3c5ad129a426ff4289099bfb41"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3e5af946f419c30f5cb98b69d40997fe8580efe78fc83c2f0f25b60d0e56efb"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f382f7ffe384ce34345e1c0b2065451267d3453cadde78946fbd99a59f0cc23c"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be156f51f3a4f369e758505ed4ae64ea88900dcb2f89d5aabb5752676d3f3d7e"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1936d134b6c513fbe934aeb668b0fee1ffd4729a3c9d8d373f3e404fbb0ce8a0"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ff8eaf4a9399eb2bebd838f16e2d1ded0955230283b07376d68947bbc2d33d"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae598a172e3a95df3383634589660d6b170cc1336fe7578115c584a99e0ba64d"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cd4ba4c18b149da11e7f1b3584813159f189dc20833709de5f3df8b1342a9759"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:0402f1629e91a4b2e4aee68043a30191e5e1b7cd2aa8dacf50b1a1bcf6b7d3ab"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:1e12319c6b304cd4c32d5db00b7a1e36bdc66179c44c5707f6faa5a889a317c0"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0bbfae35ce4de4c574b386c43c78a0be176eeddfdae148cb2136f4605bebab89"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-win32.whl", hash = "sha256:7fec74c234d3097612ea80f2a80c60720eec34947066d33d34dc07a3092e8105"}, - {file = "rapidfuzz-3.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:a553cc1a80d97459d587529cc43a4c7c5ecf835f572b671107692fe9eddf3e24"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:757dfd7392ec6346bd004f8826afb3bf01d18a723c97cbe9958c733ab1a51791"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2963f4a3f763870a16ee076796be31a4a0958fbae133dbc43fc55c3968564cf5"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d2f0274595cc5b2b929c80d4e71b35041104b577e118cf789b3fe0a77b37a4c5"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f211e366e026de110a4246801d43a907cd1a10948082f47e8a4e6da76fef52"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a59472b43879012b90989603aa5a6937a869a72723b1bf2ff1a0d1edee2cc8e6"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a03863714fa6936f90caa7b4b50ea59ea32bb498cc91f74dc25485b3f8fccfe9"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd95b6b7bfb1584f806db89e1e0c8dbb9d25a30a4683880c195cc7f197eaf0c"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7183157edf0c982c0b8592686535c8b3e107f13904b36d85219c77be5cefd0d8"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ad9d74ef7c619b5b0577e909582a1928d93e07d271af18ba43e428dc3512c2a1"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b53137d81e770c82189e07a8f32722d9e4260f13a0aec9914029206ead38cac3"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:49b9ed2472394d306d5dc967a7de48b0aab599016aa4477127b20c2ed982dbf9"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:dec307b57ec2d5054d77d03ee4f654afcd2c18aee00c48014cb70bfed79597d6"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4381023fa1ff32fd5076f5d8321249a9aa62128eb3f21d7ee6a55373e672b261"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win32.whl", hash = "sha256:8d7a072f10ee57c8413c8ab9593086d42aaff6ee65df4aa6663eecdb7c398dca"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:ebcfb5bfd0a733514352cfc94224faad8791e576a80ffe2fd40b2177bf0e7198"}, - {file = "rapidfuzz-3.6.1-cp39-cp39-win_arm64.whl", hash = "sha256:1c47d592e447738744905c18dda47ed155620204714e6df20eb1941bb1ba315e"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:eef8b346ab331bec12bbc83ac75641249e6167fab3d84d8f5ca37fd8e6c7a08c"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53251e256017e2b87f7000aee0353ba42392c442ae0bafd0f6b948593d3f68c6"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dede83a6b903e3ebcd7e8137e7ff46907ce9316e9d7e7f917d7e7cdc570ee05"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e4da90e4c2b444d0a171d7444ea10152e07e95972bb40b834a13bdd6de1110c"}, - {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ca3dfcf74f2b6962f411c33dd95b0adf3901266e770da6281bc96bb5a8b20de9"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bcc957c0a8bde8007f1a8a413a632a1a409890f31f73fe764ef4eac55f59ca87"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c9a50bea7a8537442834f9bc6b7d29d8729a5b6379df17c31b6ab4df948c2"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c23ceaea27e790ddd35ef88b84cf9d721806ca366199a76fd47cfc0457a81b"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b155e67fff215c09f130555002e42f7517d0ea72cbd58050abb83cb7c880cec"}, - {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3028ee8ecc48250607fa8a0adce37b56275ec3b1acaccd84aee1f68487c8557b"}, - {file = "rapidfuzz-3.6.1.tar.gz", hash = "sha256:35660bee3ce1204872574fa041c7ad7ec5175b3053a4cb6e181463fc07013de7"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a5637e6bf11b15b5aff6ee818c76bdec99ad208511b78985e6209ba648a6e3ee"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:380586664f2f63807050ddb95e7702888b4f0b425abf17655940c411f39287ad"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3168ff565d4b8c239cf11fb604dd2507d30e9bcaac76a4077c0ac23cf2c866ed"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be69f7fd46b5c6467fe5e2fd4cff3816b0c03048eed8a4becb9a73e6000960e7"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cbd5894f23fdf5697499cf759523639838ac822bd1600e343fdce7313baa02ae"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85a5b6e026393fe39fb61146b9c17c5af66fffbe1410e992c4bb06d9ec327bd3"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab269adfc64480f209e99f253391a10735edd5c09046e04899adab5fb132f20e"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35aeac852bca06023d6bbd50c1fc504ca5a9a3613d5e75a140f0be7601fa34ef"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e706f302c6a3ae0d74edd0d6ace46aee1ae07c563b436ccf5ff04db2b3571e60"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bec353f022011e6e5cd28ccb8700fbd2a33918197af0d4e0abb3c3f4845cc864"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ef3925daaa93eed20401012e219f569ff0c039ed5bf4ce2d3737b4f75d441622"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6ee98d88ae9ccc77ff61992ed33b2496478def5dc0da55c9a9aa06fcb725a352"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:423c7c588b09d618601097b7a0017dfcb91132a2076bef29023c5f3cd2dc3de1"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win32.whl", hash = "sha256:c17c5efee347a40a6f4c1eec59e3d7d1e22f7613a97f8b8a07733ef723483a04"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:4209816626d8d6ff8ae7dc248061c6059e618b70c6e6f6e4d7444ae3740b2b85"}, + {file = "rapidfuzz-3.6.2-cp310-cp310-win_arm64.whl", hash = "sha256:1c54d3c85e522d3ac9ee39415f183c8fa184c4f87e7e5a37938f15a6d50e853a"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e06f6d270112f5db001f1cba5a97e1a48aee3d3dbdcbea3ec027c230462dbf9b"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:080cb71b50cb6aff11d1c6aeb157f273e2da0b2bdb3f9d7b01257e49e69a8576"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7895e04a22d6515bc91a850e0831f2405547605aa311d1ffec51e4818abc3c1"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82f9838519136b7083dd1e3149ee80344521f3dc37f744f227505ff0883efb"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a945567c2b0b6e069454c9782d5234b0b6795718adf7a9f868bd3144afa6a023"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:673ba2c343644805acdae1cb949c6a4de71aa2f62a998978551ebea59603af3f"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d457c89bac1471442002e70551e8268e639b3870b4a4521eae363c07253be87"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:495c0d8e14e6f12520eb7fc71b9ba9fcaafb47fc23a654e6e89b6c7985ec0020"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d67b649bf3e1b1722d04eca44d37919aef88305ce7ad05564502d013cf550fd"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e48dde8ca83d11daa00900cf6a5d281a1297aef9b7bfa73801af6e8822be5019"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:824cc381cf81cbf8d158f6935664ec2a69e6ac3b1d39fa201988bf81a257f775"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1dfe4c24957474ce0ac75d886387e30e292b4be39228a6d71f76de414dc187db"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d57b98013b802621bbc8b12a46bfc9d36ac552ab51ca207f7ce167ad46adabeb"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win32.whl", hash = "sha256:9a07dffac439223b4f1025dbfc68f4445a3460a859309c9858c2a3fa29617cdc"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:95a49c6b8bf1229743ae585dd5b7d57f0d15a7eb6e826866d5c9965ba958503c"}, + {file = "rapidfuzz-3.6.2-cp311-cp311-win_arm64.whl", hash = "sha256:af7c19ec86e11488539380d3db1755be5d561a3c0e7b04ff9d07abd7f9a8e9d8"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:de8adc12161bf282c60f12dc9233bb31632f71d446a010fe7469a69b8153427f"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:337e357f693130c4c6be740652542b260e36f622c59e01fa33d58f1d2750c930"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6468f8bc8c3c50604f43631550ef9cfec873515dba5023ca34d461be94669fc8"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74c6773b11445b5e5cf93ca383171cd0ac0cdeafea11a7b2a5688f8bf8d813e6"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1507fc5769aa109dda4de3a15f822a0f6a03e18d627bd0ba3ddbb253cf70e07"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:617949a70150e6fffdaed19253dd49f7a53528411dc8bf7663d499ba21e0f61e"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8b77779174b1b40aa70827692571ab457061897846255ad7d5d559e2edb1932"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80e51b22a7da83f9c87a97e92df07ed0612c74c35496590255f4b5d5b4212dfe"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3ae7c86914cb6673e97e187ba431b9c4cf4177d9ae77f8a1e5b2ba9a5628839e"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ddc380ffaa90f204cc9ddcb779114b9ab6f015246d549de9d47871a97ef9f18a"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3c1dc078ef371fce09f9f3eec2ca4eaa2a8cd412ec53941015b4f39f14d34407"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:9a74102fc5a2534fe91f7507838623e1f3a149d8e05648389c42bb42e14b1c3f"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:48e1eaea8fcd522fca7f04f0480663f0f0cfb77957092cce60a93f4462864996"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win32.whl", hash = "sha256:66b008bf2972740cd2dda5d382eb8bdb87265cd88198e71c7797bdc0d1f79d20"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:87ac3a87f2251ae2e95fc9478ca5c759de6d141d04c84d3fec9f9cdcfc167b33"}, + {file = "rapidfuzz-3.6.2-cp312-cp312-win_arm64.whl", hash = "sha256:b593cc51aed887e93b78c2f94dfae9008be2b23d17afd3b1f1d3eb3913b58f26"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d830bc7a9b586a374147ec60b08b1f9ae5996b43f75cc514f37faef3866b519"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dbee7f5ff11872b76505cbd87c814abc823e8757f11c69062eb3b25130a283da"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c011fb31f2c3f82f503aedd6097d3d3854e574e327a119a3b7eb2cf90b79ca"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cda81d0e0ce0c13abfa46b24e10c1e85f9c6acb628f0a9a948f5779f9c2076a2"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c279928651ce0e9e5220dcb25a00cc53b65e592a0861336a38299bcdca3a596"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35bd4bc9c40e6994c5d6edea4b9319388b4d9711c13c66d543bb4c37624b4184"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d07899506a5a8760448d9df036d528b55a554bf571714173635c79eef4a86e58"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb2e51d01b9c6d6954a3e055c57a80d4685b4fc82719db5519fc153566bcd6bb"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:153d065e353371cc0aeff32b99999a5758266a64e958d1364189367c1c9f6814"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4edcceebb85ebfa49a3ddcde20ad891d36c08dc0fd592efdab0e7d313a4e36af"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3549123fca5bb817341025f98e8e49ca99f84596c7c4f92b658f8e5836040d4a"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:84c1032ae42628465b7a5cc35249906061e18a8193c9c27cbd2db54e9823a9a6"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9bcc91ebd8fc69a6bd3b5711c8250f5f4e70606b4da75ef415f57ad209978205"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-win32.whl", hash = "sha256:f3a70f341c4c111bad910d2df69c78577a98af140319a996af24c9385939335d"}, + {file = "rapidfuzz-3.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:354ad5fe655beb7b279390cb58334903931c5452ecbad1b1666ffb06786498e2"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1b86b93d93020c2b3edc1665d75c8855784845fc0a739b312c26c3a4bf0c80d5"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28243086ed0e50808bb56632e5442c457241646aeafafd501ac87901f40a3237"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed52461ae5a9ea4c400d38e2649c74a413f1a6d8fb8308b66f1fbd122514732f"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a46220f86a5f9cb016af31525e0d0865cad437d02239aa0d8aed2ab8bff1f1c"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81a630ed2fc3ec5fc7400eb66bab1f87e282b4d47f0abe3e48c6634dfa13b5e4"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8e5a437b9089df6242a718d9c31ab1742989e9400a0977af012ef483b63b4c2"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16270b5529de83b7bae7457e952e4d9cf3fbf029a837dd32d415bb9e0eb8e599"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5378c04102c7f084cde30a100154fa6d7e2baf0d51a6bdd2f912545559c1fb35"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f18397c8d6a65fc0b288d2fc29bc7baeea6ba91eeb95163a3cd98f23cd3bc85"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2acd2514defce81e6ff4bbff50252d5e7df8e85a731442c4b83e44c86cf1c916"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:1df2faf80201952e252413b6fac6f3e146080dcebb87bb1bb722508e67558ed8"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6440ed0b3007c1c9286b0b88fe2ab2d9e83edd60cd62293b3dfabb732b4e8a30"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4fcfa23b5553b27f4016df77c53172ea743454cf12c28cfa7c35a309a2be93b3"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win32.whl", hash = "sha256:2d580d937146e803c8e5e1b87916cab8d6f84013b6392713e201efcda335c7d8"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:fe2a68be734e8e88af23385c68d6467e15818b6b1df1cbfebf7bff577226c957"}, + {file = "rapidfuzz-3.6.2-cp39-cp39-win_arm64.whl", hash = "sha256:6478f7803efebf5f644d0b758439c5b25728550fdfbb19783d150004c46a75a9"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:36ce7b68a7b90b787cdd73480a68d2f1ca63c31a3a9d5a79a8736f978e1e9344"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53597fd72a9340bcdd80d3620f4957c2b92f9b569313b969a3abdaffd193aae6"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4f6de745fe6ce46a422d353ee10599013631d7d714a36d025f164b2d4e8c000"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62df2136068e2515ed8beb01756381ff62c29384d785e3bf46e3111d4ea3ba1e"}, + {file = "rapidfuzz-3.6.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7382c90170f60c846c81a07ddd80bb2e8c43c8383754486fa37f67391a571897"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f31314fd2e2f3dc3e519e6f93669462ce7953df2def1c344aa8f5345976d0eb2"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012221629d54d3bee954148247f711eb86d4d390b589ebfe03172ea0b37a7531"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d41dd59a70decfce6595315367a2fea2af660d92a9d144acc6479030501014d7"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9fa14136a5b0cba1ec42531f7c3e0b0d3edb7fd6bc5e5ae7b498541f3855ab"}, + {file = "rapidfuzz-3.6.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:259364199cbfeca33b1af369fc7951f71717aa285184a3fa5a7b1772da1b89db"}, + {file = "rapidfuzz-3.6.2.tar.gz", hash = "sha256:cf911e792ab0c431694c9bf2648afabfd92099103f2e31492893e078ddca5e1a"}, ] [package.extras] @@ -5373,13 +5372,13 @@ files = [ [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = true python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] @@ -5735,13 +5734,13 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -6003,60 +6002,60 @@ cairosvg = ["cairosvg (>=1.0)"] [[package]] name = "sqlalchemy" -version = "2.0.27" +version = "2.0.28" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, - {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, - {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0b148ab0438f72ad21cb004ce3bdaafd28465c4276af66df3b9ecd2037bf252"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbda76961eb8f27e6ad3c84d1dc56d5bc61ba8f02bd20fcf3450bd421c2fcc9c"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feea693c452d85ea0015ebe3bb9cd15b6f49acc1a31c28b3c50f4db0f8fb1e71"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5da98815f82dce0cb31fd1e873a0cb30934971d15b74e0d78cf21f9e1b05953f"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a5adf383c73f2d49ad15ff363a8748319ff84c371eed59ffd0127355d6ea1da"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56856b871146bfead25fbcaed098269d90b744eea5cb32a952df00d542cdd368"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-win32.whl", hash = "sha256:943aa74a11f5806ab68278284a4ddd282d3fb348a0e96db9b42cb81bf731acdc"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-win_amd64.whl", hash = "sha256:c6c4da4843e0dabde41b8f2e8147438330924114f541949e6318358a56d1875a"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46a3d4e7a472bfff2d28db838669fc437964e8af8df8ee1e4548e92710929adc"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3dd67b5d69794cfe82862c002512683b3db038b99002171f624712fa71aeaa"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61e2e41656a673b777e2f0cbbe545323dbe0d32312f590b1bc09da1de6c2a02"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0315d9125a38026227f559488fe7f7cee1bd2fbc19f9fd637739dc50bb6380b2"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af8ce2d31679006e7b747d30a89cd3ac1ec304c3d4c20973f0f4ad58e2d1c4c9"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:81ba314a08c7ab701e621b7ad079c0c933c58cdef88593c59b90b996e8b58fa5"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-win32.whl", hash = "sha256:1ee8bd6d68578e517943f5ebff3afbd93fc65f7ef8f23becab9fa8fb315afb1d"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-win_amd64.whl", hash = "sha256:ad7acbe95bac70e4e687a4dc9ae3f7a2f467aa6597049eeb6d4a662ecd990bb6"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d3499008ddec83127ab286c6f6ec82a34f39c9817f020f75eca96155f9765097"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b66fcd38659cab5d29e8de5409cdf91e9986817703e1078b2fdaad731ea66f5"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea30da1e76cb1acc5b72e204a920a3a7678d9d52f688f087dc08e54e2754c67"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:124202b4e0edea7f08a4db8c81cc7859012f90a0d14ba2bf07c099aff6e96462"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e23b88c69497a6322b5796c0781400692eca1ae5532821b39ce81a48c395aae9"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b6303bfd78fb3221847723104d152e5972c22367ff66edf09120fcde5ddc2e2"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-win32.whl", hash = "sha256:a921002be69ac3ab2cf0c3017c4e6a3377f800f1fca7f254c13b5f1a2f10022c"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-win_amd64.whl", hash = "sha256:b4a2cf92995635b64876dc141af0ef089c6eea7e05898d8d8865e71a326c0385"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e91b5e341f8c7f1e5020db8e5602f3ed045a29f8e27f7f565e0bdee3338f2c7"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c7b78dfc7278329f27be02c44abc0d69fe235495bb8e16ec7ef1b1a17952db"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eba73ef2c30695cb7eabcdb33bb3d0b878595737479e152468f3ba97a9c22a4"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5df5d1dafb8eee89384fb7a1f79128118bc0ba50ce0db27a40750f6f91aa99d5"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2858bbab1681ee5406650202950dc8f00e83b06a198741b7c656e63818633526"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-win32.whl", hash = "sha256:9461802f2e965de5cff80c5a13bc945abea7edaa1d29360b485c3d2b56cdb075"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-win_amd64.whl", hash = "sha256:a6bec1c010a6d65b3ed88c863d56b9ea5eeefdf62b5e39cafd08c65f5ce5198b"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:843a882cadebecc655a68bd9a5b8aa39b3c52f4a9a5572a3036fb1bb2ccdc197"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dbb990612c36163c6072723523d2be7c3eb1517bbdd63fe50449f56afafd1133"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7e4baf9161d076b9a7e432fce06217b9bd90cfb8f1d543d6e8c4595627edb9"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0a5354cb4de9b64bccb6ea33162cb83e03dbefa0d892db88a672f5aad638a75"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fffcc8edc508801ed2e6a4e7b0d150a62196fd28b4e16ab9f65192e8186102b6"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca7b6d99a4541b2ebab4494f6c8c2f947e0df4ac859ced575238e1d6ca5716b"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-win32.whl", hash = "sha256:8c7f10720fc34d14abad5b647bc8202202f4948498927d9f1b4df0fb1cf391b7"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-win_amd64.whl", hash = "sha256:243feb6882b06a2af68ecf4bec8813d99452a1b62ba2be917ce6283852cf701b"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc4974d3684f28b61b9a90fcb4c41fb340fd4b6a50c04365704a4da5a9603b05"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87724e7ed2a936fdda2c05dbd99d395c91ea3c96f029a033a4a20e008dd876bf"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68722e6a550f5de2e3cfe9da6afb9a7dd15ef7032afa5651b0f0c6b3adb8815d"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:328529f7c7f90adcd65aed06a161851f83f475c2f664a898af574893f55d9e53"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:df40c16a7e8be7413b885c9bf900d402918cc848be08a59b022478804ea076b8"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:426f2fa71331a64f5132369ede5171c52fd1df1bd9727ce621f38b5b24f48750"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-win32.whl", hash = "sha256:33157920b233bc542ce497a81a2e1452e685a11834c5763933b440fedd1d8e2d"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-win_amd64.whl", hash = "sha256:2f60843068e432311c886c5f03c4664acaef507cf716f6c60d5fde7265be9d7b"}, + {file = "SQLAlchemy-2.0.28-py3-none-any.whl", hash = "sha256:78bb7e8da0183a8301352d569900d9d3594c48ac21dc1c2ec6b3121ed8b6c986"}, + {file = "SQLAlchemy-2.0.28.tar.gz", hash = "sha256:dd53b6c4e6d960600fd6532b79ee28e2da489322fcf6648738134587faf767b6"}, ] [package.dependencies] @@ -6148,13 +6147,13 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 [[package]] name = "streamlit" -version = "1.31.1" +version = "1.32.0" description = "A faster way to build and share data apps" optional = true python-versions = ">=3.8, !=3.9.7" files = [ - {file = "streamlit-1.31.1-py2.py3-none-any.whl", hash = "sha256:a1a84249f7a9b854fe356db06c85dc03c3f9da4df06a33aa5a922647b955e8c8"}, - {file = "streamlit-1.31.1.tar.gz", hash = "sha256:dfc43ca85b4b4c31d097c27b983b8ccc960222ad907862b2b2fb4ddf04c50fdc"}, + {file = "streamlit-1.32.0-py2.py3-none-any.whl", hash = "sha256:d8458d6d00fd9d7e965e8125ec97d0ef7ed3d82844bf5663e71231fdb623ebb1"}, + {file = "streamlit-1.32.0.tar.gz", hash = "sha256:65bf22e190de973b910c0b127b33d80d31c5275ed891c6fde740de46e5e05323"}, ] [package.dependencies] @@ -6163,7 +6162,6 @@ blinker = ">=1.0.0,<2" cachetools = ">=4.0,<6" click = ">=7.0,<9" gitpython = ">=3.0.7,<3.1.19 || >3.1.19,<4" -importlib-metadata = ">=1.4,<8" numpy = ">=1.19.3,<2" packaging = ">=16.8,<24" pandas = ">=1.3.0,<3" @@ -6171,15 +6169,12 @@ pillow = ">=7.1.0,<11" protobuf = ">=3.20,<5" pyarrow = ">=7.0" pydeck = ">=0.8.0b4,<1" -python-dateutil = ">=2.7.3,<3" requests = ">=2.27,<3" rich = ">=10.14.0,<14" tenacity = ">=8.1.0,<9" toml = ">=0.10.1,<2" tornado = ">=6.0.3,<7" typing-extensions = ">=4.3.0,<5" -tzlocal = ">=1.1,<6" -validators = ">=0.2,<1" watchdog = {version = ">=2.1.5", markers = "platform_system != \"Darwin\""} [package.extras] @@ -6424,24 +6419,24 @@ wsproto = ">=0.14" [[package]] name = "types-python-dateutil" -version = "2.8.19.20240106" +version = "2.8.19.20240311" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, - {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, + {file = "types-python-dateutil-2.8.19.20240311.tar.gz", hash = "sha256:51178227bbd4cbec35dc9adffbf59d832f20e09842d7dcb8c73b169b8780b7cb"}, + {file = "types_python_dateutil-2.8.19.20240311-py3-none-any.whl", hash = "sha256:ef813da0809aca76472ca88807addbeea98b19339aebe56159ae2f4b4f70857a"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -6519,23 +6514,6 @@ files = [ [package.extras] pytz = ["pytz (>=2023.3)"] -[[package]] -name = "tzlocal" -version = "5.2" -description = "tzinfo object for the local timezone" -optional = true -python-versions = ">=3.8" -files = [ - {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, - {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, -] - -[package.dependencies] -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] - [[package]] name = "uncertainties" version = "3.1.7" @@ -6627,13 +6605,13 @@ files = [ [[package]] name = "uvicorn" -version = "0.27.1" +version = "0.28.0" description = "The lightning-fast ASGI server." optional = true python-versions = ">=3.8" files = [ - {file = "uvicorn-0.27.1-py3-none-any.whl", hash = "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4"}, - {file = "uvicorn-0.27.1.tar.gz", hash = "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a"}, + {file = "uvicorn-0.28.0-py3-none-any.whl", hash = "sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1"}, + {file = "uvicorn-0.28.0.tar.gz", hash = "sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067"}, ] [package.dependencies] @@ -7156,13 +7134,13 @@ multidict = ">=4.0" [[package]] name = "zarr" -version = "2.17.0" +version = "2.17.1" description = "An implementation of chunked, compressed, N-dimensional arrays for Python" optional = true python-versions = ">=3.9" files = [ - {file = "zarr-2.17.0-py3-none-any.whl", hash = "sha256:d287cb61019c4a0a0f386f76eeaa7f0b1160b1cb90cf96173a4b6cbc135df6e1"}, - {file = "zarr-2.17.0.tar.gz", hash = "sha256:6390a2b8af31babaab4c963efc45bf1da7f9500c9aafac193f84cf019a7c66b0"}, + {file = "zarr-2.17.1-py3-none-any.whl", hash = "sha256:e25df2741a6e92645f3890f30f3136d5b57a0f8f831094b024bbcab5f2797bc7"}, + {file = "zarr-2.17.1.tar.gz", hash = "sha256:564b3aa072122546fe69a0fa21736f466b20fad41754334b62619f088ce46261"}, ] [package.dependencies] @@ -7214,4 +7192,4 @@ streamlit = ["streamlit"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "2113b8d664ed7203c6ca323715ae935123133bfe17847d15d3b713faa51c94ff" +content-hash = "144dda68488c1b95f503819d173bb4bb4873e0d2c64f0a818c270cc0ea31125d" diff --git a/pyproject.toml b/pyproject.toml index 161dc2bcb..6f0ea66de 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -106,6 +106,7 @@ cloup = ">=3.0.1,<4" deprecation = ">=2.1,<3" diskcache = ">=5.4.0,<6" environs = ">=9.4.0,<11" +eval-type-backport = { version = "^0.1.3", markers = "python_version < '3.10'" } fsspec = ">=2023.1,<2024.3" lxml = ">=4.9.1,<6" measurement = ">=3.2,<4" @@ -233,18 +234,13 @@ wddump = 'wetterdienst.provider.dwd.radar.cli:wddump' [tool.ruff] line-length = 120 +include = ["wetterdienst/**/*.py", "tests/**/*.py", "benchmarks/**/*.py", "examples/**/*.py"] extend-include = ["*.ipynb"] [tool.ruff.lint] select = [ # Bandit "S", - # Bugbear - "B", - # Builtins - "A", - # comprehensions - "C4", # eradicate "ERA", # flake8-2020 @@ -260,12 +256,25 @@ select = [ "F", # return "RET", + # pyupgrade + "UP", + # flake8-commas + "COM", + # future-annotations + "FA", + # flake8-type-checking + "TCH", + # flake8-unused-arguments + "ARG", + # flake8-use-pathlib + "PTH" ] extend-ignore = [ # zip() without an explicit strict= parameter. "B905", # Unnecessary `elif` after `return` statement "RET505", + "COM812" ] unfixable = ["ERA", "F401", "F841", "T20", "ERA001"] @@ -277,7 +286,7 @@ unfixable = ["ERA", "F401", "F841", "T20", "ERA001"] "tests/*" = ["S101"] "tests/provider/dwd/observation/test_available_datasets.py" = ["E402"] "wetterdienst/__init__.py" = ["E402"] -"wetterdienst/ui/restapi.py" = ["B008"] +"wetterdienst/ui/restapi.py" = ["B008", "UP007"] [tool.pytest.ini_options] addopts = "-rsfEX -p pytester --strict-markers --verbosity=3 --webdriver=Firefox --headless" @@ -320,15 +329,15 @@ install_dev = "poetry install --with=test,dev,docs -E mpl -E ipython -E sql -E e format = [ # Fix all things with ruff, but skip a few: # unused imports (F401), unused variables (F841), `print` statements (T201), and commented-out code (ERA001). - { cmd = "ruff format wetterdienst tests benchmarks examples"}, - { cmd = "ruff check --fix wetterdienst tests benchmarks examples" }, - { cmd = "pyproject-fmt ."} + { cmd = "ruff format" }, + { cmd = "ruff check --fix" }, + { cmd = "pyproject-fmt ." } ] lint = [ - { cmd = "ruff format --check wetterdienst tests benchmarks examples" }, - { cmd = "ruff check wetterdienst tests benchmarks examples" }, - { cmd = "pyproject-fmt --check ."} + { cmd = "ruff format --check" }, + { cmd = "ruff check" }, + { cmd = "pyproject-fmt --check ." } ] docs = { shell = "cd docs && poetry run make html" } test-cflakes = "pytest -m cflake" diff --git a/tests/__init__.py b/tests/__init__.py index 36979cf71..57a368863 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/benchmarks/__init__.py b/tests/benchmarks/__init__.py index 1f0020ad8..2c7e8b50c 100644 --- a/tests/benchmarks/__init__.py +++ b/tests/benchmarks/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/benchmarks/test_benchmarks.py b/tests/benchmarks/test_benchmarks.py index d7de94c97..8b2407f0e 100644 --- a/tests/benchmarks/test_benchmarks.py +++ b/tests/benchmarks/test_benchmarks.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/core/timeseries/test_api.py b/tests/core/timeseries/test_api.py index d54e4bd32..23a6277ea 100644 --- a/tests/core/timeseries/test_api.py +++ b/tests/core/timeseries/test_api.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest @@ -116,5 +115,5 @@ def test_api_partly_valid_parameters(default_settings, caplog): ( DwdObservationDataset.SOLAR, DwdObservationDataset.SOLAR, - ) + ), ] diff --git a/tests/core/timeseries/test_interpolation.py b/tests/core/timeseries/test_interpolation.py index a018731e6..ff50568ca 100644 --- a/tests/core/timeseries/test_interpolation.py +++ b/tests/core/timeseries/test_interpolation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -57,7 +56,7 @@ def test_interpolation_temperature_air_mean_200_hourly_by_coords(default_setting "value": [277.71], "distance_mean": [13.37], "taken_station_ids": [["02480", "04411", "07341", "00917"]], - } + }, ) assert_frame_equal(given_df, expected_df) @@ -82,7 +81,7 @@ def test_interpolation_temperature_air_mean_200_daily_by_station_id(default_sett "value": [279.52, 281.85], "distance_mean": [16.99, 0.0], "taken_station_ids": [["00072", "02074", "02638", "04703"], ["00071"]], - } + }, ) for result in ( request.interpolate(latlon=(48.2156, 8.9784)), @@ -115,7 +114,7 @@ def test_interpolation_precipitation_height_minute_10(default_settings): "value": [0.03], "distance_mean": [9.38], "taken_station_ids": [["04230", "02480", "04411", "07341"]], - } + }, ) assert_frame_equal(given_df, expected_df) diff --git a/tests/core/timeseries/test_io.py b/tests/core/timeseries/test_io.py index 23caab0d9..5d548e8ba 100644 --- a/tests/core/timeseries/test_io.py +++ b/tests/core/timeseries/test_io.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -96,7 +95,7 @@ def df_stations(): "longitude": [13.5528], "name": ["Freyung vorm Wald"], "state": ["Bayern"], - } + }, ) @@ -384,14 +383,14 @@ def test_values_to_ogc_feature_collection(df_values, stations_result_mock): "parameter": "temperature_air_max_200", "quality": None, "value": 1.3, - } + }, ], } def test_values_to_ogc_feature_collection_with_metadata(df_values, stations_result_mock, dwd_metadata): data = ValuesResult(stations=stations_result_mock, values=None, df=df_values[0, :]).to_ogc_feature_collection( - with_metadata=True + with_metadata=True, ) assert data.keys() == {"data", "metadata"} assert data["metadata"] == dwd_metadata @@ -447,13 +446,13 @@ def test_interpolated_values_to_dict(df_interpolated_values): "value": 1.3, "distance_mean": 0.0, "taken_station_ids": ["01048"], - } + }, ] def test_interpolated_values_to_dict_with_metadata(df_interpolated_values, stations_result_mock, dwd_metadata): data = InterpolatedValuesResult(stations=stations_result_mock, df=df_interpolated_values, latlon=(1, 2)).to_dict( - with_metadata=True + with_metadata=True, ) assert data.keys() == {"values", "metadata"} assert data["metadata"] == dwd_metadata @@ -461,7 +460,9 @@ def test_interpolated_values_to_dict_with_metadata(df_interpolated_values, stati def test_interpolated_values_to_ogc_feature_collection(df_interpolated_values, stations_result_mock): data = InterpolatedValuesResult( - stations=stations_result_mock, df=df_interpolated_values, latlon=(1.2345, 2.3456) + stations=stations_result_mock, + df=df_interpolated_values, + latlon=(1.2345, 2.3456), ).to_ogc_feature_collection() assert data.keys() == {"data"} assert data["data"]["features"][0] == { @@ -477,7 +478,7 @@ def test_interpolated_values_to_ogc_feature_collection(df_interpolated_values, s "height": 645.0, "name": "Freyung vorm Wald", "state": "Bayern", - } + }, ], "type": "Feature", "values": [ @@ -488,16 +489,20 @@ def test_interpolated_values_to_ogc_feature_collection(df_interpolated_values, s "value": 1.3, "distance_mean": 0.0, "taken_station_ids": ["01048"], - } + }, ], } def test_interpolated_values_to_ogc_feature_collection_with_metadata( - df_interpolated_values, stations_result_mock, dwd_metadata + df_interpolated_values, + stations_result_mock, + dwd_metadata, ): data = InterpolatedValuesResult( - stations=stations_result_mock, df=df_interpolated_values, latlon=(1.2345, 2.3456) + stations=stations_result_mock, + df=df_interpolated_values, + latlon=(1.2345, 2.3456), ).to_ogc_feature_collection(with_metadata=True) assert data.keys() == {"data", "metadata"} assert data["metadata"] == dwd_metadata @@ -514,13 +519,15 @@ def test_summarized_values_to_dict(df_summarized_values): "value": 1.3, "distance": 0.0, "taken_station_id": "01048", - } + }, ] def test_summarized_values_to_dict_with_metadata(df_summarized_values, stations_result_mock, dwd_metadata): data = SummarizedValuesResult( - stations=stations_result_mock, df=df_summarized_values, latlon=(1.2345, 2.3456) + stations=stations_result_mock, + df=df_summarized_values, + latlon=(1.2345, 2.3456), ).to_dict(with_metadata=True) assert data.keys() == {"values", "metadata"} assert data["metadata"] == dwd_metadata @@ -528,7 +535,9 @@ def test_summarized_values_to_dict_with_metadata(df_summarized_values, stations_ def test_summarized_values_to_ogc_feature_collection(df_summarized_values, stations_result_mock): data = SummarizedValuesResult( - stations=stations_result_mock, df=df_summarized_values, latlon=(1.2345, 2.3456) + stations=stations_result_mock, + df=df_summarized_values, + latlon=(1.2345, 2.3456), ).to_ogc_feature_collection() assert data.keys() == {"data"} assert data["data"]["features"][0] == { @@ -544,7 +553,7 @@ def test_summarized_values_to_ogc_feature_collection(df_summarized_values, stati "height": 645.0, "name": "Freyung vorm Wald", "state": "Bayern", - } + }, ], "type": "Feature", "values": [ @@ -555,16 +564,20 @@ def test_summarized_values_to_ogc_feature_collection(df_summarized_values, stati "value": 1.3, "distance": 0.0, "taken_station_id": "01048", - } + }, ], } def test_summarized_values_to_ogc_feature_collection_with_metadata( - df_summarized_values, stations_result_mock, dwd_metadata + df_summarized_values, + stations_result_mock, + dwd_metadata, ): data = SummarizedValuesResult( - stations=stations_result_mock, df=df_summarized_values, latlon=(1.2345, 2.3456) + stations=stations_result_mock, + df=df_summarized_values, + latlon=(1.2345, 2.3456), ).to_ogc_feature_collection(with_metadata=True) assert data.keys() == {"data", "metadata"} assert data["metadata"] == dwd_metadata @@ -594,11 +607,11 @@ def test_filter_by_date_interval(df_values): def test_filter_by_sql(df_values): """Test filter by sql statement""" df = ExportMixin(df=df_values).filter_by_sql( - sql="SELECT * FROM data WHERE parameter='temperature_air_max_200' AND value < 1.5" + sql="SELECT * FROM data WHERE parameter='temperature_air_max_200' AND value < 1.5", ) assert not df.is_empty() df = ExportMixin(df=df_values).filter_by_sql( - sql="SELECT * FROM data WHERE parameter='temperature_air_max_200' AND value > 4" + sql="SELECT * FROM data WHERE parameter='temperature_air_max_200' AND value > 4", ) assert df.is_empty() diff --git a/tests/core/timeseries/test_parameter.py b/tests/core/timeseries/test_parameter.py index 5a122f0c7..498cf83fe 100644 --- a/tests/core/timeseries/test_parameter.py +++ b/tests/core/timeseries/test_parameter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest diff --git a/tests/core/timeseries/test_request.py b/tests/core/timeseries/test_request.py index db7ecda87..44c9b1320 100644 --- a/tests/core/timeseries/test_request.py +++ b/tests/core/timeseries/test_request.py @@ -96,7 +96,7 @@ def test_dwd_observation_data_api(default_settings): ( DwdObservationParameter.DAILY.CLIMATE_SUMMARY.PRECIPITATION_HEIGHT, DwdObservationDataset.CLIMATE_SUMMARY, - ) + ), ] @@ -104,7 +104,10 @@ def test_dwd_observation_data_api(default_settings): def test_dwd_observation_data_dataset(default_settings): """Request a parameter set""" given = DwdObservationRequest( - parameter=["kl"], resolution="daily", period=["recent", "historical"], settings=default_settings + parameter=["kl"], + resolution="daily", + period=["recent", "historical"], + settings=default_settings, ).filter_by_station_id(station_id=(1,)) expected = DwdObservationRequest( parameter=[DwdObservationDataset.CLIMATE_SUMMARY], @@ -137,7 +140,7 @@ def test_dwd_observation_data_dataset(default_settings): ( DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationDataset.CLIMATE_SUMMARY, - ) + ), ] @@ -153,10 +156,13 @@ def test_dwd_observation_data_parameter(default_settings): ( DwdObservationParameter.DAILY.CLIMATE_SUMMARY.PRECIPITATION_HEIGHT, DwdObservationDataset.CLIMATE_SUMMARY, - ) + ), ] given = DwdObservationRequest( - parameter=["climate_summary"], resolution="daily", period=["recent", "historical"], settings=default_settings + parameter=["climate_summary"], + resolution="daily", + period=["recent", "historical"], + settings=default_settings, ) assert given.parameter == [(DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationDataset.CLIMATE_SUMMARY)] @@ -180,7 +186,7 @@ def test_dwd_observation_data_parameter_dataset_pairs(default_settings): ( DwdObservationParameter.DAILY.PRECIPITATION_MORE.PRECIPITATION_HEIGHT, DwdObservationDataset.PRECIPITATION_MORE, - ) + ), ] diff --git a/tests/core/timeseries/test_summary.py b/tests/core/timeseries/test_summary.py index 6c181f740..a1d5b40f3 100644 --- a/tests/core/timeseries/test_summary.py +++ b/tests/core/timeseries/test_summary.py @@ -36,7 +36,7 @@ def test_summary_temperature_air_mean_200_daily(default_settings): "value": [273.65, 267.65, 270.45], "distance": [13.42, 5.05, 0.0], "taken_station_id": ["01048", "01051", "01050"], - } + }, ) for result in (request.summarize(latlon=(51.0221, 13.8470)), request.summarize_by_station_id(station_id="1050")): given_df = result.df diff --git a/tests/examples/__init__.py b/tests/examples/__init__.py index 36979cf71..57a368863 100644 --- a/tests/examples/__init__.py +++ b/tests/examples/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/examples/test_examples.py b/tests/examples/test_examples.py index 56fd03d0b..e2278faec 100644 --- a/tests/examples/test_examples.py +++ b/tests/examples/test_examples.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import sys diff --git a/tests/examples/test_notebook_examples.py b/tests/examples/test_notebook_examples.py index 3ca867ad3..9884f5a86 100644 --- a/tests/examples/test_notebook_examples.py +++ b/tests/examples/test_notebook_examples.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from pathlib import Path diff --git a/tests/provider/__init__.py b/tests/provider/__init__.py index 36979cf71..57a368863 100644 --- a/tests/provider/__init__.py +++ b/tests/provider/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/provider/dwd/__init__.py b/tests/provider/dwd/__init__.py index 36979cf71..57a368863 100644 --- a/tests/provider/dwd/__init__.py +++ b/tests/provider/dwd/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/provider/dwd/dmo/test_api.py b/tests/provider/dwd/dmo/test_api.py index 202c2edd5..7f1259666 100644 --- a/tests/provider/dwd/dmo/test_api.py +++ b/tests/provider/dwd/dmo/test_api.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -17,8 +16,8 @@ def df_files_january(): "date_str": [ "310000", "311200", - ] - } + ], + }, ) @@ -31,8 +30,8 @@ def df_files_two_month(): "010000", "011200", "020000", - ] - } + ], + }, ) @@ -43,8 +42,8 @@ def df_files_end_of_month(): "date_str": [ "310000", "311200", - ] - } + ], + }, ) @@ -118,8 +117,8 @@ def test_add_date_from_filename_too_few_dates(): { "date_str": [ "311200", - ] - } + ], + }, ) with pytest.raises(ValueError): add_date_from_filename(df, dt.datetime(2021, 1, 1, 1, 1, 1)) diff --git a/tests/provider/dwd/mosmix/__init__.py b/tests/provider/dwd/mosmix/__init__.py index 36979cf71..57a368863 100644 --- a/tests/provider/dwd/mosmix/__init__.py +++ b/tests/provider/dwd/mosmix/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/provider/dwd/mosmix/test_api_data.py b/tests/provider/dwd/mosmix/test_api_data.py index 02c23f58d..6de09f89f 100644 --- a/tests/provider/dwd/mosmix/test_api_data.py +++ b/tests/provider/dwd/mosmix/test_api_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -15,7 +14,9 @@ def test_dwd_mosmix_l(settings_humanize_false): Test some details of a typical MOSMIX-L response. """ request = DwdMosmixRequest( - parameter="large", mosmix_type="large", settings=settings_humanize_false + parameter="large", + mosmix_type="large", + settings=settings_humanize_false, ).filter_by_station_id( station_id=["01001"], ) @@ -158,7 +159,7 @@ def test_dwd_mosmix_l(settings_humanize_false): "wpc61", "wpch1", "wpcd1", - ] + ], ) @@ -167,7 +168,9 @@ def test_dwd_mosmix_l(settings_humanize_false): def test_dwd_mosmix_s(settings_humanize_false): """Test some details of a typical MOSMIX-S response.""" request = DwdMosmixRequest( - parameter="small", mosmix_type="small", settings=settings_humanize_false + parameter="small", + mosmix_type="small", + settings=settings_humanize_false, ).filter_by_station_id( station_id=["01028"], ) @@ -230,7 +233,7 @@ def test_dwd_mosmix_s(settings_humanize_false): "rd50", "rad1h", "sund1", - ] + ], ) @@ -255,7 +258,9 @@ def test_mosmix_l_parameters(settings_humanize_false): Test some details of a MOSMIX-L response when queried for specific parameters. """ request = DwdMosmixRequest( - parameter=["dd", "ww"], mosmix_type="large", settings=settings_humanize_false + parameter=["dd", "ww"], + mosmix_type="large", + settings=settings_humanize_false, ).filter_by_station_id( station_id=("01001", "123"), ) diff --git a/tests/provider/dwd/mosmix/test_api_stations.py b/tests/provider/dwd/mosmix/test_api_stations.py index 8c8e77ad9..200afcaa5 100644 --- a/tests/provider/dwd/mosmix/test_api_stations.py +++ b/tests/provider/dwd/mosmix/test_api_stations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/tests/provider/dwd/observation/__init__.py b/tests/provider/dwd/observation/__init__.py index 36979cf71..57a368863 100644 --- a/tests/provider/dwd/observation/__init__.py +++ b/tests/provider/dwd/observation/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/provider/dwd/observation/test_api_data.py b/tests/provider/dwd/observation/test_api_data.py index 337fdc508..038a27491 100644 --- a/tests/provider/dwd/observation/test_api_data.py +++ b/tests/provider/dwd/observation/test_api_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -199,7 +198,7 @@ def test_dwd_observation_data_result_missing_data(default_settings): ) given_df = request.values.all().df.drop("quality") assert not given_df.filter( - pl.col("date").dt.year().is_in((1933, 1934)) & ~pl.fold(True, lambda acc, s: acc & s.is_null(), pl.all()) + pl.col("date").dt.year().is_in((1933, 1934)) & ~pl.fold(True, lambda acc, s: acc & s.is_null(), pl.all()), ).is_empty() request = DwdObservationRequest( parameter=DwdObservationParameter.HOURLY.TEMPERATURE_AIR_MEAN_200, @@ -250,7 +249,8 @@ def test_dwd_observation_data_result_all_missing_data(default_settings): @pytest.mark.remote def test_dwd_observation_data_result_tabular( - settings_humanize_si_false_wide_shape, dwd_climate_summary_tabular_columns + settings_humanize_si_false_wide_shape, + dwd_climate_summary_tabular_columns, ): """Test for actual values (tabular)""" request = DwdObservationRequest( @@ -343,7 +343,8 @@ def test_dwd_observation_data_result_tabular( @pytest.mark.remote def test_dwd_observation_data_result_tabular_si( - settings_humanize_false_wide_shape, dwd_climate_summary_tabular_columns + settings_humanize_false_wide_shape, + dwd_climate_summary_tabular_columns, ): """Test for actual values (tabular) in metric units""" request = DwdObservationRequest( @@ -722,7 +723,7 @@ def test_dwd_observations_urban_values(default_settings): 286.54999999999995, ], "quality": [3.0, 3.0], - } + }, ) assert_frame_equal(given_df, expected_df) @@ -1023,7 +1024,7 @@ def test_tidy_up_data(settings_humanize_false): "txk": [-1.7], "tnk": [-7.9], "tgk": [-11.4], - } + }, ) given_df = request.values._tidy_up_df(df, request.parameter[0][1]) given_df = request.values._organize_df_columns(given_df, "01048", DwdObservationDataset.CLIMATE_SUMMARY) @@ -1065,7 +1066,7 @@ def test_tidy_up_data(settings_humanize_false): -11.4, ], "quality": [10, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3], - } + }, ) assert_frame_equal(given_df, expected_df) @@ -1125,7 +1126,7 @@ def test_dwd_observation_solar_daily(default_settings): @pytest.mark.remote -def test_dwd_observation_solar_hourly(default_settings): +def test_dwd_observation_solar_hourly(): """Test DWD observation solar hourly data""" # Snippet provided by @lasinludwig settings = Settings( @@ -1137,7 +1138,7 @@ def test_dwd_observation_solar_hourly(default_settings): ts_dropna=True, ignore_env=True, ) - latlon_bremen: tuple[float, float] = 53.0980433, 8.7747248 + latlon_bremen = 53.0980433, 8.7747248 # request for radiation request = DwdObservationRequest( parameter="radiation_global", @@ -1206,6 +1207,7 @@ def test_dwd_observation_data_5minute_precipitation_data_tidy(default_settings): resolution=DwdObservationResolution.MINUTE_5, start_date="2023-08-25 00:00", end_date="2023-08-27 00:00", + settings=default_settings, ).filter_by_rank( latlon=(49.853706, 8.66311), rank=1, @@ -1220,6 +1222,7 @@ def test_dwd_observation_data_5minute_precipitation_data_recent(default_settings parameter=["precipitation_height_rocker", "precipitation_height_droplet"], resolution=DwdObservationResolution.MINUTE_5, period=[DwdObservationPeriod.RECENT, DwdObservationPeriod.NOW], + settings=default_settings, ).filter_by_rank( latlon=(49.853706, 8.66311), rank=1, @@ -1235,6 +1238,7 @@ def test_dwd_observation_data_1minute_precipitation_data_tidy(default_settings): resolution=DwdObservationResolution.MINUTE_1, start_date="1990-01-01 00:00", end_date="1995-01-01 00:10", + settings=default_settings, ).filter_by_station_id(1048) values = request.values.all().df assert values.get_column("value").sum() == 2681.8 diff --git a/tests/provider/dwd/observation/test_api_metadata.py b/tests/provider/dwd/observation/test_api_metadata.py index 4168cb51d..91f7e3c6f 100644 --- a/tests/provider/dwd/observation/test_api_metadata.py +++ b/tests/provider/dwd/observation/test_api_metadata.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import json @@ -27,7 +26,7 @@ def test_dwd_observation_metadata_discover_parameters(): "si": "kg / m ** 2", }, "precipitation_index": {"origin": "-", "si": "-"}, - } + }, } assert json.dumps(expected) in json.dumps(metadata) @@ -84,7 +83,7 @@ def test_dwd_observation_metadata_describe_fields_kl_daily_german(): resolution=DwdObservationResolution.DAILY, period=DwdObservationPeriod.RECENT, language="de", - )["parameters"].keys() + )["parameters"].keys(), ) == [ "stations_id", "mess_datum", diff --git a/tests/provider/dwd/observation/test_api_stations.py b/tests/provider/dwd/observation/test_api_stations.py index d8b812b3a..0c51cc5bf 100644 --- a/tests/provider/dwd/observation/test_api_stations.py +++ b/tests/provider/dwd/observation/test_api_stations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -28,7 +27,7 @@ def expected_df(): "height": [478.0], "name": ["Aach"], "state": ["Baden-Württemberg"], - } + }, ) @@ -49,7 +48,10 @@ def test_dwd_observations_stations_filter(default_settings, expected_df): def test_dwd_observations_urban_stations(default_settings): """Test DWD Observation urban stations""" request = DwdObservationRequest( - parameter="urban_air_temperature", resolution="hourly", period="historical", settings=default_settings + parameter="urban_air_temperature", + resolution="hourly", + period="historical", + settings=default_settings, ).all() assert request.station_id.to_list() == ["00399", "13667", "15811", "15818", "19711"] @@ -115,6 +117,6 @@ def test_dwd_observations_stations_minute_1(default_settings): "height": 202.0, "name": "Aachen", "state": "Nordrhein-Westfalen", - } + }, ) assert_frame_equal(given_df, expected_df) diff --git a/tests/provider/dwd/observation/test_available_datasets.py b/tests/provider/dwd/observation/test_available_datasets.py index 470113318..3b8124eae 100644 --- a/tests/provider/dwd/observation/test_available_datasets.py +++ b/tests/provider/dwd/observation/test_available_datasets.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import polars as pl diff --git a/tests/provider/dwd/observation/test_fileindex.py b/tests/provider/dwd/observation/test_fileindex.py index 321a62ed1..31eba6337 100644 --- a/tests/provider/dwd/observation/test_fileindex.py +++ b/tests/provider/dwd/observation/test_fileindex.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """tests for file index creation""" @@ -31,7 +30,7 @@ def test_file_index_creation_success(default_settings): assert not file_index.is_empty() assert file_index.filter(pl.col("station_id").eq("01048")).get_column("filename").to_list() == [ "https://opendata.dwd.de/climate_environment/CDC/observations_germany/" - "climate/daily/kl/recent/tageswerte_KL_01048_akt.zip" + "climate/daily/kl/recent/tageswerte_KL_01048_akt.zip", ] @@ -52,7 +51,10 @@ def test_file_index_creation_precipitation_minute_1(default_settings): def test_file_index_creation_failure(default_settings): with pytest.raises(FileNotFoundError): create_file_index_for_climate_observations( - DwdObservationDataset.CLIMATE_SUMMARY, Resolution.MINUTE_1, Period.HISTORICAL, settings=default_settings + DwdObservationDataset.CLIMATE_SUMMARY, + Resolution.MINUTE_1, + Period.HISTORICAL, + settings=default_settings, ) @@ -67,7 +69,7 @@ def test_create_file_list_for_dwd_server(default_settings): ).to_list() assert remote_file_path == [ "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/" - "daily/kl/recent/tageswerte_KL_01048_akt.zip" + "daily/kl/recent/tageswerte_KL_01048_akt.zip", ] # with date range remote_file_path = create_file_list_for_climate_observations( @@ -81,5 +83,5 @@ def test_create_file_list_for_dwd_server(default_settings): assert remote_file_path == [ "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/" "10_minutes/air_temperature/historical/" - "10minutenwerte_TU_00003_19930428_19991231_hist.zip" + "10minutenwerte_TU_00003_19930428_19991231_hist.zip", ] diff --git a/tests/provider/dwd/observation/test_meta_index.py b/tests/provider/dwd/observation/test_meta_index.py index 661ddaae0..080fd77b8 100644 --- a/tests/provider/dwd/observation/test_meta_index.py +++ b/tests/provider/dwd/observation/test_meta_index.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """tests for file index creation""" @@ -21,7 +20,10 @@ def test_meta_index_creation_success(default_settings): # Existing combination of parameters meta_index = create_meta_index_for_climate_observations( - DwdObservationDataset.CLIMATE_SUMMARY, Resolution.DAILY, Period.HISTORICAL, settings=default_settings + DwdObservationDataset.CLIMATE_SUMMARY, + Resolution.DAILY, + Period.HISTORICAL, + settings=default_settings, ).collect() assert not meta_index.is_empty() @@ -30,14 +32,20 @@ def test_meta_index_creation_success(default_settings): def test_meta_index_creation_failure(default_settings): with pytest.raises(FileNotFoundError): create_meta_index_for_climate_observations( - DwdObservationDataset.CLIMATE_SUMMARY, Resolution.MINUTE_1, Period.HISTORICAL, settings=default_settings + DwdObservationDataset.CLIMATE_SUMMARY, + Resolution.MINUTE_1, + Period.HISTORICAL, + settings=default_settings, ) @pytest.mark.remote def test_meta_index_1mph_creation(default_settings): meta_index_1mph = create_meta_index_for_climate_observations( - DwdObservationDataset.PRECIPITATION, Resolution.MINUTE_1, Period.HISTORICAL, settings=default_settings + DwdObservationDataset.PRECIPITATION, + Resolution.MINUTE_1, + Period.HISTORICAL, + settings=default_settings, ).collect() assert meta_index_1mph.filter(pl.col(Columns.STATION_ID.value).eq("00003")).row(0) == ( ( diff --git a/tests/provider/dwd/observation/test_parameters.py b/tests/provider/dwd/observation/test_parameters.py index 0f1920c22..f051394f8 100644 --- a/tests/provider/dwd/observation/test_parameters.py +++ b/tests/provider/dwd/observation/test_parameters.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest diff --git a/tests/provider/dwd/observation/test_parser.py b/tests/provider/dwd/observation/test_parser.py index 57199b2ac..f3aae9205 100644 --- a/tests/provider/dwd/observation/test_parser.py +++ b/tests/provider/dwd/observation/test_parser.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -56,7 +55,7 @@ def test_parse_dwd_data(): "txk": ["2.5", "24.8"], "tnk": ["-1.6", "14.4"], "tgk": pl.Series(values=[None, None], dtype=pl.Utf8), - } + }, ) assert_frame_equal( given_df[[0, -1], :], diff --git a/tests/provider/dwd/observation/test_util.py b/tests/provider/dwd/observation/test_util.py index bcbf60662..b66c5c16a 100644 --- a/tests/provider/dwd/observation/test_util.py +++ b/tests/provider/dwd/observation/test_util.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst import Period, Resolution diff --git a/tests/provider/dwd/radar/__init__.py b/tests/provider/dwd/radar/__init__.py index 36979cf71..57a368863 100644 --- a/tests/provider/dwd/radar/__init__.py +++ b/tests/provider/dwd/radar/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/provider/dwd/radar/conftest.py b/tests/provider/dwd/radar/conftest.py index 213caf8be..251d0abe5 100644 --- a/tests/provider/dwd/radar/conftest.py +++ b/tests/provider/dwd/radar/conftest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest diff --git a/tests/provider/dwd/radar/test_api_current.py b/tests/provider/dwd/radar/test_api_current.py index c872be4f9..e6ae0c28a 100644 --- a/tests/provider/dwd/radar/test_api_current.py +++ b/tests/provider/dwd/radar/test_api_current.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest diff --git a/tests/provider/dwd/radar/test_api_historic.py b/tests/provider/dwd/radar/test_api_historic.py index aae71fcd7..0ede60f3e 100644 --- a/tests/provider/dwd/radar/test_api_historic.py +++ b/tests/provider/dwd/radar/test_api_historic.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -107,7 +106,7 @@ def test_radar_request_radolan_cdc_historic_hourly_data(default_settings, radar_ "radarid": "10000", "radarlocations": IsList(IsStr(regex="|".join(radar_locations)), length=(10, len(radar_locations))), "radolanversion": "2.21.0", - } + }, ) assert requested_attrs == attrs @@ -160,7 +159,7 @@ def test_radar_request_radolan_cdc_historic_daily_data(default_settings, radar_l "radardays": IsList(IsStr(regex=radardays_pattern), length=(10, len(radar_locations))), "radarlocations": IsList(IsStr(regex=radar_locations_pattern), length=(10, len(radar_locations))), "radolanversion": "2.21.0", - } + }, ) assert requested_attrs == attrs @@ -174,7 +173,9 @@ def test_radar_request_composite_historic_hg_yesterday(prefixed_radar_locations, timestamp = dt.datetime.now(ZoneInfo("UTC")).replace(tzinfo=None) - dt.timedelta(days=1) request = DwdRadarValues( - parameter=DwdRadarParameter.HG_REFLECTIVITY, start_date=timestamp, settings=default_settings + parameter=DwdRadarParameter.HG_REFLECTIVITY, + start_date=timestamp, + settings=default_settings, ) results = list(request.query()) @@ -208,10 +209,11 @@ def test_radar_request_composite_historic_hg_yesterday(prefixed_radar_locations, "producttype": "HG", "radarid": "10000", "radarlocations": IsList( - IsStr(regex=prefixed_radar_locations_pattern), length=(10, len(prefixed_radar_locations)) + IsStr(regex=prefixed_radar_locations_pattern), + length=(10, len(prefixed_radar_locations)), ), "radolanversion": IsStr(regex="P4000.H"), - } + }, ) assert requested_attrs == attrs @@ -256,7 +258,9 @@ def test_radar_request_composite_historic_radolan_rw_yesterday(radar_locations, timestamp = dt.datetime.now(ZoneInfo("UTC")).replace(tzinfo=None) - dt.timedelta(days=1) request = DwdRadarValues( - parameter=DwdRadarParameter.RW_REFLECTIVITY, start_date=timestamp, settings=default_settings + parameter=DwdRadarParameter.RW_REFLECTIVITY, + start_date=timestamp, + settings=default_settings, ) results = list(request.query()) @@ -287,7 +291,7 @@ def test_radar_request_composite_historic_radolan_rw_yesterday(radar_locations, "radarid": "10000", "radarlocations": IsList(IsStr(regex="|".join(radar_locations)), length=(10, len(radar_locations))), "radolanversion": "2.29.1", - } + }, ) assert requested_attrs == attrs @@ -336,7 +340,7 @@ def test_radar_request_composite_historic_radolan_rw_timerange(radar_locations, "radarid": "10000", "radarlocations": IsList(IsStr(regex=radarlocations_pattern), length=(10, len(radar_locations))), "radolanversion": "2.29.1", - } + }, ) assert requested_attrs == attrs @@ -381,7 +385,7 @@ def test_radar_request_site_historic_dx_yesterday(default_settings): "radarid": "10132", "statfilter": 0, "version": " 2", - } + }, ) assert requested_attrs == attrs @@ -430,7 +434,7 @@ def test_radar_request_site_historic_dx_timerange(default_settings): "radarid": "10132", "statfilter": 0, "version": " 2", - } + }, ) assert requested_attrs == attrs @@ -808,7 +812,9 @@ def test_radar_request_radvor_re_yesterday(prefixed_radar_locations, default_set timestamp = dt.datetime.now(ZoneInfo("UTC")).replace(tzinfo=None) - dt.timedelta(days=1) request = DwdRadarValues( - parameter=DwdRadarParameter.RE_REFLECTIVITY, start_date=timestamp, settings=default_settings + parameter=DwdRadarParameter.RE_REFLECTIVITY, + start_date=timestamp, + settings=default_settings, ) results = list(request.query()) @@ -840,10 +846,11 @@ def test_radar_request_radvor_re_yesterday(prefixed_radar_locations, default_set "quantification": 16, "radarid": "10000", "radarlocations": IsList( - IsStr(regex="|".join(prefixed_radar_locations)), length=(10, len(prefixed_radar_locations)) + IsStr(regex="|".join(prefixed_radar_locations)), + length=(10, len(prefixed_radar_locations)), ), "radolanversion": IsStr(regex="P4000.H"), - } + }, ) assert requested_attrs == attrs, str(requested_attrs) @@ -901,7 +908,9 @@ def test_radar_request_radvor_rq_yesterday(radar_locations, default_settings): timestamp = dt.datetime.now(ZoneInfo("UTC")).replace(tzinfo=None) - dt.timedelta(days=1) request = DwdRadarValues( - parameter=DwdRadarParameter.RQ_REFLECTIVITY, start_date=timestamp, settings=default_settings + parameter=DwdRadarParameter.RQ_REFLECTIVITY, + start_date=timestamp, + settings=default_settings, ) results = list(request.query()) @@ -934,7 +943,7 @@ def test_radar_request_radvor_rq_yesterday(radar_locations, default_settings): "radarid": "10000", "radarlocations": IsList(IsStr(regex="|".join(radar_locations)), length=(10, len(radar_locations))), "radolanversion": "2.29.1", - } + }, ) assert requested_attrs == attrs @@ -987,7 +996,7 @@ def test_radar_request_radvor_rq_timerange(radar_locations, default_settings): "radarid": "10000", "radarlocations": IsList(IsStr(regex="|".join(radar_locations)), length=(10, len(radar_locations))), "radolanversion": "2.29.1", - } + }, ) assert requested_attrs == attrs diff --git a/tests/provider/dwd/radar/test_api_invalid.py b/tests/provider/dwd/radar/test_api_invalid.py index 84743865f..bfbfe28a5 100644 --- a/tests/provider/dwd/radar/test_api_invalid.py +++ b/tests/provider/dwd/radar/test_api_invalid.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -96,7 +95,9 @@ def test_radar_request_site_without_site(default_settings): """ with pytest.raises(ValueError) as exec_info: request = DwdRadarValues( - parameter=DwdRadarParameter.SWEEP_PCP_VELOCITY_H, start_date=DwdRadarDate.LATEST, settings=default_settings + parameter=DwdRadarParameter.SWEEP_PCP_VELOCITY_H, + start_date=DwdRadarDate.LATEST, + settings=default_settings, ) list(request.query()) assert exec_info.match("Argument 'site' is missing") diff --git a/tests/provider/dwd/radar/test_api_latest.py b/tests/provider/dwd/radar/test_api_latest.py index 7cacd4acc..1cf2a4147 100644 --- a/tests/provider/dwd/radar/test_api_latest.py +++ b/tests/provider/dwd/radar/test_api_latest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -20,7 +19,9 @@ def test_radar_request_composite_latest_rv_reflectivity(default_settings, statio """ request = DwdRadarValues( - parameter=DwdRadarParameter.RV_REFLECTIVITY, start_date=DwdRadarDate.LATEST, settings=default_settings + parameter=DwdRadarParameter.RV_REFLECTIVITY, + start_date=DwdRadarDate.LATEST, + settings=default_settings, ) buffer = next(request.query())[1] @@ -43,7 +44,9 @@ def test_radar_request_composite_latest_rw_reflectivity(default_settings, radar_ wrl = pytest.importorskip("wradlib", reason="wradlib not installed") request = DwdRadarValues( - parameter=DwdRadarParameter.RW_REFLECTIVITY, start_date=DwdRadarDate.LATEST, settings=default_settings + parameter=DwdRadarParameter.RW_REFLECTIVITY, + start_date=DwdRadarDate.LATEST, + settings=default_settings, ) results = list(request.query()) @@ -60,7 +63,8 @@ def test_radar_request_composite_latest_rw_reflectivity(default_settings, radar_ { "datasize": 1620000, "datetime": IsDatetime( - approx=dt.datetime.now(ZoneInfo("UTC")).replace(tzinfo=None), delta=dt.timedelta(minutes=90) + approx=dt.datetime.now(ZoneInfo("UTC")).replace(tzinfo=None), + delta=dt.timedelta(minutes=90), ), "formatversion": 3, "intervalseconds": 3600, @@ -73,7 +77,7 @@ def test_radar_request_composite_latest_rw_reflectivity(default_settings, radar_ "radarid": "10000", "radarlocations": IsList(IsStr(regex="|".join(radar_locations)), length=(10, len(radar_locations))), "radolanversion": "2.29.1", - } + }, ) assert requested_attrs == attrs @@ -105,7 +109,8 @@ def test_radar_request_site_latest_dx_reflectivity(default_settings): "bytes": IsInt(gt=0), "cluttermap": 0, "datetime": IsDatetime( - approx=dt.datetime.now(ZoneInfo("UTC")).replace(tzinfo=None), delta=dt.timedelta(minutes=65) + approx=dt.datetime.now(ZoneInfo("UTC")).replace(tzinfo=None), + delta=dt.timedelta(minutes=65), ), "dopplerfilter": 4, "elevprofile": IsList(IsNumeric(ge=0.8, le=0.9), length=8), @@ -114,7 +119,7 @@ def test_radar_request_site_latest_dx_reflectivity(default_settings): "radarid": "10132", "statfilter": 0, "version": " 2", - } + }, ) assert requested_attrs == attrs diff --git a/tests/provider/dwd/radar/test_api_most_recent.py b/tests/provider/dwd/radar/test_api_most_recent.py index 488330525..6da364eac 100644 --- a/tests/provider/dwd/radar/test_api_most_recent.py +++ b/tests/provider/dwd/radar/test_api_most_recent.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest @@ -159,7 +158,7 @@ def test_radar_request_radolan_cdc_most_recent(default_settings, radar_locations "radarid": "10000", "radarlocations": IsList(IsStr(regex="|".join(radar_locations)), length=(10, len(radar_locations))), "radolanversion": "2.29.1", - } + }, ) assert requested_attrs == attrs diff --git a/tests/provider/dwd/radar/test_api_recent.py b/tests/provider/dwd/radar/test_api_recent.py index c5089ab96..8ed3a3241 100644 --- a/tests/provider/dwd/radar/test_api_recent.py +++ b/tests/provider/dwd/radar/test_api_recent.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt diff --git a/tests/provider/dwd/radar/test_index.py b/tests/provider/dwd/radar/test_index.py index 5d12a3f2d..73335aa73 100644 --- a/tests/provider/dwd/radar/test_index.py +++ b/tests/provider/dwd/radar/test_index.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from pathlib import PurePath @@ -18,7 +17,9 @@ @pytest.mark.remote def test_radar_fileindex_composite_pg_reflectivity_bin(default_settings): file_index = create_fileindex_radar( - parameter=DwdRadarParameter.PG_REFLECTIVITY, fmt=DwdRadarDataFormat.BINARY, settings=default_settings + parameter=DwdRadarParameter.PG_REFLECTIVITY, + fmt=DwdRadarDataFormat.BINARY, + settings=default_settings, ) assert not file_index.is_empty() urls = file_index.get_column("filename").to_list() @@ -28,7 +29,9 @@ def test_radar_fileindex_composite_pg_reflectivity_bin(default_settings): @pytest.mark.remote def test_radar_fileindex_composite_pg_reflectivity_bufr(default_settings): file_index = create_fileindex_radar( - parameter=DwdRadarParameter.PG_REFLECTIVITY, fmt=DwdRadarDataFormat.BUFR, settings=default_settings + parameter=DwdRadarParameter.PG_REFLECTIVITY, + fmt=DwdRadarDataFormat.BUFR, + settings=default_settings, ) assert not file_index.is_empty() urls = file_index.get_column("filename").to_list() @@ -92,7 +95,9 @@ def test_radar_fileindex_sites_px_reflectivity_bufr(default_settings): @pytest.mark.remote def test_radar_fileindex_sites_px250_reflectivity_bufr(default_settings): file_index = create_fileindex_radar( - parameter=DwdRadarParameter.PX250_REFLECTIVITY, site=DwdRadarSite.BOO, settings=default_settings + parameter=DwdRadarParameter.PX250_REFLECTIVITY, + site=DwdRadarSite.BOO, + settings=default_settings, ) assert not file_index.is_empty() urls = file_index.get_column("filename").to_list() @@ -206,7 +211,7 @@ def test_radar_fileindex_radolan_cdc_5minutes(default_settings): urls = file_index.get_column("filename").to_list() assert all( PurePath(url).match( - "*/climate_environment/CDC/grids_germany/5_minutes/radolan/reproc/2017_002/bin/*/YW2017*.tar" + "*/climate_environment/CDC/grids_germany/5_minutes/radolan/reproc/2017_002/bin/*/YW2017*.tar", ) for url in urls if not url.endswith(".tar.gz") diff --git a/tests/provider/dwd/radar/test_sites.py b/tests/provider/dwd/radar/test_sites.py index 8afccd124..d48dc8d7f 100644 --- a/tests/provider/dwd/radar/test_sites.py +++ b/tests/provider/dwd/radar/test_sites.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.dwd.radar.api import DwdRadarSites diff --git a/tests/provider/dwd/radar/test_util.py b/tests/provider/dwd/radar/test_util.py index 8d05c8f3d..848072b13 100644 --- a/tests/provider/dwd/radar/test_util.py +++ b/tests/provider/dwd/radar/test_util.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime @@ -13,7 +12,9 @@ def test_radar_get_date_from_filename(): date = get_date_from_filename( - "sweep_pcp_v_0-20200926143033_10132--buf.bz2", pattern=RADAR_DT_PATTERN, formats=[DatetimeFormat.YMDHM.value] + "sweep_pcp_v_0-20200926143033_10132--buf.bz2", + pattern=RADAR_DT_PATTERN, + formats=[DatetimeFormat.YMDHM.value], ) assert date == datetime.datetime(2020, 9, 26, 14, 30) @@ -32,12 +33,16 @@ def test_radar_get_date_from_filename(): assert date == datetime.datetime(2020, 9, 26, 14, 30) date = get_date_from_filename( - "rab02-tt_10132-20200926161533-boo---buf", pattern=RADAR_DT_PATTERN, formats=[DatetimeFormat.YMDHM.value] + "rab02-tt_10132-20200926161533-boo---buf", + pattern=RADAR_DT_PATTERN, + formats=[DatetimeFormat.YMDHM.value], ) assert date == datetime.datetime(2020, 9, 26, 16, 15) date = get_date_from_filename( - "rab02-tt_10132-2301010000-boo---buf", pattern=RADAR_DT_PATTERN, formats=[DatetimeFormat.ymdhm.value] + "rab02-tt_10132-2301010000-boo---buf", + pattern=RADAR_DT_PATTERN, + formats=[DatetimeFormat.ymdhm.value], ) assert date == datetime.datetime(2023, 1, 1, 0, 0) diff --git a/tests/provider/dwd/test_date.py b/tests/provider/dwd/test_date.py index d571f70ea..654b36fcb 100644 --- a/tests/provider/dwd/test_date.py +++ b/tests/provider/dwd/test_date.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt diff --git a/tests/provider/dwd/test_index.py b/tests/provider/dwd/test_index.py index d1fb0a799..db9287f23 100644 --- a/tests/provider/dwd/test_index.py +++ b/tests/provider/dwd/test_index.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest diff --git a/tests/provider/eccc/__init__.py b/tests/provider/eccc/__init__.py index 36979cf71..57a368863 100644 --- a/tests/provider/eccc/__init__.py +++ b/tests/provider/eccc/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/provider/eccc/test_api.py b/tests/provider/eccc/test_api.py index b4e6f197e..674ba58c3 100644 --- a/tests/provider/eccc/test_api.py +++ b/tests/provider/eccc/test_api.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt @@ -32,7 +31,7 @@ def test_eccc_api_stations(settings_si_false): "height": [4.0], "name": ["ACTIVE PASS"], "state": ["BRITISH COLUMBIA"], - } + }, ) assert_frame_equal(given_df, expected_df) diff --git a/tests/provider/eumetnet/opera/test_sites.py b/tests/provider/eumetnet/opera/test_sites.py index 1cfe62048..23fdf4018 100644 --- a/tests/provider/eumetnet/opera/test_sites.py +++ b/tests/provider/eumetnet/opera/test_sites.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest diff --git a/tests/provider/imgw/meteorology/test_api.py b/tests/provider/imgw/meteorology/test_api.py index 3b3365a83..9ca8c4442 100644 --- a/tests/provider/imgw/meteorology/test_api.py +++ b/tests/provider/imgw/meteorology/test_api.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt diff --git a/tests/provider/noaa/__init__.py b/tests/provider/noaa/__init__.py index 81cd921b5..c79af6a95 100644 --- a/tests/provider/noaa/__init__.py +++ b/tests/provider/noaa/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/provider/noaa/ghcn/__init__.py b/tests/provider/noaa/ghcn/__init__.py index 81cd921b5..c79af6a95 100644 --- a/tests/provider/noaa/ghcn/__init__.py +++ b/tests/provider/noaa/ghcn/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/provider/noaa/ghcn/test_api_stations.py b/tests/provider/noaa/ghcn/test_api_stations.py index 60aba76d9..8e35d91bf 100644 --- a/tests/provider/noaa/ghcn/test_api_stations.py +++ b/tests/provider/noaa/ghcn/test_api_stations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt diff --git a/tests/test_api.py b/tests/test_api.py index 987d9984d..db3cb290f 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest @@ -33,7 +32,11 @@ ), # Environment and Climate Change Canada pytest.param( - "eccc", "observation", {"parameter": "daily", "resolution": "daily"}, None, marks=pytest.mark.xfail + "eccc", + "observation", + {"parameter": "daily", "resolution": "daily"}, + None, + marks=pytest.mark.xfail, ), # noqa: E800, ERA001 # IMGW Hydrology ("imgw", "hydrology", {"parameter": "hydrology", "resolution": "daily"}, None), @@ -46,7 +49,11 @@ ("wsv", "pegel", {"parameter": "stage"}, None), # EA Hydrology pytest.param( - "ea", "hydrology", {"parameter": "discharge", "resolution": "daily"}, None, marks=pytest.mark.xfail + "ea", + "hydrology", + {"parameter": "discharge", "resolution": "daily"}, + None, + marks=pytest.mark.xfail, ), # NWS Observation ("nws", "observation", {"parameter": "temperature_air_mean_200"}, "KBHM"), @@ -88,7 +95,7 @@ def test_api(provider, network, kwargs, si_units, station_id): "height", "name", "state", - } + }, ) # Check that there are actually stations_result assert not stations.is_empty() diff --git a/tests/test_docs.py b/tests/test_docs.py index 34e907dc7..089d142f4 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import doctest @@ -50,7 +49,8 @@ def _check_startswith(name, startswith): provider_coverage = Path(COVERAGE / f"{provider.name}.rst").read_text() for network in Path(PROVIDER / provider.name).glob("*"): if _check_startswith( - network.name, EXCLUDE_PROVIDER_NETWORKS_STARTSWITH + network.name, + EXCLUDE_PROVIDER_NETWORKS_STARTSWITH, ) or network.name in EXCLUDE_PROVIDER_NETWORKS.get(provider.name, []): continue assert f"{provider.name}/{network.name}" in provider_coverage diff --git a/tests/test_settings.py b/tests/test_settings.py index 22c45f396..354b13324 100644 --- a/tests/test_settings.py +++ b/tests/test_settings.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import inspect @@ -90,7 +89,9 @@ def test_settings_mixed(caplog): os.environ["WD_TS_INTERPOLATION_STATION_DISTANCE"] = "precipitation_height=40.0,other=42" caplog.set_level(logging.INFO) settings = Settings( - ts_skip_threshold=0.81, ts_si_units=False, ts_interpolation_station_distance={"just_another": 43} + ts_skip_threshold=0.81, + ts_si_units=False, + ts_interpolation_station_distance={"just_another": 43}, ) log_message = caplog.messages[0] assert settings.cache_disable diff --git a/tests/ui/cli/test_cli.py b/tests/ui/cli/test_cli.py index d5a63dfd9..d4fd58125 100644 --- a/tests/ui/cli/test_cli.py +++ b/tests/ui/cli/test_cli.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import json @@ -54,7 +53,7 @@ def test_cli_about_resolutions(): assert "annual" in result.output -def test_cli_about_coverage(capsys): +def test_cli_about_coverage(): runner = CliRunner() result = runner.invoke(cli, "about coverage --provider=dwd --network=observation") assert "minute_1" in result.output @@ -73,7 +72,8 @@ def test_no_provider(): def test_no_network(caplog): runner = CliRunner() runner.invoke( - cli, "stations --provider=dwd --network=abc --parameter=precipitation_height --resolution=daily --all" + cli, + "stations --provider=dwd --network=abc --parameter=precipitation_height --resolution=daily --all", ) assert "No API available for provider DWD and network abc" in caplog.text @@ -269,7 +269,7 @@ def test_cli_interpolate_geojson(): "taken_station_ids": ["00071"], }, ], - } + }, ], } @@ -367,7 +367,7 @@ def test_cli_summarize_geojson(): "taken_station_id": "00071", }, ], - } + }, ], } diff --git a/tests/ui/cli/test_cli_stations.py b/tests/ui/cli/test_cli_stations.py index f821bc85c..42db68723 100644 --- a/tests/ui/cli/test_cli_stations.py +++ b/tests/ui/cli/test_cli_stations.py @@ -111,9 +111,20 @@ def invoke_wetterdienst_stations_filter_by_rank(provider, network, setting, fmt= "provider,network,setting,station_id,expected_dict,coordinates", SETTINGS_STATIONS, ) -def test_cli_stations_json(provider, network, setting, station_id, expected_dict, coordinates): +def test_cli_stations_json( + provider, + network, + setting, + station_id, + expected_dict, # noqa: ARG001 + coordinates, # noqa: ARG001 +): result = invoke_wetterdienst_stations_static( - provider=provider, network=network, setting=setting, station=station_id, fmt="json" + provider=provider, + network=network, + setting=setting, + station=station_id, + fmt="json", ) response = json.loads(result.output) assert response.keys() == {"stations"} @@ -140,7 +151,15 @@ def test_cli_stations_json_with_metadata(metadata): @pytest.mark.remote @pytest.mark.parametrize("provider,network,setting,station_id,expected_dict,coordinates", SETTINGS_STATIONS) -def test_cli_stations_empty(provider, network, setting, station_id, expected_dict, coordinates, caplog): +def test_cli_stations_empty( + provider, + network, + setting, + station_id, # noqa: ARG001 + expected_dict, # noqa: ARG001 + coordinates, # noqa: ARG001 + caplog, +): result = invoke_wetterdienst_stations_empty(provider=provider, network=network, setting=setting, fmt="json") assert isinstance(result.exception, SystemExit) assert "ERROR" in caplog.text @@ -151,7 +170,11 @@ def test_cli_stations_empty(provider, network, setting, station_id, expected_dic @pytest.mark.parametrize("provider,network,setting,station_id,expected_dict,coordinates", SETTINGS_STATIONS) def test_cli_stations_geojson(provider, network, setting, station_id, expected_dict, coordinates): result = invoke_wetterdienst_stations_static( - provider=provider, network=network, setting=setting, station=station_id, fmt="geojson" + provider=provider, + network=network, + setting=setting, + station=station_id, + fmt="geojson", ) response = json.loads(result.output) assert response.keys() == {"data"} @@ -184,9 +207,20 @@ def test_cli_stations_geojson_with_metadata(metadata): "provider,network,setting,station_id,expected_dict,coordinates", SETTINGS_STATIONS, ) -def test_cli_stations_csv(provider, network, setting, station_id, expected_dict, coordinates): +def test_cli_stations_csv( + provider, + network, + setting, + station_id, + expected_dict, + coordinates, # noqa: ARG001 +): result = invoke_wetterdienst_stations_static( - provider=provider, network=network, setting=setting, station=station_id, fmt="csv" + provider=provider, + network=network, + setting=setting, + station=station_id, + fmt="csv", ) assert expected_dict["name"] in result.output @@ -196,7 +230,15 @@ def test_cli_stations_csv(provider, network, setting, station_id, expected_dict, "provider,network,setting,station_id,expected_dict,coordinates", SETTINGS_STATIONS, ) -def test_cli_stations_excel(provider, network, setting, station_id, expected_dict, coordinates, tmp_path): +def test_cli_stations_excel( + provider, + network, + setting, + station_id, + expected_dict, + coordinates, # noqa: ARG001 + tmp_path, +): filename = Path("stations.xlsx") if not IS_WINDOWS: filename = tmp_path.joinpath(filename) @@ -219,9 +261,19 @@ def test_cli_stations_excel(provider, network, setting, station_id, expected_dic "provider,network,setting,station_id,expected_dict,coordinates", SETTINGS_STATIONS, ) -def test_cli_stations_geospatial(provider, network, setting, station_id, expected_dict, coordinates): +def test_cli_stations_geospatial( + provider, + network, + setting, + station_id, + expected_dict, + coordinates, # noqa: ARG001 +): result = invoke_wetterdienst_stations_filter_by_rank( - provider=provider, network=network, setting=setting, fmt="json" + provider=provider, + network=network, + setting=setting, + fmt="json", ) response = json.loads(result.output) station = [item for item in response["stations"] if item["station_id"] == station_id][0] diff --git a/tests/ui/cli/test_cli_values.py b/tests/ui/cli/test_cli_values.py index af4b87ef5..639e6c755 100644 --- a/tests/ui/cli/test_cli_values.py +++ b/tests/ui/cli/test_cli_values.py @@ -90,7 +90,11 @@ def invoke_wetterdienst_values_filter_by_rank(provider, network, setting, fmt="j def test_cli_values_json_wide(setting): provider, network, setting, station_id, station_name = setting result = invoke_wetterdienst_values_static_wide( - provider=provider, network=network, setting=setting, station=station_id, fmt="json" + provider=provider, + network=network, + setting=setting, + station=station_id, + fmt="json", ) response = json.loads(result.stdout) station_ids = {reading["station_id"] for reading in response["values"]} @@ -137,9 +141,19 @@ def test_cli_values_json_multiple_datasets(): @pytest.mark.remote @pytest.mark.parametrize("provider,network,setting,station_id,station_name", SETTINGS_VALUES) -def test_cli_values_json(provider, network, setting, station_id, station_name): +def test_cli_values_json( + provider, + network, + setting, + station_id, + station_name, # noqa: ARG001 +): result = invoke_wetterdienst_values_static( - provider=provider, network=network, setting=setting, station=station_id, fmt="json" + provider=provider, + network=network, + setting=setting, + station=station_id, + fmt="json", ) response = json.loads(result.output) first = response["values"][0] @@ -151,7 +165,7 @@ def test_cli_values_json(provider, network, setting, station_id, station_name): "parameter", "value", "quality", - } + }, ) @@ -246,7 +260,7 @@ def test_cli_values_geojson(): }, "geometry": {"type": "Point", "coordinates": [13.7543, 51.1278, 228.0]}, "values": IsInstance(list), - } + }, ], } @@ -299,9 +313,19 @@ def test_cli_values_geojson_pretty_true(json_dumps_mock): "provider,network,setting,station_id,station_name", SETTINGS_VALUES, ) -def test_cli_values_csv(provider, network, setting, station_id, station_name): +def test_cli_values_csv( + provider, + network, + setting, + station_id, + station_name, # noqa: ARG001 +): result = invoke_wetterdienst_values_static_wide( - provider=provider, network=network, setting=setting, station=station_id, fmt="csv" + provider=provider, + network=network, + setting=setting, + station=station_id, + fmt="csv", ) assert station_id in result.output @@ -311,7 +335,14 @@ def test_cli_values_csv(provider, network, setting, station_id, station_name): "provider,network,setting,station_id,station_name", SETTINGS_VALUES, ) -def test_cli_values_excel(provider, network, setting, station_id, station_name, tmp_path): +def test_cli_values_excel( + provider, + network, + setting, + station_id, + station_name, # noqa: ARG001 + tmp_path, +): filename = Path("values.xlsx") if not IS_WINDOWS: filename = tmp_path.joinpath(filename) @@ -333,9 +364,19 @@ def test_cli_values_excel(provider, network, setting, station_id, station_name, "provider,network,setting,station_id,station_name", SETTINGS_VALUES, ) -def test_cli_values_format_unknown(provider, network, setting, station_id, station_name): +def test_cli_values_format_unknown( + provider, + network, + setting, + station_id, + station_name, # noqa: ARG001 +): result = invoke_wetterdienst_values_static_wide( - provider=provider, network=network, setting=setting, station=station_id, fmt="foobar" + provider=provider, + network=network, + setting=setting, + station=station_id, + fmt="foobar", ) assert "Error: Invalid value for '--format': 'foobar' is not one of 'json', 'geojson', 'csv'" in result.output @@ -345,7 +386,13 @@ def test_cli_values_format_unknown(provider, network, setting, station_id, stati "provider,network,setting,station_id,station_name", SETTINGS_VALUES, ) -def test_cli_values_filter_by_rank(provider, network, setting, station_id, station_name): +def test_cli_values_filter_by_rank( + provider, + network, + setting, + station_id, + station_name, # noqa: ARG001 +): result = invoke_wetterdienst_values_filter_by_rank(provider=provider, network=network, setting=setting, fmt="json") response = json.loads(result.output) station_ids = {reading["station_id"] for reading in response["values"]} diff --git a/tests/ui/explorer/conftest.py b/tests/ui/explorer/conftest.py index de95144fb..be6503a1c 100644 --- a/tests/ui/explorer/conftest.py +++ b/tests/ui/explorer/conftest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest @@ -42,7 +41,7 @@ def wait_for_element_by_id_clickable(self, element_id, timeout=None): EC.element_to_be_clickable, ((By.ID, element_id),), timeout, - "timeout {}s => waiting for element id {}".format(timeout if timeout else self._wait_timeout, element_id), + f"timeout {timeout if timeout else self._wait_timeout}s => waiting for element id {element_id}", ) diff --git a/tests/ui/explorer/test_explorer.py b/tests/ui/explorer/test_explorer.py index 8b68c6c16..b18ecfdf4 100644 --- a/tests/ui/explorer/test_explorer.py +++ b/tests/ui/explorer/test_explorer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ @@ -20,18 +19,24 @@ import json import platform import time +from typing import TYPE_CHECKING import pytest -from bs4 import BeautifulSoup from tests.conftest import IS_CI +if TYPE_CHECKING: + from bs4 import BeautifulSoup + @pytest.mark.skipif(platform.system() == "Darwin" and platform.machine() == "arm64", reason="problem with geckodriver") @pytest.mark.slow @pytest.mark.cflake @pytest.mark.explorer -def test_app_layout(wetterdienst_ui, dash_tre): +def test_app_layout( + wetterdienst_ui, # noqa: ARG001 + dash_tre, +): # Sanity check if we are on the right page. assert dash_tre.find_element("h1").text == "Wetterdienst Explorer" # Roughly verify the application elements. @@ -44,7 +49,10 @@ def test_app_layout(wetterdienst_ui, dash_tre): @pytest.mark.slow @pytest.mark.cflake @pytest.mark.explorer -def test_app_data_stations_success(wetterdienst_ui, dash_tre): +def test_app_data_stations_success( + wetterdienst_ui, # noqa: ARG001 + dash_tre, +): """ Verify if data for "stations_result" has been correctly propagated. """ @@ -97,7 +105,10 @@ def test_app_data_stations_success(wetterdienst_ui, dash_tre): @pytest.mark.slow @pytest.mark.cflake @pytest.mark.explorer -def test_app_data_stations_failed(wetterdienst_ui, dash_tre): +def test_app_data_stations_failed( + wetterdienst_ui, # noqa: ARG001 + dash_tre, +): """ Verify if data for "stations_result" has been correctly propagated. """ @@ -136,7 +147,10 @@ def test_app_data_stations_failed(wetterdienst_ui, dash_tre): @pytest.mark.slow @pytest.mark.cflake @pytest.mark.explorer -def test_options_reset(wetterdienst_ui, dash_tre): +def test_options_reset( + wetterdienst_ui, # noqa: ARG001 + dash_tre, +): """ Verify if data for "stations_result" has been correctly propagated. """ @@ -181,7 +195,10 @@ def test_options_reset(wetterdienst_ui, dash_tre): @pytest.mark.slow @pytest.mark.cflake @pytest.mark.explorer -def test_app_data_values(wetterdienst_ui, dash_tre): +def test_app_data_values( + wetterdienst_ui, # noqa: ARG001 + dash_tre, +): """ Verify if data for "values" has been correctly propagated. """ @@ -233,7 +250,11 @@ def test_app_data_values(wetterdienst_ui, dash_tre): @pytest.mark.slow @pytest.mark.cflake @pytest.mark.explorer -def test_dwd_mosmix_options(wetterdienst_ui, dash_tre, is_ci): +def test_dwd_mosmix_options( + wetterdienst_ui, # noqa: ARG001 + dash_tre, + is_ci, +): """ Verify if data for "values" has been correctly propagated. """ diff --git a/tests/ui/test_core.py b/tests/ui/test_core.py index 03df9abfb..41120a4d6 100644 --- a/tests/ui/test_core.py +++ b/tests/ui/test_core.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.ui.core import unpack_parameters diff --git a/tests/ui/test_restapi.py b/tests/ui/test_restapi.py index 04f57043f..a34d641ce 100644 --- a/tests/ui/test_restapi.py +++ b/tests/ui/test_restapi.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pytest diff --git a/tests/util/__init__.py b/tests/util/__init__.py index 36979cf71..57a368863 100644 --- a/tests/util/__init__.py +++ b/tests/util/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/tests/util/test_datetime.py b/tests/util/test_datetime.py index ee6650905..58164d961 100644 --- a/tests/util/test_datetime.py +++ b/tests/util/test_datetime.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt diff --git a/tests/util/test_geo.py b/tests/util/test_geo.py index f2b9b1d08..f48ea8757 100644 --- a/tests/util/test_geo.py +++ b/tests/util/test_geo.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import numpy as np @@ -23,7 +22,7 @@ def test_get_coordinates_in_radians(): [0.0349066, 0.1047198], [0.0523599, 0.122173], [0.0698132, 0.1396263], - ] + ], ), ) @@ -51,7 +50,7 @@ def test_derive_nearest_neighbours(): "station_id": [4371, 4373, 4411, 13904, 13965, 15207], "latitude": [52.1042, 52.8568, 49.9195, 55.0, 48.2639, 51.2835], "longitude": [8.7521, 11.1319, 8.9671, 6.3333, 8.8134, 9.359], - } + }, ) distances, indices_nearest_neighbours = derive_nearest_neighbours( latitudes=metadata.get_column("latitude"), diff --git a/tests/util/test_network.py b/tests/util/test_network.py index 1278e7e53..42b0b42dd 100644 --- a/tests/util/test_network.py +++ b/tests/util/test_network.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.settings import Settings diff --git a/tests/util/test_url.py b/tests/util/test_url.py index eced0a93e..92135dedd 100644 --- a/tests/util/test_url.py +++ b/tests/util/test_url.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.util.url import ConnectionString diff --git a/wetterdienst/__init__.py b/wetterdienst/__init__.py index 3bf874de2..15e2848a8 100644 --- a/wetterdienst/__init__.py +++ b/wetterdienst/__init__.py @@ -1,5 +1,4 @@ # """Wetterdienst - Open weather data for humans""" -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from dataclasses import asdict, dataclass diff --git a/wetterdienst/api.py b/wetterdienst/api.py index 5d369c28d..a51e27495 100644 --- a/wetterdienst/api.py +++ b/wetterdienst/api.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.exceptions import InvalidEnumerationError, ProviderNotFoundError diff --git a/wetterdienst/boot.py b/wetterdienst/boot.py index f08e26b53..295ad6d94 100644 --- a/wetterdienst/boot.py +++ b/wetterdienst/boot.py @@ -1,5 +1,4 @@ # """Wetterdienst - Open weather data for humans""" -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/core/__init__.py b/wetterdienst/core/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/core/__init__.py +++ b/wetterdienst/core/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/core/core.py b/wetterdienst/core/core.py index caf2cfef6..de4804daf 100644 --- a/wetterdienst/core/core.py +++ b/wetterdienst/core/core.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import datetime as dt diff --git a/wetterdienst/core/process.py b/wetterdienst/core/process.py index 194fed1ab..89b6a77ba 100644 --- a/wetterdienst/core/process.py +++ b/wetterdienst/core/process.py @@ -1,8 +1,8 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. -import datetime as dt -from typing import Optional, Tuple +from __future__ import annotations + +from typing import TYPE_CHECKING import polars as pl @@ -11,6 +11,9 @@ from wetterdienst.metadata.resolution import Resolution from wetterdienst.util.datetime import mktimerange, parse_date +if TYPE_CHECKING: + import datetime as dt + try: from backports.datetime_fromisoformat import MonkeyPatch except ImportError: @@ -19,7 +22,7 @@ MonkeyPatch.patch_fromisoformat() -def create_date_range(date: str, resolution: Resolution) -> Tuple[Optional[dt.datetime], Optional[dt.datetime]]: +def create_date_range(date: str, resolution: Resolution) -> tuple[dt.datetime | None, dt.datetime | None]: if "/" in date: if date.count("/") >= 2: raise InvalidTimeIntervalError("Invalid ISO 8601 time interval") diff --git a/wetterdienst/core/timeseries/__init__.py b/wetterdienst/core/timeseries/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/core/timeseries/__init__.py +++ b/wetterdienst/core/timeseries/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/core/timeseries/export.py b/wetterdienst/core/timeseries/export.py index 7fd6ab458..7e0a81f19 100644 --- a/wetterdienst/core/timeseries/export.py +++ b/wetterdienst/core/timeseries/export.py @@ -1,11 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import json import logging from abc import abstractmethod from dataclasses import dataclass -from typing import Optional, Union from urllib.parse import urlunparse import polars as pl @@ -43,7 +43,7 @@ def to_json(self, *args, **kwargs) -> str: def to_ogc_feature_collection(self, *args, **kwargs) -> dict: pass - def to_geojson(self, with_metadata: bool = False, indent: Optional[Union[int, bool]] = 4, **_kwargs) -> str: + def to_geojson(self, with_metadata: bool = False, indent: int | bool | None = 4, **_kwargs) -> str: """ Convert station information into GeoJSON format :param with_metadata: Include metadata in GeoJSON output @@ -192,7 +192,7 @@ def to_target(self, target: str): pl.col("date") .dt.convert_time_zone("UTC") .dt.replace_time_zone(None) - .map_elements(lambda date: date.isoformat()) + .map_elements(lambda date: date.isoformat()), ) group = df.get_column("dataset").gather(0).item() df = df.to_pandas() @@ -531,6 +531,6 @@ def convert_datetimes(df: pl.DataFrame) -> pl.DataFrame: for date_column in date_columns: if date_column in df: df = df.with_columns( - pl.col(date_column).map_elements(lambda v: v.isoformat() if v else None, return_dtype=pl.Utf8) + pl.col(date_column).map_elements(lambda v: v.isoformat() if v else None, return_dtype=pl.Utf8), ) return df diff --git a/wetterdienst/core/timeseries/interpolate.py b/wetterdienst/core/timeseries/interpolate.py index 381a7e5c2..294e8db65 100644 --- a/wetterdienst/core/timeseries/interpolate.py +++ b/wetterdienst/core/timeseries/interpolate.py @@ -1,13 +1,13 @@ -# -*- coding: utf-8 -*- # Copyright (c) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import logging from datetime import datetime -from enum import Enum from functools import lru_cache from itertools import combinations from queue import Queue -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING import polars as pl import utm @@ -19,21 +19,27 @@ from wetterdienst.metadata.parameter import Parameter if TYPE_CHECKING: + from enum import Enum + from wetterdienst.core.timeseries.request import TimeseriesRequest from wetterdienst.core.timeseries.result import StationsResult log = logging.getLogger(__name__) -def get_interpolated_df(request: "TimeseriesRequest", latitude: float, longitude: float) -> pl.DataFrame: +def get_interpolated_df(request: TimeseriesRequest, latitude: float, longitude: float) -> pl.DataFrame: utm_x, utm_y, _, _ = utm.from_latlon(latitude, longitude) stations_dict, param_dict = request_stations(request, latitude, longitude, utm_x, utm_y) return calculate_interpolation(utm_x, utm_y, stations_dict, param_dict, request.interp_use_nearby_station_until_km) def request_stations( - request: "TimeseriesRequest", latitude: float, longitude: float, utm_x: float, utm_y: float -) -> Tuple[dict, dict]: + request: TimeseriesRequest, + latitude: float, + longitude: float, + utm_x: float, + utm_y: float, +) -> tuple[dict, dict]: param_dict = {} stations_dict = {} stations_ranked = request.filter_by_rank(latlon=(latitude, longitude), rank=20) @@ -60,7 +66,7 @@ def request_stations( def apply_station_values_per_parameter( result_df: pl.DataFrame, - stations_ranked: "StationsResult", + stations_ranked: StationsResult, param_dict: dict, station: dict, valid_station_groups_exists: bool, @@ -76,7 +82,8 @@ def apply_station_values_per_parameter( ts_interpolation_station_distance = stations_ranked.stations.settings.ts_interpolation_station_distance if station["distance"] > ts_interpolation_station_distance.get( - parameter.name.lower(), ts_interpolation_station_distance["default"] + parameter.name.lower(), + ts_interpolation_station_distance["default"], ): log.info(f"Station for parameter {parameter.name} is too far away") continue @@ -99,8 +106,8 @@ def apply_station_values_per_parameter( interval=stations_ranked.frequency.value, time_zone="UTC", eager=True, - ).dt.round(stations_ranked.frequency.value) - } + ).dt.round(stations_ranked.frequency.value), + }, ) param_dict[parameter_name] = _ParameterData(df) @@ -128,8 +135,8 @@ def calculate_interpolation( Columns.VALUE.value: pl.Float64, Columns.DISTANCE_MEAN.value: pl.Float64, Columns.TAKEN_STATION_IDS.value: pl.List(inner=pl.Utf8), - } - ) + }, + ), ] valid_station_groups = get_valid_station_groups(stations_dict, utm_x, utm_y) @@ -144,7 +151,7 @@ def calculate_interpolation( results = [] for row in param_data.values.select(pl.all().exclude("date")).iter_rows(named=True): results.append( - apply_interpolation(row, stations_dict, valid_station_groups, parameter, utm_x, utm_y, nearby_stations) + apply_interpolation(row, stations_dict, valid_station_groups, parameter, utm_x, utm_y, nearby_stations), ) results = pl.DataFrame( results, @@ -165,7 +172,7 @@ def calculate_interpolation( by=[ Columns.PARAMETER.value, Columns.DATE.value, - ] + ], ) @@ -198,8 +205,8 @@ def apply_interpolation( parameter: Enum, utm_x: float, utm_y: float, - nearby_stations: List[str], -) -> Tuple[Enum, Optional[float], Optional[float], List[str]]: + nearby_stations: list[str], +) -> tuple[Enum, float | None, float | None, list[str]]: """ Interpolation function that is being applied over each row of the accumulated data of different stations. :param row: row with values of each station diff --git a/wetterdienst/core/timeseries/request.py b/wetterdienst/core/timeseries/request.py index 2ad827de1..5a78a7ace 100644 --- a/wetterdienst/core/timeseries/request.py +++ b/wetterdienst/core/timeseries/request.py @@ -1,13 +1,13 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import logging from abc import abstractmethod -from datetime import datetime from enum import Enum from hashlib import sha256 -from typing import List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Tuple, Union import numpy as np import polars as pl @@ -31,16 +31,18 @@ StationNotFoundError, ) from wetterdienst.metadata.columns import Columns -from wetterdienst.metadata.datarange import DataRange -from wetterdienst.metadata.kind import Kind from wetterdienst.metadata.parameter import Parameter from wetterdienst.metadata.period import Period, PeriodType -from wetterdienst.metadata.provider import Provider from wetterdienst.metadata.resolution import Frequency, Resolution, ResolutionType from wetterdienst.settings import Settings from wetterdienst.util.enumeration import parse_enumeration_from_template from wetterdienst.util.python import to_list +if TYPE_CHECKING: + from wetterdienst.metadata.datarange import DataRange + from wetterdienst.metadata.kind import Kind + from wetterdienst.metadata.provider import Provider + try: from backports.datetime_fromisoformat import MonkeyPatch except ImportError: @@ -72,7 +74,7 @@ def _kind(self) -> Kind: @property @abstractmethod - def _resolution_base(self) -> Optional[Resolution]: + def _resolution_base(self) -> Resolution | None: """Optional enumeration for multiple resolutions""" pass @@ -91,7 +93,7 @@ def frequency(self) -> Frequency: return Frequency[self.resolution.name] @property - def dynamic_frequency(self) -> Optional[Frequency]: + def dynamic_frequency(self) -> Frequency | None: return self._dynamic_frequency @dynamic_frequency.setter @@ -107,7 +109,7 @@ def _period_type(self) -> PeriodType: @property @abstractmethod - def _period_base(self) -> Optional[Period]: + def _period_base(self) -> Period | None: """Period base enumeration from which a period string can be parsed""" pass @@ -138,7 +140,7 @@ def _has_datasets(self) -> bool: pass @property - def _dataset_base(self) -> Optional[Enum]: + def _dataset_base(self) -> Enum | None: """Dataset base that is used to differ between different datasets""" if self._has_datasets: raise NotImplementedError("implement _dataset_base enumeration that contains available datasets") @@ -200,7 +202,7 @@ def _values(self): Parameter.PRECIPITATION_HEIGHT.name, ] - def _parse_period(self, period: Period) -> Optional[List[Period]]: + def _parse_period(self, period: Period) -> list[Period] | None: """ Method to parse period(s) @@ -216,10 +218,10 @@ def _parse_period(self, period: Period) -> Optional[List[Period]]: [ parse_enumeration_from_template(p, intermediate=self._period_base, base=Period) for p in to_list(period) - ] + ], ) - def _parse_parameter(self, parameter: List[Union[str, Enum]]) -> List[Tuple[Enum, Enum]]: + def _parse_parameter(self, parameter: list[str | Enum | None]) -> list[tuple[Enum, Enum]]: """ Method to parse parameters, either from string or enum. Case independent for strings. @@ -272,7 +274,7 @@ def _parse_parameter(self, parameter: List[Union[str, Enum]]) -> List[Tuple[Enum return parameters - def _parse_dataset_and_parameter(self, parameter, dataset) -> Tuple[Optional[Enum], Optional[Enum]]: + def _parse_dataset_and_parameter(self, parameter, dataset) -> tuple[Enum | None, Enum | None]: """ Parse parameters for cases like - parameter=("climate_summary", ) or @@ -304,14 +306,15 @@ def _parse_dataset_and_parameter(self, parameter, dataset) -> Tuple[Optional[Enu # Case 2: dataset and parameter e.g. (precipitation_height, climate_summary) try: parameter_ = parse_enumeration_from_template( - parameter, self._parameter_base[self._dataset_accessor][dataset_.name] + parameter, + self._parameter_base[self._dataset_accessor][dataset_.name], ) except (InvalidEnumerationError, TypeError): pass return parameter_, dataset_ - def _parse_parameter_and_dataset(self, parameter) -> Tuple[Enum, Enum]: + def _parse_parameter_and_dataset(self, parameter) -> tuple[Enum, Enum]: """Try to parse dataset first e.g. when "climate_summary" or "precipitation_height", "climate_summary" is requested @@ -347,12 +350,12 @@ def _parse_station_id(series: pl.Series) -> pl.Series: def __init__( self, - parameter: Union[_PARAMETER_TYPE, Tuple[_PARAMETER_TYPE], List[_PARAMETER_TYPE]], - resolution: Union[str, Resolution], - period: Union[str, Period], - start_date: Optional[Union[str, datetime]] = None, - end_date: Optional[Union[str, datetime]] = None, - settings: Optional[Settings] = None, + parameter: _PARAMETER_TYPE | tuple[_PARAMETER_TYPE] | list[_PARAMETER_TYPE], + resolution: str | Resolution, + period: str | Period, + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ) -> None: """ @@ -398,12 +401,12 @@ def __init__( if not self.tidy and settings.ts_skip_empty: log.info( "option 'ts_skip_empty' is only available with option 'ts_shape' " - "and is thus ignored in this request." + "and is thus ignored in this request.", ) if not self.tidy and settings.ts_dropna: log.info( - "option 'ts_dropna' is only available with option 'ts_shape' " "and is thus ignored in this request." + "option 'ts_dropna' is only available with option 'ts_shape' " "and is thus ignored in this request.", ) # optional attribute for dynamic resolutions @@ -441,9 +444,9 @@ def __eq__(self, other) -> bool: @staticmethod def convert_timestamps( - start_date: Optional[Union[str, datetime]] = None, - end_date: Optional[Union[str, datetime]] = None, - ) -> Union[Tuple[None, None], Tuple[datetime, datetime]]: + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + ) -> tuple[None, None] | tuple[dt.datetime, dt.datetime]: """ Sort out start_date vs. end_date, parse strings to datetime objects and finally convert both to pd.Timestamp types. @@ -638,7 +641,7 @@ def all(self) -> StationsResult: # noqa: A003 stations_filter=StationsFilter.ALL, ) - def filter_by_station_id(self, station_id: Union[str, Tuple[str, ...], List[str]]) -> StationsResult: + def filter_by_station_id(self, station_id: str | tuple[str, ...] | list[str]) -> StationsResult: """ Method to filter stations_result by station ids @@ -705,7 +708,7 @@ def filter_by_name(self, name: str, rank: int = 1, threshold: int = 90) -> Stati def filter_by_rank( self, - latlon: Tuple[float, float], + latlon: tuple[float, float], rank: int, ) -> StationsResult: """ @@ -748,7 +751,7 @@ def filter_by_rank( rank=rank, ) - def filter_by_distance(self, latlon: Tuple[float, float], distance: float, unit: str = "km") -> StationsResult: + def filter_by_distance(self, latlon: tuple[float, float], distance: float, unit: str = "km") -> StationsResult: """ Wrapper for get_nearby_stations_by_distance using the given parameter set. Returns the nearest stations_result defined by distance (km). @@ -776,7 +779,7 @@ def filter_by_distance(self, latlon: Tuple[float, float], distance: float, unit: lat, lon = latlon log.info( f"No weather stations were found for coordinates {lat}/{lon} (lat/lon) " - f"and distance {distance_in_km}km" + f"and distance {distance_in_km}km", ) return StationsResult( @@ -808,7 +811,7 @@ def filter_by_bbox(self, left: float, bottom: float, right: float, top: float) - df = df.filter( pl.col(Columns.LATITUDE.value).is_between(bottom, top, closed="both") - & pl.col(Columns.LONGITUDE.value).is_between(left, right, closed="both") + & pl.col(Columns.LONGITUDE.value).is_between(left, right, closed="both"), ) if df.is_empty(): @@ -839,7 +842,7 @@ def filter_by_sql(self, sql: str) -> StationsResult: log.info(f"No stations were found for sql {sql}") return StationsResult(stations=self, df=df, df_all=self.all().df, stations_filter=StationsFilter.BY_SQL) - def interpolate(self, latlon: Tuple[float, float]) -> InterpolatedValuesResult: + def interpolate(self, latlon: tuple[float, float]) -> InterpolatedValuesResult: """ Method to interpolate values @@ -896,7 +899,7 @@ def interpolate_by_station_id(self, station_id: str) -> InterpolatedValuesResult latlon = self._get_latlon_by_station_id(station_id) return self.interpolate(latlon=latlon) - def summarize(self, latlon: Tuple[float, float]) -> SummarizedValuesResult: + def summarize(self, latlon: tuple[float, float]) -> SummarizedValuesResult: """ Method to interpolate values @@ -957,7 +960,7 @@ def summarize_by_station_id(self, station_id: str) -> SummarizedValuesResult: latlon = self._get_latlon_by_station_id(station_id) return self.summarize(latlon=latlon) - def _get_latlon_by_station_id(self, station_id: str) -> Tuple[float, float]: + def _get_latlon_by_station_id(self, station_id: str) -> tuple[float, float]: """ Method to parse latlon for methods .summary/.interpolate. Typically, we expect a latlon tuple of floats, but we want users to be able to request for a station id as well. diff --git a/wetterdienst/core/timeseries/result.py b/wetterdienst/core/timeseries/result.py index 5e9eb097f..085900509 100644 --- a/wetterdienst/core/timeseries/result.py +++ b/wetterdienst/core/timeseries/result.py @@ -1,12 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import json import typing from dataclasses import dataclass -from datetime import datetime -from enum import Enum -from typing import TYPE_CHECKING, List, Literal, Optional, Tuple, Union +from typing import TYPE_CHECKING, Literal import polars as pl from typing_extensions import NotRequired, TypedDict @@ -14,12 +13,15 @@ from wetterdienst.core.process import filter_by_date from wetterdienst.core.timeseries.export import ExportMixin from wetterdienst.metadata.columns import Columns -from wetterdienst.metadata.period import Period -from wetterdienst.metadata.resolution import Frequency, Resolution if TYPE_CHECKING: + from datetime import datetime + from enum import Enum + from wetterdienst.core.timeseries.request import TimeseriesRequest from wetterdienst.core.timeseries.values import TimeseriesValues + from wetterdienst.metadata.period import Period + from wetterdienst.metadata.resolution import Frequency, Resolution from wetterdienst.provider.dwd.dmo import DwdDmoRequest from wetterdienst.provider.dwd.mosmix import DwdMosmixRequest @@ -59,8 +61,8 @@ class _Metadata(TypedDict): class _Station(TypedDict): station_id: str - start_date: Optional[str] - end_date: Optional[str] + start_date: str | None + end_date: str | None latitude: float longitude: float height: float @@ -70,20 +72,20 @@ class _Station(TypedDict): class _StationsDict(TypedDict): metadata: NotRequired[_Metadata] - stations: List[_Station] + stations: list[_Station] class _OgcFeatureProperties(TypedDict): id: str name: str state: str - start_date: Optional[str] - end_date: Optional[str] + start_date: str | None + end_date: str | None class _OgcFeatureGeometry(TypedDict): type: Literal["Point"] - coordinates: List[float] + coordinates: list[float] class _StationsOgcFeature(TypedDict): @@ -94,7 +96,7 @@ class _StationsOgcFeature(TypedDict): class _StationsOgcFeatureCollectionData(TypedDict): type: Literal["FeatureCollection"] - features: List[_StationsOgcFeature] + features: list[_StationsOgcFeature] class _StationsOgcFeatureCollection(TypedDict): @@ -105,11 +107,11 @@ class _StationsOgcFeatureCollection(TypedDict): class StationsResult(ExportMixin): def __init__( self, - stations: Union["TimeseriesRequest", "DwdMosmixRequest", "DwdDmoRequest"], + stations: TimeseriesRequest | DwdMosmixRequest | DwdDmoRequest, df: pl.DataFrame, df_all: pl.DataFrame, stations_filter: StationsFilter, - rank: Optional[int] = None, + rank: int | None = None, **kwargs, ) -> None: # TODO: add more attributes from ScalarStations class @@ -144,7 +146,7 @@ def _resolution_type(self): return self.stations._resolution_type @property - def values(self) -> "TimeseriesValues": + def values(self) -> TimeseriesValues: return self.stations._values.from_stations(self) @property @@ -255,17 +257,19 @@ def to_dict(self, with_metadata: bool = False) -> _StationsDict: df = df.with_columns( [ pl.col("start_date").map_elements( - lambda date: date.isoformat() if date else None, return_dtype=pl.Utf8 + lambda date: date.isoformat() if date else None, + return_dtype=pl.Utf8, ), pl.col("end_date").map_elements( - lambda date: date.isoformat() if date else None, return_dtype=pl.Utf8 + lambda date: date.isoformat() if date else None, + return_dtype=pl.Utf8, ), - ] + ], ) data["stations"] = df.to_dicts() return data - def to_json(self, with_metadata: bool = False, indent: Optional[Union[int, bool]] = 4) -> str: + def to_json(self, with_metadata: bool = False, indent: int | bool | None = 4) -> str: """ Format station information as JSON. :param with_metadata: bool whether to include metadata @@ -313,7 +317,7 @@ def to_ogc_feature_collection(self, with_metadata: bool = False) -> _StationsOgc station["height"], ], }, - } + }, ) data["data"] = { "type": "FeatureCollection", @@ -333,8 +337,8 @@ class _ValuesItemDict(TypedDict): class _ValuesDict(TypedDict): metadata: NotRequired[_Metadata] - stations: NotRequired[List[_Station]] - values: List[_ValuesItemDict] + stations: NotRequired[list[_Station]] + values: list[_ValuesItemDict] @dataclass @@ -343,7 +347,7 @@ class _ValuesResult(ExportMixin): df: pl.DataFrame @staticmethod - def _to_dict(df: pl.DataFrame) -> List[_ValuesItemDict]: + def _to_dict(df: pl.DataFrame) -> list[_ValuesItemDict]: """ Format values as dictionary. This method is used both by ``to_dict()`` and ``to_ogc_feature_collection()``, however the latter one splits the DataFrame into multiple DataFrames by station and calls this method @@ -373,7 +377,10 @@ def to_dict(self, with_metadata: bool = False, with_stations: bool = False) -> _ return data def to_json( - self, with_metadata: bool = False, with_stations: bool = False, indent: Optional[Union[int, bool]] = 4 + self, + with_metadata: bool = False, + with_stations: bool = False, + indent: int | bool | None = 4, ) -> str: """ Format values as JSON. @@ -397,12 +404,12 @@ class _ValuesOgcFeature(TypedDict): type: Literal["Feature"] properties: _OgcFeatureProperties geometry: _OgcFeatureGeometry - values: List[_ValuesItemDict] + values: list[_ValuesItemDict] class _ValuesOgcFeatureCollectionData(TypedDict): type: Literal["FeatureCollection"] - features: List[_ValuesOgcFeature] + features: list[_ValuesOgcFeature] class _ValuesOgcFeatureCollection(TypedDict): @@ -413,7 +420,7 @@ class _ValuesOgcFeatureCollection(TypedDict): @dataclass class ValuesResult(_ValuesResult): stations: StationsResult - values: "TimeseriesValues" + values: TimeseriesValues df: pl.DataFrame @property @@ -433,7 +440,7 @@ def to_ogc_feature_collection(self, with_metadata: bool = False) -> _ValuesOgcFe features = [] for station in df_stations.iter_rows(named=True): df_values = self.df.filter(pl.col("station_id") == station["station_id"]).select( - pl.all().exclude("station_id") + pl.all().exclude("station_id"), ) features.append( { @@ -458,7 +465,7 @@ def to_ogc_feature_collection(self, with_metadata: bool = False) -> _ValuesOgcFe ], }, "values": self._to_dict(df_values), - } + }, ) data["data"] = { "type": "FeatureCollection", @@ -478,26 +485,26 @@ class _InterpolatedValuesItemDict(TypedDict): date: str value: float distance_mean: float - taken_station_ids: List[str] + taken_station_ids: list[str] class _InterpolatedValuesDict(TypedDict): metadata: NotRequired[_Metadata] - stations: NotRequired[List[_Station]] - values: List[_InterpolatedValuesItemDict] + stations: NotRequired[list[_Station]] + values: list[_InterpolatedValuesItemDict] class _InterpolatedValuesOgcFeature(TypedDict): type: Literal["Feature"] properties: _InterpolatedOrSummarizedOgcFeatureProperties geometry: _OgcFeatureGeometry - stations: List[_Station] - values: List[_InterpolatedValuesItemDict] + stations: list[_Station] + values: list[_InterpolatedValuesItemDict] class _InterpolatedValuesOgcFeatureCollectionData(TypedDict): type: Literal["FeatureCollection"] - features: List[_InterpolatedValuesOgcFeature] + features: list[_InterpolatedValuesOgcFeature] class _InterpolatedValuesOgcFeatureCollection(TypedDict): @@ -509,14 +516,14 @@ class _InterpolatedValuesOgcFeatureCollection(TypedDict): class InterpolatedValuesResult(_ValuesResult): stations: StationsResult df: pl.DataFrame - latlon: Optional[Tuple[float, float]] + latlon: tuple[float, float] | None if typing.TYPE_CHECKING: # We need to override the signature of the method to_dict() from ValuesResult here # because we want to return a slightly different type with columns related to interpolation. # Those are distance_mean and station_ids. # https://github.com/python/typing/discussions/1015 - def _to_dict(self, df: pl.DataFrame) -> List[_InterpolatedValuesItemDict]: ... + def _to_dict(self, df: pl.DataFrame) -> list[_InterpolatedValuesItemDict]: ... def to_dict(self, with_metadata: bool = False, with_stations: bool = False) -> _InterpolatedValuesDict: ... @@ -569,21 +576,21 @@ class _SummarizedValuesItemDict(TypedDict): class _SummarizedValuesDict(TypedDict): metadata: NotRequired[_Metadata] - stations: NotRequired[List[_Station]] - values: List[_SummarizedValuesItemDict] + stations: NotRequired[list[_Station]] + values: list[_SummarizedValuesItemDict] class _SummarizedValuesOgcFeature(TypedDict): type: Literal["Feature"] properties: _InterpolatedOrSummarizedOgcFeatureProperties geometry: _OgcFeatureGeometry - stations: List[_Station] - values: List[_SummarizedValuesItemDict] + stations: list[_Station] + values: list[_SummarizedValuesItemDict] class _SummarizedValuesOgcFeatureCollectionData(TypedDict): type: Literal["FeatureCollection"] - features: List[_SummarizedValuesOgcFeature] + features: list[_SummarizedValuesOgcFeature] class _SummarizedValuesOgcFeatureCollection(TypedDict): @@ -595,14 +602,14 @@ class _SummarizedValuesOgcFeatureCollection(TypedDict): class SummarizedValuesResult(_ValuesResult): stations: StationsResult df: pl.DataFrame - latlon: Tuple[float, float] + latlon: tuple[float, float] if typing.TYPE_CHECKING: # We need to override the signature of the method to_dict() from ValuesResult here # because we want to return a slightly different type with columns related to interpolation. # Those are distance and station_id. # https://github.com/python/typing/discussions/1015 - def _to_dict(self, df: pl.DataFrame) -> List[_SummarizedValuesItemDict]: ... + def _to_dict(self, df: pl.DataFrame) -> list[_SummarizedValuesItemDict]: ... def to_dict(self, with_metadata: bool = False, with_stations: bool = False) -> _SummarizedValuesDict: ... diff --git a/wetterdienst/core/timeseries/summarize.py b/wetterdienst/core/timeseries/summarize.py index 390e1fe4e..66011f3a4 100644 --- a/wetterdienst/core/timeseries/summarize.py +++ b/wetterdienst/core/timeseries/summarize.py @@ -1,17 +1,22 @@ +from __future__ import annotations + import logging from collections import defaultdict from datetime import datetime -from enum import Enum -from typing import Optional, Tuple +from typing import TYPE_CHECKING import polars as pl from wetterdienst import Parameter -from wetterdienst.core.timeseries.request import TimeseriesRequest -from wetterdienst.core.timeseries.result import StationsResult from wetterdienst.core.timeseries.tools import _ParameterData, extract_station_values from wetterdienst.metadata.columns import Columns +if TYPE_CHECKING: + from enum import Enum + + from wetterdienst.core.timeseries.request import TimeseriesRequest + from wetterdienst.core.timeseries.result import StationsResult + log = logging.getLogger(__name__) SUMMARY_STATION_KM_LIMIT = defaultdict( @@ -26,12 +31,12 @@ ) -def get_summarized_df(request: "TimeseriesRequest", latitude: float, longitude: float) -> pl.DataFrame: +def get_summarized_df(request: TimeseriesRequest, latitude: float, longitude: float) -> pl.DataFrame: stations_dict, param_dict = request_stations(request, latitude, longitude) return calculate_summary(stations_dict, param_dict) -def request_stations(request: "TimeseriesRequest", latitude: float, longitude: float) -> Tuple[dict, dict]: +def request_stations(request: TimeseriesRequest, latitude: float, longitude: float) -> tuple[dict, dict]: param_dict = {} stations_dict = {} @@ -57,7 +62,7 @@ def request_stations(request: "TimeseriesRequest", latitude: float, longitude: f def apply_station_values_per_parameter( result_df: pl.DataFrame, - stations_ranked: "StationsResult", + stations_ranked: StationsResult, param_dict: dict, station: dict, ) -> None: @@ -92,8 +97,8 @@ def apply_station_values_per_parameter( interval=stations_ranked.frequency.value, time_zone="UTC", eager=True, - ).dt.round(stations_ranked.frequency.value) - } + ).dt.round(stations_ranked.frequency.value), + }, ) param_dict[parameter_name] = _ParameterData(df) @@ -114,8 +119,8 @@ def calculate_summary(stations_dict: dict, param_dict: dict) -> pl.DataFrame: Columns.VALUE.value: pl.Float64, Columns.DISTANCE.value: pl.Float64, Columns.TAKEN_STATION_ID.value: pl.Utf8, - } - ) + }, + ), ] for parameter, param_data in param_dict.items(): @@ -142,7 +147,7 @@ def calculate_summary(stations_dict: dict, param_dict: dict) -> pl.DataFrame: by=[ Columns.PARAMETER.value, Columns.DATE.value, - ] + ], ) @@ -150,7 +155,7 @@ def apply_summary( row: dict, stations_dict: dict, parameter: Enum, -) -> Tuple[Enum, Optional[float], Optional[float], Optional[str]]: +) -> tuple[Enum, float | None, float | None, str | None]: vals = {s: v for s, v in row.items() if v is not None} if not vals: diff --git a/wetterdienst/core/timeseries/tools.py b/wetterdienst/core/timeseries/tools.py index 1224e6a9a..8deda9c5e 100644 --- a/wetterdienst/core/timeseries/tools.py +++ b/wetterdienst/core/timeseries/tools.py @@ -1,10 +1,10 @@ -from typing import List +from __future__ import annotations import polars as pl class _ParameterData: - def __init__(self, values: pl.DataFrame, station_ids: List[str] = None, extra_station_counter: int = 0): + def __init__(self, values: pl.DataFrame, station_ids: list[str] | None = None, extra_station_counter: int = 0): self.station_ids = station_ids or [] self.extra_station_counter = extra_station_counter self.values = values @@ -12,7 +12,9 @@ def __init__(self, values: pl.DataFrame, station_ids: List[str] = None, extra_st def extract_station_values( - param_data: _ParameterData, result_series_param: pl.Series, valid_station_groups_exists: bool + param_data: _ParameterData, + result_series_param: pl.Series, + valid_station_groups_exists: bool, ) -> None: # Three rules: # 1. only add further stations if not a minimum of 4 stations is reached OR @@ -28,7 +30,7 @@ def extract_station_values( # "S" is added to station id titles to prevent bug with pandas that somehow doesn't allow column name "02000" # under certain circumstances param_data.values = param_data.values.with_columns( - pl.lit(result_series_param).alias(f"S{result_series_param.name}") + pl.lit(result_series_param).alias(f"S{result_series_param.name}"), ) else: param_data.finished = True diff --git a/wetterdienst/core/timeseries/values.py b/wetterdienst/core/timeseries/values.py index fe9dd57e7..8d57b5514 100644 --- a/wetterdienst/core/timeseries/values.py +++ b/wetterdienst/core/timeseries/values.py @@ -1,12 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. -import datetime as dt +from __future__ import annotations + import logging import operator from abc import ABCMeta, abstractmethod -from enum import Enum -from typing import Dict, Generator, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Iterator import polars as pl from dateutil.relativedelta import relativedelta @@ -22,6 +21,10 @@ from wetterdienst.metadata.unit import REGISTRY, OriginUnit, SIUnit from wetterdienst.util.logging import TqdmToLogger +if TYPE_CHECKING: + import datetime as dt + from enum import Enum + try: from backports.datetime_fromisoformat import MonkeyPatch except ImportError: @@ -52,7 +55,7 @@ def __repr__(self): """Representation of values object""" station_ids_joined = ", ".join(self.sr.station_id.to_list()) parameters_joined = ", ".join( - [f"({parameter.value}/{dataset.value})" for parameter, dataset in self.sr.stations.parameter] + [f"({parameter.value}/{dataset.value})" for parameter, dataset in self.sr.stations.parameter], ) periods_joined = self.sr.stations.period and ", ".join([period.value for period in self.sr.stations.period]) @@ -68,7 +71,7 @@ def __repr__(self): # Fields for type coercion, needed for separation from fields with actual data # that have to be parsed differently when having data in tabular form @property - def _meta_fields(self) -> Dict[str, str]: + def _meta_fields(self) -> dict[str, str]: """ Metadata fields that are independent of actual values and should be parsed differently @@ -107,7 +110,12 @@ def _data_tz(self) -> Timezone: """Timezone enumeration of published data.""" pass - def _fetch_frequency(self, station_id, parameter: Enum, dataset: Enum) -> str: + def _fetch_frequency( + self, + station_id, # noqa: ARG002 + parameter: Enum, # noqa: ARG002 + dataset: Enum, # noqa: ARG002 + ) -> str: """ Method used to fetch the dynamic frequency string from somewhere and then set it after download the corresponding dataset. The fetch may either be an arbitrary algorithm that parses the frequency from the @@ -122,8 +130,11 @@ def _fetch_frequency(self, station_id, parameter: Enum, dataset: Enum) -> str: raise NotImplementedError("implement this method if the service has a dynamic resolution") def _adjust_start_end_date( - self, start_date: dt.datetime, end_date: dt.datetime, tzinfo: ZoneInfo - ) -> Tuple[dt.datetime, dt.datetime]: + self, + start_date: dt.datetime, + end_date: dt.datetime, + tzinfo: ZoneInfo, + ) -> tuple[dt.datetime, dt.datetime]: """Adjust start and end date to the resolution of the service. This is necessary for building a complete date range that matches the resolution. """ @@ -225,7 +236,8 @@ def _convert_values_to_si(self, df: pl.DataFrame, dataset) -> pl.DataFrame: data = [] for (dataset, parameter), group in df.group_by( - by=[Columns.DATASET.value, Columns.PARAMETER.value], maintain_order=True + by=[Columns.DATASET.value, Columns.PARAMETER.value], + maintain_order=True, ): op, factor = conversion_factors.get(dataset).get(parameter, (None, None)) if op: @@ -235,8 +247,9 @@ def _convert_values_to_si(self, df: pl.DataFrame, dataset) -> pl.DataFrame: return pl.concat(data) def _create_conversion_factors( - self, datasets: List[str] - ) -> Dict[str, Dict[str, Tuple[Union[operator.add, operator.mul], float]]]: + self, + datasets: list[str], + ) -> dict[str, dict[str, tuple[operator.add | operator.mul, float]]]: """ Function to create conversion factors based on a given dataset @@ -257,8 +270,9 @@ def _create_conversion_factors( @staticmethod def _get_conversion_factor( - origin_unit: Enum, si_unit: Enum - ) -> Tuple[Optional[Union[operator.mul, operator.add]], Optional[float]]: + origin_unit: Enum, + si_unit: Enum, + ) -> tuple[operator.mul | operator.add | None, float | None]: """ Method to get the conversion factor (flaot) for a specific parameter :param origin_unit: origin unit enumeration of parameter @@ -354,7 +368,8 @@ def _build_complete_df(self, df: pl.DataFrame, station_id: str) -> pl.DataFrame: base_df = self._get_base_df(start_date, end_date) data = [] for (station_id, parameter), group in df.group_by( - [Columns.STATION_ID.value, Columns.PARAMETER.value], maintain_order=True + [Columns.STATION_ID.value, Columns.PARAMETER.value], + maintain_order=True, ): par_df = base_df.join( other=group, @@ -362,7 +377,8 @@ def _build_complete_df(self, df: pl.DataFrame, station_id: str) -> pl.DataFrame: how="left", ) par_df = par_df.with_columns( - pl.lit(station_id).alias(Columns.STATION_ID.value), pl.lit(parameter).alias(Columns.PARAMETER.value) + pl.lit(station_id).alias(Columns.STATION_ID.value), + pl.lit(parameter).alias(Columns.PARAMETER.value), ) data.append(par_df) return pl.concat(data) @@ -382,7 +398,7 @@ def _organize_df_columns(self, df: pl.DataFrame, station_id: str, dataset: Enum) ) return df.select(pl.col(col) if col in df.columns else pl.lit(None).alias(col) for col in columns) - def query(self) -> Generator[ValuesResult, None, None]: + def query(self) -> Iterator[ValuesResult]: """ Core method for data collection, iterating of station ids and yielding a DataFrame for each station with all found parameters. Takes care of type @@ -409,7 +425,9 @@ def query(self) -> Generator[ValuesResult, None, None]: for parameter, dataset in self.sr.parameter: parameter_df = self._collect_station_parameter( - station_id=station_id, parameter=parameter, dataset=dataset + station_id=station_id, + parameter=parameter, + dataset=dataset, ) if parameter_df.is_empty(): @@ -442,7 +460,7 @@ def query(self) -> Generator[ValuesResult, None, None]: self.sr.start_date, self.sr.end_date, closed="both", - ) + ), ) if self.sr.skip_empty: @@ -450,7 +468,7 @@ def query(self) -> Generator[ValuesResult, None, None]: if percentage < self.sr.skip_threshold: log.info( f"station {station_id} is skipped as percentage of actual values ({percentage}) " - f"is below threshold ({self.sr.skip_threshold})." + f"is below threshold ({self.sr.skip_threshold}).", ) continue @@ -517,14 +535,14 @@ def _tabulate_df(df: pl.DataFrame) -> pl.DataFrame: and quality flags """ df_tabulated = df.select( - [pl.col(Columns.STATION_ID.value), pl.col(Columns.DATASET.value), pl.col(Columns.DATE.value)] + [pl.col(Columns.STATION_ID.value), pl.col(Columns.DATASET.value), pl.col(Columns.DATE.value)], ).unique() for (parameter,), parameter_df in df.group_by([Columns.PARAMETER.value], maintain_order=True): # Build quality column name parameter_quality = f"{Columns.QUALITY_PREFIX.value}_{parameter}" parameter_df = parameter_df.select([Columns.DATE.value, Columns.VALUE.value, Columns.QUALITY.value]).rename( - mapping={Columns.VALUE.value: parameter, Columns.QUALITY.value: parameter_quality} + mapping={Columns.VALUE.value: parameter, Columns.QUALITY.value: parameter_quality}, ) df_tabulated = df_tabulated.join(parameter_df, on=[Columns.DATE.value]) @@ -552,7 +570,7 @@ def all(self) -> ValuesResult: # noqa: A003 return ValuesResult(stations=self.sr, values=self, df=df) @staticmethod - def _humanize(df: pl.DataFrame, humanized_parameters_mapping: Dict[str, str]) -> pl.DataFrame: + def _humanize(df: pl.DataFrame, humanized_parameters_mapping: dict[str, str]) -> pl.DataFrame: """ Method for humanizing parameters. @@ -562,7 +580,7 @@ def _humanize(df: pl.DataFrame, humanized_parameters_mapping: Dict[str, str]) -> """ return df.with_columns(pl.col(Columns.PARAMETER.value).replace(humanized_parameters_mapping)) - def _create_humanized_parameters_mapping(self) -> Dict[str, str]: + def _create_humanized_parameters_mapping(self) -> dict[str, str]: """ Reduce the creation of parameter mapping of the massive amount of parameters by specifying the resolution. @@ -598,7 +616,7 @@ def _get_actual_percentage(self, df: pl.DataFrame) -> float: dataset_enum = self.sr.stations._parameter_base[self.sr.resolution.name][dataset.name] parameters.extend([par.value for par in dataset_enum if not par.name.lower().startswith("quality")]) percentage = df.group_by(["parameter"]).agg( - (pl.col("value").drop_nulls().len() / pl.col("value").len()).cast(pl.Float64).alias("perc") + (pl.col("value").drop_nulls().len() / pl.col("value").len()).cast(pl.Float64).alias("perc"), ) missing = pl.DataFrame( [{"parameter": par, "perc": 0.0} for par in parameters if par not in percentage.get_column("parameter")], diff --git a/wetterdienst/exceptions.py b/wetterdienst/exceptions.py index 05e657093..fc5a71ef0 100644 --- a/wetterdienst/exceptions.py +++ b/wetterdienst/exceptions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. class InvalidEnumerationError(ValueError): diff --git a/wetterdienst/metadata/__init__.py b/wetterdienst/metadata/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/metadata/__init__.py +++ b/wetterdienst/metadata/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/metadata/columns.py b/wetterdienst/metadata/columns.py index 35305b2dd..4d044b2f3 100644 --- a/wetterdienst/metadata/columns.py +++ b/wetterdienst/metadata/columns.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/metadata/datarange.py b/wetterdienst/metadata/datarange.py index 60460ec15..8c0544cb8 100644 --- a/wetterdienst/metadata/datarange.py +++ b/wetterdienst/metadata/datarange.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/metadata/extension.py b/wetterdienst/metadata/extension.py index 2c9e6b431..f87f5ff1a 100644 --- a/wetterdienst/metadata/extension.py +++ b/wetterdienst/metadata/extension.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/metadata/kind.py b/wetterdienst/metadata/kind.py index ce31ce7bd..82ec52d4a 100644 --- a/wetterdienst/metadata/kind.py +++ b/wetterdienst/metadata/kind.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/metadata/parameter.py b/wetterdienst/metadata/parameter.py index f8eff76a0..50969274c 100644 --- a/wetterdienst/metadata/parameter.py +++ b/wetterdienst/metadata/parameter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/metadata/period.py b/wetterdienst/metadata/period.py index 6cbf63629..9c75ed016 100644 --- a/wetterdienst/metadata/period.py +++ b/wetterdienst/metadata/period.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/metadata/provider.py b/wetterdienst/metadata/provider.py index fef497d71..dcb884e3c 100644 --- a/wetterdienst/metadata/provider.py +++ b/wetterdienst/metadata/provider.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/metadata/resolution.py b/wetterdienst/metadata/resolution.py index ebfb35fbb..496cca11e 100644 --- a/wetterdienst/metadata/resolution.py +++ b/wetterdienst/metadata/resolution.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/metadata/timezone.py b/wetterdienst/metadata/timezone.py index 2b3c269f6..fdb96c5e4 100644 --- a/wetterdienst/metadata/timezone.py +++ b/wetterdienst/metadata/timezone.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/metadata/unit.py b/wetterdienst/metadata/unit.py index 365d104e5..f497b0af2 100644 --- a/wetterdienst/metadata/unit.py +++ b/wetterdienst/metadata/unit.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import pint diff --git a/wetterdienst/provider/__init__.py b/wetterdienst/provider/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/provider/__init__.py +++ b/wetterdienst/provider/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/dwd/__init__.py b/wetterdienst/provider/dwd/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/provider/dwd/__init__.py +++ b/wetterdienst/provider/dwd/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/dwd/dmo/__init__.py b/wetterdienst/provider/dwd/dmo/__init__.py index d5d785361..389c06fa7 100644 --- a/wetterdienst/provider/dwd/dmo/__init__.py +++ b/wetterdienst/provider/dwd/dmo/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.dwd.dmo.api import DwdDmoRequest, DwdDmoStationGroup, DwdDmoType, DwdForecastDate diff --git a/wetterdienst/provider/dwd/dmo/api.py b/wetterdienst/provider/dwd/dmo/api.py index a29fc57bd..b656c0516 100644 --- a/wetterdienst/provider/dwd/dmo/api.py +++ b/wetterdienst/provider/dwd/dmo/api.py @@ -1,11 +1,12 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import logging from enum import Enum from io import StringIO -from typing import Dict, Iterator, List, Optional, Union +from typing import Iterator from urllib.error import HTTPError from urllib.parse import urljoin @@ -1016,12 +1017,12 @@ def add_date_from_filename(df: pl.DataFrame, current_date: dt.datetime) -> pl.Da pl.lit(year).alias("year"), pl.col("date_str").map_elements(lambda s: s[:2]).cast(int).alias("day"), pl.col("date_str").map_elements(lambda s: s[2:4]).cast(int).alias("hour"), - ] + ], ) days_difference = df.get_column("day").max() - df.get_column("day").min() if days_difference > 20: df = df.with_columns( - pl.when(pl.col("day") > 25).then(month - 1 if month > 1 else 12).otherwise(month).alias("month") + pl.when(pl.col("day") > 25).then(month - 1 if month > 1 else 12).otherwise(month).alias("month"), ) else: df = df.with_columns(pl.lit(month).alias("month")) @@ -1036,7 +1037,7 @@ def add_date_from_filename(df: pl.DataFrame, current_date: dt.datetime) -> pl.Da pl.struct(["year", "month", "day", "hour"]) .map_elements(lambda s: dt.datetime(s["year"], s["month"], s["day"], s["hour"])) .alias("date"), - ] + ], ) @@ -1059,7 +1060,7 @@ class DwdDmoValues(TimeseriesValues): _tz = Timezone.GERMANY _data_tz = Timezone.UTC - def _create_humanized_parameters_mapping(self) -> Dict[str, str]: + def _create_humanized_parameters_mapping(self) -> dict[str, str]: """ Method for creation of parameter name mappings based on self._parameter_base @@ -1078,7 +1079,7 @@ def __init__(self, stations_result: StationsResult) -> None: :param stations_result: """ - super(DwdDmoValues, self).__init__(stations_result=stations_result) + super().__init__(stations_result=stations_result) parameter_base = self.sr.stations._parameter_base dataset_accessor = self.sr.stations._dataset_accessor @@ -1092,10 +1093,12 @@ def __init__(self, stations_result: StationsResult) -> None: parameter_.append(parameter.value) self.kml = KMLReader( - station_ids=self.sr.station_id.to_list(), parameters=parameter_, settings=self.sr.stations.settings + station_ids=self.sr.station_id.to_list(), + parameters=parameter_, + settings=self.sr.stations.settings, ) - def get_dwd_dmo_path(self, dataset: Enum, station_id: Optional[str] = None) -> str: + def get_dwd_dmo_path(self, dataset: Enum, station_id: str | None = None) -> str: path = f"weather/local_forecasts/dmo/{dataset.value}/{self.sr.stations.station_group.value}" if self.sr.stations.station_group == DwdDmoStationGroup.ALL_STATIONS: return f"{path}/kmz" @@ -1161,7 +1164,10 @@ def _collect_station_parameter(self) -> Iterator[pl.DataFrame]: yield df else: for date in pl.datetime_range( - self.sr.stations.start_issue, self.sr.stations.end_issue, interval=self.sr.frequency.value, eager=True + self.sr.stations.start_issue, + self.sr.stations.end_issue, + interval=self.sr.frequency.value, + eager=True, ): try: for df in self.read_dmo(date): @@ -1189,7 +1195,7 @@ def _tidy_up_df(df: pl.DataFrame) -> pl.DataFrame: return df.with_columns(pl.lit(value=None, dtype=pl.Float64).alias(Columns.QUALITY.value)) - def read_dmo(self, date: Union[dt.datetime, DwdForecastDate]) -> Iterator[pl.DataFrame]: + def read_dmo(self, date: dt.datetime | DwdForecastDate) -> Iterator[pl.DataFrame]: """ Manage data acquisition for a given date that is used to filter the found files on the DMO path of the DWD server. @@ -1201,12 +1207,12 @@ def read_dmo(self, date: Union[dt.datetime, DwdForecastDate]) -> Iterator[pl.Dat df_forecast = df_forecast.rename( mapping={ "datetime": Columns.DATE.value, - } + }, ) yield df_forecast - def _read_dmo(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl.DataFrame]: + def _read_dmo(self, date: DwdForecastDate | dt.datetime) -> Iterator[pl.DataFrame]: """ Wrapper that either calls read_icon_eu or read_icon depending on defined period type @@ -1219,7 +1225,7 @@ def _read_dmo(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl.Da else: yield from self.read_icon(date) - def read_icon_eu(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl.DataFrame]: + def read_icon_eu(self, date: DwdForecastDate | dt.datetime) -> Iterator[pl.DataFrame]: """ Reads single icon_eu file with all stations_result and returns every chunk that matches with one of the defined station ids. @@ -1232,8 +1238,7 @@ def read_icon_eu(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl url = urljoin("https://opendata.dwd.de", dmo_path) file_url = self.get_url_for_date(url, date) self.kml.read(file_url) - for forecast in self.kml.get_forecasts(): - yield forecast + yield from self.kml.get_forecasts() else: for station_id in self.sr.station_id: dmo_path = self.get_dwd_dmo_path(DwdDmoDataset.ICON_EU, station_id=station_id) @@ -1246,7 +1251,7 @@ def read_icon_eu(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl self.kml.read(file_url) yield next(self.kml.get_forecasts()) - def read_icon(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl.DataFrame]: + def read_icon(self, date: DwdForecastDate | dt.datetime) -> Iterator[pl.DataFrame]: """ Reads multiple icon files with one per each station and returns a chunk per file. @@ -1259,8 +1264,7 @@ def read_icon(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl.Da url = urljoin("https://opendata.dwd.de", dmo_path) file_url = self.get_url_for_date(url, date) self.kml.read(file_url) - for forecast in self.kml.get_forecasts(): - yield forecast + yield from self.kml.get_forecasts() else: for station_id in self.sr.station_id: dmo_path = self.get_dwd_dmo_path(DwdDmoDataset.ICON, station_id=station_id) @@ -1273,7 +1277,7 @@ def read_icon(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl.Da self.kml.read(file_url) yield next(self.kml.get_forecasts()) - def get_url_for_date(self, url: str, date: Union[dt.datetime, DwdForecastDate]) -> str: + def get_url_for_date(self, url: str, date: dt.datetime | DwdForecastDate) -> str: """ Method to get a file url based on the dmo url and the date that is used for filtering. @@ -1292,7 +1296,7 @@ def get_url_for_date(self, url: str, date: Union[dt.datetime, DwdForecastDate]) .str.split("_") .list.last() .map_elements(lambda s: s[:-4]) - .alias("date_str") + .alias("date_str"), ) df = add_date_from_filename(df, dt.datetime.now(ZoneInfo("UTC")).replace(tzinfo=None)) if date == DwdForecastDate.LATEST: @@ -1378,15 +1382,15 @@ def adjust_datetime(datetime_: dt.datetime) -> dt.datetime: def __init__( self, - parameter: Optional[List[Union[str, DwdDmoParameter, Parameter]]], - dmo_type: Union[str, DwdDmoType], - start_issue: Union[str, dt.datetime, DwdForecastDate] = DwdForecastDate.LATEST, - end_issue: Optional[Union[str, dt.datetime]] = None, - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - station_group: Optional[Union[str, DwdDmoStationGroup]] = None, - lead_time: Optional[Union[str, DwdDmoLeadTime]] = None, - settings: Optional[Settings] = None, + parameter: list[str | DwdDmoParameter | Parameter] | None, + dmo_type: str | DwdDmoType, + start_issue: str | dt.datetime | DwdForecastDate = DwdForecastDate.LATEST, + end_issue: str | dt.datetime | None = None, + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + station_group: str | DwdDmoStationGroup | None = None, + lead_time: str | DwdDmoLeadTime | None = None, + settings: Settings | None = None, ) -> None: """ @@ -1528,7 +1532,7 @@ def issue_end(self): "longitude": "0.12", "height": "5", }, - ] + ], ) def _all(self) -> pl.LazyFrame: diff --git a/wetterdienst/provider/dwd/metadata/__init__.py b/wetterdienst/provider/dwd/metadata/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/provider/dwd/metadata/__init__.py +++ b/wetterdienst/provider/dwd/metadata/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/dwd/metadata/datetime.py b/wetterdienst/provider/dwd/metadata/datetime.py index 58bfb8ac0..dbfa74e2d 100644 --- a/wetterdienst/provider/dwd/metadata/datetime.py +++ b/wetterdienst/provider/dwd/metadata/datetime.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/provider/dwd/mosmix/__init__.py b/wetterdienst/provider/dwd/mosmix/__init__.py index ee955738e..e4706f391 100644 --- a/wetterdienst/provider/dwd/mosmix/__init__.py +++ b/wetterdienst/provider/dwd/mosmix/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.dwd.mosmix.api import ( diff --git a/wetterdienst/provider/dwd/mosmix/access.py b/wetterdienst/provider/dwd/mosmix/access.py index 1f3adf357..1aedcff9a 100644 --- a/wetterdienst/provider/dwd/mosmix/access.py +++ b/wetterdienst/provider/dwd/mosmix/access.py @@ -1,25 +1,28 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. # Source: # https://github.com/jlewis91/dwdbulk/blob/master/dwdbulk/api/forecasts.py +from __future__ import annotations + import datetime as dt import logging from io import BytesIO -from os.path import basename -from typing import Iterator, List +from pathlib import Path +from typing import TYPE_CHECKING, Iterator import polars as pl from fsspec.implementations.zip import ZipFileSystem from lxml.etree import iterparse # noqa: S410 from tqdm import tqdm -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.io import read_in_chunks from wetterdienst.util.logging import TqdmToLogger from wetterdienst.util.network import NetworkFilesystemManager +if TYPE_CHECKING: + from wetterdienst.settings import Settings + try: from backports.datetime_fromisoformat import MonkeyPatch except ImportError: @@ -33,7 +36,7 @@ class KMLReader: """Read DWD XML Weather Forecast File of Type KML.""" - def __init__(self, station_ids: List[str], parameters: List[str], settings: Settings) -> None: + def __init__(self, station_ids: list[str], parameters: list[str], settings: Settings) -> None: self.station_ids = station_ids self.parameters = parameters self.metadata = {} @@ -89,7 +92,7 @@ def read(self, url: str): Download and read DWD XML Weather Forecast File of Type KML. """ - log.info(f"Downloading KMZ file {basename(url)}") + log.info(f"Downloading KMZ file {Path(url).name}") kml = self.fetch(url) log.info("Parsing KML data") @@ -169,7 +172,7 @@ def get_forecasts(self) -> Iterator[pl.DataFrame]: measurement_values = " ".join(measurement_string.split()).split(" ") measurement_values = [None if i == "-" else float(i) for i in measurement_values] assert len(measurement_values) == len( # noqa:S101 - self.timesteps + self.timesteps, ), "Number of time steps does not match number of measurement values" data_dict[measurement_parameter.lower()] = measurement_values diff --git a/wetterdienst/provider/dwd/mosmix/api.py b/wetterdienst/provider/dwd/mosmix/api.py index e997dd48c..335a0754f 100644 --- a/wetterdienst/provider/dwd/mosmix/api.py +++ b/wetterdienst/provider/dwd/mosmix/api.py @@ -1,11 +1,12 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import logging from enum import Enum from io import StringIO -from typing import Dict, Iterator, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Iterator from urllib.parse import urljoin import polars as pl @@ -18,7 +19,6 @@ from wetterdienst.metadata.columns import Columns from wetterdienst.metadata.datarange import DataRange from wetterdienst.metadata.kind import Kind -from wetterdienst.metadata.parameter import Parameter from wetterdienst.metadata.period import Period, PeriodType from wetterdienst.metadata.provider import Provider from wetterdienst.metadata.resolution import Resolution, ResolutionType @@ -26,7 +26,6 @@ from wetterdienst.metadata.unit import OriginUnit, SIUnit, UnitEnum from wetterdienst.provider.dwd.metadata.datetime import DatetimeFormat from wetterdienst.provider.dwd.mosmix.access import KMLReader -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.enumeration import parse_enumeration_from_template from wetterdienst.util.geo import convert_dm_to_dd @@ -35,6 +34,10 @@ from wetterdienst.util.polars_util import read_fwf_from_df from wetterdienst.util.python import to_list +if TYPE_CHECKING: + from wetterdienst.metadata.parameter import Parameter + from wetterdienst.settings import Settings + try: from backports.datetime_fromisoformat import MonkeyPatch except ImportError: @@ -999,7 +1002,7 @@ class DwdMosmixValues(TimeseriesValues): _tz = Timezone.GERMANY _data_tz = Timezone.UTC - def _create_humanized_parameters_mapping(self) -> Dict[str, str]: + def _create_humanized_parameters_mapping(self) -> dict[str, str]: """ Method for creation of parameter name mappings based on self._parameter_base @@ -1018,7 +1021,7 @@ def __init__(self, stations_result: StationsResult) -> None: :param stations_result: """ - super(DwdMosmixValues, self).__init__(stations_result=stations_result) + super().__init__(stations_result=stations_result) parameter_base = self.sr.stations._parameter_base dataset_accessor = self.sr.stations._dataset_accessor @@ -1032,7 +1035,9 @@ def __init__(self, stations_result: StationsResult) -> None: parameter_.append(parameter.value) self.kml = KMLReader( - station_ids=self.sr.station_id.to_list(), parameters=parameter_, settings=self.sr.stations.settings + station_ids=self.sr.station_id.to_list(), + parameters=parameter_, + settings=self.sr.stations.settings, ) @property @@ -1095,7 +1100,10 @@ def _collect_station_parameter(self) -> Iterator[pl.DataFrame]: yield df else: for date in pl.datetime_range( - self.sr.stations.start_issue, self.sr.stations.end_issue, interval=self.sr.frequency.value, eager=True + self.sr.stations.start_issue, + self.sr.stations.end_issue, + interval=self.sr.frequency.value, + eager=True, ): try: for df in self.read_mosmix(date): @@ -1123,7 +1131,7 @@ def _tidy_up_df(df: pl.DataFrame) -> pl.DataFrame: return df.with_columns(pl.lit(value=None, dtype=pl.Float64).alias(Columns.QUALITY.value)) - def read_mosmix(self, date: Union[dt.datetime, DwdForecastDate]) -> Iterator[pl.DataFrame]: + def read_mosmix(self, date: dt.datetime | DwdForecastDate) -> Iterator[pl.DataFrame]: """ Manage data acquisition for a given date that is used to filter the found files on the MOSMIX path of the DWD server. @@ -1135,12 +1143,12 @@ def read_mosmix(self, date: Union[dt.datetime, DwdForecastDate]) -> Iterator[pl. df_forecast = df_forecast.rename( mapping={ "datetime": Columns.DATE.value, - } + }, ) yield df_forecast - def _read_mosmix(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl.DataFrame]: + def _read_mosmix(self, date: DwdForecastDate | dt.datetime) -> Iterator[pl.DataFrame]: """ Wrapper that either calls read_mosmix_s or read_mosmix_l depending on defined period type @@ -1153,7 +1161,7 @@ def _read_mosmix(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl else: yield from self.read_mosmix_large(date) - def read_mosmix_small(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterator[pl.DataFrame]: + def read_mosmix_small(self, date: DwdForecastDate | dt.datetime) -> Iterator[pl.DataFrame]: """ Reads single MOSMIX-S file with all stations_result and returns every mosmix that matches with one of the defined station ids. @@ -1164,12 +1172,12 @@ def read_mosmix_small(self, date: Union[DwdForecastDate, dt.datetime]) -> Iterat url = urljoin("https://opendata.dwd.de", DWD_MOSMIX_S_PATH) file_url = self.get_url_for_date(url, date) self.kml.read(file_url) - for forecast in self.kml.get_forecasts(): - yield forecast + yield from self.kml.get_forecasts() def read_mosmix_large( - self, date: Union[DwdForecastDate, dt.datetime] - ) -> Iterator[Tuple[pl.DataFrame, pl.DataFrame]]: + self, + date: DwdForecastDate | dt.datetime, + ) -> Iterator[tuple[pl.DataFrame, pl.DataFrame]]: """ Reads multiple MOSMIX-L files with one per each station and returns a mosmix per file. @@ -1184,8 +1192,7 @@ def read_mosmix_large( self.kml.read(file_url) - for forecast in self.kml.get_forecasts(): - yield forecast + yield from self.kml.get_forecasts() else: for station_id in self.sr.station_id: station_url = urljoin("https://opendata.dwd.de", DWD_MOSMIX_L_SINGLE_PATH).format(station_id=station_id) @@ -1200,7 +1207,7 @@ def read_mosmix_large( yield next(self.kml.get_forecasts()) - def get_url_for_date(self, url: str, date: Union[dt.datetime, DwdForecastDate]) -> str: + def get_url_for_date(self, url: str, date: dt.datetime | DwdForecastDate) -> str: """ Method to get a file url based on the MOSMIX-S/MOSMIX-L url and the date that is used for filtering. @@ -1222,13 +1229,13 @@ def get_url_for_date(self, url: str, date: Union[dt.datetime, DwdForecastDate]) df = pl.DataFrame({"url": urls}) df = df.with_columns( - pl.col("url").str.split("/").list.last().str.split("_").list.gather(2).flatten().alias("date") + pl.col("url").str.split("/").list.last().str.split("_").list.gather(2).flatten().alias("date"), ) df = df.filter(pl.col("date").ne("LATEST")) df = df.with_columns( - pl.col("date").map_elements(lambda d: f"{d}00").str.to_datetime(DatetimeFormat.YMDHM.value) + pl.col("date").map_elements(lambda d: f"{d}00").str.to_datetime(DatetimeFormat.YMDHM.value), ) df = df.filter(pl.col("date").eq(date)) @@ -1314,14 +1321,14 @@ def adjust_datetime(datetime_: dt.datetime) -> dt.datetime: def __init__( self, - parameter: Optional[List[Union[str, DwdMosmixParameter, Parameter]]], - mosmix_type: Union[str, DwdMosmixType], - start_issue: Optional[Union[str, dt.datetime, DwdForecastDate]] = DwdForecastDate.LATEST, - end_issue: Optional[Union[str, dt.datetime]] = None, - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - station_group: Optional[DwdMosmixStationGroup] = None, - settings: Optional[Settings] = None, + parameter: list[str | DwdMosmixParameter | Parameter] | None, + mosmix_type: str | DwdMosmixType, + start_issue: str | dt.datetime | DwdForecastDate | None = DwdForecastDate.LATEST, + end_issue: str | dt.datetime | None = None, + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + station_group: DwdMosmixStationGroup | None = None, + settings: Settings | None = None, ) -> None: """ diff --git a/wetterdienst/provider/dwd/observation/__init__.py b/wetterdienst/provider/dwd/observation/__init__.py index 656e02da4..1b1524d62 100644 --- a/wetterdienst/provider/dwd/observation/__init__.py +++ b/wetterdienst/provider/dwd/observation/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.dwd.observation.api import DwdObservationRequest diff --git a/wetterdienst/provider/dwd/observation/api.py b/wetterdienst/provider/dwd/observation/api.py index 731f6511a..01d3b171a 100644 --- a/wetterdienst/provider/dwd/observation/api.py +++ b/wetterdienst/provider/dwd/observation/api.py @@ -1,10 +1,10 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import logging from itertools import repeat -from typing import List, Optional, Tuple, Union import polars as pl import portion as P @@ -83,7 +83,7 @@ def __eq__(self, other): :param other: :return: """ - return super(DwdObservationValues, self).__eq__(other) and ( + return super().__eq__(other) and ( self.sr.resolution == other.sr.resolution and self.sr.period == other.sr.period ) @@ -96,16 +96,16 @@ def __str__(self): return ", ".join( [ - super(DwdObservationValues, self).__str__(), + super().__str__(), f"resolution {self.sr.resolution.value}", f"periods {periods_joined}", - ] + ], ) def _collect_station_parameter( self, station_id: str, - parameter: Union[DwdObservationParameter, DwdObservationDataset], + parameter: DwdObservationParameter | DwdObservationDataset, # noqa: ARG002 dataset: DwdObservationDataset, ) -> pl.DataFrame: """ @@ -133,7 +133,11 @@ def _collect_station_parameter( for period, date_range in periods_and_date_ranges: parameter_identifier = build_parameter_set_identifier( - dataset, self.sr.resolution, period, station_id, date_range + dataset, + self.sr.resolution, + period, + station_id, + date_range, ) log.info(f"Acquiring observation data for {parameter_identifier}.") @@ -144,7 +148,12 @@ def _collect_station_parameter( continue remote_files = create_file_list_for_climate_observations( - station_id, dataset, self.sr.resolution, period, self.sr.stations.settings, date_range + station_id, + dataset, + self.sr.resolution, + period, + self.sr.stations.settings, + date_range, ) if remote_files.is_empty(): @@ -200,7 +209,7 @@ def _fix_timestamps(df: pl.DataFrame) -> pl.DataFrame: pl.when(pl.col(Columns.DATE.value).dt.year() < 2000) .then(pl.col(Columns.DATE.value) - pl.duration(hours=1)) .otherwise(pl.col(Columns.DATE.value)) - .alias(Columns.DATE.value) + .alias(Columns.DATE.value), ) def _tidy_up_df(self, df: pl.DataFrame, dataset) -> pl.DataFrame: @@ -230,7 +239,7 @@ def _tidy_up_df(self, df: pl.DataFrame, dataset) -> pl.DataFrame: ] # Drop string columns, can't be coerced to float - df = df.drop((col for col in droppable_columns if col in df.columns)) + df = df.drop(col for col in droppable_columns if col in df.columns) df = df.select( pl.col(Columns.STATION_ID.value), @@ -248,7 +257,7 @@ def _tidy_up_df(self, df: pl.DataFrame, dataset) -> pl.DataFrame: [ pl.Series(repeat(quality_wind, times=2)).list.explode(), pl.Series(repeat(quality_general, times=12)).list.explode(), - ] + ], ) df = df.drop( DwdObservationParameter.DAILY.CLIMATE_SUMMARY.QUALITY_WIND.value, @@ -257,7 +266,7 @@ def _tidy_up_df(self, df: pl.DataFrame, dataset) -> pl.DataFrame: elif resolution in (Resolution.MONTHLY, Resolution.ANNUAL): quality_general = df.get_column(DwdObservationParameter.MONTHLY.CLIMATE_SUMMARY.QUALITY_GENERAL.value) quality_precipitation = df.get_column( - DwdObservationParameter.MONTHLY.CLIMATE_SUMMARY.QUALITY_PRECIPITATION.value + DwdObservationParameter.MONTHLY.CLIMATE_SUMMARY.QUALITY_PRECIPITATION.value, ) quality = pl.concat( @@ -266,15 +275,15 @@ def _tidy_up_df(self, df: pl.DataFrame, dataset) -> pl.DataFrame: repeat( quality_general, times=9, - ) + ), ).list.explode(), pl.Series( repeat( quality_precipitation, times=2, - ) + ), ).list.explode(), - ] + ], ) df = df.drop( DwdObservationParameter.MONTHLY.CLIMATE_SUMMARY.QUALITY_GENERAL.value, @@ -315,8 +324,11 @@ def _tidy_up_df(self, df: pl.DataFrame, dataset) -> pl.DataFrame: return df.with_columns(pl.when(pl.col(Columns.VALUE.value).is_not_null()).then(pl.col(Columns.QUALITY.value))) def _get_historical_date_ranges( - self, station_id: str, dataset: DwdObservationDataset, settings: Settings - ) -> List[str]: + self, + station_id: str, + dataset: DwdObservationDataset, + settings: Settings, + ) -> list[str]: """ Get particular files for historical data which for high resolution is released in data chunks e.g. decades or monthly chunks @@ -326,7 +338,10 @@ def _get_historical_date_ranges( :return: """ file_index = create_file_index_for_climate_observations( - dataset, self.sr.resolution, Period.HISTORICAL, settings + dataset, + self.sr.resolution, + Period.HISTORICAL, + settings, ) file_index = file_index.filter(pl.col(Columns.STATION_ID.value).eq(station_id)) @@ -340,7 +355,7 @@ def _get_historical_date_ranges( file_index = file_index.filter( pl.col(Columns.STATION_ID.value).eq(station_id) & pl.col(Columns.START_DATE.value).ge(end_date_max).not_() - & pl.col(Columns.END_DATE.value).le(start_date_min).not_() + & pl.col(Columns.END_DATE.value).le(start_date_min).not_(), ) return file_index.collect().get_column(Columns.DATE_RANGE.value).to_list() @@ -368,7 +383,7 @@ class DwdObservationRequest(TimeseriesRequest): _values = DwdObservationValues @property - def _interval(self) -> Optional[Interval]: + def _interval(self) -> Interval | None: """ Interval of the request if date given @@ -417,7 +432,7 @@ def _now_interval(self) -> Interval: now_begin = now_end.replace(hour=0, minute=0, second=0) - dt.timedelta(days=1) return P.closed(now_begin, now_end) - def _get_periods(self) -> List[Period]: + def _get_periods(self) -> list[Period]: """ Set periods automatically depending on the given start date and end date. Overlapping of historical and recent interval will cause both periods to appear @@ -444,17 +459,15 @@ def _parse_station_id(series: pl.Series) -> pl.Series: def __init__( self, - parameter: Union[ - Union[str, DwdObservationDataset, DwdObservationParameter], - List[Union[str, DwdObservationDataset, DwdObservationParameter]], - ], - resolution: Union[str, Resolution, DwdObservationResolution], - period: Optional[ - Union[str, Period, DwdObservationPeriod, List[Union[str, Period, DwdObservationPeriod]]] - ] = None, - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + parameter: str + | DwdObservationDataset + | DwdObservationParameter + | list[str | DwdObservationDataset | DwdObservationParameter], + resolution: str | Resolution | DwdObservationResolution, + period: str | Period | DwdObservationPeriod | list[str | Period | DwdObservationPeriod] = None, + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ): """ @@ -484,9 +497,9 @@ def __init__( else: self.period = self._parse_period([*self._period_base]) - def filter_by_station_id(self, station_id: Union[str, int, Tuple[str, ...], Tuple[int, ...], List[str], List[int]]): + def filter_by_station_id(self, station_id: str | int | tuple[str, ...] | tuple[int, ...] | list[str] | list[int]): return super().filter_by_station_id( - pl.Series(name=Columns.STATION_ID.value, values=to_list(station_id)).cast(str).str.pad_start(5, "0") + pl.Series(name=Columns.STATION_ID.value, values=to_list(station_id)).cast(str).str.pad_start(5, "0"), ) @classmethod @@ -544,7 +557,7 @@ def _all(self) -> pl.LazyFrame: for period in reversed(self.period): if not check_dwd_observations_dataset(dataset, self.resolution, period): log.warning( - f"The combination of {dataset.value}, " f"{self.resolution.value}, {period.value} is invalid." + f"The combination of {dataset.value}, " f"{self.resolution.value}, {period.value} is invalid.", ) continue diff --git a/wetterdienst/provider/dwd/observation/download.py b/wetterdienst/provider/dwd/observation/download.py index 3bcc15745..65d4d4e48 100644 --- a/wetterdienst/provider/dwd/observation/download.py +++ b/wetterdienst/provider/dwd/observation/download.py @@ -1,26 +1,31 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import logging from concurrent.futures import ThreadPoolExecutor from io import BytesIO -from typing import List, Tuple +from typing import TYPE_CHECKING from zipfile import BadZipFile -import polars as pl from fsspec.implementations.zip import ZipFileSystem from wetterdienst.exceptions import ProductFileNotFoundError -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.network import download_file +if TYPE_CHECKING: + import polars as pl + + from wetterdienst.settings import Settings + log = logging.getLogger(__name__) def download_climate_observations_data_parallel( - remote_files: pl.Series, settings: Settings -) -> List[Tuple[str, BytesIO]]: + remote_files: pl.Series, + settings: Settings, +) -> list[tuple[str, BytesIO]]: """ Wrapper for ``_download_dwd_data`` to provide a multiprocessing feature. @@ -29,7 +34,8 @@ def download_climate_observations_data_parallel( """ with ThreadPoolExecutor() as p: files_in_bytes = p.map( - lambda file: _download_climate_observations_data(remote_file=file, settings=settings), remote_files + lambda file: _download_climate_observations_data(remote_file=file, settings=settings), + remote_files, ) return list(zip(remote_files, files_in_bytes)) diff --git a/wetterdienst/provider/dwd/observation/fields.py b/wetterdienst/provider/dwd/observation/fields.py index 1659563fc..9be57b9f9 100644 --- a/wetterdienst/provider/dwd/observation/fields.py +++ b/wetterdienst/provider/dwd/observation/fields.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ diff --git a/wetterdienst/provider/dwd/observation/fileindex.py b/wetterdienst/provider/dwd/observation/fileindex.py index 255f61c06..8f007e10d 100644 --- a/wetterdienst/provider/dwd/observation/fileindex.py +++ b/wetterdienst/provider/dwd/observation/fileindex.py @@ -1,8 +1,9 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt -from typing import Optional +from typing import TYPE_CHECKING import polars as pl @@ -16,21 +17,23 @@ DwdObservationDataset, ) from wetterdienst.provider.dwd.observation.metadata.resolution import HIGH_RESOLUTIONS -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.network import list_remote_files_fsspec +if TYPE_CHECKING: + from wetterdienst.settings import Settings + STATION_ID_REGEX = r"_(\d{3,5})_" DATE_RANGE_REGEX = r"_(\d{8}_\d{8})_" def create_file_list_for_climate_observations( station_id: str, - dataset: "DwdObservationDataset", + dataset: DwdObservationDataset, resolution: Resolution, period: Period, settings: Settings, - date_range: Optional[str] = None, + date_range: str | None = None, ) -> pl.Series: """ Function for selecting datafiles (links to archives) for given @@ -59,7 +62,10 @@ def create_file_list_for_climate_observations( def create_file_index_for_climate_observations( - dataset: "DwdObservationDataset", resolution: Resolution, period: Period, settings: Settings + dataset: DwdObservationDataset, + resolution: Resolution, + period: Period, + settings: Settings, ) -> pl.LazyFrame: """ Function (cached) to create a file index of the DWD station data. The file index @@ -73,11 +79,19 @@ def create_file_index_for_climate_observations( """ if dataset in DWD_URBAN_DATASETS: file_index = _create_file_index_for_dwd_server( - dataset, resolution, Period.RECENT, "observations_germany/climate_urban", settings + dataset, + resolution, + Period.RECENT, + "observations_germany/climate_urban", + settings, ) else: file_index = _create_file_index_for_dwd_server( - dataset, resolution, period, "observations_germany/climate", settings + dataset, + resolution, + period, + "observations_germany/climate", + settings, ) file_index = file_index.filter(pl.col("filename").str.ends_with(Extension.ZIP.value)) @@ -88,7 +102,7 @@ def create_file_index_for_climate_observations( .list.last() .str.extract(STATION_ID_REGEX, 1) .str.pad_start(5, "0") - .alias("station_id") + .alias("station_id"), ) file_index = file_index.filter(pl.col("station_id").is_not_null() & pl.col("station_id").ne("00000")) @@ -125,7 +139,11 @@ def create_file_index_for_climate_observations( def _create_file_index_for_dwd_server( - dataset: DwdObservationDataset, resolution: Resolution, period: Period, cdc_base: str, settings: Settings + dataset: DwdObservationDataset, + resolution: Resolution, + period: Period, + cdc_base: str, + settings: Settings, ) -> pl.LazyFrame: """ Function to create a file index of the DWD station data, which usually is shipped as diff --git a/wetterdienst/provider/dwd/observation/metadata/__init__.py b/wetterdienst/provider/dwd/observation/metadata/__init__.py index 0212b6c5d..af2fb890b 100644 --- a/wetterdienst/provider/dwd/observation/metadata/__init__.py +++ b/wetterdienst/provider/dwd/observation/metadata/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.dwd.observation.metadata.dataset import DwdObservationDataset diff --git a/wetterdienst/provider/dwd/observation/metadata/dataset.py b/wetterdienst/provider/dwd/observation/metadata/dataset.py index 63248d008..de98bae52 100644 --- a/wetterdienst/provider/dwd/observation/metadata/dataset.py +++ b/wetterdienst/provider/dwd/observation/metadata/dataset.py @@ -1,8 +1,8 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + from enum import Enum -from typing import Dict, List from wetterdienst.metadata.period import Period from wetterdienst.metadata.resolution import Resolution @@ -59,7 +59,7 @@ class DwdObservationDataset(Enum): DwdObservationDataset.URBAN_WIND, ) -RESOLUTION_DATASET_MAPPING: Dict[Resolution, Dict[DwdObservationDataset, List[Period]]] = { +RESOLUTION_DATASET_MAPPING: dict[Resolution, dict[DwdObservationDataset, list[Period]]] = { Resolution.MINUTE_1: { DwdObservationDataset.PRECIPITATION: [ Period.HISTORICAL, diff --git a/wetterdienst/provider/dwd/observation/metadata/parameter.py b/wetterdienst/provider/dwd/observation/metadata/parameter.py index ea57dc1f8..f8beecb8b 100644 --- a/wetterdienst/provider/dwd/observation/metadata/parameter.py +++ b/wetterdienst/provider/dwd/observation/metadata/parameter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/provider/dwd/observation/metadata/period.py b/wetterdienst/provider/dwd/observation/metadata/period.py index 3f13d3522..b7b493ec7 100644 --- a/wetterdienst/provider/dwd/observation/metadata/period.py +++ b/wetterdienst/provider/dwd/observation/metadata/period.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/provider/dwd/observation/metadata/resolution.py b/wetterdienst/provider/dwd/observation/metadata/resolution.py index e8d6fea6c..b6cd19c32 100644 --- a/wetterdienst/provider/dwd/observation/metadata/resolution.py +++ b/wetterdienst/provider/dwd/observation/metadata/resolution.py @@ -1,8 +1,8 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + from enum import Enum -from typing import Dict from wetterdienst.metadata.resolution import Resolution from wetterdienst.provider.dwd.metadata.datetime import DatetimeFormat @@ -14,7 +14,7 @@ ) -RESOLUTION_TO_DATETIME_FORMAT_MAPPING: Dict[Resolution, str] = { +RESOLUTION_TO_DATETIME_FORMAT_MAPPING: dict[Resolution, str] = { Resolution.MINUTE_1: DatetimeFormat.YMDHM.value, Resolution.MINUTE_10: DatetimeFormat.YMDHM.value, Resolution.HOURLY: DatetimeFormat.YMDHM.value, diff --git a/wetterdienst/provider/dwd/observation/metadata/unit.py b/wetterdienst/provider/dwd/observation/metadata/unit.py index 9052bf9a3..e3971ae3d 100644 --- a/wetterdienst/provider/dwd/observation/metadata/unit.py +++ b/wetterdienst/provider/dwd/observation/metadata/unit.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.metadata.unit import OriginUnit, SIUnit, UnitEnum diff --git a/wetterdienst/provider/dwd/observation/metaindex.py b/wetterdienst/provider/dwd/observation/metaindex.py index 7a4582936..d27162edd 100644 --- a/wetterdienst/provider/dwd/observation/metaindex.py +++ b/wetterdienst/provider/dwd/observation/metaindex.py @@ -1,12 +1,13 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import logging import re from concurrent.futures import ThreadPoolExecutor from io import BytesIO, StringIO -from typing import List, Tuple +from typing import TYPE_CHECKING import polars as pl from fsspec.implementations.zip import ZipFileSystem @@ -20,11 +21,13 @@ DWD_URBAN_DATASETS, DwdObservationDataset, ) -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.network import download_file, list_remote_files_fsspec from wetterdienst.util.polars_util import read_fwf_from_df +if TYPE_CHECKING: + from wetterdienst.settings import Settings + log = logging.getLogger(__name__) DWD_COLUMN_NAMES_MAPPING = { @@ -53,7 +56,10 @@ def create_meta_index_for_climate_observations( - dataset: DwdObservationDataset, resolution: Resolution, period: Period, settings: Settings + dataset: DwdObservationDataset, + resolution: Resolution, + period: Period, + settings: Settings, ) -> pl.LazyFrame: """ Wrapper function that either calls the regular meta index function for general @@ -89,7 +95,10 @@ def create_meta_index_for_climate_observations( # precipitation if cond1: mdp = _create_meta_index_for_climate_observations( - DwdObservationDataset.PRECIPITATION_MORE, Resolution.DAILY, Period.HISTORICAL, settings=settings + DwdObservationDataset.PRECIPITATION_MORE, + Resolution.DAILY, + Period.HISTORICAL, + settings=settings, ) meta_index = meta_index.join( @@ -110,7 +119,10 @@ def create_meta_index_for_climate_observations( def _create_meta_index_for_climate_observations( - dataset: DwdObservationDataset, resolution: Resolution, period: Period, settings: Settings + dataset: DwdObservationDataset, + resolution: Resolution, + period: Period, + settings: Settings, ) -> pl.LazyFrame: """Function used to create meta index DataFrame parsed from the text files that are located in each data section of the station data directory of the weather service. @@ -139,7 +151,7 @@ def _create_meta_index_for_climate_observations( return _read_meta_df(payload) -def _find_meta_file(files: List[str], url: str, strings: List[str]) -> str: +def _find_meta_file(files: list[str], url: str, strings: list[str]) -> str: """ Function used to find meta file based on predefined strings that are usually found in those files @@ -226,7 +238,8 @@ def _create_meta_index_for_1minute_historical_precipitation(settings: Settings) log.info(f"Downloading {len(metadata_file_paths)} files for 1minute precipitation historical metadata.") with ThreadPoolExecutor() as executor: metadata_files = executor.map( - lambda file: download_file(url=file, settings=settings, ttl=CacheExpiry.NO_CACHE), metadata_file_paths + lambda file: download_file(url=file, settings=settings, ttl=CacheExpiry.NO_CACHE), + metadata_file_paths, ) metadata_dfs = [_parse_geo_metadata((file, station_id)) for file, station_id in zip(metadata_files, station_ids)] @@ -237,7 +250,7 @@ def _create_meta_index_for_1minute_historical_precipitation(settings: Settings) pl.when(pl.col(Columns.END_DATE.value).str.strip_chars().eq("")) .then(pl.lit((dt.date.today() - dt.timedelta(days=1)).strftime("%Y%m%d"))) .otherwise(pl.col(Columns.END_DATE.value)) - .alias(Columns.END_DATE.value) + .alias(Columns.END_DATE.value), ) # Drop empty state column again as it will be merged later on @@ -249,7 +262,7 @@ def _create_meta_index_for_1minute_historical_precipitation(settings: Settings) return meta_index_df.with_columns(pl.col(Columns.STATION_ID.value).cast(str).str.pad_start(5, "0")) -def _parse_geo_metadata(metadata_file_and_station_id: Tuple[BytesIO, str]) -> pl.LazyFrame: +def _parse_geo_metadata(metadata_file_and_station_id: tuple[BytesIO, str]) -> pl.LazyFrame: """A function that analysis the given file (bytes) and extracts geography of 1minute metadata zip and catches the relevant information and create a similar file to those that can usually be found already prepared for other @@ -280,7 +293,7 @@ def _parse_geo_metadata(metadata_file_and_station_id: Tuple[BytesIO, str]) -> pl "von_datum": Columns.START_DATE.value, "bis_datum": Columns.END_DATE.value, "Stationsname": Columns.NAME.value, - } + }, ) df = df.with_columns(pl.col(Columns.START_DATE.value).first().cast(str), pl.col(Columns.END_DATE.value).cast(str)) diff --git a/wetterdienst/provider/dwd/observation/parser.py b/wetterdienst/provider/dwd/observation/parser.py index 553c657bb..1f97632ef 100644 --- a/wetterdienst/provider/dwd/observation/parser.py +++ b/wetterdienst/provider/dwd/observation/parser.py @@ -1,9 +1,9 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import logging from io import BytesIO, StringIO -from typing import List, Tuple import polars as pl @@ -74,7 +74,7 @@ def parse_climate_observations_data( - filenames_and_files: List[Tuple[str, BytesIO]], + filenames_and_files: list[tuple[str, BytesIO]], dataset: DwdObservationDataset, resolution: Resolution, period: Period, @@ -111,7 +111,7 @@ def parse_climate_observations_data( def _parse_climate_observations_data( - filename_and_file: Tuple[str, BytesIO], + filename_and_file: tuple[str, BytesIO], dataset: DwdObservationDataset, resolution: Resolution, period: Period, @@ -148,7 +148,7 @@ def _parse_climate_observations_data( df = df.rename(mapping=lambda col: col.strip().lower()) # End of record (EOR) has no value, so drop it right away. - df = df.drop((col for col in DROPPABLE_PARAMETERS if col in df.columns)) + df = df.drop(col for col in DROPPABLE_PARAMETERS if col in df.columns) if resolution == Resolution.MINUTE_1: if dataset == DwdObservationDataset.PRECIPITATION: @@ -161,8 +161,8 @@ def _parse_climate_observations_data( ) df = df.with_columns( pl.datetime_ranges(pl.col("mess_datum_beginn"), pl.col("mess_datum_ende"), interval="1m").alias( - "mess_datum" - ) + "mess_datum", + ), ) df = df.drop( "mess_datum_beginn", @@ -172,10 +172,10 @@ def _parse_climate_observations_data( df = df.explode("mess_datum") else: df = df.with_columns( - [pl.all(), *[pl.lit(None, pl.Utf8).alias(par) for par in PRECIPITATION_PARAMETERS]] + [pl.all(), *[pl.lit(None, pl.Utf8).alias(par) for par in PRECIPITATION_PARAMETERS]], ) df = df.with_columns( - pl.col("mess_datum").cast(str).str.to_datetime(DatetimeFormat.YMDHM.value, time_zone="UTC") + pl.col("mess_datum").cast(str).str.to_datetime(DatetimeFormat.YMDHM.value, time_zone="UTC"), ) if resolution == Resolution.MINUTE_5 and dataset == DwdObservationDataset.PRECIPITATION: # apparently historical datasets differ from recent and now having all columns as described in the @@ -196,13 +196,14 @@ def _parse_climate_observations_data( df = df.with_columns(pl.col("mess_datum").map_elements(lambda date: date[:-3])) if resolution in (Resolution.MONTHLY, Resolution.ANNUAL): - df = df.drop((col for col in ["bis_datum", "mess_datum_ende"] if col in df.columns)).rename( - mapping={"mess_datum_beginn": "mess_datum"} + df = df.drop(col for col in ["bis_datum", "mess_datum_ende"] if col in df.columns).rename( + mapping={"mess_datum_beginn": "mess_datum"}, ) if resolution == Resolution.SUBDAILY and dataset is DwdObservationDataset.WIND_EXTREME: df = df.select( - pl.all().exclude("qn_8"), pl.col("qn_8").alias("qn_8_3" if "fx_911_3" in df.columns else "qn_8_6") + pl.all().exclude("qn_8"), + pl.col("qn_8").alias("qn_8_3" if "fx_911_3" in df.columns else "qn_8_6"), ) fmt = None diff --git a/wetterdienst/provider/dwd/observation/util/__init__.py b/wetterdienst/provider/dwd/observation/util/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/provider/dwd/observation/util/__init__.py +++ b/wetterdienst/provider/dwd/observation/util/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/dwd/observation/util/parameter.py b/wetterdienst/provider/dwd/observation/util/parameter.py index 61e3f1776..022322301 100644 --- a/wetterdienst/provider/dwd/observation/util/parameter.py +++ b/wetterdienst/provider/dwd/observation/util/parameter.py @@ -1,15 +1,18 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. -from typing import Optional +from __future__ import annotations + +from typing import TYPE_CHECKING -from wetterdienst.metadata.period import Period -from wetterdienst.metadata.resolution import Resolution from wetterdienst.provider.dwd.observation.metadata.dataset import ( RESOLUTION_DATASET_MAPPING, DwdObservationDataset, ) +if TYPE_CHECKING: + from wetterdienst.metadata.period import Period + from wetterdienst.metadata.resolution import Resolution + def check_dwd_observations_dataset( dataset: DwdObservationDataset, @@ -33,7 +36,7 @@ def build_parameter_set_identifier( resolution: Resolution, period: Period, station_id: str, - date_range_string: Optional[str] = None, + date_range_string: str | None = None, ) -> str: """Create parameter set identifier that is used for storage interactions""" identifier = f"{dataset.value}/{resolution.value}/{period.value}/{station_id}" diff --git a/wetterdienst/provider/dwd/radar/__init__.py b/wetterdienst/provider/dwd/radar/__init__.py index ac80137ae..832a70f0e 100644 --- a/wetterdienst/provider/dwd/radar/__init__.py +++ b/wetterdienst/provider/dwd/radar/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.dwd.radar.api import DwdRadarValues diff --git a/wetterdienst/provider/dwd/radar/api.py b/wetterdienst/provider/dwd/radar/api.py index dd80d64e9..98ce9e034 100644 --- a/wetterdienst/provider/dwd/radar/api.py +++ b/wetterdienst/provider/dwd/radar/api.py @@ -1,6 +1,7 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import bz2 import datetime as dt import gzip @@ -9,7 +10,7 @@ import tarfile from dataclasses import dataclass from io import BytesIO -from typing import Generator, Optional, Union +from typing import Generator import polars as pl from fsspec.implementations.tar import TarFileSystem @@ -97,16 +98,16 @@ class DwdRadarValues: def __init__( self, - parameter: Union[str, DwdRadarParameter], - site: Optional[DwdRadarSite] = None, - fmt: Optional[DwdRadarDataFormat] = None, - subset: Optional[DwdRadarDataSubset] = None, - elevation: Optional[int] = None, - start_date: Optional[Union[str, dt.datetime, DwdRadarDate]] = None, - end_date: Optional[Union[str, dt.datetime, dt.timedelta]] = None, - resolution: Optional[Union[str, Resolution, DwdRadarResolution]] = None, - period: Optional[Union[str, Period, DwdRadarPeriod]] = None, - settings: Optional[Settings] = None, + parameter: str | DwdRadarParameter, + site: DwdRadarSite | None = None, + fmt: DwdRadarDataFormat | None = None, + subset: DwdRadarDataSubset | None = None, + elevation: int | None = None, + start_date: str | dt.datetime | DwdRadarDate | None = None, + end_date: str | dt.datetime | dt.timedelta | None = None, + resolution: str | Resolution | DwdRadarResolution | None = None, + period: str | Period | DwdRadarPeriod | None = None, + settings: Settings | None = None, ) -> None: """ :param parameter: The radar moment to request @@ -306,7 +307,11 @@ def query(self) -> Generator[RadarResult, None, None]: # Find latest file. if self.start_date == DwdRadarDate.LATEST: file_index = create_fileindex_radar( - parameter=self.parameter, site=self.site, fmt=self.format, parse_datetime=False, settings=self.settings + parameter=self.parameter, + site=self.site, + fmt=self.format, + parse_datetime=False, + settings=self.settings, ) # Find "-latest-" or "LATEST" or similar file. @@ -333,20 +338,22 @@ def query(self) -> Generator[RadarResult, None, None]: results = [] for period in period_types: file_index = create_fileindex_radolan_cdc( - resolution=self.resolution, period=period, settings=self.settings + resolution=self.resolution, + period=period, + settings=self.settings, ) # Filter for dates range if start_date and end_date are defined. if period == Period.RECENT: file_index = file_index.filter( - pl.col("datetime").is_between(self.start_date, self.end_date, closed="both") + pl.col("datetime").is_between(self.start_date, self.end_date, closed="both"), ) # This is for matching historical data, e.g. "RW-200509.tar.gz". else: file_index = file_index.filter( pl.col("datetime").dt.year().eq(self.start_date.year) - & pl.col("datetime").dt.month().eq(self.start_date.month) + & pl.col("datetime").dt.month().eq(self.start_date.month), ) results.append(file_index) @@ -375,7 +382,7 @@ def query(self) -> Generator[RadarResult, None, None]: # Filter for dates range if start_date and end_date are defined. file_index = file_index.filter( - pl.col("datetime").is_between(self.start_date, self.end_date, closed="both") + pl.col("datetime").is_between(self.start_date, self.end_date, closed="both"), ) # Filter SWEEP_VOL_VELOCITY_H and SWEEP_VOL_REFLECTIVITY_H by elevation. @@ -384,7 +391,7 @@ def query(self) -> Generator[RadarResult, None, None]: pl.col("filename").str.contains(f"vradh_{self.elevation:02d}") | pl.col("filename").str.contains(f"sweep_vol_v_{self.elevation}") | pl.col("filename").str.contains(f"dbzh_{self.elevation:02d}") - | pl.col("filename").str.contains(f"sweep_vol_z_{self.elevation}") + | pl.col("filename").str.contains(f"sweep_vol_z_{self.elevation}"), ) if file_index.is_empty(): @@ -448,7 +455,9 @@ def _download_generic_data(self, url: str) -> Generator[RadarResult, None, None] yield RadarResult( data=BytesIO(tfs.open(file).read()), timestamp=get_date_from_filename( - file_name, pattern=RADAR_DT_PATTERN, formats=[DatetimeFormat.ymdhm.value] + file_name, + pattern=RADAR_DT_PATTERN, + formats=[DatetimeFormat.ymdhm.value], ), filename=file_name, ) @@ -461,7 +470,9 @@ def _download_generic_data(self, url: str) -> Generator[RadarResult, None, None] url=url, data=data, timestamp=get_date_from_filename( - url, pattern=RADAR_DT_PATTERN, formats=[DatetimeFormat.ymdhm.value] + url, + pattern=RADAR_DT_PATTERN, + formats=[DatetimeFormat.ymdhm.value], ), ) @@ -473,7 +484,9 @@ def _download_generic_data(self, url: str) -> Generator[RadarResult, None, None] url=url, data=data, timestamp=get_date_from_filename( - url, pattern=RADAR_DT_PATTERN, formats=[DatetimeFormat.ymdhm.value] + url, + pattern=RADAR_DT_PATTERN, + formats=[DatetimeFormat.ymdhm.value], ), ) diff --git a/wetterdienst/provider/dwd/radar/cli.py b/wetterdienst/provider/dwd/radar/cli.py index 15e574be4..82bd4c8d8 100644 --- a/wetterdienst/provider/dwd/radar/cli.py +++ b/wetterdienst/provider/dwd/radar/cli.py @@ -1,7 +1,7 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import sys +from pathlib import Path import h5py @@ -24,16 +24,15 @@ def hdf5dump(thing, compact=False): "stopelA", ] - with open(thing, "rb") as buffer: - hdf = h5py.File(buffer, "r") - - def dumpattrs(item, indent=2): - for name, value in item.attrs.items(): - if compact: - if name in blocklist: - continue - print(" " * indent, "-", name, value) # noqa: T201 + def dumpattrs(item, indent=2): + for name, value in item.attrs.items(): + if compact: + if name in blocklist: + continue + print(" " * indent, "-", name, value) # noqa: T201 + with Path(thing).open("rb") as buffer: + hdf = h5py.File(buffer, "r") for group in hdf.keys(): print("name:", hdf[group].name) # noqa: T201 dumpattrs(hdf[group]) diff --git a/wetterdienst/provider/dwd/radar/index.py b/wetterdienst/provider/dwd/radar/index.py index 6137724c7..583c935ca 100644 --- a/wetterdienst/provider/dwd/radar/index.py +++ b/wetterdienst/provider/dwd/radar/index.py @@ -1,8 +1,9 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import os -from typing import Optional +from typing import TYPE_CHECKING import polars as pl @@ -20,16 +21,18 @@ DwdRadarDataSubset, DwdRadarParameter, ) -from wetterdienst.provider.dwd.radar.sites import DwdRadarSite from wetterdienst.provider.dwd.radar.util import ( RADAR_DT_PATTERN, RADOLAN_DT_PATTERN, get_date_from_filename, ) -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.network import list_remote_files_fsspec +if TYPE_CHECKING: + from wetterdienst.provider.dwd.radar.sites import DwdRadarSite + from wetterdienst.settings import Settings + def use_cache() -> int: # pragma: no cover """ @@ -51,11 +54,11 @@ def use_cache() -> int: # pragma: no cover def create_fileindex_radar( parameter: DwdRadarParameter, settings: Settings, - site: Optional[DwdRadarSite] = None, - fmt: Optional[DwdRadarDataFormat] = None, - subset: Optional[DwdRadarDataSubset] = None, - resolution: Optional[Resolution] = None, - period: Optional[Period] = None, + site: DwdRadarSite | None = None, + fmt: DwdRadarDataFormat | None = None, + subset: DwdRadarDataSubset | None = None, + resolution: Resolution | None = None, + period: Period | None = None, parse_datetime: bool = False, ) -> pl.DataFrame: """ @@ -117,7 +120,7 @@ def create_fileindex_radar( .map_elements( lambda fn: get_date_from_filename(filename=fn, pattern=RADAR_DT_PATTERN, formats=formats), ) - .alias("datetime") + .alias("datetime"), ) return df_fileindex.drop_nulls() @@ -137,12 +140,17 @@ def create_fileindex_radolan_cdc(resolution: Resolution, period: Period, setting :return: File index as DataFrame """ df_fileindex = create_fileindex_radar( - parameter=DwdRadarParameter.RADOLAN_CDC, resolution=resolution, period=period, settings=settings + parameter=DwdRadarParameter.RADOLAN_CDC, + resolution=resolution, + period=period, + settings=settings, ) df_fileindex = df_fileindex.filter( pl.col("filename").str.contains("/bin/", literal=True) - & (pl.col("filename").str.ends_with(Extension.GZ.value) | pl.col("filename").str.ends_with(Extension.TAR.value)) + & ( + pl.col("filename").str.ends_with(Extension.GZ.value) | pl.col("filename").str.ends_with(Extension.TAR.value) + ), ) if period == Period.HISTORICAL: @@ -155,7 +163,7 @@ def create_fileindex_radolan_cdc(resolution: Resolution, period: Period, setting .map_elements( lambda fn: get_date_from_filename(filename=fn, pattern=RADOLAN_DT_PATTERN, formats=formats), ) - .alias("datetime") + .alias("datetime"), ) return df_fileindex.drop_nulls() @@ -163,11 +171,11 @@ def create_fileindex_radolan_cdc(resolution: Resolution, period: Period, setting def build_path_to_parameter( parameter: DwdRadarParameter, - site: Optional[DwdRadarSite] = None, - fmt: Optional[DwdRadarDataFormat] = None, - subset: Optional[DwdRadarDataSubset] = None, - resolution: Optional[Resolution] = None, - period: Optional[Period] = None, + site: DwdRadarSite | None = None, + fmt: DwdRadarDataFormat | None = None, + subset: DwdRadarDataSubset | None = None, + resolution: Resolution | None = None, + period: Period | None = None, ) -> str: """ Compute URL path to data product. diff --git a/wetterdienst/provider/dwd/radar/metadata/__init__.py b/wetterdienst/provider/dwd/radar/metadata/__init__.py index 3d8adc299..3d6e625d0 100644 --- a/wetterdienst/provider/dwd/radar/metadata/__init__.py +++ b/wetterdienst/provider/dwd/radar/metadata/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.dwd.radar.metadata.parameter import ( diff --git a/wetterdienst/provider/dwd/radar/metadata/parameter.py b/wetterdienst/provider/dwd/radar/metadata/parameter.py index bf9f294ac..199864770 100644 --- a/wetterdienst/provider/dwd/radar/metadata/parameter.py +++ b/wetterdienst/provider/dwd/radar/metadata/parameter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/provider/dwd/radar/metadata/period.py b/wetterdienst/provider/dwd/radar/metadata/period.py index 948294be4..1ecc21c36 100644 --- a/wetterdienst/provider/dwd/radar/metadata/period.py +++ b/wetterdienst/provider/dwd/radar/metadata/period.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/provider/dwd/radar/metadata/resolution.py b/wetterdienst/provider/dwd/radar/metadata/resolution.py index 02f985b9f..0f8c8a049 100644 --- a/wetterdienst/provider/dwd/radar/metadata/resolution.py +++ b/wetterdienst/provider/dwd/radar/metadata/resolution.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/provider/dwd/radar/sites.py b/wetterdienst/provider/dwd/radar/sites.py index 301a6406b..cf0f36e37 100644 --- a/wetterdienst/provider/dwd/radar/sites.py +++ b/wetterdienst/provider/dwd/radar/sites.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ @@ -17,8 +16,9 @@ - https://github.com/wradlib/wradlib-notebooks/blob/v1.8.0/notebooks/radolan/radolan_network.ipynb """ # noqa:B950,E501 +from __future__ import annotations + from enum import Enum -from typing import Dict import polars as pl @@ -60,7 +60,7 @@ class DwdRadarSitesGenerator: # pragma: no cover "/HaeufigGesucht/koordinaten-radarverbund.pdf?__blob=publicationFile" ) - def all(self) -> Dict: # pragma: no cover # noqa: A003 + def all(self) -> dict: # pragma: no cover # noqa: A003 """ Build dictionary from DataFrame containing radar site information. """ diff --git a/wetterdienst/provider/dwd/radar/util.py b/wetterdienst/provider/dwd/radar/util.py index dc42a24c3..8c0c87bc4 100644 --- a/wetterdienst/provider/dwd/radar/util.py +++ b/wetterdienst/provider/dwd/radar/util.py @@ -1,10 +1,13 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import re -from io import BytesIO -from typing import List, Optional +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from io import BytesIO # 6-character timestamps are used for data within "RADOLAN_CDC/historical". # Examples: @@ -33,7 +36,7 @@ RADOLAN_DT_PATTERN = re.compile(f"{RADAR_DT_REGEX_SHORT}|{RADAR_DT_REGEX_MEDIUM}") -def get_date_from_filename(filename: str, pattern: re.Pattern, formats: List[str]) -> Optional[dt.datetime]: +def get_date_from_filename(filename: str, pattern: re.Pattern, formats: list[str]) -> dt.datetime | None: try: date_string = pattern.findall(filename)[0] except IndexError: diff --git a/wetterdienst/provider/dwd/road/api.py b/wetterdienst/provider/dwd/road/api.py index 2ac887088..445525047 100644 --- a/wetterdienst/provider/dwd/road/api.py +++ b/wetterdienst/provider/dwd/road/api.py @@ -1,14 +1,13 @@ -# -*- coding: utf-8 -*- # Copyright (c) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. -import datetime as dt +from __future__ import annotations + import logging from concurrent.futures import ThreadPoolExecutor from enum import Enum from functools import reduce -from io import BytesIO from tempfile import NamedTemporaryFile -from typing import TYPE_CHECKING, List, Optional, Tuple, Union +from typing import TYPE_CHECKING from urllib.parse import urljoin import polars as pl @@ -28,6 +27,9 @@ from wetterdienst.util.parameter import DatasetTreeCore if TYPE_CHECKING: + import datetime as dt + from io import BytesIO + from wetterdienst.core.timeseries.result import StationsResult log = logging.getLogger(__name__) @@ -154,7 +156,7 @@ class DwdRoadValues(TimeseriesValues): _data_tz = Timezone.UTC - def __init__(self, stations_result: "StationsResult") -> None: + def __init__(self, stations_result: StationsResult) -> None: check_pdbufr() super().__init__(stations_result) @@ -201,11 +203,13 @@ def _create_file_index_for_dwd_road_weather_station( .list.last() .str.extract(DATE_REGEX, 1) .str.to_datetime("%y%m%d%H%M", time_zone="UTC") - .alias("date") + .alias("date"), ) def _collect_data_by_station_group( - self, road_weather_station_group: DwdRoadStationGroup, parameters: List[str] + self, + road_weather_station_group: DwdRoadStationGroup, + parameters: list[str], ) -> pl.DataFrame: """ Method to collect data for one specified parameter. Manages restoring, @@ -221,14 +225,14 @@ def _collect_data_by_station_group( remote_files = self._create_file_index_for_dwd_road_weather_station(road_weather_station_group) if self.sr.start_date: remote_files = remote_files.filter( - pl.col(Columns.DATE.value).is_between(self.sr.start_date, self.sr.end_date) + pl.col(Columns.DATE.value).is_between(self.sr.start_date, self.sr.end_date), ) remote_files = remote_files.get_column(Columns.FILENAME.value).to_list() filenames_and_files = self._download_road_weather_observations(remote_files, self.sr.settings) return self._parse_dwd_road_weather_data(filenames_and_files, parameters) @staticmethod - def _download_road_weather_observations(remote_files: List[str], settings) -> List[Tuple[str, BytesIO]]: + def _download_road_weather_observations(remote_files: list[str], settings) -> list[tuple[str, BytesIO]]: """ :param remote_files: List of requested files :return: List of downloaded files @@ -236,13 +240,16 @@ def _download_road_weather_observations(remote_files: List[str], settings) -> Li log.info(f"Downloading {len(remote_files)} files from DWD Road Weather.") with ThreadPoolExecutor() as p: files_in_bytes = p.map( - lambda file: download_file(url=file, settings=settings, ttl=CacheExpiry.TWELVE_HOURS), remote_files + lambda file: download_file(url=file, settings=settings, ttl=CacheExpiry.TWELVE_HOURS), + remote_files, ) return list(zip(remote_files, files_in_bytes)) def _parse_dwd_road_weather_data( - self, filenames_and_files: List[Tuple[str, BytesIO]], parameters: List[str] + self, + filenames_and_files: list[tuple[str, BytesIO]], + parameters: list[str], ) -> pl.DataFrame: """ This function is used to read the road weather station data from given bytes object. @@ -260,11 +267,11 @@ def _parse_dwd_road_weather_data( [ self.__parse_dwd_road_weather_data(filename_and_file, parameters) for filename_and_file in filenames_and_files - ] + ], ) @staticmethod - def __parse_dwd_road_weather_data(filename_and_file: Tuple[str, BytesIO], parameters: List[str]) -> pl.DataFrame: + def __parse_dwd_road_weather_data(filename_and_file: tuple[str, BytesIO], parameters: list[str]) -> pl.DataFrame: """ A wrapping function that only handles data for one station id. The files passed to it are thus related to this id. This is important for storing the data locally as @@ -313,7 +320,7 @@ def __parse_dwd_road_weather_data(filename_and_file: Tuple[str, BytesIO], parame pl.col("day").cast(pl.Utf8).str.pad_start(2, "0"), pl.col("hour").cast(pl.Utf8).str.pad_start(2, "0"), pl.col("minute").cast(pl.Utf8).str.pad_start(2, "0"), - ] + ], ) .str.to_datetime("%Y%m%d%H%M", time_zone="UTC") .alias("timestamp"), @@ -326,7 +333,8 @@ def __parse_dwd_road_weather_data(filename_and_file: Tuple[str, BytesIO], parame value_name=Columns.VALUE.value, ) return df.with_columns( - pl.col("value").cast(pl.Float64), pl.lit(None, dtype=pl.Float64).alias(Columns.QUALITY.value) + pl.col("value").cast(pl.Float64), + pl.lit(None, dtype=pl.Float64).alias(Columns.QUALITY.value), ) @@ -354,7 +362,7 @@ class DwdRoadRequest(TimeseriesRequest): Columns.ROAD_TYPE.value, Columns.ROAD_SURFACE_TYPE.value, Columns.ROAD_SURROUNDINGS_TYPE.value, - ) + ), ) _endpoint = ( "https://www.dwd.de/DE/leistungen/opendata/help/stationen/sws_stations_xls.xlsx?__blob=publicationFile&v=11" @@ -392,12 +400,12 @@ class DwdRoadRequest(TimeseriesRequest): def __init__( self, - parameter: List[Union[str, Enum, Parameter]], - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + parameter: list[str | Enum | Parameter], + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ): - super(DwdRoadRequest, self).__init__( + super().__init__( parameter=parameter, resolution=Resolution.MINUTE_10, period=Period.HISTORICAL, @@ -415,17 +423,17 @@ def _all(self) -> pl.LazyFrame: df = df.filter( pl.col(Columns.HAS_FILE.value).is_null() & pl.col(Columns.STATION_GROUP.value).ne("0") - & pl.col(Columns.STATION_ID.value).is_not_null() + & pl.col(Columns.STATION_ID.value).is_not_null(), ) df = df.with_columns( pl.col(Columns.LONGITUDE.value).str.replace(",", "."), pl.col(Columns.LATITUDE.value).str.replace(",", "."), pl.when(~pl.col(Columns.ROAD_TYPE.value).str.contains("x")).then(pl.col(Columns.ROAD_TYPE.value)), pl.when(~pl.col(Columns.ROAD_SURROUNDINGS_TYPE.value).str.contains("x")).then( - pl.col(Columns.ROAD_SURROUNDINGS_TYPE.value) + pl.col(Columns.ROAD_SURROUNDINGS_TYPE.value), ), pl.when(~pl.col(Columns.ROAD_SURFACE_TYPE.value).str.contains("x")).then( - pl.col(Columns.ROAD_SURFACE_TYPE.value) + pl.col(Columns.ROAD_SURFACE_TYPE.value), ), ) df = df.with_columns(pl.col(col).cast(dtype) for col, dtype in self._dtypes.items()) diff --git a/wetterdienst/provider/ea/__init__.py b/wetterdienst/provider/ea/__init__.py index 81cd921b5..c79af6a95 100644 --- a/wetterdienst/provider/ea/__init__.py +++ b/wetterdienst/provider/ea/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/ea/hydrology/__init__.py b/wetterdienst/provider/ea/hydrology/__init__.py index 2d96527ed..81127793b 100644 --- a/wetterdienst/provider/ea/hydrology/__init__.py +++ b/wetterdienst/provider/ea/hydrology/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.ea.hydrology.api import ( diff --git a/wetterdienst/provider/ea/hydrology/api.py b/wetterdienst/provider/ea/hydrology/api.py index 38200567a..0457b6ec0 100644 --- a/wetterdienst/provider/ea/hydrology/api.py +++ b/wetterdienst/provider/ea/hydrology/api.py @@ -1,11 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. -import datetime as dt +from __future__ import annotations + import json import logging from enum import Enum -from typing import List, Optional, Union +from typing import TYPE_CHECKING import polars as pl @@ -14,17 +14,21 @@ from wetterdienst.metadata.columns import Columns from wetterdienst.metadata.datarange import DataRange from wetterdienst.metadata.kind import Kind -from wetterdienst.metadata.parameter import Parameter from wetterdienst.metadata.period import Period, PeriodType from wetterdienst.metadata.provider import Provider from wetterdienst.metadata.resolution import Resolution, ResolutionType from wetterdienst.metadata.timezone import Timezone from wetterdienst.metadata.unit import OriginUnit, SIUnit, UnitEnum -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.network import download_file from wetterdienst.util.parameter import DatasetTreeCore +if TYPE_CHECKING: + import datetime as dt + + from wetterdienst.metadata.parameter import Parameter + from wetterdienst.settings import Settings + log = logging.getLogger(__file__) @@ -88,7 +92,12 @@ class EaHydrologyValues(TimeseriesValues): _base_url = "https://environment.data.gov.uk/hydrology/id/stations/{station_id}.json" _data_tz = Timezone.UK - def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: Enum) -> pl.DataFrame: + def _collect_station_parameter( + self, + station_id: str, + parameter: Enum, + dataset: Enum, # noqa: ARG002 + ) -> pl.DataFrame: endpoint = self._base_url.format(station_id=station_id) log.info(f"Downloading file {endpoint}.") payload = download_file(endpoint, self.sr.stations.settings, CacheExpiry.NO_CACHE) @@ -99,7 +108,7 @@ def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: .map_elements(lambda measure: measure["parameterName"]) .str.to_lowercase() .str.replace(" ", "") - .eq(parameter.value.lower().replace("_", "")) + .eq(parameter.value.lower().replace("_", "")), ) try: measure_dict = measures_list.get_column("measure")[0] @@ -132,13 +141,13 @@ class EaHydrologyRequest(TimeseriesRequest): def __init__( self, - parameter: List[Union[str, EaHydrologyParameter, Parameter]], - resolution: Union[str, EaHydrologyResolution, Resolution], - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + parameter: list[str | EaHydrologyParameter | Parameter], + resolution: str | EaHydrologyResolution | Resolution, + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ): - super(EaHydrologyRequest, self).__init__( + super().__init__( parameter=parameter, resolution=resolution, period=Period.HISTORICAL, @@ -180,5 +189,5 @@ def _all(self) -> pl.LazyFrame: "lat": Columns.LATITUDE.value, "long": Columns.LONGITUDE.value, "notation": Columns.STATION_ID.value, - } + }, ) diff --git a/wetterdienst/provider/eaufrance/__init__.py b/wetterdienst/provider/eaufrance/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/provider/eaufrance/__init__.py +++ b/wetterdienst/provider/eaufrance/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/eaufrance/hubeau/__init__.py b/wetterdienst/provider/eaufrance/hubeau/__init__.py index ec30cc3a4..5ae22ba20 100644 --- a/wetterdienst/provider/eaufrance/hubeau/__init__.py +++ b/wetterdienst/provider/eaufrance/hubeau/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.eaufrance.hubeau.api import HubeauParameter, HubeauRequest diff --git a/wetterdienst/provider/eaufrance/hubeau/api.py b/wetterdienst/provider/eaufrance/hubeau/api.py index 7aaae37e6..1c30157d4 100644 --- a/wetterdienst/provider/eaufrance/hubeau/api.py +++ b/wetterdienst/provider/eaufrance/hubeau/api.py @@ -1,12 +1,13 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import json import logging import math from enum import Enum -from typing import Iterator, List, Literal, Optional, Tuple, Union +from typing import TYPE_CHECKING, Iterator, Literal import polars as pl from zoneinfo import ZoneInfo @@ -16,17 +17,19 @@ from wetterdienst.metadata.columns import Columns from wetterdienst.metadata.datarange import DataRange from wetterdienst.metadata.kind import Kind -from wetterdienst.metadata.parameter import Parameter from wetterdienst.metadata.period import Period, PeriodType from wetterdienst.metadata.provider import Provider from wetterdienst.metadata.resolution import Resolution, ResolutionType from wetterdienst.metadata.timezone import Timezone from wetterdienst.metadata.unit import OriginUnit, SIUnit, UnitEnum -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.network import download_file from wetterdienst.util.parameter import DatasetTreeCore +if TYPE_CHECKING: + from wetterdienst.metadata.parameter import Parameter + from wetterdienst.settings import Settings + log = logging.getLogger(__name__) REQUIRED_ENTRIES = [ @@ -77,7 +80,7 @@ class HubeauValues(TimeseriesValues): "grandeur_hydro={grandeur_hydro}&sort=asc&size=2" ) - def _get_hubeau_dates(self, station_id, parameter, dataset) -> Iterator[Tuple[dt.datetime, dt.datetime]]: + def _get_hubeau_dates(self, station_id, parameter, dataset) -> Iterator[tuple[dt.datetime, dt.datetime]]: """ Method to get the Hubeau interval, which is roughly today - 30 days. We'll add another day on each end as buffer. @@ -98,7 +101,12 @@ def _get_hubeau_dates(self, station_id, parameter, dataset) -> Iterator[Tuple[dt request_date_range = pl.datetime_range(start=start, end=end, interval=delta / periods, eager=True) return zip(request_date_range[:-1], request_date_range[1:]) - def _get_dynamic_frequency(self, station_id, parameter, dataset) -> Tuple[int, Literal["min", "H"]]: + def _get_dynamic_frequency( + self, + station_id, + parameter, + dataset, # noqa: ARG002 + ) -> tuple[int, Literal["min", "H"]]: url = self._endpoint_freq.format(station_id=station_id, grandeur_hydro=parameter.value) log.info(f"Downloading file {url}.") response = download_file(url=url, settings=self.sr.stations.settings, ttl=CacheExpiry.METAINDEX) @@ -149,7 +157,7 @@ def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: Columns.DATE.value: pl.Datetime(time_zone="UTC"), Columns.VALUE.value: pl.Float64, Columns.QUALITY.value: pl.Float64, - } + }, ) else: df = df.with_columns(pl.col("date_obs").map_elements(dt.datetime.fromisoformat)) @@ -163,7 +171,7 @@ def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: "date_obs": Columns.DATE.value, "resultat_obs": Columns.VALUE.value, "code_qualification_obs": Columns.QUALITY.value, - } + }, ) return df.select( @@ -193,12 +201,12 @@ class HubeauRequest(TimeseriesRequest): def __init__( self, - parameter: List[Union[str, Enum, Parameter]], - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + parameter: list[str | Enum | Parameter], + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ): - super(HubeauRequest, self).__init__( + super().__init__( parameter=parameter, resolution=Resolution.DYNAMIC, period=Period.HISTORICAL, @@ -233,7 +241,7 @@ def _all(self) -> pl.LazyFrame: "libelle_departement": Columns.STATE.value, "date_ouverture_station": Columns.START_DATE.value, "date_fermeture_station": Columns.END_DATE.value, - } + }, ) df = df.with_columns( diff --git a/wetterdienst/provider/eccc/__init__.py b/wetterdienst/provider/eccc/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/provider/eccc/__init__.py +++ b/wetterdienst/provider/eccc/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/eccc/observation/__init__.py b/wetterdienst/provider/eccc/observation/__init__.py index 46f69e018..53f29e2b0 100644 --- a/wetterdienst/provider/eccc/observation/__init__.py +++ b/wetterdienst/provider/eccc/observation/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.eccc.observation.api import EcccObservationRequest diff --git a/wetterdienst/provider/eccc/observation/api.py b/wetterdienst/provider/eccc/observation/api.py index d98a661d3..9a5dc4753 100644 --- a/wetterdienst/provider/eccc/observation/api.py +++ b/wetterdienst/provider/eccc/observation/api.py @@ -1,12 +1,13 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import gzip import logging from enum import Enum from io import BytesIO -from typing import Generator, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Iterator import polars as pl @@ -15,7 +16,6 @@ from wetterdienst.metadata.columns import Columns from wetterdienst.metadata.datarange import DataRange from wetterdienst.metadata.kind import Kind -from wetterdienst.metadata.parameter import Parameter from wetterdienst.metadata.period import Period, PeriodType from wetterdienst.metadata.provider import Provider from wetterdienst.metadata.resolution import Resolution, ResolutionType @@ -28,10 +28,13 @@ EcccObservationResolution, ) from wetterdienst.provider.eccc.observation.metadata.unit import EcccObservationUnit -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.network import download_file +if TYPE_CHECKING: + from wetterdienst.metadata.parameter import Parameter + from wetterdienst.settings import Settings + log = logging.getLogger(__name__) @@ -98,7 +101,10 @@ def _tidy_up_df(df: pl.LazyFrame) -> pl.LazyFrame: return pl.LazyFrame() def _collect_station_parameter( - self, station_id: str, parameter: EcccObservationParameter, dataset: Enum + self, + station_id: str, + parameter: EcccObservationParameter, # noqa: ARG002 + dataset: Enum, # noqa: ARG002 ) -> pl.DataFrame: """ @@ -114,7 +120,7 @@ def _collect_station_parameter( [ pl.col(Columns.START_DATE.value).dt.year(), pl.col(Columns.END_DATE.value).dt.year(), - ] + ], ) .transpose() .to_series() @@ -151,7 +157,7 @@ def _collect_station_parameter( "time (lst)", "data quality", ] - df = df.drop((col for col in droppable_columns if col in df.columns)) + df = df.drop(col for col in droppable_columns if col in df.columns) data.append(df) try: @@ -160,7 +166,7 @@ def _collect_station_parameter( df = pl.LazyFrame() df = df.rename( - mapping={col: Columns.DATE.value for col in ["date/time (lst)", "date/time"] if col in df.columns} + mapping={col: Columns.DATE.value for col in ["date/time (lst)", "date/time"] if col in df.columns}, ) df = self._tidy_up_df(df) @@ -176,7 +182,7 @@ def _collect_station_parameter( pl.lit(value=None, dtype=pl.Float64).alias(Columns.QUALITY.value), ).collect() - def _create_file_urls(self, station_id: str, start_year: int, end_year: int) -> Generator[str, None, None]: + def _create_file_urls(self, station_id: str, start_year: int, end_year: int) -> Iterator[str]: """ :param station_id: @@ -193,7 +199,10 @@ def _create_file_urls(self, station_id: str, start_year: int, end_year: int) -> # For hourly data request only necessary data to reduce amount of data being # downloaded and parsed for date in pl.datetime_range( - dt.datetime(start_year, 1, 1), dt.datetime(end_year + 1, 1, 1), interval=freq, eager=True + dt.datetime(start_year, 1, 1), + dt.datetime(end_year + 1, 1, 1), + interval=freq, + eager=True, ): url = self._base_url.format(int(station_id), self._timeframe) url += f"&Year={date.year}" @@ -254,7 +263,7 @@ def _dates_columns_mapping(self) -> dict: { start_date: Columns.START_DATE.value, end_date: Columns.END_DATE.value, - } + }, ) return dcm @@ -273,11 +282,11 @@ def _dates_columns_mapping(self) -> dict: def __init__( self, - parameter: List[Union[str, EcccObservationParameter, Parameter]], - resolution: Union[str, EcccObservationResolution, Resolution], - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + parameter: list[str | EcccObservationParameter | Parameter], + resolution: str | EcccObservationResolution | Resolution, + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ): """ @@ -286,7 +295,7 @@ def __init__( :param start_date: start date for values filtering :param end_date: end date for values filtering """ - super(EcccObservationRequest, self).__init__( + super().__init__( parameter=parameter, resolution=resolution, period=Period.HISTORICAL, @@ -331,7 +340,7 @@ def _all(self) -> pl.LazyFrame: return df.filter(pl.col(Columns.LATITUDE.value).ne("") & pl.col(Columns.LONGITUDE.value).ne("")) - def _download_stations(self) -> Tuple[BytesIO, int]: + def _download_stations(self) -> tuple[BytesIO, int]: """ Download station list from ECCC FTP server. diff --git a/wetterdienst/provider/eccc/observation/metadata/__init__.py b/wetterdienst/provider/eccc/observation/metadata/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/provider/eccc/observation/metadata/__init__.py +++ b/wetterdienst/provider/eccc/observation/metadata/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/eccc/observation/metadata/parameter.py b/wetterdienst/provider/eccc/observation/metadata/parameter.py index c0303ec1e..696ecae15 100644 --- a/wetterdienst/provider/eccc/observation/metadata/parameter.py +++ b/wetterdienst/provider/eccc/observation/metadata/parameter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/provider/eccc/observation/metadata/resolution.py b/wetterdienst/provider/eccc/observation/metadata/resolution.py index 09b8beaa6..bd3933537 100644 --- a/wetterdienst/provider/eccc/observation/metadata/resolution.py +++ b/wetterdienst/provider/eccc/observation/metadata/resolution.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/provider/eccc/observation/metadata/unit.py b/wetterdienst/provider/eccc/observation/metadata/unit.py index dc7d95595..ed0a38ebd 100644 --- a/wetterdienst/provider/eccc/observation/metadata/unit.py +++ b/wetterdienst/provider/eccc/observation/metadata/unit.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.metadata.unit import OriginUnit, SIUnit, UnitEnum diff --git a/wetterdienst/provider/eumetnet/__init__.py b/wetterdienst/provider/eumetnet/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/provider/eumetnet/__init__.py +++ b/wetterdienst/provider/eumetnet/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/eumetnet/opera/sites.py b/wetterdienst/provider/eumetnet/opera/sites.py index 5dc3a3417..2237c4796 100644 --- a/wetterdienst/provider/eumetnet/opera/sites.py +++ b/wetterdienst/provider/eumetnet/opera/sites.py @@ -1,10 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import gzip import importlib.resources import json -from typing import Any, Dict, List, Union +from typing import Any import requests @@ -19,7 +20,7 @@ class OperaRadarSites: def __init__(self): self.sites = self.load() - def load(self) -> List[Dict]: + def load(self) -> list[dict]: """ Load and decode JSON file from filesystem. """ @@ -27,13 +28,13 @@ def load(self) -> List[Dict]: with gzip.open(rf, mode="rb") as f: return json.load(f) - def all(self) -> List[Dict]: # noqa: A003 + def all(self) -> list[dict]: # noqa: A003 """ The whole list of OPERA radar sites. """ return self.sites - def to_dict(self) -> Dict: + def to_dict(self) -> dict: """ Dictionary of sites, keyed by ODIM code. """ @@ -44,7 +45,7 @@ def to_dict(self) -> Dict: result[site["odimcode"]] = site return result - def by_odim_code(self, odim_code: str) -> Dict: + def by_odim_code(self, odim_code: str) -> dict: """ Return radar site by ODIM code. @@ -59,7 +60,7 @@ def by_odim_code(self, odim_code: str) -> Dict: else: raise KeyError("Radar site not found") - def by_wmo_code(self, wmo_code: int) -> Dict: + def by_wmo_code(self, wmo_code: int) -> dict: """ Return radar site by WMO code. @@ -72,7 +73,7 @@ def by_wmo_code(self, wmo_code: int) -> Dict: else: raise KeyError("Radar site not found") - def by_country_name(self, country_name: str) -> List[Dict]: + def by_country_name(self, country_name: str) -> list[dict]: """ Filter list of radar sites by country name. @@ -97,7 +98,7 @@ class OperaRadarSitesGenerator: "current-activities/opera/database/OPERA_Database/OPERA_RADARS_DB.json" ) - def get_opera_radar_sites(self) -> List[Dict]: # pragma: no cover + def get_opera_radar_sites(self) -> list[dict]: # pragma: no cover data = requests.get(self.url, timeout=10).json() # Filter empty elements and convert data types. @@ -126,7 +127,7 @@ def asbool(obj: Any) -> bool: raise ValueError("String is not true/false: %r" % obj) return bool(obj) - def convert_types(element: Dict) -> Dict[str, Union[int, float, bool, None]]: + def convert_types(element: dict) -> dict[str, int | float | bool | None]: converted = {} for key, value in element.items(): try: diff --git a/wetterdienst/provider/geosphere/__init__.py b/wetterdienst/provider/geosphere/__init__.py index 1f0020ad8..2c7e8b50c 100644 --- a/wetterdienst/provider/geosphere/__init__.py +++ b/wetterdienst/provider/geosphere/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/geosphere/observation/__init__.py b/wetterdienst/provider/geosphere/observation/__init__.py index b1223ea3c..21d3045e4 100644 --- a/wetterdienst/provider/geosphere/observation/__init__.py +++ b/wetterdienst/provider/geosphere/observation/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.geosphere.observation.api import ( diff --git a/wetterdienst/provider/geosphere/observation/api.py b/wetterdienst/provider/geosphere/observation/api.py index a91e29aaf..070033d33 100644 --- a/wetterdienst/provider/geosphere/observation/api.py +++ b/wetterdienst/provider/geosphere/observation/api.py @@ -1,16 +1,16 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import json import logging from datetime import datetime, timedelta from enum import Enum -from typing import List, Optional, Union +from typing import TYPE_CHECKING import polars as pl -from wetterdienst import Parameter, Settings from wetterdienst.core.timeseries.request import TimeseriesRequest from wetterdienst.core.timeseries.values import TimeseriesValues from wetterdienst.metadata.columns import Columns @@ -26,6 +26,9 @@ from wetterdienst.util.network import download_file from wetterdienst.util.parameter import DatasetTreeCore +if TYPE_CHECKING: + from wetterdienst import Parameter, Settings + log = logging.getLogger(__name__) @@ -682,14 +685,16 @@ def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: data[par] = par_dict["data"] df = pl.DataFrame(data) df = df.melt( - id_vars=[Columns.DATE.value], variable_name=Columns.PARAMETER.value, value_name=Columns.VALUE.value + id_vars=[Columns.DATE.value], + variable_name=Columns.PARAMETER.value, + value_name=Columns.VALUE.value, ) if self.sr.resolution == Resolution.MINUTE_10: df = df.with_columns( pl.when(pl.col(Columns.PARAMETER.value).is_in(["GSX", "HSX"])) .then(pl.col(Columns.VALUE.value) * 600) .otherwise(pl.col(Columns.VALUE.value)) - .alias(Columns.VALUE.value) + .alias(Columns.VALUE.value), ) return df.with_columns( pl.col(Columns.DATE.value).str.to_datetime("%Y-%m-%dT%H:%M+%Z").dt.replace_time_zone("UTC"), @@ -726,17 +731,17 @@ class GeosphereObservationRequest(TimeseriesRequest): def __init__( self, - parameter: List[Union[str, GeosphereObservationParameter, Parameter]], - resolution: Union[str, GeosphereObservationResolution, Resolution], - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + parameter: list[str | GeosphereObservationParameter | Parameter], + resolution: str | GeosphereObservationResolution | Resolution, + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ): if not start_date or not end_date: res = parse_enumeration_from_template(resolution, self._resolution_base, Resolution) start_date = self._default_start_dates[res.name.lower()] end_date = datetime.now() - super(GeosphereObservationRequest, self).__init__( + super().__init__( parameter=parameter, resolution=resolution, period=Period.HISTORICAL, @@ -762,7 +767,7 @@ def _all(self) -> pl.LazyFrame: "Startdatum": Columns.START_DATE.value, "Enddatum": Columns.END_DATE.value, "Bundesland": Columns.STATE.value, - } + }, ) return df.with_columns( pl.col(Columns.START_DATE.value).str.to_datetime(), diff --git a/wetterdienst/provider/imgw/__init__.py b/wetterdienst/provider/imgw/__init__.py index 1f0020ad8..2c7e8b50c 100644 --- a/wetterdienst/provider/imgw/__init__.py +++ b/wetterdienst/provider/imgw/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/imgw/hydrology/__init__.py b/wetterdienst/provider/imgw/hydrology/__init__.py index 4a2fdc257..d877d67d9 100644 --- a/wetterdienst/provider/imgw/hydrology/__init__.py +++ b/wetterdienst/provider/imgw/hydrology/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.imgw.hydrology.api import ImgwHydrologyRequest diff --git a/wetterdienst/provider/imgw/hydrology/api.py b/wetterdienst/provider/imgw/hydrology/api.py index 1f1197a75..55eb96bba 100644 --- a/wetterdienst/provider/imgw/hydrology/api.py +++ b/wetterdienst/provider/imgw/hydrology/api.py @@ -1,12 +1,12 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import logging import re from concurrent.futures import ThreadPoolExecutor from enum import Enum -from typing import Optional, Union import polars as pl import portion as P @@ -135,7 +135,12 @@ class ImgwHydrologyValues(TimeseriesValues): }, } - def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: Enum) -> pl.DataFrame: + def _collect_station_parameter( + self, + station_id: str, + parameter: Enum, # noqa: ARG002 + dataset: Enum, + ) -> pl.DataFrame: """ :param station_id: @@ -146,7 +151,8 @@ def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: urls = self._get_urls(dataset) with ThreadPoolExecutor() as p: files_in_bytes = p.map( - lambda file: download_file(url=file, settings=self.sr.settings, ttl=CacheExpiry.FIVE_MINUTES), urls + lambda file: download_file(url=file, settings=self.sr.settings, ttl=CacheExpiry.FIVE_MINUTES), + urls, ) data = [] file_schema = self._file_schema[self.sr.resolution.name.lower()][dataset.name.lower()] @@ -265,7 +271,7 @@ def _get_urls(self, dataset: Enum) -> pl.Series: interval = P.closed(self.sr.start_date, self.sr.end_date) if self.sr.resolution == Resolution.DAILY: df_files = df_files.with_columns( - pl.col("file").str.strip_chars_end(".zip").str.split("_").list.slice(1).alias("year_month") + pl.col("file").str.strip_chars_end(".zip").str.split("_").list.slice(1).alias("year_month"), ) df_files = df_files.with_columns( pl.col("year_month").list.first().cast(pl.Int64).alias("year"), @@ -284,9 +290,9 @@ def _get_urls(self, dataset: Enum) -> pl.Series: lambda x: [ dt.datetime(x["year"], x["month"], 1), dt.datetime(x["year"], x["month"], 1) + relativedelta(months=1) - relativedelta(days=1), - ] + ], ) - .alias("date_range") + .alias("date_range"), ) else: df_files = df_files.with_columns( @@ -296,9 +302,9 @@ def _get_urls(self, dataset: Enum) -> pl.Series: .list.last() .str.to_datetime("%Y", time_zone="UTC", strict=False) .map_elements( - lambda d: [d - relativedelta(months=2), d + relativedelta(months=11) - relativedelta(days=1)] + lambda d: [d - relativedelta(months=2), d + relativedelta(months=11) - relativedelta(days=1)], ) - .alias("date_range") + .alias("date_range"), ) df_files = df_files.select( pl.col("url"), @@ -308,7 +314,7 @@ def _get_urls(self, dataset: Enum) -> pl.Series: df_files = df_files.with_columns( pl.struct(["start_date", "end_date"]) .map_elements(lambda dates: P.closed(dates["start_date"], dates["end_date"])) - .alias("interval") + .alias("interval"), ) df_files = df_files.filter(pl.col("interval").map_elements(lambda i: i.overlaps(interval))) return df_files.get_column("url") @@ -335,9 +341,9 @@ def __init__( self, parameter, resolution, - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ): super().__init__( parameter=parameter, @@ -356,7 +362,12 @@ def _all(self) -> pl.LazyFrame: log.info(f"Downloading file {self._endpoint}.") payload = download_file(self._endpoint, settings=self.settings, ttl=CacheExpiry.METAINDEX) df = pl.read_csv( - payload, encoding="latin-1", has_header=False, separator=";", skip_rows=1, infer_schema_length=0 + payload, + encoding="latin-1", + has_header=False, + separator=";", + skip_rows=1, + infer_schema_length=0, ) df = df[:, [1, 2, 4, 5]] df.columns = [ diff --git a/wetterdienst/provider/imgw/meteorology/__init__.py b/wetterdienst/provider/imgw/meteorology/__init__.py index 74e63037b..4f0b3de00 100644 --- a/wetterdienst/provider/imgw/meteorology/__init__.py +++ b/wetterdienst/provider/imgw/meteorology/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.imgw.meteorology.api import ImgwMeteorologyRequest diff --git a/wetterdienst/provider/imgw/meteorology/api.py b/wetterdienst/provider/imgw/meteorology/api.py index 511951229..c17fa4030 100644 --- a/wetterdienst/provider/imgw/meteorology/api.py +++ b/wetterdienst/provider/imgw/meteorology/api.py @@ -1,11 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import re from concurrent.futures import ThreadPoolExecutor from enum import Enum -from typing import Optional, Union import polars as pl import portion as P @@ -354,7 +354,12 @@ class ImgwMeteorologyValues(TimeseriesValues): }, } - def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: Enum) -> pl.DataFrame: + def _collect_station_parameter( + self, + station_id: str, + parameter: Enum, # noqa: ARG002 + dataset: Enum, + ) -> pl.DataFrame: """ :param station_id: @@ -365,7 +370,8 @@ def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: urls = self._get_urls(dataset) with ThreadPoolExecutor() as p: files_in_bytes = p.map( - lambda file: download_file(url=file, settings=self.sr.settings, ttl=CacheExpiry.FIVE_MINUTES), urls + lambda file: download_file(url=file, settings=self.sr.settings, ttl=CacheExpiry.FIVE_MINUTES), + urls, ) data = [] file_schema = self._file_schema[self.sr.resolution.name.lower()][dataset.name.lower()] @@ -460,9 +466,9 @@ def _get_urls(self, dataset: Enum) -> pl.Series: dt.datetime(int(years[1]), 1, 1, tzinfo=ZoneInfo("UTC")) + relativedelta(years=1) - relativedelta(days=1), - ] + ], ) - .alias("date_range") + .alias("date_range"), ) else: df_files = df_files.with_columns( @@ -472,16 +478,16 @@ def _get_urls(self, dataset: Enum) -> pl.Series: .str.split("_") .list.first() .str.to_datetime("%Y", time_zone="UTC", strict=False) - .map_elements(lambda d: [d, d + relativedelta(years=1) - relativedelta(days=1)]) + .map_elements(lambda d: [d, d + relativedelta(years=1) - relativedelta(days=1)]), ) .otherwise( pl.col("file") .str.split("_") .map_elements(lambda s: "_".join(s[:2])) .str.to_datetime("%Y_%m", time_zone="UTC", strict=False) - .map_elements(lambda d: [d, d + relativedelta(months=1) - relativedelta(days=1)]) + .map_elements(lambda d: [d, d + relativedelta(months=1) - relativedelta(days=1)]), ) - .alias("date_range") + .alias("date_range"), ) df_files = df_files.select( pl.col("url"), @@ -491,7 +497,7 @@ def _get_urls(self, dataset: Enum) -> pl.Series: df_files = df_files.with_columns( pl.struct(["start_date", "end_date"]) .map_elements(lambda dates: P.closed(dates["start_date"], dates["end_date"])) - .alias("interval") + .alias("interval"), ) df_files = df_files.filter(pl.col("interval").map_elements(lambda i: i.overlaps(interval))) return df_files.get_column("url") @@ -518,9 +524,9 @@ def __init__( self, parameter, resolution, - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ): super().__init__( parameter=parameter, diff --git a/wetterdienst/provider/noaa/__init__.py b/wetterdienst/provider/noaa/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/provider/noaa/__init__.py +++ b/wetterdienst/provider/noaa/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/noaa/ghcn/__init__.py b/wetterdienst/provider/noaa/ghcn/__init__.py index 903f4d556..445963b10 100644 --- a/wetterdienst/provider/noaa/ghcn/__init__.py +++ b/wetterdienst/provider/noaa/ghcn/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.noaa.ghcn.api import NoaaGhcnRequest, NoaaGhcnResolution diff --git a/wetterdienst/provider/noaa/ghcn/api.py b/wetterdienst/provider/noaa/ghcn/api.py index 62685e6e0..07dc677f6 100644 --- a/wetterdienst/provider/noaa/ghcn/api.py +++ b/wetterdienst/provider/noaa/ghcn/api.py @@ -1,10 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import logging from enum import Enum -from typing import List, Optional, Union +from typing import TYPE_CHECKING import polars as pl from zoneinfo import ZoneInfo @@ -14,7 +15,6 @@ from wetterdienst.metadata.columns import Columns from wetterdienst.metadata.datarange import DataRange from wetterdienst.metadata.kind import Kind -from wetterdienst.metadata.parameter import Parameter from wetterdienst.metadata.period import Period, PeriodType from wetterdienst.metadata.provider import Provider from wetterdienst.metadata.resolution import Resolution, ResolutionType @@ -24,11 +24,14 @@ NoaaGhcnParameter, ) from wetterdienst.provider.noaa.ghcn.unit import NoaaGhcnUnit -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.network import download_file from wetterdienst.util.polars_util import read_fwf_from_df +if TYPE_CHECKING: + from wetterdienst.metadata.parameter import Parameter + from wetterdienst.settings import Settings + log = logging.getLogger(__name__) @@ -315,14 +318,17 @@ def _collect_station_parameter_for_hourly(self, station_id: str, parameter, data .map_elements( lambda date: dt.datetime.strptime(date, "%Y-%m-%d-%H-%M") .replace(tzinfo=ZoneInfo(time_zone)) - .astimezone(ZoneInfo("UTC")) + .astimezone(ZoneInfo("UTC")), ) .alias("date"), *parameter, ) df = df.with_columns(pl.col(Columns.DATE.value).dt.replace_time_zone("UTC")) df = df.melt( - id_vars=["station_id", "date"], value_vars=parameter, variable_name="parameter", value_name="value" + id_vars=["station_id", "date"], + value_vars=parameter, + variable_name="parameter", + value_name="value", ) return df.with_columns( pl.col("parameter").str.to_lowercase(), @@ -330,7 +336,12 @@ def _collect_station_parameter_for_hourly(self, station_id: str, parameter, data pl.lit(value=None, dtype=pl.Float64).alias("quality"), ) - def _collect_station_parameter_for_daily(self, station_id: str, parameter, dataset) -> pl.DataFrame: + def _collect_station_parameter_for_daily( + self, + station_id: str, + parameter, # noqa: ARG002 + dataset, # noqa: ARG002 + ) -> pl.DataFrame: """ Collection method for NOAA GHCN data. Parameter and dataset can be ignored as data is provided as a whole. @@ -357,14 +368,14 @@ def _collect_station_parameter_for_daily(self, station_id: str, parameter, datas "column_2": Columns.DATE.value, "column_3": Columns.PARAMETER.value, "column_4": Columns.VALUE.value, - } + }, ) time_zone = self._get_timezone_from_station(station_id) df = df.with_columns( pl.col(Columns.DATE.value).map_elements( lambda date: dt.datetime.strptime(date, "%Y%m%d") .replace(tzinfo=ZoneInfo(time_zone)) - .astimezone(ZoneInfo("UTC")) + .astimezone(ZoneInfo("UTC")), ), pl.col(Columns.PARAMETER.value).str.to_lowercase(), pl.col(Columns.VALUE.value).cast(float), @@ -376,8 +387,8 @@ def _collect_station_parameter_for_daily(self, station_id: str, parameter, datas ( NoaaGhcnParameter.DAILY.TIME_WIND_GUST_MAX.value, NoaaGhcnParameter.DAILY.TIME_WIND_GUST_MAX_1MILE_OR_1MIN.value, - ) - ) + ), + ), ) df = self._apply_daily_factors(df) return df.select( @@ -424,11 +435,11 @@ class NoaaGhcnRequest(TimeseriesRequest): def __init__( self, - parameter: List[Union[str, NoaaGhcnParameter, Parameter]], - resolution: Union[str, NoaaGhcnResolution, Resolution], - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + parameter: list[str | NoaaGhcnParameter | Parameter], + resolution: str | NoaaGhcnResolution | Resolution, + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ) -> None: """ @@ -530,10 +541,12 @@ def _create_metaindex_for_ghcn_daily(self) -> pl.LazyFrame: inventory_df = read_fwf_from_df(inventory_df, column_specs) inventory_df.columns = [Columns.STATION_ID.value, Columns.START_DATE.value, Columns.END_DATE.value] inventory_df = inventory_df.with_columns( - pl.col(Columns.START_DATE.value).cast(int), pl.col(Columns.END_DATE.value).cast(int) + pl.col(Columns.START_DATE.value).cast(int), + pl.col(Columns.END_DATE.value).cast(int), ) inventory_df = inventory_df.group_by([Columns.STATION_ID.value]).agg( - pl.col(Columns.START_DATE.value).min(), pl.col(Columns.END_DATE.value).max() + pl.col(Columns.START_DATE.value).min(), + pl.col(Columns.END_DATE.value).max(), ) inventory_df = inventory_df.with_columns( pl.col(Columns.START_DATE.value).cast(str).str.to_datetime("%Y"), diff --git a/wetterdienst/provider/noaa/ghcn/parameter.py b/wetterdienst/provider/noaa/ghcn/parameter.py index f764fb5aa..27b859c56 100644 --- a/wetterdienst/provider/noaa/ghcn/parameter.py +++ b/wetterdienst/provider/noaa/ghcn/parameter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/provider/noaa/ghcn/unit.py b/wetterdienst/provider/noaa/ghcn/unit.py index 29ce893e9..a1ae65a74 100644 --- a/wetterdienst/provider/noaa/ghcn/unit.py +++ b/wetterdienst/provider/noaa/ghcn/unit.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/nws/__init__.py b/wetterdienst/provider/nws/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/provider/nws/__init__.py +++ b/wetterdienst/provider/nws/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/nws/observation/__init__.py b/wetterdienst/provider/nws/observation/__init__.py index 5432940f8..61592edf7 100644 --- a/wetterdienst/provider/nws/observation/__init__.py +++ b/wetterdienst/provider/nws/observation/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.nws.observation.api import ( diff --git a/wetterdienst/provider/nws/observation/api.py b/wetterdienst/provider/nws/observation/api.py index a59123c2a..274db0a16 100644 --- a/wetterdienst/provider/nws/observation/api.py +++ b/wetterdienst/provider/nws/observation/api.py @@ -1,11 +1,12 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import json import logging from enum import Enum -from typing import List, Optional, Union +from typing import TYPE_CHECKING import polars as pl @@ -14,17 +15,19 @@ from wetterdienst.metadata.columns import Columns from wetterdienst.metadata.datarange import DataRange from wetterdienst.metadata.kind import Kind -from wetterdienst.metadata.parameter import Parameter from wetterdienst.metadata.period import Period, PeriodType from wetterdienst.metadata.provider import Provider from wetterdienst.metadata.resolution import Resolution, ResolutionType from wetterdienst.metadata.timezone import Timezone from wetterdienst.metadata.unit import OriginUnit, SIUnit, UnitEnum -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.network import download_file from wetterdienst.util.parameter import DatasetTreeCore +if TYPE_CHECKING: + from wetterdienst.metadata.parameter import Parameter + from wetterdienst.settings import Settings + log = logging.getLogger(__name__) @@ -99,7 +102,12 @@ class NwsObservationValues(TimeseriesValues): _data_tz = Timezone.UTC _endpoint = "https://api.weather.gov/stations/{station_id}/observations" - def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: Enum) -> pl.DataFrame: + def _collect_station_parameter( + self, + station_id: str, + parameter: Enum, # noqa: ARG002 + dataset: Enum, # noqa: ARG002 + ) -> pl.DataFrame: url = self._endpoint.format(station_id=station_id) log.info(f"acquiring data from {url}") response = download_file(url, settings=self.sr.stations.settings, ttl=CacheExpiry.FIVE_MINUTES) @@ -119,69 +127,69 @@ def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: "temperature": pl.Struct( [ pl.Field("value", pl.Float64), - ] + ], ), "dewpoint": pl.Struct( [ pl.Field("value", pl.Float64), - ] + ], ), "windDirection": pl.Struct( [ pl.Field("value", pl.Int64), - ] + ], ), "windSpeed": pl.Struct( [ pl.Field("value", pl.Float64), - ] + ], ), "windGust": pl.Struct( [ pl.Field("value", pl.Int32), - ] + ], ), "barometricPressure": pl.Struct( [ pl.Field("value", pl.Int64), - ] + ], ), "seaLevelPressure": pl.Struct( [ pl.Field("value", pl.Int64), - ] + ], ), "visibility": pl.Struct( [ pl.Field("value", pl.Int64), - ] + ], ), "maxTemperatureLast24Hours": pl.Struct([pl.Field("value", pl.Int32)]), "minTemperatureLast24Hours": pl.Struct([pl.Field("value", pl.Int32)]), "precipitationLastHour": pl.Struct( [ pl.Field("value", pl.Int64), - ] + ], ), "precipitationLast3Hours": pl.Struct( [ pl.Field("value", pl.Int64), - ] + ], ), "precipitationLast6Hours": pl.Struct( [ pl.Field("value", pl.Int64), - ] + ], ), "relativeHumidity": pl.Struct( [ pl.Field("value", pl.Float64), - ] + ], ), "windChill": pl.Struct( [ pl.Field("value", pl.Float64), - ] + ], ), }, ) @@ -220,12 +228,12 @@ class NwsObservationRequest(TimeseriesRequest): def __init__( self, - parameter: List[Union[str, NwsObservationParameter, Parameter]], - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + parameter: list[str | NwsObservationParameter | Parameter], + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ): - super(NwsObservationRequest, self).__init__( + super().__init__( parameter=parameter, resolution=Resolution.HOURLY, period=Period.RECENT, @@ -239,8 +247,8 @@ def __init__( "headers": { "User-Agent": "wetterdienst/0.48.0", "Content-Type": "application/json", - } - } + }, + }, ) def _all(self) -> pl.LazyFrame: @@ -248,7 +256,11 @@ def _all(self) -> pl.LazyFrame: df = pl.read_csv(source=response, has_header=False, separator="\t", infer_schema_length=0).lazy() df = df.filter(pl.col("column_7").eq("US")) df = df.select( - pl.col("column_2"), pl.col("column_3"), pl.col("column_4"), pl.col("column_5"), pl.col("column_6") + pl.col("column_2"), + pl.col("column_3"), + pl.col("column_4"), + pl.col("column_5"), + pl.col("column_6"), ) df = df.rename( mapping={ @@ -257,7 +269,7 @@ def _all(self) -> pl.LazyFrame: "column_4": Columns.LONGITUDE.value, "column_5": Columns.HEIGHT.value, "column_6": Columns.NAME.value, - } + }, ) df = df.with_columns(pl.all().str.strip_chars()) df = df.with_columns( diff --git a/wetterdienst/provider/wsv/__init__.py b/wetterdienst/provider/wsv/__init__.py index 81cd921b5..c79af6a95 100644 --- a/wetterdienst/provider/wsv/__init__.py +++ b/wetterdienst/provider/wsv/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/provider/wsv/pegel/__init__.py b/wetterdienst/provider/wsv/pegel/__init__.py index 87f4430f6..2f8029a20 100644 --- a/wetterdienst/provider/wsv/pegel/__init__.py +++ b/wetterdienst/provider/wsv/pegel/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from wetterdienst.provider.wsv.pegel.api import WsvPegelParameter, WsvPegelRequest diff --git a/wetterdienst/provider/wsv/pegel/api.py b/wetterdienst/provider/wsv/pegel/api.py index 172f3d2c8..ac8d4ae81 100644 --- a/wetterdienst/provider/wsv/pegel/api.py +++ b/wetterdienst/provider/wsv/pegel/api.py @@ -1,10 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt import json from enum import Enum -from typing import List, Optional, Union +from typing import TYPE_CHECKING, Optional import polars as pl @@ -13,18 +14,20 @@ from wetterdienst.metadata.columns import Columns from wetterdienst.metadata.datarange import DataRange from wetterdienst.metadata.kind import Kind -from wetterdienst.metadata.parameter import Parameter from wetterdienst.metadata.period import Period, PeriodType from wetterdienst.metadata.provider import Provider from wetterdienst.metadata.resolution import Resolution, ResolutionType from wetterdienst.metadata.timezone import Timezone from wetterdienst.metadata.unit import OriginUnit, SIUnit, UnitEnum -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry from wetterdienst.util.network import download_file from wetterdienst.util.parameter import DatasetTreeCore -FLOAT_9_TIMES = List[Optional[float]] +if TYPE_CHECKING: + from wetterdienst.metadata.parameter import Parameter + from wetterdienst.settings import Settings + +FLOAT_9_TIMES = list[Optional[float]] class WsvPegelParameter(DatasetTreeCore): @@ -126,7 +129,12 @@ class WsvPegelValues(TimeseriesValues): def _data_tz(self) -> Timezone: return Timezone.GERMANY - def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: Enum) -> pl.DataFrame: + def _collect_station_parameter( + self, + station_id: str, + parameter: Enum, + dataset: Enum, # noqa: ARG002 + ) -> pl.DataFrame: """ Method to collect data for station parameter from WSV Pegelonline following its open REST-API at https://pegelonline.wsv.de/webservices/rest-api/v2/stations/ @@ -151,7 +159,12 @@ def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: pl.lit(None, dtype=pl.Float64).alias(Columns.QUALITY.value), ) - def _fetch_frequency(self, station_id, parameter: Enum, dataset: Enum) -> str: + def _fetch_frequency( + self, + station_id, + parameter: Enum, + dataset: Enum, # noqa: ARG002 + ) -> str: """ Method to get the frequency string for a station and parameter from WSV Pegelonline. The frequency is given at each station dict queried from the REST-API under "equidistance" @@ -210,12 +223,12 @@ class WsvPegelRequest(TimeseriesRequest): def __init__( self, - parameter: List[Union[str, WsvPegelParameter, Parameter]], - start_date: Optional[Union[str, dt.datetime]] = None, - end_date: Optional[Union[str, dt.datetime]] = None, - settings: Optional[Settings] = None, + parameter: list[str | WsvPegelParameter | Parameter], + start_date: str | dt.datetime | None = None, + end_date: str | dt.datetime | None = None, + settings: Settings | None = None, ): - super(WsvPegelRequest, self).__init__( + super().__init__( parameter=parameter, resolution=Resolution.DYNAMIC, period=Period.RECENT, @@ -232,7 +245,7 @@ def _all(self) -> pl.LazyFrame: """ def _extract_ts( - ts_list: List[dict], + ts_list: list[dict], ) -> FLOAT_9_TIMES: """ Function to extract water level related information namely gauge zero and characteristic values diff --git a/wetterdienst/settings.py b/wetterdienst/settings.py index b31d2b493..81132e26f 100644 --- a/wetterdienst/settings.py +++ b/wetterdienst/settings.py @@ -1,12 +1,12 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import json import logging -import pathlib from copy import deepcopy from functools import partial -from typing import Dict, Literal, Optional, Union +from typing import TYPE_CHECKING, Literal import platformdirs from environs import Env @@ -14,6 +14,9 @@ from wetterdienst import Parameter +if TYPE_CHECKING: + import pathlib + log = logging.getLogger(__name__) @@ -49,18 +52,18 @@ class Settings: def __init__( self, - cache_disable: Optional[bool] = None, - cache_dir: Optional[pathlib.Path] = None, - fsspec_client_kwargs: Optional[dict] = None, - ts_humanize: Optional[bool] = None, - ts_shape: Optional[Literal["wide", "long"]] = None, - ts_si_units: Optional[bool] = None, - ts_skip_empty: Optional[bool] = None, - ts_skip_threshold: Optional[float] = None, - ts_skip_criteria: Optional[Literal["min", "mean", "max"]] = None, - ts_dropna: Optional[bool] = None, - ts_interpolation_use_nearby_station_distance: Optional[Union[float, int]] = None, - ts_interpolation_station_distance: Optional[Dict[str, float]] = None, + cache_disable: bool | None = None, + cache_dir: pathlib.Path | None = None, + fsspec_client_kwargs: dict | None = None, + ts_humanize: bool | None = None, + ts_shape: Literal["wide", "long"] | None = None, + ts_si_units: bool | None = None, + ts_skip_empty: bool | None = None, + ts_skip_threshold: float | None = None, + ts_skip_criteria: Literal["min", "mean", "max"] | None = None, + ts_dropna: bool | None = None, + ts_interpolation_use_nearby_station_distance: float | int | None = None, + ts_interpolation_station_distance: dict[str, float] | None = None, ignore_env: bool = False, ) -> None: _defaults = deepcopy(self._defaults) # make sure mutable objects are not changed @@ -83,12 +86,16 @@ def __init__( # timeseries self.ts_humanize: bool = _da(ts_humanize, env.bool("HUMANIZE", None), _defaults["ts_humanize"]) self.ts_shape: str = _da( - ts_shape, env.str("SHAPE", None, validate=OneOf(["long", "wide"])), _defaults["ts_shape"] + ts_shape, + env.str("SHAPE", None, validate=OneOf(["long", "wide"])), + _defaults["ts_shape"], ) self.ts_si_units: bool = _da(ts_si_units, env.bool("SI_UNITS", None), _defaults["ts_si_units"]) self.ts_skip_empty: bool = _da(ts_skip_empty, env.bool("SKIP_EMPTY", None), _defaults["ts_skip_empty"]) self.ts_skip_threshold: float = _da( - ts_skip_threshold, env.float("SKIP_THRESHOLD", None), _defaults["ts_skip_threshold"] + ts_skip_threshold, + env.float("SKIP_THRESHOLD", None), + _defaults["ts_skip_threshold"], ) self.ts_skip_criteria: str = _da( ts_skip_criteria, @@ -100,7 +107,7 @@ def __init__( with env.prefixed("INTERPOLATION_"): _ts_interpolation_station_distance = _defaults["ts_interpolation_station_distance"] _ts_interpolation_station_distance.update( - {k: float(v) for k, v in env.dict("STATION_DISTANCE", {}).items()} if not ignore_env else {} + {k: float(v) for k, v in env.dict("STATION_DISTANCE", {}).items()} if not ignore_env else {}, ) _ts_interpolation_station_distance.update(ts_interpolation_station_distance or {}) self.ts_interpolation_station_distance = _ts_interpolation_station_distance @@ -124,7 +131,7 @@ def __repr__(self) -> str: def __str__(self) -> str: return f"Settings({json.dumps(self.to_dict(),indent=4)})" - def __eq__(self, other: "Settings"): + def __eq__(self, other: Settings): return self.to_dict() == other.to_dict() def to_dict(self) -> dict: @@ -143,12 +150,12 @@ def to_dict(self) -> dict: "ts_interpolation_use_nearby_station_distance": self.ts_interpolation_use_nearby_station_distance, } - def reset(self) -> "Settings": + def reset(self) -> Settings: """Reset Wetterdienst Settings to start""" return self.__init__() @classmethod - def default(cls) -> "Settings": + def default(cls) -> Settings: """Ignore environmental variables and use all default arguments as defined above""" # Put empty env to force using the given defaults return cls(ignore_env=True) diff --git a/wetterdienst/ui/cli.py b/wetterdienst/ui/cli.py index fd1369cac..f7f4cf81e 100644 --- a/wetterdienst/ui/cli.py +++ b/wetterdienst/ui/cli.py @@ -1,13 +1,14 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import functools import json import logging import sys from collections import OrderedDict from pprint import pformat -from typing import List, Literal +from typing import Literal import click import cloup @@ -711,11 +712,11 @@ def fields(provider, network, dataset, resolution, period, language, **kwargs): def stations( provider: str, network: str, - parameter: List[str], + parameter: list[str], resolution: str, - period: List[str], + period: list[str], all_: bool, - station: List[str], + station: list[str], name: str, coordinates: str, rank: int, @@ -806,9 +807,9 @@ def stations( def values( provider: str, network: str, - parameter: List[str], + parameter: list[str], resolution: str, - period: List[str], + period: list[str], lead_time: Literal["short", "long"], date: str, issue: str, @@ -914,9 +915,9 @@ def values( def interpolate( provider: str, network: str, - parameter: List[str], + parameter: list[str], resolution: str, - period: List[str], + period: list[str], lead_time: Literal["short", "long"], use_nearby_station_distance: float, date: str, @@ -1001,9 +1002,9 @@ def interpolate( def summarize( provider: str, network: str, - parameter: List[str], + parameter: list[str], resolution: str, - period: List[str], + period: list[str], lead_time: Literal["short", "long"], date: str, issue: str, diff --git a/wetterdienst/ui/core.py b/wetterdienst/ui/core.py index 83189b043..0cb201afe 100644 --- a/wetterdienst/ui/core.py +++ b/wetterdienst/ui/core.py @@ -1,18 +1,12 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import logging import sys -from typing import List, Literal, Optional, Tuple, Union +from typing import TYPE_CHECKING, Literal from wetterdienst.core.process import create_date_range -from wetterdienst.core.timeseries.request import TimeseriesRequest -from wetterdienst.core.timeseries.result import ( - InterpolatedValuesResult, - StationsResult, - SummarizedValuesResult, - ValuesResult, -) from wetterdienst.metadata.datarange import DataRange from wetterdienst.metadata.period import PeriodType from wetterdienst.metadata.resolution import Resolution, ResolutionType @@ -20,10 +14,19 @@ from wetterdienst.settings import Settings from wetterdienst.util.enumeration import parse_enumeration_from_template +if TYPE_CHECKING: + from wetterdienst.core.timeseries.request import TimeseriesRequest + from wetterdienst.core.timeseries.result import ( + InterpolatedValuesResult, + StationsResult, + SummarizedValuesResult, + ValuesResult, + ) + log = logging.getLogger(__name__) -def unpack_parameters(parameter: str) -> List[str]: +def unpack_parameters(parameter: str) -> list[str]: """Parse parameters to either - list of str, each representing a parameter or - list of tuple of str representing a pair of parameter and dataset @@ -36,7 +39,7 @@ def unpack_parameters(parameter: str) -> List[str]: """ - def unpack_parameter(par: str) -> Union[str, Tuple[str, str]]: + def unpack_parameter(par: str) -> str | tuple[str, str]: try: parameter_, dataset_ = par.split("/") except ValueError: @@ -55,11 +58,11 @@ def unpack_parameter(par: str) -> Union[str, Tuple[str, str]]: def _get_stations_request( api, - parameter: List[str], + parameter: list[str], resolution: str, - period: List[str], + period: list[str], lead_time: str, - date: Optional[str], + date: str | None, issue: str, si_units: bool, shape: Literal["long", "wide"], @@ -104,7 +107,7 @@ def _get_stations_request( if api._data_range == DataRange.LOOSELY and not start_date and not end_date: # TODO: use another property "network" on each class raise TypeError( - f"Combination of provider {api._provider.name} and network {api._kind.name} requires start and end date" + f"Combination of provider {api._provider.name} and network {api._kind.name} requires start and end date", ) # Todo: We may have to apply other measures to allow for @@ -135,14 +138,14 @@ def _get_stations_request( def get_stations( api, - parameter: List[str], + parameter: list[str], resolution: str, - period: List[str], + period: list[str], lead_time: str, - date: Optional[str], - issue: Optional[str], + date: str | None, + issue: str | None, all_: bool, - station_id: List[str], + station_id: list[str], name: str, coordinates: str, rank: int, @@ -232,14 +235,14 @@ def get_stations( def get_values( api: TimeseriesRequest, - parameter: List[str], + parameter: list[str], resolution: str, lead_time: str, date: str, issue: str, - period: List[str], + period: list[str], all_, - station_id: List[str], + station_id: list[str], name: str, coordinates: str, rank: int, @@ -302,9 +305,9 @@ def get_values( def get_interpolate( api: TimeseriesRequest, - parameter: List[str], + parameter: list[str], resolution: str, - period: List[str], + period: list[str], lead_time: str, date: str, issue: str, @@ -357,9 +360,9 @@ def get_interpolate( def get_summarize( api: TimeseriesRequest, - parameter: List[str], + parameter: list[str], resolution: str, - period: List[str], + period: list[str], lead_time: str, date: str, issue: str, diff --git a/wetterdienst/ui/explorer/app.py b/wetterdienst/ui/explorer/app.py index a7d661b50..e1a804192 100644 --- a/wetterdienst/ui/explorer/app.py +++ b/wetterdienst/ui/explorer/app.py @@ -1,13 +1,13 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ Wetterdienst Explorer UI Dash application. """ +from __future__ import annotations + import json import logging -from typing import Optional import dash import dash_bootstrap_components as dbc @@ -116,10 +116,12 @@ def fetch_stations(provider: str, network: str, resolution: str, dataset: str, p df = df.with_columns( pl.col(Columns.START_DATE.value).map_elements( - lambda date: date and date.isoformat() or None, return_dtype=pl.Utf8 + lambda date: date and date.isoformat() or None, + return_dtype=pl.Utf8, ), pl.col(Columns.END_DATE.value).map_elements( - lambda date: date and date.isoformat() or None, return_dtype=pl.Utf8 + lambda date: date and date.isoformat() or None, + return_dtype=pl.Utf8, ), ) @@ -141,7 +143,13 @@ def fetch_stations(provider: str, network: str, resolution: str, dataset: str, p ], ) def fetch_values( - provider: str, network: str, resolution: str, dataset: str, parameter: str, period: str, station_id: int + provider: str, + network: str, + resolution: str, + dataset: str, + parameter: str, + period: str, + station_id: int, ): """ Fetch "values" data. @@ -165,7 +173,7 @@ def fetch_values( log.info( f"Requesting values for station_id={station_id}, parameter={parameter}, resolution={resolution}, " - f"period={period}" + f"period={period}", ) try: @@ -245,7 +253,13 @@ def render_navigation_stations(payload): ], ) def render_status_response_stations( - provider: str, network: str, resolution: str, dataset: str, parameter: str, period: str, payload: str + provider: str, + network: str, + resolution: str, + dataset: str, + parameter: str, + period: str, + payload: str, ): """ Report about the status of the query. @@ -263,8 +277,8 @@ def render_status_response_stations( f""" No data. Maybe the combination of "{resolution}", "{dataset}", "{parameter}" and "{period}" is invalid for provider "{provider}" and network "{network}". - """ - ) + """, + ), ] data = json.loads(payload) @@ -279,8 +293,8 @@ def render_status_response_stations( [ html.Div(f"Columns: {len(stations_data.columns)}"), html.Div(f"Records: {len(stations_data)}"), - ] - ) + ], + ), ] @@ -357,7 +371,9 @@ def render_map(payload): return [] stations_data = pl.from_dicts(data, infer_schema_length=0) stations_data = stations_data.with_columns( - pl.col("station_id").cast(pl.Utf8), pl.col("latitude").cast(pl.Float64), pl.col("longitude").cast(pl.Float64) + pl.col("station_id").cast(pl.Utf8), + pl.col("latitude").cast(pl.Float64), + pl.col("longitude").cast(pl.Float64), ) log.info(f"Rendering stations_result map from {frame_summary(stations_data)}") # columns used for constructing geojson object @@ -418,7 +434,7 @@ def render_graph(parameter, resolution, payload: str): r=0, # right margin b=0, # bottom margin t=0, # top margin - ) + ), ) return fig @@ -538,7 +554,14 @@ def set_parameter_options(provider, network, resolution, dataset): Input("select-period", "value"), ], ) -def reset_values(provider, network, resolution, dataset, parameter, period): +def reset_values( + provider, # noqa: ARG001 + network, + resolution, + dataset, + parameter, + period, +): """Reset settings values if any previous parameter has been changed e.g. when a new provider is selected, reset network, resolution, etc""" last_triggered = dash.callback_context.triggered[0]["prop_id"].split(".")[0] @@ -561,7 +584,7 @@ def reset_values(provider, network, resolution, dataset, parameter, period): return network, resolution, dataset, parameter, period -def start_service(listen_address: Optional[str] = None, reload: Optional[bool] = False): # pragma: no cover +def start_service(listen_address: str | None = None, reload: bool | None = False): # pragma: no cover """ This entrypoint will be used by `wetterdienst.cli`. """ diff --git a/wetterdienst/ui/explorer/layout/main.py b/wetterdienst/ui/explorer/layout/main.py index c1ac5fd6c..0f12248e3 100644 --- a/wetterdienst/ui/explorer/layout/main.py +++ b/wetterdienst/ui/explorer/layout/main.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ @@ -23,8 +22,8 @@ def get_about_modal(): **Resources**: - https://wetterdienst.readthedocs.io/en/latest/overview.html - https://github.com/earthobservations/wetterdienst - """ - ) + """, + ), ), dbc.ModalFooter(dbc.Button("Close", id="close-about", className="ml-auto")), ], diff --git a/wetterdienst/ui/explorer/layout/observations_germany.py b/wetterdienst/ui/explorer/layout/observations_germany.py index dfd4689aa..68a73d7f8 100644 --- a/wetterdienst/ui/explorer/layout/observations_germany.py +++ b/wetterdienst/ui/explorer/layout/observations_germany.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import dash_leaflet as dl diff --git a/wetterdienst/ui/explorer/library.py b/wetterdienst/ui/explorer/library.py index d51ab017e..fa3182fc6 100644 --- a/wetterdienst/ui/explorer/library.py +++ b/wetterdienst/ui/explorer/library.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """ @@ -34,8 +33,8 @@ def default_figure(climate_data: pl.DataFrame, parameter, resolution, unit_dict) x=climate_data.get_column("date").to_list(), y=climate_data.get_column("value").to_list(), hoverinfo="x+y", - ) - ] + ), + ], ) fig.update_layout(yaxis={"title": ytitle}, xaxis={"title": "Date"}, showlegend=False) return fig diff --git a/wetterdienst/ui/explorer/util.py b/wetterdienst/ui/explorer/util.py index d2ce57f7d..3424ebb1d 100644 --- a/wetterdienst/ui/explorer/util.py +++ b/wetterdienst/ui/explorer/util.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import polars as pl diff --git a/wetterdienst/ui/restapi.py b/wetterdienst/ui/restapi.py index e40b51c2e..c59d3ef99 100644 --- a/wetterdienst/ui/restapi.py +++ b/wetterdienst/ui/restapi.py @@ -1,16 +1,16 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import json import logging -from typing import Annotated, Any, Literal, Optional, Union +from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, Union from click_params import StringListParamType from fastapi import FastAPI, HTTPException, Query from fastapi.responses import HTMLResponse, PlainTextResponse, Response from wetterdienst import Author, Info, Provider, Wetterdienst, __appname__, __version__ -from wetterdienst.core.timeseries.request import TimeseriesRequest from wetterdienst.core.timeseries.result import ( _InterpolatedValuesDict, _InterpolatedValuesOgcFeatureCollection, @@ -32,6 +32,9 @@ ) from wetterdienst.util.cli import read_list, setup_logging +if TYPE_CHECKING: + from wetterdienst.core.timeseries.request import TimeseriesRequest + app = FastAPI(debug=False) log = logging.getLogger(__name__) @@ -51,7 +54,7 @@ def _create_author_entry(author: Author): shortname = provider.name _, name, country, copyright_, url = provider.value sources.append( - f"
  • {shortname} ({name}, {country}) - {copyright_}
  • " + f"
  • {shortname} ({name}, {country}) - {copyright_}
  • ", ) sources = "\n".join(sources) info = Info() @@ -305,7 +308,7 @@ def values( parameter: Annotated[Optional[str], Query()] = None, resolution: Annotated[Optional[str], Query()] = None, period: Annotated[Optional[str], Query()] = None, - lead_time: Annotated[Optional[Literal["short", "long"]], Query()] = None, + lead_time: Annotated[Literal["short", "long"] | None, Query()] = None, date: Annotated[Optional[str], Query()] = None, issue: Annotated[Optional[str], Query()] = None, all_: Annotated[Optional[bool], Query(alias="all")] = None, @@ -406,7 +409,8 @@ def values( # - _InterpolatedValuesOgcFeatureCollection for geojson # - str for csv @app.get( - "/api/interpolate", response_model=Union[_InterpolatedValuesDict, _InterpolatedValuesOgcFeatureCollection, str] + "/api/interpolate", + response_model=Union[_InterpolatedValuesDict, _InterpolatedValuesOgcFeatureCollection, str], ) def interpolate( provider: Annotated[Optional[str], Query()] = None, @@ -414,7 +418,7 @@ def interpolate( parameter: Annotated[Optional[str], Query()] = None, resolution: Annotated[Optional[str], Query()] = None, period: Annotated[Optional[str], Query()] = None, - lead_time: Annotated[Optional[Literal["short", "long"]], Query()] = None, + lead_time: Annotated[Literal["short", "long"] | None, Query()] = None, date: Annotated[Optional[str], Query()] = None, issue: Annotated[Optional[str], Query()] = None, station: Annotated[Optional[str], Query()] = None, @@ -502,7 +506,7 @@ def summarize( parameter: Annotated[Optional[str], Query()] = None, resolution: Annotated[Optional[str], Query()] = None, period: Annotated[Optional[str], Query()] = None, - lead_time: Annotated[Optional[Literal["short", "long"]], Query()] = None, + lead_time: Annotated[Literal["short", "long"] | None, Query()] = None, date: Annotated[Optional[str], Query()] = None, issue: Annotated[Optional[str], Query()] = "latest", station: Annotated[Optional[str], Query()] = None, @@ -577,7 +581,7 @@ def summarize( return Response(content=content, media_type=media_type) -def start_service(listen_address: Optional[str] = None, reload: Optional[bool] = False): # pragma: no cover +def start_service(listen_address: str | None = None, reload: bool | None = False): # pragma: no cover from uvicorn.main import run setup_logging() diff --git a/wetterdienst/ui/streamlit/app.py b/wetterdienst/ui/streamlit/app.py index 5ba332ae4..c3cb7738e 100644 --- a/wetterdienst/ui/streamlit/app.py +++ b/wetterdienst/ui/streamlit/app.py @@ -1,8 +1,8 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2023, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import json -from typing import Optional import duckdb import plotly.express as px @@ -54,14 +54,14 @@ def create_plotly_fig( x: str, y: str, facet: bool, - lm: Optional[str], + lm: str | None, settings: dict, ): if "unit" in df.columns: df = df.with_columns( pl.struct(["parameter", "unit"]) .map_elements(lambda s: f"{s['parameter']} ({s['unit']})") - .alias("parameter") + .alias("parameter"), ) fig = px.scatter( x=df.get_column(x).to_list(), @@ -118,7 +118,7 @@ def main(): the [Deutscher Wetterdienst](https://www.dwd.de/). There are over 1_500 climate stations in Germany and all of the data can be accessed freely thanks to the open data initiative. The app enables you to select any of the stations (by station id or name), download its data (as CSV) and get visualizations of it. - """ + """, ) st.markdown("Here's a map of all stations:") stations = get_dwd_observation_stations(settings) @@ -143,7 +143,7 @@ def main(): Important: - use **FROM df** - use single quotes for strings e.g. 'a_string' - """ + """, ) sql_query = st.text_area( "sql query", @@ -159,7 +159,8 @@ def main(): st.download_button( "Download JSON", df.with_columns(pl.col("date").map_elements(lambda d: d.isoformat())).write_json( - pretty=True, row_oriented=True + pretty=True, + row_oriented=True, ), "data.json", "text/json", @@ -177,7 +178,9 @@ def main(): columns = columns.copy() columns.remove(column_y) variable_column = st.selectbox( - "Column Variable", options=columns, index="parameter" in columns and columns.index("parameter") + "Column Variable", + options=columns, + index="parameter" in columns and columns.index("parameter"), ) variable_options = df.get_column(variable_column).unique().sort().to_list() variable_filter = st.multiselect("Variable Filter", options=variable_options) @@ -203,7 +206,7 @@ def main(): publishing their data as **open data**. Credits also go to [streamlit](https://streamlit.io/) for hosting this app. If you have any issues or ideas regarding this app, please let us know in the [issues](https://github.com/earthobservations/wetterdienst/issues). - """ + """, ) diff --git a/wetterdienst/util/__init__.py b/wetterdienst/util/__init__.py index 36979cf71..57a368863 100644 --- a/wetterdienst/util/__init__.py +++ b/wetterdienst/util/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. diff --git a/wetterdienst/util/cache.py b/wetterdienst/util/cache.py index 9c0a1b9ef..490699a50 100644 --- a/wetterdienst/util/cache.py +++ b/wetterdienst/util/cache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from enum import Enum diff --git a/wetterdienst/util/cli.py b/wetterdienst/util/cli.py index 5b8c8e4b8..e6398b2e1 100644 --- a/wetterdienst/util/cli.py +++ b/wetterdienst/util/cli.py @@ -1,12 +1,12 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. """A set of utility functions""" +from __future__ import annotations + import logging import sys import textwrap -from typing import List, Optional def setup_logging(level=logging.INFO) -> None: @@ -22,7 +22,7 @@ def setup_logging(level=logging.INFO) -> None: pint_logger.setLevel(logging.ERROR) -def read_list(data: Optional[str], separator: str = ",") -> List[str]: +def read_list(data: str | None, separator: str = ",") -> list[str]: if data is None: return [] diff --git a/wetterdienst/util/datetime.py b/wetterdienst/util/datetime.py index 533b972c5..19989c5f4 100644 --- a/wetterdienst/util/datetime.py +++ b/wetterdienst/util/datetime.py @@ -1,15 +1,15 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import datetime as dt -from typing import Tuple from dateutil.relativedelta import relativedelta from wetterdienst.metadata.resolution import Resolution -def round_minutes(timestamp: dt.datetime, step: int): +def round_minutes(timestamp: dt.datetime, step: int) -> dt.datetime: """ Align timestamp to the given minute mark before tm. - https://stackoverflow.com/a/3464000 @@ -24,7 +24,7 @@ def round_minutes(timestamp: dt.datetime, step: int): return timestamp - change -def raster_minutes(timestamp: dt.datetime, value: int): +def raster_minutes(timestamp: dt.datetime, value: int) -> dt.datetime: """ Align timestamp to the most recent minute mark. @@ -47,8 +47,8 @@ def raster_minutes(timestamp: dt.datetime, value: int): def mktimerange( resolution: Resolution, date_from: dt.datetime, - date_to: dt.datetime = None, -) -> Tuple[dt.datetime, dt.datetime]: + date_to: dt.datetime | None = None, +) -> tuple[dt.datetime, dt.datetime]: """ Compute appropriate time ranges for monthly and annual time resolutions. This takes into account to properly floor/ceil the date_from/date_to diff --git a/wetterdienst/util/eccodes.py b/wetterdienst/util/eccodes.py index a046a6644..4b64b8f61 100644 --- a/wetterdienst/util/eccodes.py +++ b/wetterdienst/util/eccodes.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (c) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from functools import lru_cache diff --git a/wetterdienst/util/enumeration.py b/wetterdienst/util/enumeration.py index f36bbab25..29451160b 100644 --- a/wetterdienst/util/enumeration.py +++ b/wetterdienst/util/enumeration.py @@ -1,16 +1,21 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. -from enum import Enum -from typing import Optional, Type, Union +from __future__ import annotations + +from typing import TYPE_CHECKING from wetterdienst.exceptions import InvalidEnumerationError from wetterdienst.util.python import to_list +if TYPE_CHECKING: + from enum import Enum + def parse_enumeration_from_template( - enum_: Union[str, Enum], intermediate: Type[Enum], base: Optional[Type[Enum]] = None -) -> Optional[Enum]: + enum_: str | Enum, + intermediate: type[Enum], + base: type[Enum] | None = None, +) -> Enum | None: """ Function used to parse an enumeration(string) to a enumeration based on a template diff --git a/wetterdienst/util/geo.py b/wetterdienst/util/geo.py index e0f41c908..6cc9c95d6 100644 --- a/wetterdienst/util/geo.py +++ b/wetterdienst/util/geo.py @@ -1,7 +1,6 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. -from typing import Tuple, Union +from __future__ import annotations import numpy as np import polars as pl @@ -47,7 +46,7 @@ def derive_nearest_neighbours( longitudes: np.array, coordinates: Coordinates, number_nearby: int = 1, -) -> Tuple[Union[float, np.ndarray], np.ndarray]: +) -> tuple[float | np.ndarray, np.ndarray]: """ A function that uses a k-d tree algorithm to obtain the nearest neighbours to coordinate pairs diff --git a/wetterdienst/util/io.py b/wetterdienst/util/io.py index f34b66ada..d637010d6 100644 --- a/wetterdienst/util/io.py +++ b/wetterdienst/util/io.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. def read_in_chunks(file_object, chunk_size=1024): diff --git a/wetterdienst/util/logging.py b/wetterdienst/util/logging.py index 00e0435d6..f3053d119 100644 --- a/wetterdienst/util/logging.py +++ b/wetterdienst/util/logging.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import io @@ -18,7 +17,7 @@ class TqdmToLogger(io.StringIO): buf = "" def __init__(self, logger, level=None): - super(TqdmToLogger, self).__init__() + super().__init__() self.logger = logger self.level = level or logging.INFO diff --git a/wetterdienst/util/network.py b/wetterdienst/util/network.py index fe991b380..ed6206cb5 100644 --- a/wetterdienst/util/network.py +++ b/wetterdienst/util/network.py @@ -1,20 +1,23 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. +from __future__ import annotations + import logging -import os from io import BytesIO from pathlib import Path -from typing import Dict, List, MutableMapping, Optional, Tuple, Union +from typing import TYPE_CHECKING, MutableMapping import stamina -from fsspec import AbstractFileSystem from fsspec.implementations.cached import WholeFileCacheFileSystem from fsspec.implementations.http import HTTPFileSystem as _HTTPFileSystem -from wetterdienst.settings import Settings from wetterdienst.util.cache import CacheExpiry +if TYPE_CHECKING: + from fsspec import AbstractFileSystem + + from wetterdienst.settings import Settings + log = logging.getLogger(__name__) @@ -22,8 +25,8 @@ class FileDirCache(MutableMapping): def __init__( self, use_listings_cache: bool, - listings_expiry_time: Union[int, float], - listings_cache_location: Optional[str] = None, + listings_expiry_time: int | float, + listings_cache_location: str | None = None, ): """ @@ -102,9 +105,9 @@ def __reduce__(self): class HTTPFileSystem(_HTTPFileSystem): def __init__( self, - use_listings_cache: Optional[bool] = None, - listings_expiry_time: Optional[Union[int, float]] = None, - listings_cache_location: Optional[str] = None, + use_listings_cache: bool | None = None, + listings_expiry_time: int | float | None = None, + listings_cache_location: str | None = None, *args, **kwargs, ): @@ -130,10 +133,10 @@ class NetworkFilesystemManager: Manage multiple FSSPEC instances keyed by cache expiration time. """ - filesystems: Dict[str, AbstractFileSystem] = {} + filesystems: dict[str, AbstractFileSystem] = {} @staticmethod - def resolve_ttl(ttl: Union[int, CacheExpiry]) -> Tuple[str, int]: + def resolve_ttl(ttl: int | CacheExpiry) -> tuple[str, int]: ttl_name = ttl ttl_value = ttl @@ -144,10 +147,10 @@ def resolve_ttl(ttl: Union[int, CacheExpiry]) -> Tuple[str, int]: return ttl_name, ttl_value @classmethod - def register(cls, settings, ttl: Union[int, CacheExpiry] = CacheExpiry.NO_CACHE): + def register(cls, settings, ttl: int | CacheExpiry = CacheExpiry.NO_CACHE): ttl_name, ttl_value = cls.resolve_ttl(ttl) key = f"ttl-{ttl_name}" - real_cache_dir = os.path.join(settings.cache_dir, "fsspec", key) + real_cache_dir = str(Path(settings.cache_dir) / "fsspec" / key) use_cache = not (settings.cache_disable or ttl is CacheExpiry.NO_CACHE) fs = HTTPFileSystem(use_listings_cache=use_cache, client_kwargs=settings.fsspec_client_kwargs) @@ -159,7 +162,7 @@ def register(cls, settings, ttl: Union[int, CacheExpiry] = CacheExpiry.NO_CACHE) cls.filesystems[key] = filesystem_effective @classmethod - def get(cls, settings, ttl: Optional[Union[int, CacheExpiry]] = CacheExpiry.NO_CACHE) -> AbstractFileSystem: + def get(cls, settings, ttl: int | CacheExpiry = CacheExpiry.NO_CACHE) -> AbstractFileSystem: ttl_name, _ = cls.resolve_ttl(ttl) key = f"ttl-{ttl_name}" if key not in cls.filesystems: @@ -168,7 +171,7 @@ def get(cls, settings, ttl: Optional[Union[int, CacheExpiry]] = CacheExpiry.NO_C @stamina.retry(on=Exception, attempts=3) -def list_remote_files_fsspec(url: str, settings: Settings, ttl: CacheExpiry = CacheExpiry.FILEINDEX) -> List[str]: +def list_remote_files_fsspec(url: str, settings: Settings, ttl: CacheExpiry = CacheExpiry.FILEINDEX) -> list[str]: """ A function used to create a listing of all files of a given path on the server. @@ -190,12 +193,15 @@ def list_remote_files_fsspec(url: str, settings: Settings, ttl: CacheExpiry = Ca @stamina.retry(on=Exception, attempts=3) def download_file( - url: str, settings: Settings, ttl: Optional[Union[int, CacheExpiry]] = CacheExpiry.NO_CACHE + url: str, + settings: Settings, + ttl: int | CacheExpiry = CacheExpiry.NO_CACHE, ) -> BytesIO: """ A function used to download a specified file from the server. :param url: The url to the file on the dwd server + :param settings: The settings object. :param ttl: How long the resource should be cached. :returns: Bytes of the file. diff --git a/wetterdienst/util/parameter.py b/wetterdienst/util/parameter.py index 6afa14640..57d507694 100644 --- a/wetterdienst/util/parameter.py +++ b/wetterdienst/util/parameter.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import types diff --git a/wetterdienst/util/pdf.py b/wetterdienst/util/pdf.py index e1b9f951c..d54b4c3b8 100644 --- a/wetterdienst/util/pdf.py +++ b/wetterdienst/util/pdf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from io import StringIO diff --git a/wetterdienst/util/polars_util.py b/wetterdienst/util/polars_util.py index a504acba1..e97aa18f6 100644 --- a/wetterdienst/util/polars_util.py +++ b/wetterdienst/util/polars_util.py @@ -1,12 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. -from typing import List, Tuple +from __future__ import annotations import polars as pl -def read_fwf_from_df(df: pl.DataFrame, column_specs: Tuple[Tuple[int, int], ...], header: bool = False) -> pl.DataFrame: +def read_fwf_from_df(df: pl.DataFrame, column_specs: tuple[tuple[int, int], ...], header: bool = False) -> pl.DataFrame: """Function to split a column of a polars DataFrame into multiple columns by given column specs :param df: the polars DataFrame of which a column is split :param column_specs: definition of column widths in [start, end] @@ -14,7 +13,7 @@ def read_fwf_from_df(df: pl.DataFrame, column_specs: Tuple[Tuple[int, int], ...] :return: polars DataFrame with split columns """ - def _get_columns(column: str) -> List[str]: + def _get_columns(column: str) -> list[str]: cols = [] for col_start, col_end in column_specs: col = column[col_start : (col_end + 1)] @@ -34,7 +33,7 @@ def _get_columns(column: str) -> List[str]: .str.strip_chars() .alias(f"column_{i}") for i, slice_tuple in enumerate(column_specs) - ] + ], ) df = df.select(pl.col(col).replace({"": None}, default=pl.col(col)) for col in df.columns) if header: diff --git a/wetterdienst/util/python.py b/wetterdienst/util/python.py index bbff5d0f7..e980a5011 100644 --- a/wetterdienst/util/python.py +++ b/wetterdienst/util/python.py @@ -13,7 +13,7 @@ class classproperty(property): """ def __init__(self, fget, *arg, **kw): - super(classproperty, self).__init__(fget, *arg, **kw) + super().__init__(fget, *arg, **kw) self.__doc__ = fget.__doc__ def __get__(desc, self, cls): diff --git a/wetterdienst/util/url.py b/wetterdienst/util/url.py index b5539b9de..2afddff8a 100644 --- a/wetterdienst/util/url.py +++ b/wetterdienst/util/url.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2018-2022, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. from urllib.parse import parse_qs, urlparse