Skip to content

Commit

Permalink
Release 1.7.6, Merge pull request #328 from sentinel-hub/develop
Browse files Browse the repository at this point in the history
Release 1.7.6
  • Loading branch information
zigaLuksic authored Jan 29, 2024
2 parents ed5978c + 2814092 commit 3bc62a5
Show file tree
Hide file tree
Showing 13 changed files with 63 additions and 45 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,13 @@ repos:
types_or: [json]

- repo: https://github.com/psf/black
rev: 23.12.1
rev: 24.1.1
hooks:
- id: black
language_version: python3

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: "v0.1.11"
rev: "v0.1.14"
hooks:
- id: ruff

Expand Down
8 changes: 7 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
## [Version 1.7.4] - 2024-01-10
## [Version 1.7.6] - 2024-01-29

- Pipelines that are run as part of a pipeline-chain execution will now no longer be retried by ray in the case when an exception occurs.
- Parsing time ranges now has support for more formats.


## [Version 1.7.5] - 2024-01-10

- Parameter `raise_if_failed` renamed to `raise_on_failure` and is now enabled by default.
- Numpy version restricted in anticipation of numpy 2.0 release.
Expand Down
2 changes: 1 addition & 1 deletion eogrow/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""The main module of the eo-grow package."""

__version__ = "1.7.5"
__version__ = "1.7.6"
3 changes: 1 addition & 2 deletions eogrow/types.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
""" Includes custom types used in schemas
"""
"""Includes custom types used in schemas"""

import datetime
import sys
Expand Down
2 changes: 1 addition & 1 deletion eogrow/utils/pipeline_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,6 @@ def run_pipeline_chain(pipeline_chain: list[RawConfig]) -> None:
ray.get(runner.remote(run_schema.pipeline_config))


@ray.remote
@ray.remote(max_retries=0)
def _pipeline_runner(config: RawConfig) -> None:
return load_pipeline_class(config).from_raw_config(config).run()
18 changes: 16 additions & 2 deletions eogrow/utils/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,12 @@
import fs
import geopandas as gpd
import numpy as np
import pandas as pd
import rasterio
from deepdiff import DeepDiff
from fs.base import FS
from fs.osfs import OSFS
from shapely import MultiPolygon, Point, Polygon
from shapely import MultiPolygon, Point, Polygon, wkb, wkt

from eolearn.core import EOPatch, FeatureType
from eolearn.core.eodata_io import get_filesystem_data_info
Expand Down Expand Up @@ -90,13 +91,26 @@ def calculate_statistics(folder: str, config: StatCalcConfig) -> JsonDict:
elif content_path.endswith((".geojson", ".gpkg")):
stats[content] = _calculate_vector_stats(gpd.read_file(content_path), config)
elif content_path.endswith(".parquet"):
stats[content] = _calculate_vector_stats(gpd.read_parquet(content_path), config)
try:
data = gpd.read_parquet(content_path)
except Exception:
data = _load_as_geoparquet(content_path)
stats[content] = _calculate_vector_stats(data, config)
else:
stats[content] = None

return stats


def _load_as_geoparquet(path: str) -> gpd.GeoDataFrame:
data = pd.read_parquet(path)
if isinstance(data.geometry.iloc[0], str):
data.geometry = data.geometry.apply(wkt.loads)
elif isinstance(data.geometry.iloc[0], bytes):
data.geometry = data.geometry.apply(wkb.loads)
return gpd.GeoDataFrame(data, geometry="geometry", crs=data.utm_crs.iloc[0])


def _calculate_eopatch_stats(eopatch: EOPatch, config: StatCalcConfig) -> JsonDict:
"""Calculates statistics of given EOPatch and it's content"""
stats: JsonDict = defaultdict(dict)
Expand Down
8 changes: 4 additions & 4 deletions eogrow/utils/validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@

from __future__ import annotations

import datetime as dt
import inspect
from typing import TYPE_CHECKING, Any, Callable, Iterable, Tuple, Union

import numpy as np
from dateutil.parser import isoparse
from pydantic import BaseModel, Field, validator

from eolearn.core import FeatureType
Expand Down Expand Up @@ -144,8 +144,8 @@ def parse_time_period(value: tuple[str, str]) -> TimePeriod:
}
value = start_dates[kind], end_dates[kind]

start = dt.datetime.strptime(value[0], "%Y-%m-%d").date()
end = dt.datetime.strptime(value[1], "%Y-%m-%d").date()
start = isoparse(value[0])
end = isoparse(value[1])
assert start <= end, "Invalid start and end dates provided. End date must follow the start date"
return start, end

Expand Down Expand Up @@ -241,7 +241,7 @@ def parse_data_collection(value: str | dict | DataCollection) -> DataCollection:


def restrict_types(
allowed_feature_types: Iterable[FeatureType] | Callable[[FeatureType], bool]
allowed_feature_types: Iterable[FeatureType] | Callable[[FeatureType], bool],
) -> Callable[[Feature], Feature]:
"""Validates a field representing a feature, where it restricts the possible feature types."""

Expand Down
6 changes: 4 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ dependencies = [
"opencv-python-headless",
"pandas",
"pydantic>=1.8.0, <2.0",
"python-dateutil",
"python-rapidjson",
"rasterio",
"ray[default]",
Expand All @@ -72,20 +73,21 @@ docs = [
dev = [
"eo-grow[ML]",
"boto3",
"boto3-stubs",
"build",
"deepdiff",
"fs_s3fs",
"moto",
"moto[s3]>=5.0.0",
"mypy>=0.990",
"pre-commit",
"pyogrio",
"pytest-cov",
"pytest-lazy-fixture",
"pytest-order",
"pytest>=4.0.0",
"requests-mock",
"scipy",
"twine",
"types-python-dateutil",
"types-mock",
"types-requests",
"types-setuptools",
Expand Down
1 change: 1 addition & 0 deletions tests/core/area/test_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
- Batch request definition endpoint.
- Tiling grid request endpoints.
- Mocking requests of iter_tiles would be too much effort, so the `_make_new_split` of the splitter is mocked instead.
"""

from unittest.mock import patch
Expand Down
29 changes: 12 additions & 17 deletions tests/core/area/test_utm.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,28 +5,23 @@

from eogrow.core.area import UtmZoneAreaManager

LARGE_AREA_CONFIG = {
"geometry_filename": "test_large_area.geojson",
"patch": {"size_x": 1000000, "size_y": 1000000, "buffer_x": 0, "buffer_y": 0},
}

@pytest.fixture(scope="session", name="large_area_config")
def large_area_config_fixture():
return {
"geometry_filename": "test_large_area.geojson",
"patch": {"size_x": 1000000, "size_y": 1000000, "buffer_x": 0, "buffer_y": 0},
}


@pytest.fixture(scope="session", name="area_config")
def area_config_fixture():
return {
"geometry_filename": "test_area.geojson",
"patch": {"size_x": 2400, "size_y": 1100, "buffer_x": 120, "buffer_y": 55},
}
AREA_CONFIG = {
"geometry_filename": "test_area.geojson",
"patch": {"size_x": 2400, "size_y": 1100, "buffer_x": 120, "buffer_y": 55},
}


@pytest.mark.parametrize(
("config", "expected_zone_num", "expected_bbox_num"),
[
(pytest.lazy_fixture("area_config"), 1, 2),
(pytest.lazy_fixture("large_area_config"), 71, 368),
(AREA_CONFIG, 1, 2),
(LARGE_AREA_CONFIG, 71, 368),
],
)
def test_bbox_split(storage, config, expected_zone_num, expected_bbox_num):
Expand All @@ -46,7 +41,7 @@ def test_bbox_split(storage, config, expected_zone_num, expected_bbox_num):
assert bbox_count == expected_bbox_num


def test_cache_name(storage, area_config):
area_manager = UtmZoneAreaManager.from_raw_config(area_config, storage)
def test_cache_name(storage):
area_manager = UtmZoneAreaManager.from_raw_config(AREA_CONFIG, storage)

assert area_manager.get_grid_cache_filename() == "UtmZoneAreaManager_test_area_2400_1100_120.0_55.0_0.0_0.0.gpkg"
6 changes: 3 additions & 3 deletions tests/core/test_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import pytest
from fs.tempfs import TempFS
from fs_s3fs import S3FS
from moto import mock_s3
from moto import mock_aws

from eolearn.core import EOExecutor, EONode, EOTask, EOWorkflow

Expand All @@ -17,7 +17,7 @@ def execute(self, *_, value=0):
raise ValueError(f"Value is {value}")


@mock_s3
@mock_aws
def _create_new_s3_fs():
"""Creates a new empty mocked s3 bucket. If one such bucket already exists it deletes it first."""
bucket_name = "mocked-test-bucket"
Expand All @@ -35,7 +35,7 @@ def _create_new_s3_fs():
return S3FS(bucket_name=bucket_name)


@mock_s3
@mock_aws
@pytest.mark.parametrize("fs_loader", [TempFS, _create_new_s3_fs])
@pytest.mark.parametrize(
"logs_handler_factory", [EOExecutionHandler, functools.partial(RegularBackupHandler, backup_interval=0.01)]
Expand Down
4 changes: 2 additions & 2 deletions tests/utils/test_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import pytest
from fs.tempfs import TempFS
from fs_s3fs import S3FS
from moto import mock_s3
from moto import mock_aws

from eolearn.core import EOPatch, FeatureType
from sentinelhub import CRS, BBox
Expand Down Expand Up @@ -41,7 +41,7 @@ def eopatch_fixture():

@pytest.fixture(name="mock_s3fs", scope="session")
def mock_s3fs_fixture(eopatch):
with mock_s3():
with mock_aws():
s3resource = boto3.resource("s3", region_name="eu-central-1")
s3resource.create_bucket(Bucket=BUCKET_NAME, CreateBucketConfiguration={"LocationConstraint": "eu-central-1"})
mock_s3fs = S3FS(BUCKET_NAME)
Expand Down
17 changes: 9 additions & 8 deletions tests/utils/test_validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,18 +170,19 @@ class DummySchema(Pipeline.Schema):


@pytest.mark.parametrize(
("time_period", "year", "expected_start_date", "expected_end_date"),
("first_param", "second_param", "expected_start_date", "expected_end_date"),
[
("yearly", 2020, "2020-01-01", "2020-12-31"),
("Q2", 2021, "2021-04-01", "2021-06-30"),
("Q2-yearly", 2021, "2020-07-01", "2021-06-30"),
("yearly", 2020, "2020-01-01T00:00:00", "2020-12-31T00:00:00"),
("Q2", 2021, "2021-04-01T00:00:00", "2021-06-30T00:00:00"),
("Q2-yearly", 2021, "2020-07-01T00:00:00", "2021-06-30T00:00:00"),
("2022-02-02", "2022-02-22T22:22:02", "2022-02-02T00:00:00", "2022-02-22T22:22:02"),
],
)
def test_parse_time_period(time_period, year, expected_start_date, expected_end_date):
start_date, end_date = parse_time_period([time_period, year])
def test_parse_time_period(first_param, second_param, expected_start_date, expected_end_date):
start_date, end_date = parse_time_period([first_param, second_param])

assert isinstance(start_date, dt.date)
assert isinstance(end_date, dt.date)
assert isinstance(start_date, dt.datetime)
assert isinstance(end_date, dt.datetime)

assert start_date.isoformat() == expected_start_date
assert end_date.isoformat() == expected_end_date
Expand Down

0 comments on commit 3bc62a5

Please sign in to comment.