Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature 18 #33

Open
wants to merge 59 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
59 commits
Select commit Hold shift + click to select a range
3734918
Add OGC API item and map
Jan 29, 2024
032b398
Add OGC API item and map
Jan 29, 2024
ca675b6
Update geokube version for drivers
Jan 30, 2024
59a9b69
Test api to add filters for map
vale95-eng Feb 1, 2024
5e08db9
Fix api map endpoint with filters
vale95-eng Feb 1, 2024
dbc49f7
Add filters for items endpoint in api
vale95-eng Feb 2, 2024
944270f
Add filters for items endpoint in api
vale95-eng Feb 2, 2024
7d38011
Add filters for items endpoint in api
vale95-eng Feb 2, 2024
b7a7252
Merge pull request #25 from vale95-eng/feature_18
vale95-eng Feb 2, 2024
b59703e
Fix map and items for dataset rs-indices
vale95-eng Feb 8, 2024
ad95dd9
Merge branch 'CMCC-Foundation:feature_18' into feature_18
vale95-eng Feb 8, 2024
295a5cc
Merge pull request #27 from vale95-eng/feature_18
vale95-eng Feb 8, 2024
129801c
Fix dpi for map
vale95-eng Feb 27, 2024
b47a662
Merge pull request #28 from vale95-eng/feature_18
vale95-eng Feb 27, 2024
93b6cd9
Fix dpi for map as parameter
vale95-eng Feb 27, 2024
5ce30c4
Merge pull request #29 from vale95-eng/feature_18
vale95-eng Feb 27, 2024
ce8c3dd
Fix dpi equal to 100 for map as parameter
vale95-eng Feb 27, 2024
d6f097a
Merge pull request #30 from vale95-eng/feature_18
vale95-eng Feb 27, 2024
2cfd3c1
Change geokube image version
vale95-eng Feb 28, 2024
6dd4138
Add cmap as parameter for map
vale95-eng Feb 28, 2024
8bc92d8
Merge pull request #32 from vale95-eng/feature_18
vale95-eng Feb 28, 2024
8d0a4be
add optional bbox for item and map
vale95-eng Mar 20, 2024
81fbd65
update dockerfile
gtramonte May 2, 2024
367f759
adding registry for Dockerfiles base
gtramonte May 2, 2024
bbd3cf7
adding tag echo on workflow
gtramonte May 3, 2024
ab57ede
fix python version
gtramonte May 3, 2024
d32a447
adding cache on workflow
gtramonte May 3, 2024
0fb4abb
fix to python version and catalog workdir
gtramonte May 3, 2024
010608d
fix python version
gtramonte May 3, 2024
1459dd0
adding projection to api get map
gtramonte May 3, 2024
bc54df8
moving projection inside geokube
gtramonte May 3, 2024
fc5231a
Fix bug for pasture
vale95-eng May 8, 2024
4de5d7c
fix time should be optional
gtramonte May 13, 2024
1084516
Merge branch 'main' into feature_18
gtramonte May 13, 2024
6de979b
adding vmin and vmax option to get_map and get_map_with_filters API
gtramonte Jun 3, 2024
0ba23eb
Add oai_cat
vale95-eng Jun 17, 2024
e09dffe
Fix utils import
vale95-eng Jun 17, 2024
936f3bf
Fix utils import
vale95-eng Jun 17, 2024
7d41050
Add rdflib in requirements
vale95-eng Jun 17, 2024
f75e01c
Fix metadata_provider import
vale95-eng Jun 17, 2024
aa10db5
Remoove scopes as argument
vale95-eng Jun 17, 2024
19a4527
Remove json dumps
vale95-eng Jun 17, 2024
5b346c5
change example url in oai_utils
vale95-eng Jun 17, 2024
3e924b5
adding csv format to persist datacube
gtramonte Jun 20, 2024
faf79ec
adding csv format into persist dataset
gtramonte Jun 20, 2024
0c5bbe9
fix slice for time and vertical axis
gtramonte Jun 27, 2024
7783589
fix query
gtramonte Jun 27, 2024
b12e27d
update oai dcat api for italian portal
gtramonte Jul 12, 2024
aaabd1c
fix import
gtramonte Jul 12, 2024
6565728
fix to dcat_ap_it api
gtramonte Aug 1, 2024
352c4c7
other fixes to dcat_ap_it api
gtramonte Aug 2, 2024
d7dd851
fix for dcat_ap_it italian validator
gtramonte Aug 2, 2024
051320b
fix for datatime.strptime
gtramonte Aug 2, 2024
7d33613
final adjustments to dcat_ap_it api
gtramonte Aug 6, 2024
c66a62c
Merge branch 'main' into feature_18
gtramonte Aug 6, 2024
2892c10
fix geokube version
gtramonte Aug 20, 2024
e0fe5ee
fix iot environmental accrual periodicity and double slash in paths
gtramonte Aug 29, 2024
33a704f
datatype should be dateTime
gtramonte Aug 30, 2024
39d75d2
update gitignore
gtramonte Sep 26, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -112,4 +112,7 @@ venv.bak/
_catalogs/
_old/

.DS_Store
.DS_Store
db_init
*.zarr
*.nc
103 changes: 87 additions & 16 deletions api/app/endpoint_handlers/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
import pika
from typing import Optional

from dbmanager.dbmanager import DBManager
from fastapi.responses import FileResponse

from dbmanager.dbmanager import DBManager, RequestStatus
from geoquery.geoquery import GeoQuery
from geoquery.task import TaskList
from datastore.datastore import Datastore, DEFAULT_MAX_REQUEST_SIZE_GB
Expand All @@ -18,12 +20,18 @@
from api_utils import make_bytes_readable_dict
from validation import assert_product_exists

from . import request

log = get_dds_logger(__name__)
data_store = Datastore()

MESSAGE_SEPARATOR = os.environ["MESSAGE_SEPARATOR"]

def _is_etimate_enabled(dataset_id, product_id):
if dataset_id in ("sentinel-2",):
return False
return True


@log_execution_time(log)
def get_datasets(user_roles_names: list[str]) -> list[dict]:
Expand Down Expand Up @@ -213,7 +221,7 @@ def estimate(

@log_execution_time(log)
@assert_product_exists
def query(
def async_query(
user_id: str,
dataset_id: str,
product_id: str,
Expand Down Expand Up @@ -250,21 +258,22 @@ def query(

"""
log.debug("geoquery: %s", query)
estimated_size = estimate(dataset_id, product_id, query, "GB").get("value")
allowed_size = data_store.product_metadata(dataset_id, product_id).get(
"maximum_query_size_gb", DEFAULT_MAX_REQUEST_SIZE_GB
)
if estimated_size > allowed_size:
raise exc.MaximumAllowedSizeExceededError(
dataset_id=dataset_id,
product_id=product_id,
estimated_size_gb=estimated_size,
allowed_size_gb=allowed_size,
)
if estimated_size == 0.0:
raise exc.EmptyDatasetError(
dataset_id=dataset_id, product_id=product_id
if _is_etimate_enabled(dataset_id, product_id):
estimated_size = estimate(dataset_id, product_id, query, "GB").get("value")
allowed_size = data_store.product_metadata(dataset_id, product_id).get(
"maximum_query_size_gb", DEFAULT_MAX_REQUEST_SIZE_GB
)
if estimated_size > allowed_size:
raise exc.MaximumAllowedSizeExceededError(
dataset_id=dataset_id,
product_id=product_id,
estimated_size_gb=estimated_size,
allowed_size_gb=allowed_size,
)
if estimated_size == 0.0:
raise exc.EmptyDatasetError(
dataset_id=dataset_id, product_id=product_id
)
broker_conn = pika.BlockingConnection(
pika.ConnectionParameters(
host=os.getenv("BROKER_SERVICE_HOST", "broker")
Expand Down Expand Up @@ -295,6 +304,68 @@ def query(
broker_conn.close()
return request_id

@log_execution_time(log)
@assert_product_exists
def sync_query(
user_id: str,
dataset_id: str,
product_id: str,
query: GeoQuery,
):
"""Realize the logic for the endpoint:

`POST /datasets/{dataset_id}/{product_id}/execute`

Query the data and return the result of the request.

Parameters
----------
user_id : str
ID of the user executing the query
dataset_id : str
ID of the dataset
product_id : str
ID of the product
query : GeoQuery
Query to perform

Returns
-------
request_id : int
ID of the request

Raises
-------
MaximumAllowedSizeExceededError
if the allowed size is below the estimated one
EmptyDatasetError
if estimated size is zero

"""

import time
request_id = async_query(user_id, dataset_id, product_id, query)
status, _ = DBManager().get_request_status_and_reason(request_id)
log.debug("sync query: status: %s", status)
while status in (RequestStatus.RUNNING, RequestStatus.QUEUED,
RequestStatus.PENDING):
time.sleep(1)
status, _ = DBManager().get_request_status_and_reason(request_id)
log.debug("sync query: status: %s", status)

if status is RequestStatus.DONE:
download_details = DBManager().get_download_details_for_request_id(
request_id
)
return FileResponse(
path=download_details.location_path,
filename=download_details.location_path.split(os.sep)[-1],
)
raise exc.ProductRetrievingError(
dataset_id=dataset_id,
product_id=product_id,
status=status.name)


@log_execution_time(log)
def run_workflow(
Expand Down
6 changes: 5 additions & 1 deletion api/app/endpoint_handlers/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,11 @@ def get_request_resulting_size(request_id: int):
If the request was not found
"""
if request := DBManager().get_request_details(request_id):
return request.download.size_bytes
size = request.download.size_bytes
if not size or size == 0:
raise exc.EmptyDatasetError(dataset_id=request.dataset,
product_id=request.product)
return size
log.info(
"request with id '%s' could not be found",
request_id,
Expand Down
13 changes: 13 additions & 0 deletions api/app/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,3 +180,16 @@ def __init__(self, dataset_id, product_id):
product_id=product_id,
)
super().__init__(self.msg)

class ProductRetrievingError(BaseDDSException):
"""Retrieving of the product failed."""

msg: str = "Retrieving of the product '{dataset_id}.{product_id}' failed with the status {status}"

def __init__(self, dataset_id, product_id, status):
self.msg = self.msg.format(
dataset_id=dataset_id,
product_id=product_id,
status=status
)
super().__init__(self.msg)
Loading
Loading