Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Google Floods Part 1: Creating gauge points and tooltip graphs; COUNTRY=cambodia #1328

Open
wants to merge 33 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 28 commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
c5c1215
Adding Google Floods Backend
Aug 8, 2024
b9722d3
Viewing the gauges on the frontend, adding error handling to backend
gislawill Aug 12, 2024
bc92d38
Update comment
gislawill Aug 12, 2024
08a4499
Updates to support tooltip content
gislawill Aug 12, 2024
0653f1e
copy update
gislawill Aug 12, 2024
7cd866d
Refactor title state management
gislawill Aug 12, 2024
94e1588
Fixing and adding server tests
gislawill Aug 12, 2024
6ac3aad
update set_env
ericboucher Aug 13, 2024
eb29b6e
Merge branch 'master' into feature/google-floods/1317
ericboucher Aug 13, 2024
6be26d7
update snapshots
ericboucher Aug 13, 2024
2fe481a
Merge branch 'master' into feature/google-floods/1317
gislawill Aug 15, 2024
8e84cbb
Adding support for multi country deployments and adding google flood …
gislawill Aug 21, 2024
efdf2c9
Use interpolation for popup titles
gislawill Aug 21, 2024
8fa327c
Adding pytest-recording
gislawill Aug 21, 2024
f5ffbf6
Merge branch 'master' into feature/google-floods/1317
ericboucher Aug 21, 2024
438ee04
Update test_google_floods_api.py
ericboucher Aug 21, 2024
e369704
Merge branch 'feature/google-floods/1317' of https://github.com/WFP-V…
ericboucher Aug 21, 2024
6547f3b
Fixing river template
gislawill Aug 22, 2024
ac58dca
More selective with pytest recording
gislawill Aug 22, 2024
65af7e7
Merge branch 'master' into feature/google-floods/1317
ericboucher Aug 22, 2024
0adb49d
Update API URL
ericboucher Aug 22, 2024
dc94f60
Merge branch 'master' into feature/google-floods/1317
ericboucher Sep 25, 2024
b0780fa
Merge branch 'master' into feature/google-floods/1317
gislawill Oct 7, 2024
7f401bb
Adding support for Cambodia
gislawill Oct 7, 2024
341a3a7
Merge branch 'master' into feature/google-floods/1317
gislawill Oct 7, 2024
1e51479
Merge branch 'master' into feature/google-floods/1317
gislawill Oct 8, 2024
2a890cf
Adding date support
gislawill Oct 9, 2024
6d33543
Google Floods Part 2: Adding tooltip graph; COUNTRY=cambodia (#1330)
gislawill Oct 31, 2024
c4155ef
Merge branch 'master' into feature/google-floods/1317
gislawill Nov 8, 2024
66cba35
Fixing tooltip collapsing
gislawill Nov 12, 2024
05188b3
Updating api with requested changes: test comments, abstracting utili…
gislawill Nov 12, 2024
daaf86c
Update border width
gislawill Nov 15, 2024
439a542
Updates to chart line thickness and time selection
gislawill Nov 15, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
220 changes: 220 additions & 0 deletions api/app/googleflood.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,220 @@
"""Get data from Google Floods API"""

import logging
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime, timezone
from os import getenv
from urllib.parse import urlencode

import requests
from fastapi import HTTPException

logger = logging.getLogger(__name__)

GOOGLE_FLOODS_API_KEY = getenv("GOOGLE_FLOODS_API_KEY", "")
if GOOGLE_FLOODS_API_KEY == "":
logger.warning("Missing backend parameter: GOOGLE_FLOODS_API_KEY")


def make_google_floods_request(url, method="get", data=None, retries=1, timeout=10):
gislawill marked this conversation as resolved.
Show resolved Hide resolved
"""Make a request with retries and error handling."""
for _ in range(retries):
try:
if method == "post":
response_data = requests.post(url, json=data, timeout=timeout).json()
else:
response_data = requests.get(url, timeout=timeout).json()
break
except requests.exceptions.RequestException as e:
logger.warning("Request failed at url %s: %s", url, e)
response_data = {}

if "error" in response_data:
logger.error("Error in response: %s", response_data["error"])
raise HTTPException(
status_code=500, detail="Error fetching data from Google API"
)

return response_data


def format_gauge_to_geojson(data):
"""Format Gauge data to GeoJSON"""
geojson = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
data["gaugeLocation"]["longitude"],
data["gaugeLocation"]["latitude"],
],
},
"properties": {
"gaugeId": data["gaugeId"],
"issuedTime": data["issuedTime"],
"siteName": data["siteName"],
"riverName": (
data["river"] if "river" in data and len(data["river"]) > 1 else None
),
"severity": data["severity"],
"source": data["source"],
"qualityVerified": data["qualityVerified"],
"thresholds": data["thresholds"],
"gaugeValueUnit": data["gaugeValueUnit"],
},
}
if "inundationMapSet" in data:
geojson["properties"]["inundationMapSet"] = data["inundationMapSet"]
return geojson


def fetch_flood_status(region_code):
"""Fetch flood status for a region code"""
flood_status_url = f"https://floodforecasting.googleapis.com/v1/floodStatus:searchLatestFloodStatusByArea?key={GOOGLE_FLOODS_API_KEY}"
status_response = make_google_floods_request(
flood_status_url, method="post", data={"regionCode": region_code}, retries=3
)
return status_response


def get_google_flood_dates(region_codes: list[str]):
"""
When more complex date support is needed, this can be used to fetch dates from the Google Floods API.

For now, we just return today's date at the region
"""
flood_statuses = []

# Retry 3 times due to intermittent API errors
with ThreadPoolExecutor() as executor:
future_to_region = {
executor.submit(fetch_flood_status, code): code for code in region_codes
}
for future in as_completed(future_to_region):
status_response = future.result()
if "error" in status_response:
logger.error("Error in response: %s", status_response["error"])
raise HTTPException(
status_code=500,
detail="Error fetching flood status data from Google API",
)
flood_statuses.extend(status_response.get("floodStatuses", []))

gislawill marked this conversation as resolved.
Show resolved Hide resolved
parsed_issued_times = [
datetime.strptime(status["issuedTime"], "%Y-%m-%dT%H:%M:%S.%fZ")
for status in flood_statuses
if "issuedTime" in status
]
parsed_issued_times.sort(reverse=True) # Sort in descending order

# Format only the most recent date
most_recent_date = (
{
"date": parsed_issued_times[0]
.replace(tzinfo=timezone.utc)
.strftime("%Y-%m-%d")
}
if parsed_issued_times
else {}
)

return [most_recent_date] if most_recent_date else []


def get_google_floods_gauges(
region_codes: list[str],
as_geojson: bool = True,
):
"""Get statistical charts data"""
initial_gauges = []

# Retry 3 times due to intermittent API errors
with ThreadPoolExecutor() as executor:
future_to_region = {
executor.submit(fetch_flood_status, code): code for code in region_codes
}
for future in as_completed(future_to_region):
status_response = future.result()
if "error" in status_response:
logger.error("Error in response: %s", status_response["error"])
raise HTTPException(
status_code=500,
detail="Error fetching flood status data from Google API",
)
initial_gauges.extend(status_response.get("floodStatuses", []))

gauge_details_params = urlencode(
{"names": [f"gauges/{gauge['gaugeId']}" for gauge in initial_gauges]},
doseq=True,
)
gauges_details_url = f"https://floodforecasting.googleapis.com/v1/gauges:batchGet?key={GOOGLE_FLOODS_API_KEY}&{gauge_details_params}"

gauge_models_params = urlencode(
{"names": [f"gaugeModels/{gauge['gaugeId']}" for gauge in initial_gauges]},
doseq=True,
)
gauges_models_url = f"https://floodforecasting.googleapis.com/v1/gaugeModels:batchGet?key={GOOGLE_FLOODS_API_KEY}&{gauge_models_params}"

# Run both requests
details_response = make_google_floods_request(gauges_details_url)
models_response = make_google_floods_request(gauges_models_url)

# Create maps for quick lookup
gauge_details_map = {
item["gaugeId"]: item for item in details_response.get("gauges", [])
}
gauge_models_map = {
item["gaugeId"]: item for item in models_response.get("gaugeModels", [])
}

gauges_details = []
for gauge in initial_gauges:
gauge_id = gauge["gaugeId"]
detail = gauge_details_map.get(gauge_id, {})
model = gauge_models_map.get(gauge_id, {})
merged_gauge = {**gauge, **detail, **model}
gauges_details.append(merged_gauge)

if as_geojson:
geojson_feature_collection = {
"type": "FeatureCollection",
"features": [format_gauge_to_geojson(gauge) for gauge in gauges_details],
}
return geojson_feature_collection
return gauges_details


def get_google_floods_gauge_forecast(gauge_ids: list[str]):
"""Get forecast data for a gauge"""

gauge_params = urlencode(
{"gaugeIds": [gauge_id for gauge_id in gauge_ids]},
doseq=True,
)
forecast_url = f"https://floodforecasting.googleapis.com/v1/gauges:queryGaugeForecasts?key={GOOGLE_FLOODS_API_KEY}&{gauge_params}"
forecast_response = make_google_floods_request(forecast_url)

forecasts = forecast_response.get("forecasts", {})

forecast_data = {}
for gauge_id in gauge_ids:
forecast_map = {}
for forecast in forecasts.get(gauge_id, {}).get("forecasts", []):
issued_time = forecast.get("issuedTime")
for forecast_range in forecast.get("forecastRanges", []):
start_time = forecast_range.get("forecastStartTime")
value = round(forecast_range.get("value"), 2)

# Deduplicate by forecastStartTime, keeping the most recent issuedTime
if (
start_time not in forecast_map
or issued_time > forecast_map[start_time]["issuedTime"]
):
forecast_map[start_time] = {
"issuedTime": issued_time,
"value": [start_time, value],
}

forecast_data[gauge_id] = list(forecast_map.values())

return forecast_data
62 changes: 62 additions & 0 deletions api/app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,11 @@
from app.database.alert_model import AlchemyEncoder, AlertModel
from app.database.database import AlertsDataBase
from app.database.user_info_model import UserInfoModel
from app.googleflood import (
get_google_flood_dates,
get_google_floods_gauge_forecast,
get_google_floods_gauges,
)
from app.hdc import get_hdc_stats
from app.kobo import get_form_dates, get_form_responses, parse_datetime_params
from app.models import AcledRequest, RasterGeotiffModel
Expand Down Expand Up @@ -410,3 +415,60 @@ def post_raster_geotiff(raster_geotiff: RasterGeotiffModel):
return JSONResponse(
content={"download_url": presigned_download_url}, status_code=200
)


@app.get("/google-floods/gauges/")
def get_google_floods_gauges_api(region_codes: list[str] = Query(...)):
"""
Get the Google Floods gauges for a list of regions.
"""
if not region_codes:
raise HTTPException(
status_code=400,
detail="At least one region code must be provided.",
)
for region_code in region_codes:
if len(region_code) != 2:
raise HTTPException(
status_code=400,
detail=f"Region code '{region_code}' must be exactly two characters (iso2).",
)

iso2_codes = [region_code.upper() for region_code in region_codes]
return get_google_floods_gauges(iso2_codes)


@app.get("/google-floods/dates/")
def get_google_floods_dates_api(region_codes: list[str] = Query(...)):
"""
Get the Google Floods dates for a list of regions.
"""
if not region_codes:
raise HTTPException(
status_code=400,
detail="At least one region code must be provided.",
)

for region_code in region_codes:
if len(region_code) != 2:
raise HTTPException(
status_code=400,
detail=f"Region code '{region_code}' must be exactly two characters (iso2).",
)

iso2_codes = [region_code.upper() for region_code in region_codes]
return get_google_flood_dates(iso2_codes)


@app.get("/google-floods/gauges/forecasts")
def get_google_floods_gauge_forecast_api(
gauge_ids: str = Query(..., description="Comma-separated list of gauge IDs")
):
"""Get forecast data for a gauge or multiple gauges"""
gauge_id_list = [id.strip() for id in gauge_ids.split(",")]
if not gauge_id_list:
raise HTTPException(
status_code=400,
detail="gauge_ids must be provided and contain at least one value.",
)
return get_google_floods_gauge_forecast(gauge_id_list)
3 changes: 3 additions & 0 deletions api/app/pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[pytest]
log_cli = true
log_cli_level = INFO
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
interactions:
- request:
body: null
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- python-requests/2.32.0
method: GET
uri: https://floodforecasting.googleapis.com/v1/gauges:queryGaugeForecasts?gaugeIds=hybas_1121465590
response:
body:
string: !!binary |
H4sIAAAAAAAC/9Vay24bRxC86ysEnaVBvx+8+wcSnRIEgZIojoHEBiw5QGDo39OkY4GkL9zWEhMB
PJDLnd3isruqq3s+X1xeXv3+4eP9r3cPjw9Xm8vPdaAO/fHPL3cPPyMSiqkmPH9zdPqP/x28fP56
d8q7h4dP97/dvvvrvs65IiC5gbghuUXeKG00hlEE2A9X1/vLvl75u7v3b+8PL398i935f9/9+Wl7
B40cyS4caRLCrtfHp3699PePdx8fvwHGtwCb3esQ0cHSN++//UX7Cw/WPV2fCD1xACKIszEF0lLo
0oUua0APYjYh9QZy7SLXFyOveBHX1ArvTrhYF7m9HLkNURcNSYNcDt270P3l0HmkkVuoisVi5NFF
Hi9HDqNiXNzAhXA59OxCzxdDdx3uFeTIELo8Xhia0A8WHkLf+/TT8/u9H3SKkuhWSYQ2IJUOmbme
kvgg8Ajt/dMT2bg4zRQsK1rhtdFx1QMVpZSY2XnqM/lYRgZVwLTSayod07AIr2gHXh4tE9nYS7wF
I0UbuOdSMSRVMc1MjUd+Dio+GXoJoBiIdNKTsQscz6ohtnMjudOQKgU5VtOQqtEEvJg4O9XlTCK2
QQ4oaIBqi5NrLg+X9klX/GbyMFZx6RgK2pKQmUy89fFVzSejCSyPl5lkXLqNLE0HOJWLeSj2jMg5
mHgB7MLMDgbL7V/eQA/60cL1RcR3IsIbqOobQsRXExGt3CqrT66cjdyaycVaHio1xckaKjK3RREu
xuzReeYzWxQ4oNsTmknDMbAAI/VsyFQe3nYKpCSkBXwmE1d5ysHaqebPQ8QLkCNzIuLyVnkBoC5y
OquExK6XpdupiJdr4PUkJAcRGoaVu2kYkZlEnIOZm3w2k4Z9WOk1g722grgMlKNls68yk4ir3BCm
svCI/rpq4ooWrYTvSMhcIg4bllRVfEb8T5h4QdszVaJkZLHvq/v3BsVHC9eXkLxF2QBtkAYZg+Rq
EgIjJcopN+chc4d29f9Gq8yZXhRrAjlkS/+mFsU5AJ072TWZimEEUKjJcvmbzMUwzJw620fmMnEZ
qCrkIQN7z/wMXHwydBruTkHeaB8Wgt6Y+Gjh6jJS+f9lfxb7SNzueFhLRpzLiQjJK+wT0xDndsN1
JhVXjFZsFh13dmdNbVAU8vL56OG4WP8mdyh4VIinl5b4Yss9mY5haBkohWg0P+fSsdkgFG7sKzsP
F5+MWwYLRaouH1oWgN6g+Gjh+iryJYtwIzyUSuVX62eZV6XT7f/NJWOLEYbUGn7NJWPLwVuvT5av
rDB2HGAogGyL+0JzqbiiBcpEARMtn6BNpmIdgk7Z2s4+mY25ivoSvmZP6wxsfDJyHKUl6oqtHO2N
io8WnqIjF/tHtp+eLp4u/gULG2d/gzIAAA==
headers:
Alt-Svc:
- h3=":443"; ma=2592000,h3-29=":443"; ma=2592000
Cache-Control:
- private
Content-Encoding:
- gzip
Content-Type:
- application/json; charset=UTF-8
Date:
- Sat, 31 Aug 2024 00:23:43 GMT
Server:
- ESF
Server-Timing:
- gfet4t7; dur=397
Transfer-Encoding:
- chunked
Vary:
- Origin
- X-Origin
- Referer
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- '0'
status:
code: 200
message: OK
version: 1
Loading
Loading