Skip to content

Commit

Permalink
Merge pull request #164 from Ouranosinc/fix-163
Browse files Browse the repository at this point in the history
Notebooks connect to PAVICS or local server
  • Loading branch information
huard authored Nov 11, 2019
2 parents a45a0cf + 66f8b75 commit e5334d5
Show file tree
Hide file tree
Showing 35 changed files with 896 additions and 1,154 deletions.
4 changes: 2 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ sudo: false

install:
- make install
- make start
# - sleep 2
# # Prepare env with Python version
# - conda create -n raven python=$TRAVIS_PYTHON_VERSION
Expand All @@ -59,9 +58,10 @@ install:
before_script:
# # Start WPS service on port 5000 on 0.0.0.0
# - raven start --daemon --bind-host 0.0.0.0 --port 5000
- sleep 2
- make start

script:
- make test
- make test_nb
- if [[ $DOCS = true ]]; then make docs; fi
- if [[ $PEP8 = true ]]; then make pep8; fi
1 change: 1 addition & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ Changes
* Optimized memory usage in ReadTheDocs builds when using Sphinx autodoc by employing mock
* Cleaner GeoJSON outputs for many subsetting processes
* Employed ipyleaflets for notebook-based web-maps
* Run py.test on notebooks from local or remote server


0.7.x (2019-06-25)
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# vim:set ft=dockerfile:
FROM continuumio/miniconda3
MAINTAINER https://github.com/huard/raven
LABEL Description="Raven WPS" Vendor="Birdhouse" Version="0.8.2-beta"
LABEL Description="Raven WPS" Vendor="Birdhouse" Version="0.8.3-beta"

# Update Debian system
RUN apt-get update && apt-get install -y \
Expand Down
9 changes: 9 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ OSTRICH_SRC = $(CURDIR)/src/OSTRICH
OSTRICH_TARGET = GCC # can be also MPI but requires mpi compiler; not tested
UNAME_S := $(shell uname -s)
DOWNLOAD_CACHE = /tmp/
RAVEN_WPS_URL = http://localhost:9099
FLYINGPIGEON_WPS_URL = http://localhost:8093


ifeq "$(UNAME_S)" "Linux"
FN := Miniconda3-latest-Linux-x86_64.sh
Expand Down Expand Up @@ -50,6 +53,7 @@ help:
@echo " clean to remove *all* files that are not controlled by 'git'. WARNING: use it *only* if you know what you do!"
@echo "\nTesting targets:"
@echo " test to run tests (but skip long running tests)."
@echo " test_nb to verify Jupyter Notebook test outputs are valid."
@echo " testall to run all tests (including long running tests)."
@echo " pep8 to run pep8 code style checks."
@echo "\nSphinx targets:"
Expand Down Expand Up @@ -167,6 +171,11 @@ test:
@echo "Running tests (skip slow and online tests) ..."
@bash -c "source $(ANACONDA_HOME)/bin/activate $(CONDA_ENV);pytest -v -m 'not slow and not online'"

.PHONY: test_nb
test_nb:
@echo "Running notebook-based tests"
@bash -c "source $(ANACONDA_HOME)/bin/activate $(CONDA_ENV);env RAVEN_WPS_URL=$(RAVEN_WPS_URL) FLYINGPIGEON_WPS_URL=$(FLYINGPIGEON_WPS_URL) pytest --nbval $(CURDIR)/docs/source/notebooks/ --sanitize-with $(CURDIR)/docs/source/output_sanitize.cfg --ignore $(CURDIR)/docs/source/notebooks/.ipynb_checkpoints"

.PHONY: test_pdb
test_pdb:
@echo "Running tests (skip slow and online tests) with --pdb ..."
Expand Down
2 changes: 1 addition & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = '0.8.2-beta'
release = '0.8.3-beta'

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
234 changes: 84 additions & 150 deletions docs/source/notebooks/Delineation_workflow.ipynb

Large diffs are not rendered by default.

148 changes: 63 additions & 85 deletions docs/source/notebooks/Perform_Regionalization.ipynb

Large diffs are not rendered by default.

77 changes: 17 additions & 60 deletions docs/source/notebooks/Raven_run_parallel_basins.ipynb

Large diffs are not rendered by default.

74 changes: 25 additions & 49 deletions docs/source/notebooks/Region_selection.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@
"import requests\n",
"import matplotlib\n",
"import ipyleaflet\n",
"import ipywidgets"
"import ipywidgets\n",
"import os"
]
},
{
Expand Down Expand Up @@ -51,14 +52,9 @@
"outputs": [],
"source": [
"# Create WPS instances\n",
"\n",
"# # locally running instance\n",
"# raven_url = \"http://localhost:9099/wps\"\n",
"# raven = birdy.WPSClient(raven_url, progress=True)\n",
"\n",
"# instance via pavics.ouranos.ca\n",
"# Set environment variable RAVEN_WPS_URL to \"http://localhost:9099\" to run on the default local server\n",
"pavics_url = \"https://pavics.ouranos.ca\"\n",
"raven_url = f'{pavics_url}/twitcher/ows/proxy/raven/wps'\n",
"raven_url = os.environ.get(\"RAVEN_WPS_URL\", f\"{pavics_url}/twitcher/ows/proxy/raven/wps\")\n",
"\n",
"raven = birdy.WPSClient(raven_url, progress=True)"
]
Expand Down Expand Up @@ -122,7 +118,7 @@
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "89452f06cebe438181f1d08de3a2dad6",
"model_id": "6bf0347c6e9846b48bd0f5c7b54816ce",
"version_major": 2,
"version_minor": 0
},
Expand Down Expand Up @@ -150,16 +146,16 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[-71.93847656250001, 48.48748647988415]"
"[-72.33398437500001, 48.748945343432936]"
]
},
"execution_count": 11,
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -171,13 +167,13 @@
},
{
"cell_type": "code",
"execution_count": 12,
"execution_count": 8,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "4411003f654c4a2ab105986d88974af1",
"model_id": "a484d2457236427e97fcefe4439800a6",
"version_major": 2,
"version_minor": 0
},
Expand All @@ -190,60 +186,40 @@
}
],
"source": [
"# user_location = raven.hydrobasins_select(location=lonlat, aggregate_upstream=True)\n",
"user_location = raven.hydrosheds_select(location=str(user_lonlat), aggregate_upstream=True)\n"
"# NBVAL_SKIP\n",
"# Get the shape of the watershed contributing to flow at the selected location. \n",
"resp = raven.hydrobasins_select(location=str(user_lonlat), aggregate_upstream=True)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'COAST': 0,\n",
" 'DIST_MAIN': 167.2,\n",
" 'DIST_SINK': 167.2,\n",
" 'ENDO': 0,\n",
" 'HYBAS_ID': 7129001701,\n",
" 'LAKE': 301,\n",
" 'NEXT_DOWN': 7120317402,\n",
" 'NEXT_SINK': 7120034330,\n",
" 'ORDER': 1,\n",
" 'PFAF_ID': 724083034000,\n",
" 'SIDE': 'L',\n",
" 'SORT': 96070,\n",
" 'SUB_AREA': 72417.5,\n",
" 'UP_AREA': 1067.6,\n",
" 'id': 'USGS_HydroBASINS_lake_na_lev12.96070'}"
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"outputs": [],
"source": [
"# NBVAL_SKIP\n",
"# Before continuing, wait for the process to finish!\n",
"\n",
"# Extract the URL of the resulting GeoJSON feature\n",
"user_shape = user_location.get(asobj=False).feature\n",
"user_shape\n",
"features, ids = resp.get(asobj=True)\n",
"\n",
"user_shape = resp.get(asobj=False).feature\n",
"\n",
"# Examine its properties\n",
"user_data = requests.get(user_shape).json()\n",
"user_data['properties']"
"#features[0]['properties']"
]
},
{
"cell_type": "code",
"execution_count": 14,
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"# Add this GeoJSON to the map!\n",
"# NBVAL_SKIP\n",
"# Add this GeoJSON to the map above!\n",
"#df = gpd.GeoDataFrame.from_features(features)\n",
"df = gpd.read_file(user_shape)\n",
"\n",
"user_geojson = ipyleaflet.GeoData(geo_dataframe=df, \n",
" style = {\n",
" 'color': 'blue', \n",
Expand Down
55 changes: 22 additions & 33 deletions docs/source/notebooks/Run_Raven_with_Parallel_parameters.ipynb

Large diffs are not rendered by default.

59 changes: 16 additions & 43 deletions docs/source/notebooks/computing_objective_functions.ipynb

Large diffs are not rendered by default.

121 changes: 77 additions & 44 deletions docs/source/notebooks/example_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,60 +9,93 @@
from pathlib import Path

TESTS_HOME = Path(__file__).parent.parent.parent.parent
TD = TESTS_HOME / 'tests' / 'testdata'
CFG_FILE = [str(TESTS_HOME / 'test.cfg'), ]
TD = TESTS_HOME / "tests" / "testdata"
CFG_FILE = [str(TESTS_HOME / "test.cfg")]

TESTDATA = dict()
TESTDATA['gr4j-cemaneige'] = \
{'pr': TD / 'gr4j_cemaneige' / 'pr.nc',
'tas': TD / 'gr4j_cemaneige' / 'tas.nc',
'evap': TD / 'gr4j_cemaneige' / 'evap.nc'}
TESTDATA["gr4j-cemaneige"] = {
"pr": TD / "gr4j_cemaneige" / "pr.nc",
"tas": TD / "gr4j_cemaneige" / "tas.nc",
"evap": TD / "gr4j_cemaneige" / "evap.nc",
}

TESTDATA['raven-gr4j-cemaneige-nc-ts'] = TD / 'raven-gr4j-cemaneige' / 'Salmon-River-Near-Prince-George_meteo_daily.nc'
TESTDATA['raven-gr4j-cemaneige-nc-rv'] = tuple((TD / 'raven-gr4j-cemaneige').glob('raven-gr4j-salmon.rv?'))
TESTDATA["raven-gr4j-cemaneige-nc-ts"] = (
TD / "raven-gr4j-cemaneige" / "Salmon-River-Near-Prince-George_meteo_daily.nc"
)
TESTDATA["raven-gr4j-cemaneige-nc-rv"] = tuple(
(TD / "raven-gr4j-cemaneige").glob("raven-gr4j-salmon.rv?")
)

TESTDATA['raven-mohyse-nc-ts'] = TESTDATA['raven-gr4j-cemaneige-nc-ts']
TESTDATA['raven-mohyse'] = TD / 'raven-mohyse'
TESTDATA['raven-mohyse-rv'] = tuple((TD / 'raven-mohyse').glob('raven-mohyse-salmon.rv?'))
TESTDATA['raven-mohyse-ts'] = tuple((TD / 'raven-mohyse').glob('Salmon-River-Near-Prince-George_*.rvt'))
TESTDATA["raven-mohyse-nc-ts"] = TESTDATA["raven-gr4j-cemaneige-nc-ts"]
TESTDATA["raven-mohyse"] = TD / "raven-mohyse"
TESTDATA["raven-mohyse-rv"] = tuple(
(TD / "raven-mohyse").glob("raven-mohyse-salmon.rv?")
)
TESTDATA["raven-mohyse-ts"] = tuple(
(TD / "raven-mohyse").glob("Salmon-River-Near-Prince-George_*.rvt")
)

TESTDATA['raven-hmets-nc-ts'] = TESTDATA['raven-gr4j-cemaneige-nc-ts']
TESTDATA['raven-hmets'] = TD / 'raven-hmets'
TESTDATA['raven-hmets-rv'] = tuple((TD / 'raven-hmets').glob('raven-hmets-salmon.rv?'))
TESTDATA['raven-hmets-ts'] = tuple((TD / 'raven-hmets').glob('Salmon-River-Near-Prince-George_*.rvt'))
TESTDATA["raven-hmets-nc-ts"] = TESTDATA["raven-gr4j-cemaneige-nc-ts"]
TESTDATA["raven-hmets"] = TD / "raven-hmets"
TESTDATA["raven-hmets-rv"] = tuple((TD / "raven-hmets").glob("raven-hmets-salmon.rv?"))
TESTDATA["raven-hmets-ts"] = tuple(
(TD / "raven-hmets").glob("Salmon-River-Near-Prince-George_*.rvt")
)

TESTDATA['raven-hbv-ec-nc-ts'] = TESTDATA['raven-gr4j-cemaneige-nc-ts']
TESTDATA['raven-hbv-ec'] = TD / 'raven-hbv-ec'
TESTDATA['raven-hbv-ec-rv'] = tuple((TD / 'raven-hbv-ec').glob('raven-hbv-ec-salmon.rv?'))
TESTDATA['raven-hbv-ec-ts'] = tuple((TD / 'raven-hbv-ec').glob('Salmon-River-Near-Prince-George_*.rvt'))
TESTDATA["raven-hbv-ec-nc-ts"] = TESTDATA["raven-gr4j-cemaneige-nc-ts"]
TESTDATA["raven-hbv-ec"] = TD / "raven-hbv-ec"
TESTDATA["raven-hbv-ec-rv"] = tuple(
(TD / "raven-hbv-ec").glob("raven-hbv-ec-salmon.rv?")
)
TESTDATA["raven-hbv-ec-ts"] = tuple(
(TD / "raven-hbv-ec").glob("Salmon-River-Near-Prince-George_*.rvt")
)

TESTDATA['ostrich-gr4j-cemaneige'] = TD / 'ostrich-gr4j-cemaneige'
TESTDATA['ostrich-gr4j-cemaneige-rv'] = \
tuple(TESTDATA['ostrich-gr4j-cemaneige'].glob("*.rv?")) + tuple(TESTDATA['ostrich-gr4j-cemaneige'].glob('*.t??'))
TESTDATA['ostrich-gr4j-cemaneige-nc-ts'] = TESTDATA['raven-gr4j-cemaneige-nc-ts']
TESTDATA["ostrich-gr4j-cemaneige"] = TD / "ostrich-gr4j-cemaneige"
TESTDATA["ostrich-gr4j-cemaneige-rv"] = tuple(
TESTDATA["ostrich-gr4j-cemaneige"].glob("*.rv?")
) + tuple(TESTDATA["ostrich-gr4j-cemaneige"].glob("*.t??"))
TESTDATA["ostrich-gr4j-cemaneige-nc-ts"] = TESTDATA["raven-gr4j-cemaneige-nc-ts"]

TESTDATA['ostrich-mohyse'] = TD / 'ostrich-mohyse'
TESTDATA['ostrich-mohyse-rv'] = \
tuple(TESTDATA['ostrich-mohyse'].glob("*.rv?")) + tuple(TESTDATA['ostrich-mohyse'].glob('*.t??'))
TESTDATA['ostrich-mohyse-nc-ts'] = TESTDATA['raven-mohyse-nc-ts']
TESTDATA["ostrich-mohyse"] = TD / "ostrich-mohyse"
TESTDATA["ostrich-mohyse-rv"] = tuple(TESTDATA["ostrich-mohyse"].glob("*.rv?")) + tuple(
TESTDATA["ostrich-mohyse"].glob("*.t??")
)
TESTDATA["ostrich-mohyse-nc-ts"] = TESTDATA["raven-mohyse-nc-ts"]

TESTDATA['ostrich-hmets'] = TD / 'ostrich-hmets'
TESTDATA['ostrich-hmets-rv'] = \
tuple(TESTDATA['ostrich-hmets'].glob("*.rv?")) + tuple(TESTDATA['ostrich-hmets'].glob('*.t??'))
TESTDATA['ostrich-hmets-nc-ts'] = TESTDATA['raven-hmets-nc-ts']
TESTDATA["ostrich-hmets"] = TD / "ostrich-hmets"
TESTDATA["ostrich-hmets-rv"] = tuple(TESTDATA["ostrich-hmets"].glob("*.rv?")) + tuple(
TESTDATA["ostrich-hmets"].glob("*.t??")
)
TESTDATA["ostrich-hmets-nc-ts"] = TESTDATA["raven-hmets-nc-ts"]

TESTDATA['ostrich-hbv-ec'] = TD / 'ostrich-hbv-ec'
TESTDATA['ostrich-hbv-ec-rv'] = \
tuple(TESTDATA['ostrich-hbv-ec'].glob("*.rv?")) + tuple(TESTDATA['ostrich-hbv-ec'].glob('*.t??'))
TESTDATA['ostrich-hbv-ec-nc-ts'] = TESTDATA['raven-hbv-ec-nc-ts']
TESTDATA["ostrich-hbv-ec"] = TD / "ostrich-hbv-ec"
TESTDATA["ostrich-hbv-ec-rv"] = tuple(TESTDATA["ostrich-hbv-ec"].glob("*.rv?")) + tuple(
TESTDATA["ostrich-hbv-ec"].glob("*.t??")
)
TESTDATA["ostrich-hbv-ec-nc-ts"] = TESTDATA["raven-hbv-ec-nc-ts"]

TESTDATA['donnees_quebec_mrc_poly'] = TD / 'donneesqc_mrc_poly' / 'donnees_quebec_mrc_polygones.gml'
TESTDATA['watershed_vector'] = TD / 'watershed_vector' / 'LSJ_LL.zip'
TESTDATA['mrc_subset'] = TD / 'donneesqc_mrc_poly' / 'mrc_subset.gml'
TESTDATA['melcc_water'] = TD / 'melcc_water_management' / 'zone_gestion_leau_saintlaurent.gpkg'
TESTDATA["donnees_quebec_mrc_poly"] = (
TD / "donneesqc_mrc_poly" / "donnees_quebec_mrc_polygones.gml"
)
TESTDATA["watershed_vector"] = TD / "watershed_vector" / "LSJ_LL.zip"
TESTDATA["mrc_subset"] = TD / "donneesqc_mrc_poly" / "mrc_subset.gml"
TESTDATA["melcc_water"] = (
TD / "melcc_water_management" / "zone_gestion_leau_saintlaurent.gpkg"
)

TESTDATA["cmip5_subset"] = (
TD / "cmip5" / "tas_Amon_CanESM2_rcp85_r1i1p1_200601-210012_subset.nc"
)

# TODO: Replace the following files with subsets and set originals as production data
TESTDATA['earthenv_dem_90m'] = TD / 'earthenv_dem_90m' / 'earthenv_dem90_southernQuebec.tiff'
TESTDATA['hydrobasins_lake_na_lev12'] = TD / 'usgs_hydrobasins' / 'hybas_lake_na_lev12_v1c.zip'
TESTDATA['simfile_single'] = TD / 'hydro_simulations' / 'raven-gr4j-cemaneige-sim_hmets-0_Hydrographs.nc'
TESTDATA['input2d']=TD / 'input2d' / 'input2d.nc'
TESTDATA["earthenv_dem_90m"] = (
TD / "earthenv_dem_90m" / "earthenv_dem90_southernQuebec.tiff"
)
TESTDATA["hydrobasins_lake_na_lev12"] = (
TD / "usgs_hydrobasins" / "hybas_lake_na_lev12_v1c.zip"
)
TESTDATA["simfile_single"] = (
TD / "hydro_simulations" / "raven-gr4j-cemaneige-sim_hmets-0_Hydrographs.nc"
)
TESTDATA["input2d"] = TD / "input2d" / "input2d.nc"
Loading

0 comments on commit e5334d5

Please sign in to comment.