diff --git a/CHANGES.md b/CHANGES.md index 5c1f85208..9bd603e98 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -112,7 +112,7 @@ The fix is to force `requests` to use the OS copy of the root cert bundle. Fix for this error: - ``` + ```console $ docker exec proxy python -c "import requests; requests.request('GET', 'https://lvupavicsmaster.ouranos.ca/geoserver')" Traceback (most recent call last): File "", line 1, in @@ -128,13 +128,13 @@ ``` Default SSL root cert bundle of `requests`: - ``` + ```console $ docker exec proxy python -c "import requests; print requests.certs.where()" /usr/local/lib/python2.7/dist-packages/requests/cacert.pem ``` Confirm the fix works: - ``` + ```console $ docker exec -it proxy bash root@37ed3a2a03ae:/opt/local/src/CanarieAPI/canarieapi# REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt python -c "import requests; requests.request('GET', 'https://lvupavicsmaster.ouranos.ca/geoserver')" root@37ed3a2a03ae:/opt/local/src/CanarieAPI/canarieapi# @@ -719,7 +719,7 @@ We have 4 Java component but only 1 is vulnerable: Thredds: **After fix**: - ``` + ```console $ docker run -it --rm unidata/thredds-docker:4.6.18 bash root@f65aadd2955c:/usr/local/tomcat# find -iname '**log4j**' ./webapps/thredds/WEB-INF/classes/log4j2.xml @@ -730,7 +730,7 @@ ``` **Before fix (unidata/thredds-docker:4.6.15)**: - ``` + ```console $ docker exec -it thredds find / -iname '**log4j**' find: ‘/proc/1/map_files’: Operation not permitted find: ‘/proc/12/map_files’: Operation not permitted @@ -745,7 +745,7 @@ **Other components (ncwms2, geoserver, solr) have log4j older than version 2.0 so supposedly not affected**: - ``` + ```console $ docker exec -it ncwms2 find / -iname '**log4j**' /opt/conda/envs/birdhouse/opt/apache-tomcat/webapps/ncWMS2/WEB-INF/classes/log4j.properties /opt/conda/envs/birdhouse/opt/apache-tomcat/webapps/ncWMS2/WEB-INF/lib/log4j-1.2.17.jar @@ -1332,7 +1332,7 @@ Deployed to https://medus.ouranos.ca/jupyter/ for acceptance testing. database updates if such users exist on your server instance. To look for possible duplicates, the following command can be used. Duplicate entries must be updated or removed such that only unique emails are present. - ```shell + ```console echo "select email,user_name from users" | \ docker exec -i postgres-magpie psql -U $POSTGRES_MAGPIE_USERNAME magpiedb | \ sort > /tmp/magpie_users.txt diff --git a/Makefile b/Makefile index c730b029b..5cb4bad78 100644 --- a/Makefile +++ b/Makefile @@ -211,6 +211,69 @@ bump-check: ## Verifies that required bumpversion files are found version: ## Display project version @-$(MSG_I) "$(APP_NAME) version: $(APP_VERSION)" +## --- Documentation targets --- ## + +define BROWSER_PYSCRIPT +import os, webbrowser, sys +try: + from urllib import pathname2url +except: + from urllib.request import pathname2url + +webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) +endef +export BROWSER_PYSCRIPT +override BROWSER_DISPLAY := python -c "$$BROWSER_PYSCRIPT" + +override DOC_ROOT := docs +override DOC_INDEX := $(DOC_ROOT)/_build/html/index.html +override DOC_XARGS ?= ## additional arguments for Sphinx build +override DOC_XARGS := $(call clean_opt,$(DOC_XARGS)) +override DOC_DEBUG ?= ## set debug logging for Sphinx build +override DOC_DEBUG := $(call clean_opt,$(DOC_DEBUG)) +ifneq ($(DOC_DEBUG),) + override DOC_XARGS := $(DOC_XARGS) -vv +endif + +$(DOC_INDEX): + @-$(MSG_I) "Building docs..." + @$(SHELL) -c '$(CONDA_CMD) "$(MAKE)" -C "$(DOC_ROOT)" SPHINXOPTS="$(DOC_XARGS)" html;' + @-$(MSG_I) "Documentation available: [file://$(DOC_INDEX)]" + +.PHONY: _force_docs +_force_docs: + @-rm -f "$(DOC_INDEX)" + +# rm without quotes important below to allow regex +.PHONY: docs-clean +docs-clean: ## remove doc artifacts + @-$(MSG_I) "Cleaning doc artifacts..." + @-find "$(DOC_ROOT)/" -type f -name "$(APP_NAME)*.rst" -delete + @-rm -f "$(DOC_ROOT)/modules.rst" + @-rm -rf "$(DOC_ROOT)/_build" + +.PHONY: docs-linkcheck +docs-linkcheck: docs ## run check of external links in documentation for integrity + @-$(MSG_I) "Running link checks on docs..." + @$(SHELL) -c '$(CONDA_CMD) $(MAKE) -C "$(DOC_ROOT)" SPHINXOPTS="$(DOC_XARGS)" linkcheck' + +.PHONY: docs-install +docs-install: ## install package requirements for documentation generation + @$(SHELL) -c '$(CONDA_CMD) pip install $(PIP_XARGS) -r "$(DOC_ROOT)/requirements.txt"' + @-$(MSG_I) "Successfully installed docs requirements." + +.PHONY: docs-only +docs-only: _force_docs $(DOC_INDEX) ## generate documentation without requirements installation or cleanup + +# NOTE: we need almost all base dependencies because magpie package needs to be parsed to generate OpenAPI +.PHONY: docs +docs: docs-install docs-clean docs-only ## generate Sphinx HTML documentation + +.PHONY: docs-show +docs-show: $(DOC_INDEX) ## display HTML webpage of generated documentation (build docs if missing) + @-test -f "$(DOC_INDEX)" || $(MAKE) -C "$(APP_ROOT)" $(DOC_ROOT) + $(BROWSER_DISPLAY) "$(DOC_INDEX)" + ### Execution Targets ### SCRIPT ?= birdhouse/pavics-compose.sh ## Script to run the stack diff --git a/README.rst b/README.rst index af845457d..9eb5d5fdd 100644 --- a/README.rst +++ b/README.rst @@ -36,6 +36,13 @@ Power Analytics and Visualization for Climate Science - Powered by Birdhouse and For GitHub navigation, see the following README pages: -* `README for general deployment `_ -* `README for extra core components `_ -* `README for optional components `_ +* |readme_general|_ +* |readme_components|_ +* |readme_optional_components|_ + +.. |readme_general| replace:: README for general deployment +.. _readme_general: birdhouse/README.rst +.. |readme_components| replace:: README for extra core components +.. _readme_components: birdhouse/components/README.rst +.. |readme_optional_components| replace:: README for optional components +.. _readme_optional_components: birdhouse/optional-components/README.rst diff --git a/birdhouse/README.rst b/birdhouse/README.rst index 553cbf2c0..d322649c8 100644 --- a/birdhouse/README.rst +++ b/birdhouse/README.rst @@ -17,22 +17,29 @@ Requirements: * Install latest docker-ce and docker-compose for the chosen distro (not the version from the distro). -To run ``docker-compose`` for PAVICS, the `pavics-compose.sh `_ (:download:`download `) wrapper script must be used. -This script will source the ``env.local`` file, apply the appropriate variable substitutions on all the configuration files -".template", and run ``docker-compose`` with all the command line arguments given to `pavics-compose.sh `_ (:download:`download `). -See `env.local.example `_ (:download:`download `) for more details on what can go into the ``env.local`` file. +To run ``docker-compose`` for PAVICS, the `pavics-compose.sh `_ +(:download:`download `) wrapper script must be used. +This script will source the ``env.local`` file, apply the appropriate variable substitutions +on all the configuration files ".template", and run ``docker-compose`` with all the command +line arguments given to `pavics-compose.sh `_ (:download:`download `). +See `env.local.example `_ (:download:`download `) for more details on +what can go into the ``env.local`` file. -If the file `env.local` is somewhere else, symlink it here, next to `docker-compose.yml `_ (:download:`download `) because many scripts assume this location. +If the file `env.local` is somewhere else, symlink it here, next to `docker-compose.yml `_ +(:download:`download `) because many scripts assume this location. To follow infrastructure-as-code, it is encouraged to source control the above `env.local` file and any override needed to customized this PAVICS deployment -for your organization. For an example of possible override, see how the `emu service `_ (:download:`download `) -(`README `_) can be optionally added to the deployment via the `override mechanism `_. -Ouranos specific override can be found in this `birdhouse-deploy-ouranos `_ repo. +for your organization. For an example of possible override, +see how the `emu service `_ +(:download:`download `) +(`README `_) can be optionally added to the +deployment via the `override mechanism `_. Ouranos specific override +can be found in this `birdhouse-deploy-ouranos `_ repo. Suggested deployment layout: -.. code-block:: +.. code-block:: text ├── birdhouse-deploy/ # this repo │   ├── birdhouse/ @@ -47,7 +54,8 @@ Suggested deployment layout: The automatic deployment is able to handle multiple repos, so will trigger if this repo or your private-personalized-config repo changes, giving you automated continuous deployment. See the continuous deployment setup section -below and the variable ``AUTODEPLOY_EXTRA_REPOS`` in `env.local.example `_ (:download:`download `). +below and the variable ``AUTODEPLOY_EXTRA_REPOS`` in `env.local.example `_ +(:download:`download `). The automatic deployment of the PAVICS platform, of the Jupyter tutorial notebooks and of the automatic deployment mechanism itself can all be @@ -62,15 +70,17 @@ To launch all the containers, use the following command: ./pavics-compose.sh up -d -If you get a ``'No applicable error code, please check error log'`` error from the WPS processes, please make sure that the WPS databases exists in the -postgres instance. See `create-wps-pgsql-databases.sh `_ (:download:`download `). +If you get a ``'No applicable error code, please check error log'`` error from the WPS processes, please +make sure that the WPS databases exists in the postgres instance. +See `create-wps-pgsql-databases.sh `_ +(:download:`download `). Note ---- * All WPS requests should be completed within ``proxy_read_timeout`` of the - Nginx proxy, see `nginx.conf`_ (:download:`download `). + Nginx proxy, see `nginx.conf`_ (:download:`download `). Any WPS requests that will take longer should use the async mode. Default value ``PROXY_READ_TIMEOUT_VALUE`` in `default.env`_ (:download:`download `). @@ -84,12 +94,12 @@ Manual steps post deployment Create public demo user in Magpie for JupyterHub login ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Use `create-magpie-users `_ (:download:`download `) or follow manual -instructions below. +Use `create-magpie-users `_ +(:download:`download `) or follow manual instructions below. ``config.yml`` file if using ``create-magpie-users``: -.. code-block:: +.. code-block:: yaml users: - username: < value of JUPYTER_DEMO_USER in `env.local` > @@ -121,7 +131,8 @@ https://github.com/Ouranosinc/PAVICS-e2e-workflow-tests with pre-configured Jenkins at https://github.com/Ouranosinc/jenkins-config. For that test suite to pass, run the script -`scripts/bootstrap-instance-for-testsuite `_ (:download:`download `) +`scripts/bootstrap-instance-for-testsuite `_ +(:download:`download `) to prepare your new instance. Further documentation inside the script. Optional components @@ -148,10 +159,12 @@ environment for testing or to have multiple flavors of PAVICS with slightly different combinations of the parts all running simultaneously in their respective VM, allowing us to see the differences in behavior. -See `vagrant_variables.yml.example `_ (:download:`download `) for what's +See `vagrant_variables.yml.example `_ +(:download:`download `) for what's configurable with Vagrant. -If using Centos box, follow `disk-resize `_ (:download:`download `) after +If using Centos box, follow `disk-resize `_ +(:download:`download `) after first ``vagrant up`` failure due to disk full. Then ``vagrant reload && vagrant provision`` to continue. If using Ubuntu box, no manual steps required, everything just works. @@ -161,7 +174,7 @@ platform and the extension pack, and `Vagrant + + + + + +

You should have been redirected.

+ If not, click here to continue. + + +""" +TEMPLATE_CODE = """ + +
+{code}
+        
+ + +""" + + +def generate_redirects(app): + """ + Generates HTML page redirections between different file path locations. + + This extension allows to fake a link to the HTML page actually generated by an RST file which contains a literal + *reference* to another RST file. + + For example, suppose we have two RST (ie: ``file1.rst`` and ``file2.rst`` ). Within ``file1.rst`` we got: + + .. code-block:: rst + + see `file2`_ details + + .. _file2: ./docs/file2.rst + + Normally, the generated HTML will have an hyperlink named ``file2`` with a *literal* reference to ``file2.rst``. + This will result in HTTP Not Found (404) as it doesn't correspond to the generated ``file2.html``. Normally, this + can be fixed using the following directive: + + .. code-block:: rst + + :doc:`./docs/file2.rst` + + But then, rendering on GitHub becomes literally this string with an hyperlink reference that doesn't lead to the + desired ``file2.rst`` (for quick documentation locally within the GitHub repository). + + With this extension, if configuration is specified as follows, the HTML link is resolved by redirecting the literal + ``./file2.rst`` reference in the output HTML build directory to the desired ``file2.html`` generated (as required). + + .. code-block:: python + + doc_redirect_map = { + # mapping of: => + "file2.rst": "file2.rst" + } + + In other words, when ``file1.rst`` is viewed from GitHub, the literal relative path is used an the file is found, + while on ``Readthedocs`` (or when viewing locally in a browser), ``file1.html`` will contain a raw relative path to + where the pointed ``file2.html`` *should* be using corresponding base directories. This is demonstrated below: + + .. code-block:: text + + '/docs/file1.rst' ===> '/file1.html' (ref [file2]) ---> (raw '/docs/file2.rst') + | + | + '/docs/file2.rst' ===> '/file2.html' <------------------------------ + + .. note:: + + Literal RST file references must be relative to package root in other to be rendered correctly on GitHub. + """ + + if not isinstance(app.builder, (StandaloneHTMLBuilder, CheckExternalLinksBuilder)): + ext = os.path.split(__file__)[-1].split(".")[0] + builder = type(app.builder) + LOGGER.warning("Extension '%s' is only supported by the 'html' builder [builder: %s]. Skipping...", + ext, builder) + return + if not isinstance(app.config.doc_redirect_map, dict) and len(app.config.doc_redirect_map): + LOGGER.info("Could not find doc redirect map") + return + LOGGER.info("Doc Redirect Mapping:\n%s", json.dumps(app.config.doc_redirect_map, indent=2)) + in_suffix = None + if isinstance(app.config.source_suffix, list): + in_suffix = app.config.source_suffix[0] + elif isinstance(app.config.source_suffix, dict): + in_suffix = list(app.config.source_suffix.items())[0][0] + elif app.config.source_suffix: + in_suffix = app.config.source_suffix + if not in_suffix: + in_suffix = ".rst" + + for from_path, to_path in app.config.doc_redirect_map.items(): + to_html = to_path + if isinstance(to_path, dict): + to_path, to_html = to_path.get("path"), to_path.get("html") + if not to_path or not from_path: + LOGGER.debug("Skipping redirect [%s] -> [%s] (missing one parameter)", from_path, to_path) + continue + LOGGER.debug("Checking redirect [%s] -> [%s] (HTML=%s)", from_path, to_path, to_html) + + src_path = from_path + if not src_path.endswith(in_suffix): + rst_path = src_path + in_suffix + else: + rst_path = src_path + to_html_path = to_path.replace(in_suffix, ".html") + in_html_path = from_path.replace(in_suffix, ".html") + # to_path_prefix = "..%s" % os.path.sep * (len(in_html_path.split(os.path.sep)) - 1) + # to_path = to_path_prefix + to_path.replace(in_suffix, ".html") + if not to_html_path.endswith(".html"): + to_html_path = to_path + ".html" + if to_html_path.startswith("../"): + to_html_path = to_html_path.replace("../", "") + + redirected_from_file = os.path.join(app.config.doc_project_root, from_path) + redirected_src_file = os.path.join(app.builder.outdir, src_path) + redirected_rst_file = os.path.join(app.builder.outdir, rst_path) if rst_path else None + redirected_html_file = os.path.join(app.builder.outdir, to_html_path) + redirected_directory = os.path.dirname(redirected_html_file) + if not os.path.exists(redirected_directory): + os.makedirs(redirected_directory) + LOGGER.debug("Redirect FROM [%s] (%s)", from_path, redirected_from_file) + LOGGER.debug("Redirect SRC [%s] (%s)", src_path, redirected_src_file) + LOGGER.debug("Redirect RST [%s] (%s)", rst_path, redirected_rst_file) + LOGGER.debug("Redirect TO HTML [%s] (%s)", to_html_path, redirected_html_file) + LOGGER.debug("Redirect IN HTML [%s]", in_html_path) + + # create unless it already exists (eg: same directory level, config map is redundant) + if not os.path.exists(redirected_html_file): + # if using a direct call with .html extension, it will still work as if calling the .rst + with open(redirected_html_file, mode="w", encoding="utf-8") as f: + if os.path.splitext(from_path)[-1] in app.config.doc_redirect_code: + LOGGER.info("Redirect (code) [%s] -> [%s]", from_path, redirected_html_file) + from_file = os.path.join(app.builder.srcdir, from_path) + with open(from_file, mode="r", encoding="utf-8") as r: + f.write(TEMPLATE_CODE.format(code=r.read())) + else: + LOGGER.info("Redirect (template) [%s] -> [%s]", from_path, redirected_html_file) + f.write(TEMPLATE_REDIRECT.format(to_path=to_path)) + if not os.path.exists(redirected_src_file): + # point to the source that would be reach by clicking the literal reference + # by faking an .html file redirect + LOGGER.info("Redirect (symlink) [%s] -> [%s]", redirected_src_file, redirected_html_file) + os.symlink(redirected_html_file, redirected_src_file) + + +def setup(app): + app.add_config_value("doc_project_root", "", "env", str) + app.add_config_value("doc_redirect_code", [], "env", list) + app.add_config_value("doc_redirect_map", {}, "env", dict) + app.connect("builder-inited", generate_redirects) diff --git a/docs/requirements.txt b/docs/requirements.txt index 1ca00f1cc..e722b200d 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,3 @@ # extra requirements specifically for documentation purposes -sphinx-mdinclude \ No newline at end of file +sphinx-mdinclude +docutils<0.19 # see https://github.com/amyreese/sphinx-mdinclude/issues/8 diff --git a/docs/source/conf.py b/docs/source/conf.py index 0ba816fbf..d7eb67996 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -19,8 +19,14 @@ # import os import sys -sys.path.insert(0, os.path.abspath('.')) -sys.path.insert(0, os.path.abspath('../../.')) + +DOC_SRC_ROOT = os.path.abspath('.') +DOC_DIR_ROOT = os.path.abspath('..') +PROJECT_ROOT = os.path.abspath('../../.') +sys.path.insert(0, DOC_SRC_ROOT) +sys.path.insert(0, DOC_DIR_ROOT) +sys.path.insert(0, PROJECT_ROOT) +sys.path.append(os.path.abspath(os.path.join(DOC_DIR_ROOT, "_ext"))) # -- General configuration ------------------------------------------------ @@ -42,8 +48,47 @@ 'sphinx.ext.viewcode', 'sphinx.ext.githubpages', 'sphinx_mdinclude', + 'doc_redirect', +] + +md_parse_relative_links = True + +# references to RST files in 'docs' dir redirect to corresponding HTML +# references to RST files in repo root (README/CHANGES) redirect to their equivalent HTML in 'docs' dir +DOC_REDIRECT_DIRS = [ + os.path.join(DOC_SRC_ROOT, "birdhouse"), +] + +# files that will be copied over from root repo or linked birdhouse directory +# under build directory (not copied over with same nested dir structure, directly under '_build/html') +html_extra_path = [ + '../../.bumpversion.cfg', + 'birdhouse/env.local.example', + 'birdhouse/pavics-compose.sh', + 'birdhouse/docker-compose.yml', + # 'birdhouse/config/proxy/nginx.conf.template', # FIXME: doesn't work, .template.html auto-generated with it ] +doc_project_root = PROJECT_ROOT + +doc_redirect_code = [".sh", ".yml"] # those extensions will generate an embedded code instead of redirect + +# mapping of missing file link (in generated HTML) -> some new file or link +# converted MD files (mdinclude) after HTML conversion with references to RST (or other) keep the original links +# redirect the expected (missing) file locations to a generated HTML or symlink simulating the original link +doc_redirect_map = { + "birdhouse/README.rst": "birdhouse/README.html", + "birdhouse/components/README.rst": "birdhouse/components/README.html", + "birdhouse/optional-components/README.rst": "birdhouse/optional-components/README.html", + "CHANGES.md": "changes.html", +} +# edge case for '.template' files, 'html_extra_path' does an auto-copy with '.html' +# generate the links that would be expected without it +doc_redirect_map.update({ + file: file.replace("birdhouse/", "") if os.path.splitext(file)[-1] not in doc_redirect_code else file + for file in html_extra_path if "birdhouse/" in file +}) + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -78,7 +123,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -165,19 +210,11 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ['../_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -# -# html_extra_path = [] -html_extra_path = [ - 'birdhouse/README.rst', - 'birdhouse/env.local.example', - 'birdhouse/pavics-compose.sh', - 'birdhouse/docker-compose.yml', -] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. diff --git a/docs/source/index.rst b/docs/source/index.rst index 7dd58c9c2..5e4680afc 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -34,4 +34,3 @@ Indices and tables * :ref:`modindex` * :ref:`search` -