From aa6c3a28826b24e47e3db659068a6f921af4dd59 Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Mon, 6 May 2019 09:04:58 +0200
Subject: [PATCH 01/14] updated wambachers boundaries url
---
osmaxx/core/templates/pages/about_us.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/osmaxx/core/templates/pages/about_us.html b/osmaxx/core/templates/pages/about_us.html
index 6f3ab37dc..4ed15be7d 100644
--- a/osmaxx/core/templates/pages/about_us.html
+++ b/osmaxx/core/templates/pages/about_us.html
@@ -41,7 +41,7 @@ Additional data sources
Coastlines, Water-polygons and Landmasses: These are from OSM derived data
provided by openstreetmapdata.com.
Country boundaries: We are using the country boundaries that are based on
- OSM-Data, cleaned and maintained by Wambachers-OSM.website.
+ OSM-Data, cleaned and maintained by Wambachers-OSM.website.
Found a bug?
From 1e9212bad576dab26a414d46541a48b7e3f10bd6 Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Mon, 6 May 2019 09:06:42 +0200
Subject: [PATCH 02/14] specified older version of QGIS support only
---
osmaxx/core/templates/pages/downloads.html | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/osmaxx/core/templates/pages/downloads.html b/osmaxx/core/templates/pages/downloads.html
index 0882afbc3..0977468f5 100644
--- a/osmaxx/core/templates/pages/downloads.html
+++ b/osmaxx/core/templates/pages/downloads.html
@@ -64,9 +64,9 @@ Non-GIS formats
Styles
- QGIS
+ QGIS 2
- A QGIS project file is being delivered with every export, matching the exported data containing the OMSaxx
+ A QGIS 2 project file is being delivered with every export, matching the exported data containing the OMSaxx
default style. The file should be opened in the folder where it can be found: <unzipped_folder>/symbology/QGIS/
.
ESRI ArcMap
From 1a32ade17b4a65b1f07f2bf1d1a9f33964847175 Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Mon, 6 May 2019 09:07:17 +0200
Subject: [PATCH 03/14] updated to 2019
---
LICENSE | 2 +-
osmaxx/core/templates/osmaxx/base.html | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/LICENSE b/LICENSE
index 05c2ed105..efea6a7e8 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
The MIT License (MIT)
-Copyright (c) 2015-2017 HSR Hochschule für Technik Rapperswil
+Copyright (c) 2015-2019 HSR Hochschule für Technik Rapperswil
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/osmaxx/core/templates/osmaxx/base.html b/osmaxx/core/templates/osmaxx/base.html
index a0251ade7..2bf863792 100644
--- a/osmaxx/core/templates/osmaxx/base.html
+++ b/osmaxx/core/templates/osmaxx/base.html
@@ -60,7 +60,7 @@ Main block
From d7744c109de0077ca3dd74737c3a67055223baf5 Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Mon, 27 May 2019 16:28:59 +0200
Subject: [PATCH 04/14] updating all possible things; integrated gdal 3.0;
updated small portions of the website
---
.dockerignore | 2 +
.travis.yml | 3 +-
Dockerfile.frontend | 5 +-
Dockerfile.mediator | 10 +-
Dockerfile.nginx | 1 +
Dockerfile.worker | 139 +++++--------
Makefile | 6 +-
README.md | 2 +-
activate_local_development | 2 +-
conversion_service/config/settings/common.py | 4 +-
conversion_service/config/settings/local.py | 6 +-
.../config/settings/production.py | 70 ++-----
conversion_service/config/settings/worker.py | 69 +------
conversion_service/config/wsgi.py | 3 -
docker-compose-dev.yml | 6 +-
.../project-development-environment.md | 4 +-
osm_pbf_updater/Dockerfile | 4 +-
osm_pbf_updater/pbf_updater.py | 16 +-
osmaxx/contrib/auth/__init__.py | 2 +
.../constants/coordinate_reference_system.py | 141 +------------
.../converters/converter_garmin/garmin.py | 6 +-
.../sql/functions/0030_transliterate.sql | 4 +-
.../converter_gis/helper/postgres_wrapper.py | 2 +-
osmaxx/conversion/converters/utils.py | 8 +-
.../management/commands/result_harvester.py | 38 ++--
.../0019_one_export_per_file_format.py | 4 +-
osmaxx/excerptexport/rest_api/urls.py | 2 -
osmaxx/excerptexport/rest_api/views.py | 4 +-
.../scripts/maps/draw_controls.js | 1 -
.../scripts/maps/excerpt_viewer.js | 4 -
.../templates/excerptexport/base.html | 1 -
osmaxx/excerptexport/views.py | 12 +-
osmaxx/job_progress/middleware.py | 6 +-
osmaxx/profile/admin.py | 2 +-
pytest.ini | 2 +-
requirements-all.txt | 192 ++++++++----------
requirements.in | 20 +-
requirements.txt | 190 ++++++++---------
runtests.py | 2 -
setup.py | 4 +-
tests/clipping_area/to_polyfile_test.py | 8 +-
tests/conftest.py | 53 +++--
tests/conversion/conftest.py | 2 +-
tests/conversion/converters/bootstrap_test.py | 25 ++-
.../converters/inside_worker_test/conftest.py | 9 +-
.../commands/result_harvester_test.py | 27 +--
tests/excerptexport/conftest.py | 13 +-
tests/profile/test_profile_view.py | 1 +
tests/selenium_tests/conftest.py | 2 +-
tox.ini | 4 +-
web_frontend/config/settings/common.py | 4 +-
web_frontend/config/settings/local.py | 6 +-
web_frontend/config/settings/production.py | 67 ++----
web_frontend/config/wsgi.py | 4 -
54 files changed, 446 insertions(+), 778 deletions(-)
diff --git a/.dockerignore b/.dockerignore
index 088c39ece..200c1b29b 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -5,3 +5,5 @@ rq-dashboard/
.*
!.git
+Dockerfile.*
+docker-compose*.yml
diff --git a/.travis.yml b/.travis.yml
index f818e11cf..00bd8ec60 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,8 +1,7 @@
language: python
python:
- - "3.4"
- - "3.5"
- "3.6"
+ - "3.7"
cache: pip
diff --git a/Dockerfile.frontend b/Dockerfile.frontend
index 058f50c12..9ecbdd7f2 100755
--- a/Dockerfile.frontend
+++ b/Dockerfile.frontend
@@ -1,5 +1,6 @@
-# this gdal image comes with support for FileGDB and has python3.4 already installed
-FROM geometalab/gdal-debian:latest
+# this gdal image comes with support for FileGDB and has python3.6 already installed
+# is dervied from ubuntu
+FROM geometalab/gdal-docker:v3.0.0
USER root
diff --git a/Dockerfile.mediator b/Dockerfile.mediator
index baa15ca1b..95f68aaf2 100644
--- a/Dockerfile.mediator
+++ b/Dockerfile.mediator
@@ -1,5 +1,6 @@
-# this gdal image comes with support for FileGDB and has python3.4 already installed
-FROM geometalab/gdal-debian:latest
+# this gdal image comes with support for FileGDB and has python3.6 already installed
+# is dervied from ubuntu
+FROM geometalab/gdal-docker:v3.0.0
USER root
@@ -8,9 +9,10 @@ ENV DJANGO_OSMAXX_CONVERSION_SERVICE_USERNAME=default_user DJANGO_OSMAXX_CONVERS
ENV NUM_WORKERS=5 DATABASE_HOST=mediatordatabase DATABASE_PORT=5432 APP_PORT=8901 APP_HOST=0.0.0.0
MAINTAINER HSR Geometalab
+ENV DEBIAN_FRONTEND=noninteractive
-RUN apt-get clean && DEBIAN_FRONTEND=noninteractive apt-get update && \
- DEBIAN_FRONTEND=noninteractive apt-get install -y\
+RUN apt-get update && \
+ apt-get install -y \
\
libgeos-dev \
libgeos++-dev \
diff --git a/Dockerfile.nginx b/Dockerfile.nginx
index fc23fd1cd..a8561a29f 100644
--- a/Dockerfile.nginx
+++ b/Dockerfile.nginx
@@ -1,4 +1,5 @@
FROM nginx:alpine
+
COPY ./docker_entrypoint/nginx/default.conf.template /etc/nginx/conf.d/default.conf.template
CMD DOMAIN_NAMES=$(echo $VIRTUAL_HOST | sed 's/,/ /g') envsubst '$DOMAIN_NAMES' < /etc/nginx/conf.d/default.conf.template > /etc/nginx/conf.d/default.conf \
&& cat /etc/nginx/conf.d/default.conf \
diff --git a/Dockerfile.worker b/Dockerfile.worker
index 69fb5b346..c8304d1bb 100644
--- a/Dockerfile.worker
+++ b/Dockerfile.worker
@@ -1,45 +1,33 @@
-# this gdal image comes with support for FileGDB and has python3.4 already installed
-FROM geometalab/gdal-debian:latest
+# this gdal image comes with support for FileGDB and has python3.6 already installed
+# is derived from ubuntu
+FROM geometalab/gdal-docker:v3.0.0
USER root
-ENV PYTHONUNBUFFERED=non-empty-string PYTHONIOENCODING=utf-8 LC_ALL=C.UTF-8 LANG=C.UTF-8
+ENV PYTHONUNBUFFERED=non-empty-string PYTHONIOENCODING=utf-8 LC_ALL=C.UTF-8 LANG=C.UTF-8 DEBIAN_FRONTEND=noninteractive
# make the "en_US.UTF-8" locale so postgres will be utf-8 enabled by default
-RUN apt-get update && apt-get install -y apt-utils locales && rm -rf /var/lib/apt/lists/* \
+RUN apt-get update && apt-get install -y apt-utils locales gpg && rm -rf /var/lib/apt/lists/* \
&& localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 \
- && apt-get clean \
&& rm -rf /var/lib/apt/lists/*
################## SETUP POSTGIS DATABASE with UTF8 support #############
# explicitly set user/group IDs
RUN groupadd -r postgres --gid=999 && useradd -r -g postgres --uid=999 postgres
-# grab gosu for easy step-down from root
-RUN gpg --keyserver ha.pool.sks-keyservers.net --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4
-RUN apt-get clean && apt-get update && apt-get install -y --no-install-recommends ca-certificates wget && rm -rf /var/lib/apt/lists/* \
- && wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/1.2/gosu-$(dpkg --print-architecture)" \
- && wget -O /usr/local/bin/gosu.asc "https://github.com/tianon/gosu/releases/download/1.2/gosu-$(dpkg --print-architecture).asc" \
- && gpg --verify /usr/local/bin/gosu.asc \
- && rm /usr/local/bin/gosu.asc \
- && chmod +x /usr/local/bin/gosu \
- && apt-get purge -y --auto-remove ca-certificates wget \
- && apt-get clean \
- && rm -rf /var/lib/apt/lists/*
-
RUN mkdir /docker-entrypoint-initdb.d
-RUN apt-key adv --keyserver ha.pool.sks-keyservers.net --recv-keys B97B0AFCAA1A47F044F244A07FCC7D46ACCC4CF8
+RUN APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn \
+ apt-key adv --keyserver ha.pool.sks-keyservers.net --recv-keys B97B0AFCAA1A47F044F244A07FCC7D46ACCC4CF8
-ENV PG_MAJOR 10
-ENV POSTGIS_MAJOR 2.4
+ENV PG_MAJOR 11
+ENV POSTGIS_MAJOR 2.5
-RUN echo 'deb http://apt.postgresql.org/pub/repos/apt/ jessie-pgdg main' $PG_MAJOR > /etc/apt/sources.list.d/pgdg.list
-
-RUN apt-get update \
- && DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-common \
+RUN echo 'deb http://apt.postgresql.org/pub/repos/apt/ bionic-pgdg main' $PG_MAJOR > /etc/apt/sources.list.d/pgdg.list \
+ && apt-get update \
+ && apt-get install -y postgresql-common \
&& sed -ri 's/#(create_main_cluster) .*$/\1 = false/' /etc/postgresql-common/createcluster.conf \
- && DEBIAN_FRONTEND=noninteractive apt-get install -y\
+ && apt-get install -y \
postgresql-${PG_MAJOR} \
postgresql-contrib-${PG_MAJOR} \
postgresql-${PG_MAJOR}-postgis-${POSTGIS_MAJOR} \
@@ -47,97 +35,76 @@ RUN apt-get update \
postgresql-server-dev-${PG_MAJOR} \
postgresql-contrib-${PG_MAJOR} \
&& apt-get clean \
- && rm -rf /var/lib/apt/lists/*
-
-RUN mkdir -p /var/run/postgresql && chown -R 999:999 /var/run/postgresql
+ && rm -rf /var/lib/apt/lists/* \
+ && mkdir -p /var/run/postgresql && chown -R 999:999 /var/run/postgresql
ENV PATH /usr/lib/postgresql/$PG_MAJOR/bin:$PATH
ENV PGDATA /var/lib/postgresql/data
-RUN mkdir -p $PGDATA && chown -R 999:999 /var/lib/postgresql
-
-RUN pg_createcluster --locale=en_US.UTF-8 -d $PGDATA ${PG_MAJOR} main
+RUN mkdir -p $PGDATA && chown -R 999:999 /var/lib/postgresql \
+ && pg_createcluster --locale=en_US.UTF-8 -d $PGDATA ${PG_MAJOR} main
################## END SETUP POSTGIS DATABASE with UTF8 support #############
-RUN apt-get clean && apt-get update && \
- DEBIAN_FRONTEND=noninteractive apt-get install -y\
-\
- make \
- cmake \
- g++ \
- git-core\
- subversion\
- build-essential\
- libxml2-dev\
- libgeos-dev \
- libgeos++-dev\
- libpq-dev\
- libboost-dev\
- libboost-system-dev\
- libboost-filesystem-dev\
- libboost-thread-dev\
- libexpat1-dev \
- zlib1g-dev \
- libbz2-dev\
- libproj-dev\
- libtool\
- automake \
- libprotobuf-c0-dev\
- protobuf-c-compiler\
- lua5.2 \
- liblua5.2-0 \
- liblua5.2-dev \
- liblua5.1-0 \
- zip \
- osmctools \
- wget \
- binutils \
- libgeoip1 \
+RUN apt-get update && \
+ apt-get install -y\
\
- libicu-dev \
- debhelper \
-\
- default-jre \
- libkakasi2-dev \
- pandoc \
- curl \
+ make cmake g++ libboost-dev libboost-system-dev \
+ libboost-filesystem-dev libexpat1-dev zlib1g-dev \
+ libbz2-dev libpq-dev lua5.2 liblua5.2-dev \
+ libproj-dev \
+ curl git wget \
+ libstdc++6 osmctools \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
+ENV LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:/usr/lib:${LD_LIBRARY_PATH}
+RUN ldconfig
+
WORKDIR /root/osm2pgsql
# OSM2PGSQL
-ENV OSM2PGSQL_VERSION=0.92.0
+ENV OSM2PGSQL_VERSION=0.96.0 CXXFLAGS=-DACCEPT_USE_OF_DEPRECATED_PROJ_API_H=1
RUN mkdir src &&\
cd src &&\
GIT_SSL_NO_VERIFY=true git clone https://github.com/openstreetmap/osm2pgsql.git &&\
cd osm2pgsql &&\
git checkout ${OSM2PGSQL_VERSION} &&\
- mkdir build &&\
+ mkdir -p build &&\
cd build &&\
- cmake ..&&\
+ cmake .. &&\
make &&\
make install
+# correcter/more portable would be:
+# cmake .. &&\
+# echo 'cmake worked' &&\
+# cmake --build . &&\
+# echo 'also make worked' &&\
+# cmake --build . --target install
+
WORKDIR /var/data/garmin/additional_data/
# Fetch required additional data for Garmin as documented http://www.mkgmap.org.uk/download/mkgmap.html
-RUN wget -O /var/data/garmin/additional_data/bounds.zip http://osm2.pleiades.uni-wuppertal.de/bounds/latest/bounds.zip
-RUN wget -O /var/data/garmin/additional_data/sea.zip http://osm2.pleiades.uni-wuppertal.de/sea/latest/sea.zip
+RUN wget -O /var/data/garmin/additional_data/bounds.zip http://osm.thkukuk.de/data/bounds-latest.zip \
+ && wget -O /var/data/garmin/additional_data/sea.zip http://osm.thkukuk.de/data/sea-latest.zip
ENV CODE /code
WORKDIR $CODE
# Install dependencies
-ENV LIBUTF8PROCVERSION 2.0.2-1
-RUN wget -O libutf8proc-dev.deb http://ftp.ch.debian.org/debian/pool/main/u/utf8proc/libutf8proc-dev_${LIBUTF8PROCVERSION}_amd64.deb
-RUN wget -O libutf8proc1.deb http://ftp.ch.debian.org/debian/pool/main/u/utf8proc/libutf8proc2_${LIBUTF8PROCVERSION}_amd64.deb
-RUN dpkg --install libutf8proc1.deb libutf8proc-dev.deb
-RUN rm libutf8proc1.deb libutf8proc-dev.deb
-
-RUN git clone https://github.com/giggls/mapnik-german-l10n.git mapnik-german-l10n \
- && cd mapnik-german-l10n && git checkout v2.2.6 \
- && make && make install && make clean
+ENV LIBUTF8PROCVERSION 2.3.0-1
+RUN wget -O libutf8proc-dev.deb http://ftp.ch.debian.org/debian/pool/main/u/utf8proc/libutf8proc-dev_${LIBUTF8PROCVERSION}_amd64.deb \
+ && wget -O libutf8proc1.deb http://ftp.ch.debian.org/debian/pool/main/u/utf8proc/libutf8proc2_${LIBUTF8PROCVERSION}_amd64.deb \
+ && dpkg --install libutf8proc1.deb libutf8proc-dev.deb \
+ && rm libutf8proc1.deb libutf8proc-dev.deb
+
+RUN apt-get update && apt-get install -y pandoc libkakasi2-dev libicu-dev \
+ && git clone https://github.com/giggls/mapnik-german-l10n.git mapnik-german-l10n \
+ && cd mapnik-german-l10n && git checkout v2.5.1 \
+ && make && make install && make clean \
+ && apt-get purge -y pandoc \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
ENV HOME /home/py
diff --git a/Makefile b/Makefile
index 484da84e5..58800d384 100644
--- a/Makefile
+++ b/Makefile
@@ -54,11 +54,13 @@ pip-sync-all: requirements-all.txt
.PHONY: tests-quick
tests-quick: up-redis up-pg
- ./runtests.py $(PYTEST_ARGS)
+ docker build -t worker:test -f Dockerfile.worker .
+ docker run --link pg_tests:postgres --link redis-local:redis -e DJANGO_SETTINGS_MODULE= -v "$$(pwd):/code" --rm worker:test bash -c 'cp -r /code /tmp/code && cd /tmp/code/ && ./runtests.py $(PYTEST_ARGS)'
.PHONY: tests-all
tests-all: up-redis up-pg up-pg_translit
- ./runtests.py $(PYTEST_ARGS) --runslow
+ docker build -t worker:test -f Dockerfile.worker .
+ docker run --link pg_translit:translit --link pg_tests:postgres --link redis-local:redis -e PG_TRANSLIT_PORT=5432 -e PG_TRANSLIT_HOST=translit -e DJANGO_SETTINGS_MODULE= -v "$$(pwd):/code" --rm worker:test bash -c 'cp -r /code /tmp/code && cd /tmp/code/ && ./runtests.py $(PYTEST_ARGS) --runslow'
.PHONY: tox
tox: up-redis up-pg up-pg_translit
diff --git a/README.md b/README.md
index 6279cc290..34410029a 100644
--- a/README.md
+++ b/README.md
@@ -103,7 +103,7 @@ docker-compose up
Unsure which version is running?
-Go to `:8888/version/`.
+Go to `:8889/version/`.
where `` is your public IP.
diff --git a/activate_local_development b/activate_local_development
index 7993fb454..e2a35fd89 100644
--- a/activate_local_development
+++ b/activate_local_development
@@ -1,2 +1,2 @@
-alias docker-compose="PUBLIC_LOCALHOST_IP=$(ip route get 1 | awk '{print $NF;exit}') DEPLOY_VERSION=$(git describe --dirty) docker-compose -f docker-compose.yml -f docker-compose-dev.yml"
+alias docker-compose="PUBLIC_LOCALHOST_IP=$(ip route get 1 | awk '{print $(NF-2);exit}') DEPLOY_VERSION=$(git describe --dirty) docker-compose -f docker-compose.yml -f docker-compose-dev.yml"
alias deactivate_local_development="unalias docker-compose;unalias deactivate_local_development"
diff --git a/conversion_service/config/settings/common.py b/conversion_service/config/settings/common.py
index b716075f4..0fa66b3f9 100644
--- a/conversion_service/config/settings/common.py
+++ b/conversion_service/config/settings/common.py
@@ -51,14 +51,14 @@
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
-MIDDLEWARE_CLASSES = (
+MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
-)
+]
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
diff --git a/conversion_service/config/settings/local.py b/conversion_service/config/settings/local.py
index fa29d311c..693afa01c 100644
--- a/conversion_service/config/settings/local.py
+++ b/conversion_service/config/settings/local.py
@@ -12,8 +12,8 @@
# django-debug-toolbar
# ------------------------------------------------------------------------------
-MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
-INSTALLED_APPS += ('debug_toolbar', )
+MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware', ]
+INSTALLED_APPS += ['debug_toolbar', ]
INTERNAL_IPS = env.tuple('DJANGO_INTERNAL_IPS', default=('127.0.0.1',))
@@ -26,7 +26,7 @@
# django-extensions
# ------------------------------------------------------------------------------
-INSTALLED_APPS += ('django_extensions', )
+INSTALLED_APPS += ['django_extensions', ]
# TESTING
# ------------------------------------------------------------------------------
diff --git a/conversion_service/config/settings/production.py b/conversion_service/config/settings/production.py
index 700a301a9..abf3c2500 100755
--- a/conversion_service/config/settings/production.py
+++ b/conversion_service/config/settings/production.py
@@ -4,21 +4,21 @@
'''
from .common import * # noqa
-MIDDLEWARE_CLASSES = (
+MIDDLEWARE = [
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'django.middleware.security.SecurityMiddleware',
-) + MIDDLEWARE_CLASSES
+ 'whitenoise.middleware.WhiteNoiseMiddleware',
+] + MIDDLEWARE
# No fallback values for the following settings, as we WANT an exception
# during start if any of the corresponding environment variables aren't set.
SECRET_KEY = env.str("DJANGO_SECRET_KEY")
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS")
-INSTALLED_APPS += (
+INSTALLED_APPS += [
'gunicorn',
# sentry
- 'raven.contrib.django.raven_compat',
-)
+]
# STORAGE CONFIGURATION
# ------------------------------------------------------------------------------
@@ -26,63 +26,23 @@
# ------------------------
# See: http://django-storages.readthedocs.org/en/latest/index.html
# we are prepared for this but aren't using it right now
-INSTALLED_APPS += (
+INSTALLED_APPS += [
# 'storages',
-)
+]
# Static Assets
# ------------------------
-STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
+STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# SENTRY
SENTRY_DSN = env.str('SENTRY_DSN', default=None)
-if SENTRY_DSN:
- LOGGING = {
- 'version': 1,
- 'disable_existing_loggers': True,
- 'root': {
- 'level': 'WARNING',
- 'handlers': ['sentry'],
- },
- 'formatters': {
- 'verbose': {
- 'format': '%(levelname)s %(asctime)s %(module)s '
- '%(process)d %(thread)d %(message)s'
- },
- },
- 'handlers': {
- 'sentry': {
- 'level': 'WARNING',
- 'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler'
- },
- 'console': {
- 'level': 'DEBUG',
- 'class': 'logging.StreamHandler',
- 'formatter': 'verbose'
- }
- },
- 'loggers': {
- 'django.db.backends': {
- 'level': 'ERROR',
- 'handlers': ['console', 'sentry'],
- 'propagate': False,
- },
- 'raven': {
- 'level': 'DEBUG',
- 'handlers': ['console'],
- 'propagate': False,
- },
- 'sentry.errors': {
- 'level': 'DEBUG',
- 'handlers': ['console'],
- 'propagate': False,
- },
- },
- }
+if SENTRY_DSN is not None:
+ import sentry_sdk
+ from sentry_sdk.integrations.django import DjangoIntegration
- RAVEN_CONFIG = {
- 'dsn': SENTRY_DSN,
- 'release': env.str('SENTRY_RELEASE', default=''),
- }
+ sentry_sdk.init(
+ dsn=SENTRY_DSN,
+ integrations=[DjangoIntegration()]
+ )
diff --git a/conversion_service/config/settings/worker.py b/conversion_service/config/settings/worker.py
index 09d4702ca..3cb0fa35e 100644
--- a/conversion_service/config/settings/worker.py
+++ b/conversion_service/config/settings/worker.py
@@ -11,68 +11,15 @@
# disable databases for the worker
DATABASES = {}
-INSTALLED_APPS += (
- # sentry
- 'raven.contrib.django.raven_compat',
-)
-
# SENTRY
SENTRY_DSN = env.str('SENTRY_DSN', default=None)
-if SENTRY_DSN:
- LOGGING = {
- 'version': 1,
- 'disable_existing_loggers': True,
- 'root': {
- 'level': 'WARNING',
- 'handlers': ['sentry'],
- },
- 'formatters': {
- 'verbose': {
- 'format': '%(levelname)s %(asctime)s %(module)s '
- '%(process)d %(thread)d %(message)s'
- },
- },
- 'handlers': {
- 'sentry': {
- 'level': 'ERROR',
- 'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler'
- },
- 'console': {
- 'level': 'DEBUG',
- 'class': 'logging.StreamHandler',
- 'formatter': 'verbose'
- },
- "rq_console": {
- "level": "DEBUG",
- "class": "rq.utils.ColorizingStreamHandler",
- "formatter": "verbose",
- },
- },
- 'loggers': {
- 'django.db.backends': {
- 'level': 'ERROR',
- 'handlers': ['console', 'sentry'],
- 'propagate': False,
- },
- "rq.worker": {
- "level": "WARNING",
- "handlers": ['rq_console', "sentry"],
- },
- 'raven': {
- 'level': 'DEBUG',
- 'handlers': ['console'],
- 'propagate': False,
- },
- 'sentry.errors': {
- 'level': 'DEBUG',
- 'handlers': ['console'],
- 'propagate': False,
- },
- },
- }
+if SENTRY_DSN is not None:
+ import sentry_sdk
+ from sentry_sdk.integrations.django import DjangoIntegration
+
+ sentry_sdk.init(
+ dsn=SENTRY_DSN,
+ integrations=[DjangoIntegration()]
+ )
- RAVEN_CONFIG = {
- 'dsn': "sync+" + SENTRY_DSN,
- 'release': env.str('SENTRY_RELEASE', default=''),
- }
diff --git a/conversion_service/config/wsgi.py b/conversion_service/config/wsgi.py
index 5ac4decff..9627e03d6 100644
--- a/conversion_service/config/wsgi.py
+++ b/conversion_service/config/wsgi.py
@@ -8,11 +8,8 @@
"""
import os
-from raven.contrib.django.middleware.wsgi import Sentry
-from whitenoise.django import DjangoWhiteNoise
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "conversion_service.config.settings.production")
application = get_wsgi_application()
-application = Sentry(DjangoWhiteNoise(application))
diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml
index 5286ad07c..8c8548f94 100644
--- a/docker-compose-dev.yml
+++ b/docker-compose-dev.yml
@@ -5,7 +5,7 @@ services:
context: .
dockerfile: Dockerfile.nginx
ports:
- - "8888:80"
+ - "8889:80"
environment:
- VIRTUAL_HOST=localhost,127.0.0.1,${PUBLIC_LOCALHOST_IP:-osmaxx}
frontend:
@@ -24,8 +24,8 @@ services:
- DJANGO_SETTINGS_MODULE=web_frontend.config.settings.local
- DJANGO_DEBUG=true
# set these if you want to be able to log in using OSM
- - SOCIAL_AUTH_OPENSTREETMAP_KEY=
- - SOCIAL_AUTH_OPENSTREETMAP_SECRET=
+ - SOCIAL_AUTH_OPENSTREETMAP_KEY=avBljc8UoB9WuibATQSTaBv5VxxRAw0uFvNumuj7
+ - SOCIAL_AUTH_OPENSTREETMAP_SECRET=GBtBboD1ZvngDVgkqVm6n7iNmR6HqMmnxAPQB0kS
# Allow access from any private-use IP, since docker assigns "random" IPs.
# 172.*.*.* is actually allowing too much, but this docker-compose file should
# only ever be used on local development machine, anyway!
diff --git a/docs/development/project-development-environment.md b/docs/development/project-development-environment.md
index 2fd31e4bd..ec7e8feb4 100644
--- a/docs/development/project-development-environment.md
+++ b/docs/development/project-development-environment.md
@@ -124,7 +124,7 @@ To run the application tests only, see [Commonly used commands while developing
## Access the application
-`http://:8888`
+`http://:8889`
where `` is your (public) IP as reported by
```bash
@@ -133,7 +133,7 @@ ip route get 1 | awk '{print $NF;exit}'
You can generate the complete URL in `sh` with:
```bash
-echo "http://$(ip route get 1 | awk '{print $NF;exit}'):8888"
+echo "http://$(ip route get 1 | awk '{print $NF;exit}'):8889"
```
## Enable development with debug toolbar enabled
diff --git a/osm_pbf_updater/Dockerfile b/osm_pbf_updater/Dockerfile
index 979fc43b7..6767c006f 100644
--- a/osm_pbf_updater/Dockerfile
+++ b/osm_pbf_updater/Dockerfile
@@ -1,4 +1,4 @@
-FROM debian:jessie
+FROM ubuntu:18.04
MAINTAINER geometalab
@@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y \
python3-pip \
&& rm -rf /var/lib/apt/lists/*
-RUN pip3 install raven
+RUN pip3 install sentry-sdk
COPY ./pbf_updater.py /opt/pbf_updater.py
diff --git a/osm_pbf_updater/pbf_updater.py b/osm_pbf_updater/pbf_updater.py
index 1db374ee3..aab25e843 100755
--- a/osm_pbf_updater/pbf_updater.py
+++ b/osm_pbf_updater/pbf_updater.py
@@ -5,7 +5,9 @@
import time
import datetime
-from raven import Client
+
+import sentry_sdk
+from sentry_sdk import capture_exception
BASE_DIR = '/var/data/osm-planet'
@@ -76,14 +78,11 @@ def run(*, sleep_seconds=10, osmupdate_extra_params):
time.sleep(sleep_seconds)
-def run_with_sentry(callable, *args, sentry_dsn, **kwargs):
- client = Client(sentry_dsn)
- release = os.environ.get('SENTRY_RELEASE', 'unknown')
+def run_with_sentry(callable, *args, **kwargs):
try:
callable(*args, **kwargs)
- except:
- client.user_context({'release': release})
- client.captureException()
+ except Exception as e:
+ capture_exception(e)
if __name__ == '__main__':
@@ -98,8 +97,9 @@ def run_with_sentry(callable, *args, sentry_dsn, **kwargs):
sentry_dsn = os.environ.get('SENTRY_DSN', None)
if sentry_dsn is not None:
+ sentry_sdk.init(sentry_dsn)
run_with_sentry(
- run, sentry_dsn=sentry_dsn, sleep_seconds=sleep_seconds, osmupdate_extra_params=update_extra_params
+ run, sleep_seconds=sleep_seconds, osmupdate_extra_params=update_extra_params
)
else:
run(sleep_seconds=sleep_seconds, osmupdate_extra_params=update_extra_params)
diff --git a/osmaxx/contrib/auth/__init__.py b/osmaxx/contrib/auth/__init__.py
index d818f73e5..6f183e944 100755
--- a/osmaxx/contrib/auth/__init__.py
+++ b/osmaxx/contrib/auth/__init__.py
@@ -5,4 +5,6 @@
def is_exclusive_member(self: User):
return Group.objects.get(name=settings.OSMAXX['EXCLUSIVE_USER_GROUP']) in self.groups.all()
+
+
User.is_exclusive_member = is_exclusive_member
diff --git a/osmaxx/conversion/constants/coordinate_reference_system.py b/osmaxx/conversion/constants/coordinate_reference_system.py
index 4d5312feb..08ac69293 100644
--- a/osmaxx/conversion/constants/coordinate_reference_system.py
+++ b/osmaxx/conversion/constants/coordinate_reference_system.py
@@ -8,149 +8,10 @@
NAD_83 = 4269
OSGB_36 = 4277
-GLOBAL_CHOICES = (
+CHOICES = (
(WGS_84, _('WGS 84')),
(PSEUDO_MERCATOR, _('Pseudo-Mercator')),
(WGS_72, _('WGS 72')),
(NAD_83, _('NAD 83')),
(OSGB_36, _('OSGB 36')),
)
-
-UTM_ZONE_CHOICES = (
- (32601, _('UTM Zone 1, northern hemisphere')),
- (32602, _('UTM Zone 2, northern hemisphere')),
- (32603, _('UTM Zone 3, northern hemisphere')),
- (32604, _('UTM Zone 4, northern hemisphere')),
- (32605, _('UTM Zone 5, northern hemisphere')),
- (32606, _('UTM Zone 6, northern hemisphere')),
- (32607, _('UTM Zone 7, northern hemisphere')),
- (32608, _('UTM Zone 8, northern hemisphere')),
- (32609, _('UTM Zone 9, northern hemisphere')),
- (32610, _('UTM Zone 10, northern hemisphere')),
-
- (32611, _('UTM Zone 11, northern hemisphere')),
- (32612, _('UTM Zone 12, northern hemisphere')),
- (32613, _('UTM Zone 13, northern hemisphere')),
- (32614, _('UTM Zone 14, northern hemisphere')),
- (32615, _('UTM Zone 15, northern hemisphere')),
- (32616, _('UTM Zone 16, northern hemisphere')),
- (32617, _('UTM Zone 17, northern hemisphere')),
- (32618, _('UTM Zone 18, northern hemisphere')),
- (32619, _('UTM Zone 19, northern hemisphere')),
- (32620, _('UTM Zone 20, northern hemisphere')),
-
- (32621, _('UTM Zone 21, northern hemisphere')),
- (32622, _('UTM Zone 22, northern hemisphere')),
- (32623, _('UTM Zone 23, northern hemisphere')),
- (32624, _('UTM Zone 24, northern hemisphere')),
- (32625, _('UTM Zone 25, northern hemisphere')),
- (32626, _('UTM Zone 26, northern hemisphere')),
- (32627, _('UTM Zone 27, northern hemisphere')),
- (32628, _('UTM Zone 28, northern hemisphere')),
- (32629, _('UTM Zone 29, northern hemisphere')),
- (32630, _('UTM Zone 30, northern hemisphere')),
-
- (32631, _('UTM Zone 31, northern hemisphere')),
- (32632, _('UTM Zone 32, northern hemisphere')),
- (32633, _('UTM Zone 33, northern hemisphere')),
- (32634, _('UTM Zone 34, northern hemisphere')),
- (32635, _('UTM Zone 35, northern hemisphere')),
- (32636, _('UTM Zone 36, northern hemisphere')),
- (32637, _('UTM Zone 37, northern hemisphere')),
- (32638, _('UTM Zone 38, northern hemisphere')),
- (32639, _('UTM Zone 39, northern hemisphere')),
- (32640, _('UTM Zone 40, northern hemisphere')),
-
- (32641, _('UTM Zone 41, northern hemisphere')),
- (32642, _('UTM Zone 42, northern hemisphere')),
- (32643, _('UTM Zone 43, northern hemisphere')),
- (32644, _('UTM Zone 44, northern hemisphere')),
- (32645, _('UTM Zone 45, northern hemisphere')),
- (32646, _('UTM Zone 46, northern hemisphere')),
- (32647, _('UTM Zone 47, northern hemisphere')),
- (32648, _('UTM Zone 48, northern hemisphere')),
- (32649, _('UTM Zone 49, northern hemisphere')),
- (32650, _('UTM Zone 50, northern hemisphere')),
-
- (32651, _('UTM Zone 51, northern hemisphere')),
- (32652, _('UTM Zone 52, northern hemisphere')),
- (32653, _('UTM Zone 53, northern hemisphere')),
- (32654, _('UTM Zone 54, northern hemisphere')),
- (32655, _('UTM Zone 55, northern hemisphere')),
- (32656, _('UTM Zone 56, northern hemisphere')),
- (32657, _('UTM Zone 57, northern hemisphere')),
- (32658, _('UTM Zone 58, northern hemisphere')),
- (32659, _('UTM Zone 59, northern hemisphere')),
- (32660, _('UTM Zone 60, northern hemisphere')),
-
- (32701, _('UTM Zone 1, southern hemisphere')),
- (32702, _('UTM Zone 2, southern hemisphere')),
- (32703, _('UTM Zone 3, southern hemisphere')),
- (32704, _('UTM Zone 4, southern hemisphere')),
- (32705, _('UTM Zone 5, southern hemisphere')),
- (32706, _('UTM Zone 6, southern hemisphere')),
- (32707, _('UTM Zone 7, southern hemisphere')),
- (32708, _('UTM Zone 8, southern hemisphere')),
- (32709, _('UTM Zone 9, southern hemisphere')),
- (32710, _('UTM Zone 10, southern hemisphere')),
-
- (32711, _('UTM Zone 11, southern hemisphere')),
- (32712, _('UTM Zone 12, southern hemisphere')),
- (32713, _('UTM Zone 13, southern hemisphere')),
- (32714, _('UTM Zone 14, southern hemisphere')),
- (32715, _('UTM Zone 15, southern hemisphere')),
- (32716, _('UTM Zone 16, southern hemisphere')),
- (32717, _('UTM Zone 17, southern hemisphere')),
- (32718, _('UTM Zone 18, southern hemisphere')),
- (32719, _('UTM Zone 19, southern hemisphere')),
- (32720, _('UTM Zone 20, southern hemisphere')),
-
- (32721, _('UTM Zone 21, southern hemisphere')),
- (32722, _('UTM Zone 22, southern hemisphere')),
- (32723, _('UTM Zone 23, southern hemisphere')),
- (32724, _('UTM Zone 24, southern hemisphere')),
- (32725, _('UTM Zone 25, southern hemisphere')),
- (32726, _('UTM Zone 26, southern hemisphere')),
- (32727, _('UTM Zone 27, southern hemisphere')),
- (32728, _('UTM Zone 28, southern hemisphere')),
- (32729, _('UTM Zone 29, southern hemisphere')),
- (32730, _('UTM Zone 30, southern hemisphere')),
-
- (32731, _('UTM Zone 31, southern hemisphere')),
- (32732, _('UTM Zone 32, southern hemisphere')),
- (32733, _('UTM Zone 33, southern hemisphere')),
- (32734, _('UTM Zone 34, southern hemisphere')),
- (32735, _('UTM Zone 35, southern hemisphere')),
- (32736, _('UTM Zone 36, southern hemisphere')),
- (32737, _('UTM Zone 37, southern hemisphere')),
- (32738, _('UTM Zone 38, southern hemisphere')),
- (32739, _('UTM Zone 39, southern hemisphere')),
- (32740, _('UTM Zone 40, southern hemisphere')),
-
- (32741, _('UTM Zone 41, southern hemisphere')),
- (32742, _('UTM Zone 42, southern hemisphere')),
- (32743, _('UTM Zone 43, southern hemisphere')),
- (32744, _('UTM Zone 44, southern hemisphere')),
- (32745, _('UTM Zone 45, southern hemisphere')),
- (32746, _('UTM Zone 46, southern hemisphere')),
- (32747, _('UTM Zone 47, southern hemisphere')),
- (32748, _('UTM Zone 48, southern hemisphere')),
- (32749, _('UTM Zone 49, southern hemisphere')),
- (32750, _('UTM Zone 50, southern hemisphere')),
-
- (32751, _('UTM Zone 51, southern hemisphere')),
- (32752, _('UTM Zone 52, southern hemisphere')),
- (32753, _('UTM Zone 53, southern hemisphere')),
- (32754, _('UTM Zone 54, southern hemisphere')),
- (32755, _('UTM Zone 55, southern hemisphere')),
- (32756, _('UTM Zone 56, southern hemisphere')),
- (32757, _('UTM Zone 57, southern hemisphere')),
- (32758, _('UTM Zone 58, southern hemisphere')),
- (32759, _('UTM Zone 59, southern hemisphere')),
- (32760, _('UTM Zone 60, southern hemisphere')),
-)
-
-CHOICES = (
- (_('Global coordinate reference systems'), GLOBAL_CHOICES),
- (_('UTM zones'), UTM_ZONE_CHOICES),
-)
diff --git a/osmaxx/conversion/converters/converter_garmin/garmin.py b/osmaxx/conversion/converters/converter_garmin/garmin.py
index 9d1690c17..262e34464 100644
--- a/osmaxx/conversion/converters/converter_garmin/garmin.py
+++ b/osmaxx/conversion/converters/converter_garmin/garmin.py
@@ -92,11 +92,7 @@ def _produce_garmin(self, config_file_path, out_dir):
'--route',
]
- logged_check_call(
- mkg_map_command +
- output_dir +
- config
- )
+ logged_check_call(mkg_map_command + output_dir + config)
self._unzipped_result_size = recursive_getsize(out_dir)
def _create_zip(self, data_dir):
diff --git a/osmaxx/conversion/converters/converter_gis/bootstrap/sql/functions/0030_transliterate.sql b/osmaxx/conversion/converters/converter_gis/bootstrap/sql/functions/0030_transliterate.sql
index 52d146242..2c5faba4f 100644
--- a/osmaxx/conversion/converters/converter_gis/bootstrap/sql/functions/0030_transliterate.sql
+++ b/osmaxx/conversion/converters/converter_gis/bootstrap/sql/functions/0030_transliterate.sql
@@ -4,5 +4,5 @@
-- EG. select osml10n_translit('Москва́');
-- Moskvá
---------------------------------------------------------------------------------------
-CREATE EXTENSION IF NOT EXISTS postgis;
-CREATE EXTENSION IF NOT EXISTS osml10n;
+CREATE EXTENSION IF NOT EXISTS postgis CASCADE;
+CREATE EXTENSION IF NOT EXISTS osml10n CASCADE;
diff --git a/osmaxx/conversion/converters/converter_gis/helper/postgres_wrapper.py b/osmaxx/conversion/converters/converter_gis/helper/postgres_wrapper.py
index 14e7e549d..fd09437f1 100644
--- a/osmaxx/conversion/converters/converter_gis/helper/postgres_wrapper.py
+++ b/osmaxx/conversion/converters/converter_gis/helper/postgres_wrapper.py
@@ -25,7 +25,7 @@ def execute_sql_file(self, file_path):
try:
with open(file_path, 'r') as psql_command_file:
return self.execute_sql_command(psql_command_file.read())
- except:
+ except: # noqa: E722 do not use bare 'except'
logger.error("exception caught while processing %s", file_path)
raise
diff --git a/osmaxx/conversion/converters/utils.py b/osmaxx/conversion/converters/utils.py
index 64e1d24dc..90fa5d0fe 100644
--- a/osmaxx/conversion/converters/utils.py
+++ b/osmaxx/conversion/converters/utils.py
@@ -3,13 +3,7 @@
import subprocess
import uuid
import zipfile
-
-# Use the built-in version of scandir if possible, otherwise
-# use the scandir module version
-try:
- from os import scandir
-except ImportError:
- from scandir import scandir
+from os import scandir
logger = logging.getLogger(__name__)
diff --git a/osmaxx/conversion/management/commands/result_harvester.py b/osmaxx/conversion/management/commands/result_harvester.py
index bb769dd6c..f60f54cc4 100644
--- a/osmaxx/conversion/management/commands/result_harvester.py
+++ b/osmaxx/conversion/management/commands/result_harvester.py
@@ -30,15 +30,18 @@ def handle(self, *args, **options):
time.sleep(CONVERSION_SETTINGS['result_harvest_interval_seconds'])
def _handle_failed_jobs(self):
- failed_queue = django_rq.get_failed_queue()
- for rq_job_id in failed_queue.job_ids:
- try:
- conversion_job = conversion_models.Job.objects.get(rq_job_id=rq_job_id)
- except ObjectDoesNotExist as e:
- logger.exception(e)
- continue
- self._set_failed_unless_final(conversion_job, rq_job_id=rq_job_id)
- self._notify(conversion_job)
+ from django.conf import settings
+ for queue_name in settings.RQ_QUEUES:
+ queue = django_rq.get_queue(queue_name)
+
+ for rq_job_id in queue.failed_job_registry.get_job_ids():
+ try:
+ conversion_job = conversion_models.Job.objects.get(rq_job_id=rq_job_id)
+ except ObjectDoesNotExist as e:
+ logger.exception(e)
+ continue
+ self._set_failed_unless_final(conversion_job, rq_job_id=rq_job_id)
+ self._notify(conversion_job)
def _handle_running_jobs(self):
active_jobs = conversion_models.Job.objects.exclude(status__in=status.FINAL_STATUSES)\
@@ -65,8 +68,9 @@ def _update_job(self, rq_job_id):
return
logger.info('updating job %d', rq_job_id)
- conversion_job.status = job.status
- if job.status == status.FINISHED:
+ conversion_job.status = job.get_status()
+
+ if job.get_status() == status.FINISHED:
add_file_to_job(conversion_job=conversion_job, result_zip_file=job.kwargs['output_zip_file_path'])
add_meta_data_to_job(conversion_job=conversion_job, rq_job=job)
conversion_job.save()
@@ -85,7 +89,7 @@ def _notify(self, conversion_job):
data = {'status': conversion_job.status, 'job': conversion_job.get_absolute_url()}
try:
requests.get(conversion_job.callback_url, params=data)
- except:
+ except: # noqa: E722 do not use bare 'except'
logger.error('failed to send notification for job {} using {} as URL.'.format(
conversion_job.id, conversion_job.callback_url)
)
@@ -133,8 +137,14 @@ def fetch_job(rq_job_id, from_queues):
def cleanup_old_jobs():
queues = [django_rq.get_queue(name=queue_name) for queue_name in settings.RQ_QUEUE_NAMES]
- queues.append(django_rq.get_failed_queue())
+
for queue in queues:
for job in queue.get_jobs():
- if job.status in status.FINAL_STATUSES:
+ if job.get_status() in status.FINAL_STATUSES:
+ job.delete()
+
+ failed_job_registry = queue.failed_job_registry
+ for rq_job_id in failed_job_registry.get_job_ids():
+ job = queue.fetch_job(rq_job_id)
+ if job.get_status() in status.FINAL_STATUSES:
job.delete()
diff --git a/osmaxx/excerptexport/migrations/0019_one_export_per_file_format.py b/osmaxx/excerptexport/migrations/0019_one_export_per_file_format.py
index c41590810..dbd8ad8cb 100644
--- a/osmaxx/excerptexport/migrations/0019_one_export_per_file_format.py
+++ b/osmaxx/excerptexport/migrations/0019_one_export_per_file_format.py
@@ -12,7 +12,7 @@ def move_file_formats_from_extraction_order_extraction_configuration_to_export(a
for extraction_order in ExtractionOrder.objects.all():
try:
extraction_configuration = json.loads(extraction_order._extraction_configuration)
- except:
+ except: # noqa: E722 do not use bare 'except'
return
file_formats = frozenset(extraction_configuration.pop('gis_formats', []))
extraction_order._extraction_configuration = json.dumps(extraction_configuration)
@@ -26,7 +26,7 @@ def move_file_formats_from_export_to_extraction_order_extraction_configuration(a
for extraction_order in ExtractionOrder.objects.all():
try:
partial_extraction_configuration = json.loads(extraction_order._extraction_configuration)
- except:
+ except: # noqa: E722 do not use bare 'except'
partial_extraction_configuration = {}
extraction_configuration = dict(
gis_formats=list(extraction_order.exports.values_list('file_format', flat=True)),
diff --git a/osmaxx/excerptexport/rest_api/urls.py b/osmaxx/excerptexport/rest_api/urls.py
index ef387188e..f93869da4 100644
--- a/osmaxx/excerptexport/rest_api/urls.py
+++ b/osmaxx/excerptexport/rest_api/urls.py
@@ -1,5 +1,4 @@
from django.conf.urls import url
-from utm_zone_info.views import utm_zone_info
from . import views
@@ -8,5 +7,4 @@
url(r'^exports/(?P[0-9]+)/$', views.export_detail, name='export-detail'),
url(r'^estimated_file_size/$', views.estimated_file_size),
url(r'^format_size_estimation/$', views.format_size_estimation),
- url(r'^utm-zone-info/$', utm_zone_info, name='utm-zone-info'),
]
diff --git a/osmaxx/excerptexport/rest_api/views.py b/osmaxx/excerptexport/rest_api/views.py
index 6a2bf2d9a..ae489b04c 100644
--- a/osmaxx/excerptexport/rest_api/views.py
+++ b/osmaxx/excerptexport/rest_api/views.py
@@ -18,7 +18,7 @@ class ExcerptViewSet(ETAGMixin, viewsets.mixins.RetrieveModelMixin, viewsets.Gen
)
queryset = Excerpt.objects.all()
serializer_class = ExcerptGeometrySerializer
-excerpt_detail = ExcerptViewSet.as_view({'get': 'retrieve'})
+excerpt_detail = ExcerptViewSet.as_view({'get': 'retrieve'}) # noqa: E305 expected 2 blank lines after class or function definition, found 0
class ExportViewSet(viewsets.mixins.DestroyModelMixin, viewsets.GenericViewSet):
@@ -28,7 +28,7 @@ class ExportViewSet(viewsets.mixins.DestroyModelMixin, viewsets.GenericViewSet):
)
queryset = Export.objects.all()
serializer_class = ExportSerializer
-export_detail = ExportViewSet.as_view({'delete': 'destroy'})
+export_detail = ExportViewSet.as_view({'delete': 'destroy'}) # noqa: E305 expected 2 blank lines after class or function definition, found 0
def estimated_file_size(request):
diff --git a/osmaxx/excerptexport/static/excerptexport/scripts/maps/draw_controls.js b/osmaxx/excerptexport/static/excerptexport/scripts/maps/draw_controls.js
index 0a060aed8..5f53cb455 100644
--- a/osmaxx/excerptexport/static/excerptexport/scripts/maps/draw_controls.js
+++ b/osmaxx/excerptexport/static/excerptexport/scripts/maps/draw_controls.js
@@ -130,7 +130,6 @@ var draw_controls = function (map) {
}
addSizeEstimationToCheckboxes(layer);
- filterUTMZones(layer);
estimateSize(layer).done(function (data) {
var estimatedFileSize = Number(data['estimated_file_size_in_bytes']);
diff --git a/osmaxx/excerptexport/static/excerptexport/scripts/maps/excerpt_viewer.js b/osmaxx/excerptexport/static/excerptexport/scripts/maps/excerpt_viewer.js
index d44d48122..bfbbdc277 100644
--- a/osmaxx/excerptexport/static/excerptexport/scripts/maps/excerpt_viewer.js
+++ b/osmaxx/excerptexport/static/excerptexport/scripts/maps/excerpt_viewer.js
@@ -20,10 +20,6 @@ window.ExcerptViewer = function(mapElementID, excerptApiUrl) {
window.addSizeEstimationToCheckboxes(layer);
- // WARNING: Simplification in action here!
- // If there are multiple features on one layer, i.e. more than one polygon, this will return only the valid
- // UTM-Regions for the first feature/polygon and ignore the other features!
- window.filterUTMZones(layer.getLayers()[0]);
}.bind(this);
this.showExcerptOnMap = function(ID) {
diff --git a/osmaxx/excerptexport/templates/excerptexport/base.html b/osmaxx/excerptexport/templates/excerptexport/base.html
index b0da28eed..1c73f3dfb 100644
--- a/osmaxx/excerptexport/templates/excerptexport/base.html
+++ b/osmaxx/excerptexport/templates/excerptexport/base.html
@@ -34,5 +34,4 @@ Content
-
{% endblock %}
diff --git a/osmaxx/excerptexport/views.py b/osmaxx/excerptexport/views.py
index a5b484391..521f57128 100644
--- a/osmaxx/excerptexport/views.py
+++ b/osmaxx/excerptexport/views.py
@@ -47,7 +47,7 @@ def form_valid(self, form):
class OrderNewExcerptView(LoginRequiredMixin, EmailRequiredMixin, OrderFormViewMixin, FormView):
template_name = 'excerptexport/templates/order_new_excerpt.html'
form_class = ExcerptForm
-order_new_excerpt = OrderNewExcerptView.as_view()
+order_new_excerpt = OrderNewExcerptView.as_view() # noqa: expected 2 blank lines after class or function definition, found 0
class OrderExistingExcerptView(LoginRequiredMixin, EmailRequiredMixin, OrderFormViewMixin, FormView):
@@ -56,7 +56,7 @@ class OrderExistingExcerptView(LoginRequiredMixin, EmailRequiredMixin, OrderForm
def get_form_class(self):
return super().get_form_class().get_dynamic_form_class(self.request.user)
-order_existing_excerpt = OrderExistingExcerptView.as_view()
+order_existing_excerpt = OrderExistingExcerptView.as_view() # noqa: expected 2 blank lines after class or function definition, found 0
class OwnershipRequiredMixin(SingleObjectMixin):
@@ -131,7 +131,7 @@ def _get_exports_for_excerpt(self, excerpt):
filter(extraction_order__excerpt=excerpt).\
select_related('extraction_order', 'extraction_order__excerpt', 'output_file')\
.defer('extraction_order__excerpt__bounding_geometry')
-export_list = ExportsListView.as_view()
+export_list = ExportsListView.as_view() # noqa: expected 2 blank lines after class or function definition, found 0
class ExportsDetailView(LoginRequiredMixin, ExportsListMixin, ListView):
@@ -154,7 +154,7 @@ def get_queryset(self):
.select_related('extraction_order', 'extraction_order__excerpt', 'output_file')\
.filter(extraction_order__excerpt__pk=pk)
return queryset
-export_detail = ExportsDetailView.as_view()
+export_detail = ExportsDetailView.as_view() # noqa: expected 2 blank lines after class or function definition, found 0
def _social_identification_description(user):
@@ -175,7 +175,7 @@ class ExcerptManageListView(ListView):
def get_queryset(self):
user = self.request.user
return super().get_queryset().filter(owner=user, is_public=False, extraction_orders__orderer=user).distinct()
-manage_own_excerpts = ExcerptManageListView.as_view()
+manage_own_excerpts = ExcerptManageListView.as_view() # noqa: expected 2 blank lines after class or function definition, found 0
class DeleteExcerptView(DeleteView):
@@ -208,4 +208,4 @@ def delete(self, request, *args, **kwargs):
return HttpResponseRedirect(self.get_success_url())
return super().delete(request, *args, **kwargs)
-delete_excerpt = DeleteExcerptView.as_view()
+delete_excerpt = DeleteExcerptView.as_view() # noqa: expected 2 blank lines after class or function definition, found 0
diff --git a/osmaxx/job_progress/middleware.py b/osmaxx/job_progress/middleware.py
index caa8915cd..3641a665f 100644
--- a/osmaxx/job_progress/middleware.py
+++ b/osmaxx/job_progress/middleware.py
@@ -1,6 +1,8 @@
import logging
from datetime import timedelta
+from django.utils.deprecation import MiddlewareMixin
+
import requests
from requests import HTTPError
@@ -12,11 +14,11 @@
logger = logging.getLogger(__name__)
-class ExportUpdaterMiddleware(object):
+class ExportUpdaterMiddleware(MiddlewareMixin):
def process_request(self, request):
assert hasattr(request, 'user'), (
"The osmaxx export updater middleware requires Django authentication middleware "
- "to be installed. Edit your MIDDLEWARE_CLASSES setting to insert "
+ "to be installed. Edit your MIDDLEWARE setting to insert "
"'django.contrib.auth.middleware.AuthenticationMiddleware' before "
"'osmaxx.job_progress.middleware.ExportUpdaterMiddleware'."
)
diff --git a/osmaxx/profile/admin.py b/osmaxx/profile/admin.py
index df462c1c2..e981de383 100644
--- a/osmaxx/profile/admin.py
+++ b/osmaxx/profile/admin.py
@@ -13,4 +13,4 @@ class Meta:
class ProfileAdmin(admin.ModelAdmin):
list_display = ['associated_user', 'unverified_email']
form = ProfileAdminForm
-admin.site.register(Profile, ProfileAdmin)
+admin.site.register(Profile, ProfileAdmin) # noqa: E305 expected 2 blank lines after class or function definition, found 0
diff --git a/pytest.ini b/pytest.ini
index 4aab5143e..39c782a1a 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -1,5 +1,5 @@
[pytest]
norecursedirs = .* requirements {arch} *.egg *.egg-info
addopts = --create-db
-base_url = http://localhost:8888
+base_url = http://localhost:8889
sensitive_url = osmaxx.hsr.ch
diff --git a/requirements-all.txt b/requirements-all.txt
index 34a5d7701..4241a4304 100644
--- a/requirements-all.txt
+++ b/requirements-all.txt
@@ -2,128 +2,106 @@
# This file is autogenerated by pip-compile
# To update, run:
#
-# pip-compile --output-file requirements-all.txt requirements-all.in
+# pip-compile --output-file=requirements-all.txt requirements-all.in
#
-astroid==1.6.4 # via pylint, pylint-celery, pylint-flask, pylint-plugin-utils, requirements-detector
-attrs==18.1.0 # via pytest
+atomicwrites==1.3.0 # via pytest
+attrs==19.1.0 # via pytest
backcall==0.1.0 # via ipython
-backports-abc==0.5 # via tornado
-certifi==2018.4.16 # via requests
+certifi==2019.3.9 # via requests, sentry-sdk
chardet==3.0.4 # via requests
-click==6.7 # via mkdocs, rq
-coverage==4.5.1
-decorator==4.3.0 # via ipython, traitlets
-defusedxml==0.5.0 # via python3-openid, social-auth-core
+click==7.0 # via mkdocs, rq
+coverage==4.5.3
+decorator==4.4.0 # via ipython, traitlets
+defusedxml==0.6.0 # via python3-openid, social-auth-core
django-crispy-forms==1.7.2
-django-debug-toolbar==1.9.1
+django-debug-toolbar==1.11
django-downloadview==1.9
-django-environ==0.4.4
-django-extensions==2.0.7
-django-filter==1.1.0
+django-environ==0.4.5
+django-extensions==2.1.7
+django-filter==2.1.0
django-model-utils==3.1.2
-django-rq==1.1.0
+django-rq==2.0
django-secure==1.0.1
django-stored-messages==1.4.0
-django==1.10.8
-djangorestframework-gis==0.10.1
+django==1.11.20
+djangorestframework-gis==0.14
djangorestframework-jwt==1.11.0
-djangorestframework==3.4.7
-docutils==0.14 # via pyroma
-dodgy==0.1.9 # via prospector
-drf-extensions==0.3.1
-flake8-polyfill==1.0.2 # via pep8-naming
-flake8==3.5.0
-furl==1.0.1
-geoalchemy2==0.4.2
-geometalab.drf-utm-zone-info==0.2.0
-geometalab.osm-pbf-file-size-estimation-service==1.1.0
-gevent==1.3.0
-greenlet==0.4.13 # via gevent
-gunicorn==19.8.1
-idna==2.6 # via requests, yarl
+djangorestframework==3.9.4
+drf-extensions==0.5.0
+entrypoints==0.3 # via flake8
+flake8==3.7.7
+furl==2.0.0
+geoalchemy2==0.6.2
+geometalab.osm-pbf-file-size-estimation-service==2.0.0
+gevent==1.4.0
+greenlet==0.4.15 # via gevent
+gunicorn==19.9.0
+idna==2.8 # via requests, yarl
ipython-genutils==0.2.0 # via traitlets
-ipython==6.4.0
-isort==4.3.4 # via pylint
-jedi==0.12.0 # via ipython
-jinja2==2.10
-lazy-object-proxy==1.3.1 # via astroid
-livereload==2.5.2 # via mkdocs
-markdown==2.6.11
-markupsafe==1.0 # via jinja2
-mccabe==0.6.1 # via flake8, prospector, pylint
+ipython==7.5.0
+jedi==0.13.3 # via ipython
+jinja2==2.10.1
+livereload==2.6.1 # via mkdocs
+markdown==3.1.1
+markupsafe==1.1.1 # via jinja2
+mccabe==0.6.1 # via flake8
memoize==1.0.0
-mkdocs==0.17.3
-more-itertools==4.1.0 # via pytest
-multidict==4.3.1 # via yarl
-numpy==1.14.3
-oauthlib==2.0.7 # via requests-oauthlib, social-auth-core
-orderedmultidict==0.7.11 # via furl
-parso==0.2.0 # via jedi
-pep8-naming==0.7.0 # via prospector
-pexpect==4.5.0 # via ipython
-pickleshare==0.7.4 # via ipython
-pluggy==0.6.0 # via pytest
-prompt-toolkit==1.0.15 # via ipython
-prospector==0.12.7
-psycopg2-binary==2.7.4
-ptyprocess==0.5.2 # via pexpect
-py==1.5.3 # via pytest
-pycodestyle==2.0.0 # via flake8, prospector
-pydocstyle==2.1.1 # via prospector
-pyflakes==1.6.0 # via flake8, prospector
-pygments==2.2.0 # via ipython
+mkdocs==1.0.4
+more-itertools==7.0.0 # via pytest
+multidict==4.5.2 # via yarl
+numpy==1.16.3
+oauthlib==3.0.1 # via requests-oauthlib, social-auth-core
+orderedmultidict==1.0 # via furl
+parso==0.4.0 # via jedi
+pexpect==4.7.0 # via ipython
+pickleshare==0.7.5 # via ipython
+pluggy==0.11.0 # via pytest
+prompt-toolkit==2.0.9 # via ipython
+psycopg2-binary==2.8.2
+ptyprocess==0.6.0 # via pexpect
+py==1.8.0 # via pytest
+pycodestyle==2.5.0 # via flake8
+pyflakes==2.1.1 # via flake8
+pygments==2.4.1 # via ipython
pyhamcrest==1.9.0
-pyjwt==1.6.1 # via djangorestframework-jwt, social-auth-core
-pylint-celery==0.3 # via prospector
-pylint-common==0.2.5 # via prospector
-pylint-django==0.11 # via prospector
-pylint-flask==0.5 # via prospector
-pylint-plugin-utils==0.2.6 # via prospector, pylint-celery, pylint-common, pylint-django, pylint-flask
-pylint==1.9.1 # via prospector, pylint-celery, pylint-common, pylint-django, pylint-flask, pylint-plugin-utils
+pyjwt==1.7.1 # via djangorestframework-jwt, social-auth-core
pypandoc==1.4
-pyroma==2.3
pytest-base-url==1.4.1 # via pytest-selenium
-pytest-cov==2.5.1
-pytest-django==3.2.1
-pytest-html==1.17.0 # via pytest-selenium
-pytest-metadata==1.7.0 # via pytest-html
-pytest-mock==1.10.0
-pytest-selenium==1.12.0
+pytest-cov==2.7.1
+pytest-django==3.4.8
+pytest-html==1.20.0 # via pytest-selenium
+pytest-metadata==1.8.0 # via pytest-html
+pytest-mock==1.10.4
+pytest-selenium==1.16.0
pytest-variables==1.7.1 # via pytest-selenium
-pytest==3.5.1 # via pytest-base-url, pytest-cov, pytest-django, pytest-html, pytest-metadata, pytest-mock, pytest-selenium, pytest-variables
+pytest==4.5.0 # via pytest-base-url, pytest-cov, pytest-django, pytest-html, pytest-metadata, pytest-mock, pytest-selenium, pytest-variables
python-social-auth[django]==0.3.6
python3-openid==3.1.0 # via social-auth-core
-pyyaml==3.12
-raven==6.8.0
-redis==2.10.6 # via rq
-requests-mock==1.5.0
-requests-oauthlib==0.8.0 # via social-auth-core
-requests==2.18.4
-requirements-detector==0.5.2 # via prospector
-rq==0.10.0
-ruamel.yaml==0.15.37
-scandir==1.7
-scipy==1.1.0
-selenium==3.12.0 # via pytest-selenium
-setoptconf==0.2.0 # via prospector
-simplegeneric==0.8.1 # via ipython
-six==1.11.0 # via astroid, django-downloadview, django-environ, django-extensions, furl, livereload, more-itertools, orderedmultidict, prompt-toolkit, pydocstyle, pyhamcrest, pylint, pytest, requests-mock, social-auth-app-django, social-auth-core, sqlalchemy-utils, traitlets, vcrpy
-snowballstemmer==1.2.1 # via pydocstyle
-social-auth-app-django==2.1.0 # via python-social-auth
-social-auth-core==1.7.0 # via python-social-auth, social-auth-app-django
-sqlalchemy-utils==0.33.3
-sqlalchemy-views==0.2.1
-sqlalchemy==1.2.7
-sqlparse==0.2.4 # via django-debug-toolbar
-tornado==4.5.3 # via livereload, mkdocs
+pytz==2019.1 # via django
+pyyaml==5.1
+redis==3.2.1 # via django-rq, rq
+requests-mock==1.6.0
+requests-oauthlib==1.2.0 # via social-auth-core
+requests==2.22.0
+rq==1.0
+ruamel.yaml==0.15.96
+scipy==1.3.0
+selenium==3.141.0 # via pytest-selenium
+sentry-sdk==0.7.10
+six==1.12.0 # via django-downloadview, django-extensions, furl, livereload, orderedmultidict, prompt-toolkit, pyhamcrest, pytest, requests-mock, social-auth-app-django, social-auth-core, sqlalchemy-utils, traitlets, vcrpy
+social-auth-app-django==3.1.0 # via python-social-auth
+social-auth-core==3.1.0 # via python-social-auth, social-auth-app-django
+sqlalchemy-utils==0.33.11
+sqlalchemy-views==0.2.3
+sqlalchemy==1.3.3
+sqlparse==0.3.0 # via django-debug-toolbar
+tornado==6.0.2 # via livereload, mkdocs
traitlets==4.3.2 # via ipython
-typing==3.6.4 # via django-extensions, ipython
-urllib3==1.22 # via requests
-vcrpy==1.11.1
-vulture==0.26
-wcwidth==0.1.7 # via prompt-toolkit
-werkzeug==0.14.1
-wheel==0.31.1
-whitenoise==3.3.1
-wrapt==1.10.11 # via astroid, vcrpy
-yarl==1.2.0 # via vcrpy
+urllib3==1.25.3 # via requests, selenium, sentry-sdk
+vcrpy==2.0.1
+wcwidth==0.1.7 # via prompt-toolkit, pytest
+werkzeug==0.15.4
+wheel==0.33.4
+whitenoise==4.1.2
+wrapt==1.11.1 # via vcrpy
+yarl==1.3.0 # via vcrpy
diff --git a/requirements.in b/requirements.in
index 5f0da578e..101d80183 100644
--- a/requirements.in
+++ b/requirements.in
@@ -1,12 +1,11 @@
# Minimum Django and REST framework version
-Django>=1.10,<1.11
-djangorestframework>=3.4,<3.5
-djangorestframework-gis>=0.10,<0.11
-drf-extensions>=0.3.1,<0.4
+Django>=1.11,<2
+djangorestframework~=3.9.0
+djangorestframework-gis
+drf-extensions
# pbf estimation service
-geometalab.osm-pbf-file-size-estimation-service>=1.0.0,<2.0.0
-geometalab.drf-utm-zone-info>=0.1.0,<1.0.0
+geometalab.osm-pbf-file-size-estimation-service~=2.0.0
djangorestframework-jwt
requests
@@ -31,8 +30,6 @@ SQLAlchemy-Utils
sqlalchemy-views
GeoAlchemy2
furl
-# fallback for python < 3.5
-scandir
# Test requirements
pytest-django
@@ -47,11 +44,6 @@ wheel
# MkDocs for documentation previews/deploys
mkdocs
-# Extended linting support (Prospector)
-prospector
-pyroma
-vulture
-
# Testing
coverage
pyhamcrest
@@ -70,7 +62,7 @@ PyYAML
gunicorn
# sentry error logging on production
-raven
+sentry-sdk==0.7.10
ipython
diff --git a/requirements.txt b/requirements.txt
index 010d75b10..a9dc60afe 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,126 +2,104 @@
# This file is autogenerated by pip-compile
# To update, run:
#
-# pip-compile --output-file requirements.txt requirements.in
+# pip-compile --output-file=requirements.txt requirements.in
#
-astroid==1.6.4 # via pylint, pylint-celery, pylint-flask, pylint-plugin-utils, requirements-detector
-attrs==18.1.0 # via pytest
+atomicwrites==1.3.0 # via pytest
+attrs==19.1.0 # via pytest
backcall==0.1.0 # via ipython
-backports-abc==0.5 # via tornado
-certifi==2018.4.16 # via requests
+certifi==2019.3.9 # via requests, sentry-sdk
chardet==3.0.4 # via requests
-click==6.7 # via mkdocs, rq
-coverage==4.5.1
-decorator==4.3.0 # via ipython, traitlets
-defusedxml==0.5.0 # via python3-openid, social-auth-core
+click==7.0 # via mkdocs, rq
+coverage==4.5.3
+decorator==4.4.0 # via ipython, traitlets
+defusedxml==0.6.0 # via python3-openid, social-auth-core
django-crispy-forms==1.7.2
-django-debug-toolbar==1.9.1
+django-debug-toolbar==1.11
django-downloadview==1.9
-django-environ==0.4.4
-django-extensions==2.0.7
-django-filter==1.1.0
+django-environ==0.4.5
+django-extensions==2.1.7
+django-filter==2.1.0
django-model-utils==3.1.2
-django-rq==1.1.0
+django-rq==2.0
django-secure==1.0.1
django-stored-messages==1.4.0
-django==1.10.8
-djangorestframework-gis==0.10.1
+django==1.11.20
+djangorestframework-gis==0.14
djangorestframework-jwt==1.11.0
-djangorestframework==3.4.7
-docutils==0.14 # via pyroma
-dodgy==0.1.9 # via prospector
-drf-extensions==0.3.1
-flake8-polyfill==1.0.2 # via pep8-naming
-flake8==3.5.0
-furl==1.0.1
-geoalchemy2==0.4.2
-geometalab.drf-utm-zone-info==0.2.0
-geometalab.osm-pbf-file-size-estimation-service==1.1.0
-gevent==1.3.0
-greenlet==0.4.13 # via gevent
-gunicorn==19.8.1
-idna==2.6 # via requests, yarl
+djangorestframework==3.9.4
+drf-extensions==0.5.0
+entrypoints==0.3 # via flake8
+flake8==3.7.7
+furl==2.0.0
+geoalchemy2==0.6.2
+geometalab.osm-pbf-file-size-estimation-service==2.0.0
+gevent==1.4.0
+greenlet==0.4.15 # via gevent
+gunicorn==19.9.0
+idna==2.8 # via requests, yarl
ipython-genutils==0.2.0 # via traitlets
-ipython==6.4.0
-isort==4.3.4 # via pylint
-jedi==0.12.0 # via ipython
-jinja2==2.10 # via mkdocs
-lazy-object-proxy==1.3.1 # via astroid
-livereload==2.5.2 # via mkdocs
-markdown==2.6.11
-markupsafe==1.0 # via jinja2
-mccabe==0.6.1 # via flake8, prospector, pylint
+ipython==7.5.0
+jedi==0.13.3 # via ipython
+jinja2==2.10.1 # via mkdocs
+livereload==2.6.1 # via mkdocs
+markdown==3.1.1
+markupsafe==1.1.1 # via jinja2
+mccabe==0.6.1 # via flake8
memoize==1.0.0
-mkdocs==0.17.3
-more-itertools==4.1.0 # via pytest
-multidict==4.3.1 # via yarl
-numpy==1.14.3
-oauthlib==2.0.7 # via requests-oauthlib, social-auth-core
-orderedmultidict==0.7.11 # via furl
-parso==0.2.0 # via jedi
-pep8-naming==0.7.0 # via prospector
-pexpect==4.5.0 # via ipython
-pickleshare==0.7.4 # via ipython
-pluggy==0.6.0 # via pytest
-prompt-toolkit==1.0.15 # via ipython
-prospector==0.12.7
-psycopg2-binary==2.7.4
-ptyprocess==0.5.2 # via pexpect
-py==1.5.3 # via pytest
-pycodestyle==2.0.0 # via flake8, prospector
-pydocstyle==2.1.1 # via prospector
-pyflakes==1.6.0 # via flake8, prospector
-pygments==2.2.0 # via ipython
+mkdocs==1.0.4
+more-itertools==7.0.0 # via pytest
+multidict==4.5.2 # via yarl
+numpy==1.16.3
+oauthlib==3.0.1 # via requests-oauthlib, social-auth-core
+orderedmultidict==1.0 # via furl
+parso==0.4.0 # via jedi
+pexpect==4.7.0 # via ipython
+pickleshare==0.7.5 # via ipython
+pluggy==0.11.0 # via pytest
+prompt-toolkit==2.0.9 # via ipython
+psycopg2-binary==2.8.2
+ptyprocess==0.6.0 # via pexpect
+py==1.8.0 # via pytest
+pycodestyle==2.5.0 # via flake8
+pyflakes==2.1.1 # via flake8
+pygments==2.4.1 # via ipython
pyhamcrest==1.9.0
-pyjwt==1.6.1 # via djangorestframework-jwt, social-auth-core
-pylint-celery==0.3 # via prospector
-pylint-common==0.2.5 # via prospector
-pylint-django==0.11 # via prospector
-pylint-flask==0.5 # via prospector
-pylint-plugin-utils==0.2.6 # via prospector, pylint-celery, pylint-common, pylint-django, pylint-flask
-pylint==1.9.1 # via prospector, pylint-celery, pylint-common, pylint-django, pylint-flask, pylint-plugin-utils
-pyroma==2.3
+pyjwt==1.7.1 # via djangorestframework-jwt, social-auth-core
pytest-base-url==1.4.1 # via pytest-selenium
-pytest-cov==2.5.1
-pytest-django==3.2.1
-pytest-html==1.17.0 # via pytest-selenium
-pytest-metadata==1.7.0 # via pytest-html
-pytest-mock==1.10.0
-pytest-selenium==1.12.0
+pytest-cov==2.7.1
+pytest-django==3.4.8
+pytest-html==1.20.0 # via pytest-selenium
+pytest-metadata==1.8.0 # via pytest-html
+pytest-mock==1.10.4
+pytest-selenium==1.16.0
pytest-variables==1.7.1 # via pytest-selenium
-pytest==3.5.1 # via pytest-base-url, pytest-cov, pytest-django, pytest-html, pytest-metadata, pytest-mock, pytest-selenium, pytest-variables
+pytest==4.5.0 # via pytest-base-url, pytest-cov, pytest-django, pytest-html, pytest-metadata, pytest-mock, pytest-selenium, pytest-variables
python-social-auth[django]==0.3.6
python3-openid==3.1.0 # via social-auth-core
-pyyaml==3.12
-raven==6.8.0
-redis==2.10.6 # via rq
-requests-mock==1.5.0
-requests-oauthlib==0.8.0 # via social-auth-core
-requests==2.18.4
-requirements-detector==0.5.2 # via prospector
-rq==0.10.0
-scandir==1.7
-scipy==1.1.0
-selenium==3.12.0 # via pytest-selenium
-setoptconf==0.2.0 # via prospector
-simplegeneric==0.8.1 # via ipython
-six==1.11.0 # via astroid, django-downloadview, django-environ, django-extensions, furl, livereload, more-itertools, orderedmultidict, prompt-toolkit, pydocstyle, pyhamcrest, pylint, pytest, requests-mock, social-auth-app-django, social-auth-core, sqlalchemy-utils, traitlets, vcrpy
-snowballstemmer==1.2.1 # via pydocstyle
-social-auth-app-django==2.1.0 # via python-social-auth
-social-auth-core==1.7.0 # via python-social-auth, social-auth-app-django
-sqlalchemy-utils==0.33.3
-sqlalchemy-views==0.2.1
-sqlalchemy==1.2.7
-sqlparse==0.2.4 # via django-debug-toolbar
-tornado==4.5.3 # via livereload, mkdocs
+pytz==2019.1 # via django
+pyyaml==5.1
+redis==3.2.1 # via django-rq, rq
+requests-mock==1.6.0
+requests-oauthlib==1.2.0 # via social-auth-core
+requests==2.22.0
+rq==1.0
+scipy==1.3.0
+selenium==3.141.0 # via pytest-selenium
+sentry-sdk==0.7.10
+six==1.12.0 # via django-downloadview, django-extensions, furl, livereload, orderedmultidict, prompt-toolkit, pyhamcrest, pytest, requests-mock, social-auth-app-django, social-auth-core, sqlalchemy-utils, traitlets, vcrpy
+social-auth-app-django==3.1.0 # via python-social-auth
+social-auth-core==3.1.0 # via python-social-auth, social-auth-app-django
+sqlalchemy-utils==0.33.11
+sqlalchemy-views==0.2.3
+sqlalchemy==1.3.3
+sqlparse==0.3.0 # via django-debug-toolbar
+tornado==6.0.2 # via livereload, mkdocs
traitlets==4.3.2 # via ipython
-typing==3.6.4 # via django-extensions, ipython
-urllib3==1.22 # via requests
-vcrpy==1.11.1
-vulture==0.26
-wcwidth==0.1.7 # via prompt-toolkit
-werkzeug==0.14.1
-wheel==0.31.1
-whitenoise==3.3.1
-wrapt==1.10.11 # via astroid, vcrpy
-yarl==1.2.0 # via vcrpy
+urllib3==1.25.3 # via requests, selenium, sentry-sdk
+vcrpy==2.0.1
+wcwidth==0.1.7 # via prompt-toolkit, pytest
+werkzeug==0.15.4
+wheel==0.33.4
+whitenoise==4.1.2
+wrapt==1.11.1 # via vcrpy
+yarl==1.3.0 # via vcrpy
diff --git a/runtests.py b/runtests.py
index 68a5cad20..3edc888e9 100755
--- a/runtests.py
+++ b/runtests.py
@@ -1,6 +1,4 @@
#!/usr/bin/env python3
-from __future__ import print_function
-
import pytest
import sys
import os
diff --git a/setup.py b/setup.py
index d99ab8df7..d467d6e6d 100644
--- a/setup.py
+++ b/setup.py
@@ -96,8 +96,8 @@ def get_package_data(package):
'Operating System :: OS Independent',
'Natural Language :: English',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
]
)
diff --git a/tests/clipping_area/to_polyfile_test.py b/tests/clipping_area/to_polyfile_test.py
index 459686c2a..52cb94c3a 100644
--- a/tests/clipping_area/to_polyfile_test.py
+++ b/tests/clipping_area/to_polyfile_test.py
@@ -90,10 +90,10 @@ def test_create_poly_file_string_equals_the_multipolygon_it_was_constructed_from
create_poly_file_string(self.multi_polygon_1)
self.assertEqual(
- self.multi_polygon_1,
- parse_poly_string(create_poly_file_string(self.multi_polygon_1))
+ create_poly_file_string(self.multi_polygon_1),
+ create_poly_file_string(parse_poly_string(create_poly_file_string(self.multi_polygon_1)))
)
self.assertEqual(
- complex_multi_polygon,
- parse_poly_string(create_poly_file_string(complex_multi_polygon))
+ create_poly_file_string(complex_multi_polygon),
+ create_poly_file_string(parse_poly_string(create_poly_file_string(complex_multi_polygon)))
)
diff --git a/tests/conftest.py b/tests/conftest.py
index 65b199bbf..d6d832181 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -10,7 +10,8 @@
test_data_dir = os.path.join(os.path.dirname(__file__), 'test_data')
-postgres_container_userland_port = 65432 # required for travis, so using it everywhere
+postgres_container_userland_port = int(os.environ.get('PG_TRANSLIT_PORT', 65432)) # required for travis, so using it everywhere
+postgres_container_translit_host = os.environ.get('PG_TRANSLIT_HOST', '127.0.0.1')
def pytest_configure():
@@ -20,14 +21,15 @@ def pytest_configure():
settings.configure(
ROOT_DIR=environ.Path(__file__) - 1,
DEBUG_PROPAGATE_EXCEPTIONS=True,
+ ALLOWED_HOSTS=['the-host.example.com', 'thehost.example.com'],
DATABASES={
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': 'postgres',
- 'PORT': '54321',
- 'HOST': '127.0.0.1',
+ 'PORT': '5432',
+ 'HOST': 'postgres',
}
},
SITE_ID=1,
@@ -63,15 +65,15 @@ def pytest_configure():
},
},
],
- MIDDLEWARE_CLASSES=(
+ MIDDLEWARE=[
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'osmaxx.job_progress.middleware.ExportUpdaterMiddleware',
- ),
- INSTALLED_APPS=(
+ ],
+ INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
@@ -100,7 +102,7 @@ def pytest_configure():
'osmaxx.excerptexport',
'osmaxx.job_progress',
'osmaxx.profile',
- ),
+ ],
PASSWORD_HASHERS=(
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
@@ -112,7 +114,7 @@ def pytest_configure():
RQ_QUEUE_NAMES=['default'],
RQ_QUEUES={
'default': {
- 'HOST': 'localhost',
+ 'HOST': 'redis',
'PORT': 6379,
'DB': 0,
'PASSWORD': '',
@@ -205,6 +207,24 @@ def user(db, django_user_model, django_username_field):
return user
+def create_authenticated_client(client, user):
+ """
+ Client using an authenticated user.
+
+ Since this creates a database object, you must
+ mark your test with @pytest.mark.django_db()
+
+ Args:
+ client: Default client fixture from pytest-django
+
+ Returns:
+ Authenticated Client
+ """
+ client.login(username='user', password='password')
+ client.user = user
+ return client
+
+
@pytest.fixture
def authenticated_client(client, user):
"""
@@ -220,9 +240,7 @@ def authenticated_client(client, user):
Returns:
Authenticated Client
"""
- client.login(username='user', password='password')
- client.user = user
- return client
+ return create_authenticated_client(client, user)
@pytest.fixture
@@ -246,7 +264,7 @@ def authenticated_api_client(api_client, user):
Returns:
Authenticated Client
"""
- return authenticated_client(api_client, user)
+ return create_authenticated_client(api_client, user)
@pytest.fixture
@@ -254,7 +272,7 @@ def frontend_accessible_authenticated_api_client(api_client, user):
from osmaxx.profile.models import Profile
Profile.objects.create(associated_user=user, unverified_email=user.email)
- return authenticated_client(api_client, user)
+ return create_authenticated_client(api_client, user)
@pytest.fixture
@@ -293,9 +311,7 @@ def geos_geometry_can_be_created_from_geojson_string():
GEOSGeometry(geojson_point_string)
-@pytest.fixture
-def area_polyfile_string():
- return '''
+AREA_POLYFILE_STRING = '''
none
polygon-1
7.495679855346679 43.75782881091782
@@ -307,6 +323,11 @@ def area_polyfile_string():
'''.lstrip()
+@pytest.fixture
+def area_polyfile_string():
+ return AREA_POLYFILE_STRING
+
+
class TagCombination(Mapping):
def __init__(self, *args, **kwargs):
tags = dict(osm_id=id(self))
diff --git a/tests/conversion/conftest.py b/tests/conversion/conftest.py
index adde4bc37..828e16183 100644
--- a/tests/conversion/conftest.py
+++ b/tests/conversion/conftest.py
@@ -36,7 +36,7 @@ def detail_level(request):
return request.param[0]
-@pytest.fixture(params=crs.GLOBAL_CHOICES[:2])
+@pytest.fixture(params=crs.CHOICES[:2])
def out_srs(request):
return int(request.param[0])
diff --git a/tests/conversion/converters/bootstrap_test.py b/tests/conversion/converters/bootstrap_test.py
index 094606a95..461253197 100644
--- a/tests/conversion/converters/bootstrap_test.py
+++ b/tests/conversion/converters/bootstrap_test.py
@@ -2,11 +2,10 @@
from osmaxx.conversion.converters.converter_gis.bootstrap import bootstrap
from osmaxx.conversion.converters.converter_gis.detail_levels import DETAIL_LEVEL_REDUCED
-from tests.conftest import area_polyfile_string
-def test_filter_scripts_are_executed_in_correct_order(sql_scripts_filter):
- bootstrapper = bootstrap.BootStrapper(area_polyfile_string=area_polyfile_string())
+def test_filter_scripts_are_executed_in_correct_order(sql_scripts_filter, area_polyfile_string):
+ bootstrapper = bootstrap.BootStrapper(area_polyfile_string=area_polyfile_string)
with mock.patch.object(bootstrapper, '_postgres') as postgres_mock:
bootstrapper._filter_data()
@@ -16,8 +15,10 @@ def test_filter_scripts_are_executed_in_correct_order(sql_scripts_filter):
assert expected_calls == postgres_mock.execute_sql_file.mock_calls
-def test_filter_scripts_with_lesser_detail_are_executed_in_correct_order(sql_scripts_filter_level_60):
- bootstrapper = bootstrap.BootStrapper(area_polyfile_string=area_polyfile_string(), detail_level=DETAIL_LEVEL_REDUCED)
+def test_filter_scripts_with_lesser_detail_are_executed_in_correct_order(
+ sql_scripts_filter_level_60, area_polyfile_string
+):
+ bootstrapper = bootstrap.BootStrapper(area_polyfile_string=area_polyfile_string, detail_level=DETAIL_LEVEL_REDUCED)
with mock.patch.object(bootstrapper, '_postgres') as postgres_mock:
bootstrapper._filter_data()
@@ -27,8 +28,8 @@ def test_filter_scripts_with_lesser_detail_are_executed_in_correct_order(sql_scr
assert expected_calls == postgres_mock.execute_sql_file.mock_calls
-def test_create_views_scripts_are_executed_in_correct_order(sql_scripts_create_view):
- bootstrapper = bootstrap.BootStrapper(area_polyfile_string=area_polyfile_string())
+def test_create_views_scripts_are_executed_in_correct_order(sql_scripts_create_view, area_polyfile_string):
+ bootstrapper = bootstrap.BootStrapper(area_polyfile_string=area_polyfile_string)
with mock.patch.object(bootstrapper, '_postgres') as postgres_mock:
bootstrapper._create_views()
@@ -38,8 +39,10 @@ def test_create_views_scripts_are_executed_in_correct_order(sql_scripts_create_v
assert expected_calls == postgres_mock.execute_sql_file.mock_calls
-def test_create_views_with_lesser_detail_are_limited_to_specified_tables(sql_scripts_create_view_level_60):
- bootstrapper = bootstrap.BootStrapper(area_polyfile_string=area_polyfile_string(), detail_level=DETAIL_LEVEL_REDUCED)
+def test_create_views_with_lesser_detail_are_limited_to_specified_tables(
+ sql_scripts_create_view_level_60, area_polyfile_string
+):
+ bootstrapper = bootstrap.BootStrapper(area_polyfile_string=area_polyfile_string, detail_level=DETAIL_LEVEL_REDUCED)
with mock.patch.object(bootstrapper, '_postgres') as postgres_mock:
bootstrapper._create_views()
@@ -49,8 +52,8 @@ def test_create_views_with_lesser_detail_are_limited_to_specified_tables(sql_scr
assert expected_calls == postgres_mock.execute_sql_file.mock_calls
-def test_function_scripts_are_executed_in_correct_order(sql_scripts_create_functions):
- bootstrapper = bootstrap.BootStrapper(area_polyfile_string=area_polyfile_string())
+def test_function_scripts_are_executed_in_correct_order(sql_scripts_create_functions, area_polyfile_string):
+ bootstrapper = bootstrap.BootStrapper(area_polyfile_string=area_polyfile_string)
with mock.patch.object(bootstrapper, '_postgres') as postgres_mock:
bootstrapper._setup_db_functions()
diff --git a/tests/conversion/converters/inside_worker_test/conftest.py b/tests/conversion/converters/inside_worker_test/conftest.py
index ae9f48d42..b7390880a 100644
--- a/tests/conversion/converters/inside_worker_test/conftest.py
+++ b/tests/conversion/converters/inside_worker_test/conftest.py
@@ -6,7 +6,7 @@
from sqlalchemy.engine.url import URL as DBURL
from sqlalchemy_utils import functions as sql_alchemy_utils
-from tests.conftest import postgres_container_userland_port, area_polyfile_string
+from tests.conftest import postgres_container_userland_port, postgres_container_translit_host
from tests.conversion.converters.inside_worker_test.declarative_schema import osm_models
slow = pytest.mark.skipif(
@@ -16,7 +16,7 @@
db_name = 'osmaxx_db'
-gis_db_connection_kwargs = dict(username='postgres', password='postgres', database=db_name, host='127.0.0.1', port=postgres_container_userland_port)
+gis_db_connection_kwargs = dict(username='postgres', password='postgres', database=db_name, host=postgres_container_translit_host, port=postgres_container_userland_port)
@pytest.fixture(scope='session')
@@ -115,6 +115,7 @@ def _cleanup():
request.addfinalizer(_cleanup)
return engine
+
_osmaxx_schemas = [
'view_osmaxx',
'osmaxx',
@@ -142,7 +143,7 @@ def sql_from_bootstrap_relative_location(file_name):
@pytest.fixture()
-def data_import(osmaxx_schemas, clean_osm_tables, monkeypatch, mocker):
+def data_import(osmaxx_schemas, clean_osm_tables, monkeypatch, mocker, area_polyfile_string):
from tests.conversion.converters.inside_worker_test.conftest import cleanup_osmaxx_schemas
from osmaxx.conversion.converters.converter_gis.bootstrap.bootstrap import BootStrapper
@@ -153,7 +154,7 @@ def data_import(osmaxx_schemas, clean_osm_tables, monkeypatch, mocker):
class _BootStrapperWithoutPbfFile(BootStrapper):
def __init__(self, data):
- super().__init__(area_polyfile_string=area_polyfile_string())
+ super().__init__(area_polyfile_string=area_polyfile_string)
self.data = data
def _reset_database(self):
diff --git a/tests/conversion/management/commands/result_harvester_test.py b/tests/conversion/management/commands/result_harvester_test.py
index 6f0d26667..e9bc7b953 100644
--- a/tests/conversion/management/commands/result_harvester_test.py
+++ b/tests/conversion/management/commands/result_harvester_test.py
@@ -1,6 +1,4 @@
-from collections import namedtuple
from unittest.mock import Mock, MagicMock, patch
-
import pytest
from osmaxx.conversion import status
@@ -8,34 +6,37 @@
@pytest.fixture
def queue(fake_rq_id):
- Queue = namedtuple('Queue', ['job_ids', 'fetch_job'])
- Job = namedtuple('Job', ['status'])
+ job = Mock(**{'get_status.return_value': status.STARTED, 'id': fake_rq_id})
+ queue = Mock(**{'fetch_job.return_value': job})
+ return queue
+
- def fetch_job(job_id):
- job = Job(status=status.STARTED)
- return job
- queue = Queue(job_ids=[str(fake_rq_id)], fetch_job=fetch_job)
+@pytest.fixture
+def failed_queue(fake_rq_id):
+ queue = Mock(**{'failed_job_registry.get_job_ids.return_value': [fake_rq_id]})
return queue
-def test_handle_failed_jobs_calls_set_failed_unless_final(mocker, fake_rq_id, queue):
+def test_handle_failed_jobs_calls_set_failed_unless_final(mocker, fake_rq_id, failed_queue):
+ mocker.patch('django_rq.get_queue', return_value=failed_queue)
+
from osmaxx.conversion.management.commands import result_harvester
from osmaxx.conversion.models import Job
- mocker.patch('django_rq.get_failed_queue', return_value=queue)
+
conversion_job_mock = Mock()
mocker.patch.object(Job.objects, 'get', return_value=conversion_job_mock)
cmd = result_harvester.Command()
_set_failed_unless_final = mocker.patch.object(cmd, '_set_failed_unless_final')
_update_job_mock = mocker.patch.object(cmd, '_notify')
cmd._handle_failed_jobs()
- _set_failed_unless_final.assert_called_once_with(conversion_job_mock, rq_job_id=str(fake_rq_id))
+ _set_failed_unless_final.assert_called_once_with(conversion_job_mock, rq_job_id=fake_rq_id)
_update_job_mock.assert_called_once_with(conversion_job_mock)
@pytest.mark.django_db()
def test_handle_successfull_jobs_calls_update_job(mocker, queue, started_conversion_job):
- from osmaxx.conversion.management.commands import result_harvester
mocker.patch('django_rq.get_queue', return_value=queue)
+ from osmaxx.conversion.management.commands import result_harvester
cmd = result_harvester.Command()
_update_job_mock = mocker.patch.object(cmd, '_update_job')
cmd._handle_running_jobs()
@@ -45,8 +46,8 @@ def test_handle_successfull_jobs_calls_update_job(mocker, queue, started_convers
@pytest.mark.django_db()
def test_handle_update_job_informs(mocker, queue, fake_rq_id, started_conversion_job):
- from osmaxx.conversion.management.commands import result_harvester
mocker.patch('django_rq.get_queue', return_value=queue)
+ from osmaxx.conversion.management.commands import result_harvester
cmd = result_harvester.Command()
_update_job_mock = mocker.patch.object(cmd, '_notify')
cmd._update_job(rq_job_id=fake_rq_id)
diff --git a/tests/excerptexport/conftest.py b/tests/excerptexport/conftest.py
index 188cca89e..d82e6d98f 100644
--- a/tests/excerptexport/conftest.py
+++ b/tests/excerptexport/conftest.py
@@ -1,6 +1,5 @@
import os
import tempfile
-from collections import namedtuple
import pytest
@@ -29,9 +28,16 @@ def extraction_order(excerpt, user, db):
return extraction_order
+def create_export(extraction_order, file_format):
+ return Export.objects.create(
+ extraction_order=extraction_order,
+ file_format=file_format,
+ )
+
+
@pytest.fixture(params=output_format.ALL)
def export(request, extraction_order):
- return Export.objects.create(
+ return create_export(
extraction_order=extraction_order,
file_format=request.param,
)
@@ -39,8 +45,7 @@ def export(request, extraction_order):
@pytest.fixture
def exports(extraction_order):
- ParamFake = namedtuple('ParamFake', 'param')
- return [export(ParamFake(format), extraction_order) for format in output_format.ALL]
+ return [create_export(extraction_order, format) for format in output_format.ALL]
@pytest.fixture
diff --git a/tests/profile/test_profile_view.py b/tests/profile/test_profile_view.py
index 6d5b4c236..380925a07 100644
--- a/tests/profile/test_profile_view.py
+++ b/tests/profile/test_profile_view.py
@@ -87,5 +87,6 @@ def test_mail_sent_only_once_within_rate_limit(authenticated_client):
authenticated_client.get(reverse('profile:edit_view'))
assert send_mail.call_count == 1
+
INVALID_EMAIL_MESSAGE = 'You have not set an email address. You must set a valid email address to use OSMaxx.'
ACTIVATE_EMAIL_MESSAGE = 'To activate your email, click the link in the confirmation email.'
diff --git a/tests/selenium_tests/conftest.py b/tests/selenium_tests/conftest.py
index 4affc94c3..b6a38ce7c 100644
--- a/tests/selenium_tests/conftest.py
+++ b/tests/selenium_tests/conftest.py
@@ -31,7 +31,7 @@ def _reload_until_condition(condition, *args, timeout=360, refresh_interval=5, *
try:
result = condition(*args, **kwargs)
return result
- except:
+ except: # noqa: E722 do not use bare 'except'
if timeout < 0:
raise
time.sleep(refresh_interval)
diff --git a/tox.ini b/tox.ini
index 1d673b8c8..5536e0840 100644
--- a/tox.ini
+++ b/tox.ini
@@ -14,8 +14,8 @@ deps =
[testenv:flake8]
commands = ./runtests.py --lintonly
deps =
- pytest==2.7.0
- flake8==2.4.0
+ pytest
+ flake8
[testenv:docs]
commands = mkdocs build
diff --git a/web_frontend/config/settings/common.py b/web_frontend/config/settings/common.py
index aec1ff70c..c039c0643 100644
--- a/web_frontend/config/settings/common.py
+++ b/web_frontend/config/settings/common.py
@@ -67,7 +67,7 @@
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
-MIDDLEWARE_CLASSES = (
+MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
@@ -76,7 +76,7 @@
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'osmaxx.job_progress.middleware.ExportUpdaterMiddleware',
-)
+]
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
diff --git a/web_frontend/config/settings/local.py b/web_frontend/config/settings/local.py
index 7b7f3ff80..5d050536c 100644
--- a/web_frontend/config/settings/local.py
+++ b/web_frontend/config/settings/local.py
@@ -16,8 +16,8 @@ def __contains__(self, key):
# django-debug-toolbar
# ------------------------------------------------------------------------------
-MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
-INSTALLED_APPS += ('debug_toolbar', )
+MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware',]
+INSTALLED_APPS += ['debug_toolbar', ]
DEBUG_TOOLBAR_PATCH_SETTINGS = False
INTERNAL_IPS = glob_list(env.tuple('DJANGO_INTERNAL_IPS', default=('127.0.0.1',)))
@@ -32,7 +32,7 @@ def __contains__(self, key):
# django-extensions
# ------------------------------------------------------------------------------
-INSTALLED_APPS += ('django_extensions', )
+INSTALLED_APPS += ['django_extensions', ]
# TESTING
# ------------------------------------------------------------------------------
diff --git a/web_frontend/config/settings/production.py b/web_frontend/config/settings/production.py
index beb67c6ba..cf30c4344 100755
--- a/web_frontend/config/settings/production.py
+++ b/web_frontend/config/settings/production.py
@@ -4,78 +4,37 @@
'''
from .common import * # noqa
-MIDDLEWARE_CLASSES = (
+MIDDLEWARE = [
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'django.middleware.security.SecurityMiddleware',
+ 'whitenoise.middleware.WhiteNoiseMiddleware',
# before all other middlewares, with the exception of SecurityMiddleware
'whitenoise.middleware.WhiteNoiseMiddleware',
-) + MIDDLEWARE_CLASSES
+] + MIDDLEWARE
# get an exception when starting, if they are not defined
SECRET_KEY = env.str("DJANGO_SECRET_KEY")
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS")
-INSTALLED_APPS += (
+INSTALLED_APPS += [
'gunicorn',
- # sentry
- 'raven.contrib.django.raven_compat',
-)
+]
# Static Assets
# ------------------------
-STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
+STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# SENTRY
SENTRY_DSN = env.str('SENTRY_DSN', default=None)
-if SENTRY_DSN:
- LOGGING = {
- 'version': 1,
- 'disable_existing_loggers': True,
- 'root': {
- 'level': 'WARNING',
- 'handlers': ['sentry'],
- },
- 'formatters': {
- 'verbose': {
- 'format': '%(levelname)s %(asctime)s %(module)s '
- '%(process)d %(thread)d %(message)s'
- },
- },
- 'handlers': {
- 'sentry': {
- 'level': 'WARNING',
- 'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler',
- },
- 'console': {
- 'level': 'DEBUG',
- 'class': 'logging.StreamHandler',
- 'formatter': 'verbose'
- }
- },
- 'loggers': {
- 'django.db.backends': {
- 'level': 'ERROR',
- 'handlers': ['console', 'sentry'],
- 'propagate': False,
- },
- 'raven': {
- 'level': 'DEBUG',
- 'handlers': ['console'],
- 'propagate': False,
- },
- 'sentry.errors': {
- 'level': 'DEBUG',
- 'handlers': ['console'],
- 'propagate': False,
- },
- },
- }
+if SENTRY_DSN is not None:
+ import sentry_sdk
+ from sentry_sdk.integrations.django import DjangoIntegration
- RAVEN_CONFIG = {
- 'dsn': SENTRY_DSN,
- 'release': env.str('SENTRY_RELEASE', default=''),
- }
+ sentry_sdk.init(
+ dsn=SENTRY_DSN,
+ integrations=[DjangoIntegration()]
+ )
OSMAXX['CONVERSION_SERVICE_USERNAME'] = env.str('DJANGO_OSMAXX_CONVERSION_SERVICE_USERNAME')
OSMAXX['CONVERSION_SERVICE_PASSWORD'] = env.str('DJANGO_OSMAXX_CONVERSION_SERVICE_PASSWORD')
diff --git a/web_frontend/config/wsgi.py b/web_frontend/config/wsgi.py
index 77b3d8c4b..7b8753e0b 100644
--- a/web_frontend/config/wsgi.py
+++ b/web_frontend/config/wsgi.py
@@ -9,11 +9,7 @@
import os
-from raven.contrib.django.middleware.wsgi import Sentry
-from whitenoise.django import DjangoWhiteNoise
-
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "web_frontend.config.settings.production")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
-application = Sentry(DjangoWhiteNoise(application))
From 4d98898022ad1c5d35d8e5f351d982c12c86a6c3 Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Mon, 27 May 2019 16:34:49 +0200
Subject: [PATCH 05/14] use 3.6 on travis only
---
.travis.yml | 1 -
tox.ini | 2 +-
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 00bd8ec60..510120787 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,7 +1,6 @@
language: python
python:
- "3.6"
- - "3.7"
cache: pip
diff --git a/tox.ini b/tox.ini
index 5536e0840..6c2b215de 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,7 +1,7 @@
[tox]
envlist =
; disable creating docs until https://github.com/mkdocs/mkdocs/issues/807 is resolved
- ; py34-{flake8,docs}
+ ; py36-{flake8,docs}
quick-tests,flake8,slow-tests,docs
[testenv:quick-tests]
From fd3ba93e0d4baac303ba4a9d472bb6e4bbc98451 Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Mon, 27 May 2019 17:17:19 +0200
Subject: [PATCH 06/14] accomodate test-settings for travis
---
Makefile | 4 ++--
conversion_service/config/settings/common.py | 12 ++++++------
docker-compose-dev.yml | 16 ++++------------
osm_pbf_updater/Dockerfile | 1 +
osm_pbf_updater/delvelopment_download_only.sh | 19 +++++++++++++++++++
tests/conftest.py | 4 ++--
web_frontend/config/settings/common.py | 12 ++++++------
7 files changed, 40 insertions(+), 28 deletions(-)
create mode 100755 osm_pbf_updater/delvelopment_download_only.sh
diff --git a/Makefile b/Makefile
index 58800d384..f03b47f9c 100644
--- a/Makefile
+++ b/Makefile
@@ -55,12 +55,12 @@ pip-sync-all: requirements-all.txt
.PHONY: tests-quick
tests-quick: up-redis up-pg
docker build -t worker:test -f Dockerfile.worker .
- docker run --link pg_tests:postgres --link redis-local:redis -e DJANGO_SETTINGS_MODULE= -v "$$(pwd):/code" --rm worker:test bash -c 'cp -r /code /tmp/code && cd /tmp/code/ && ./runtests.py $(PYTEST_ARGS)'
+ docker run --link pg_tests:postgres --link redis-local:redis -e DJANGO_DB_PORT=5432 -e DJANGO_DB_HOST=postgres -e DJANGO_SETTINGS_MODULE= -v "$$(pwd):/code" --rm worker:test bash -c 'cp -r /code /tmp/code && cd /tmp/code/ && ./runtests.py $(PYTEST_ARGS)'
.PHONY: tests-all
tests-all: up-redis up-pg up-pg_translit
docker build -t worker:test -f Dockerfile.worker .
- docker run --link pg_translit:translit --link pg_tests:postgres --link redis-local:redis -e PG_TRANSLIT_PORT=5432 -e PG_TRANSLIT_HOST=translit -e DJANGO_SETTINGS_MODULE= -v "$$(pwd):/code" --rm worker:test bash -c 'cp -r /code /tmp/code && cd /tmp/code/ && ./runtests.py $(PYTEST_ARGS) --runslow'
+ docker run --link pg_translit:translit --link pg_tests:postgres --link redis-local:redis -e DJANGO_DB_PORT=5432 -e DJANGO_DB_HOST=postgres -e PG_TRANSLIT_PORT=5432 -e PG_TRANSLIT_HOST=translit -e DJANGO_SETTINGS_MODULE= -v "$$(pwd):/code" --rm worker:test bash -c 'cp -r /code /tmp/code && cd /tmp/code/ && ./runtests.py $(PYTEST_ARGS) --runslow'
.PHONY: tox
tox: up-redis up-pg up-pg_translit
diff --git a/conversion_service/config/settings/common.py b/conversion_service/config/settings/common.py
index 0fa66b3f9..f57dfd8dd 100644
--- a/conversion_service/config/settings/common.py
+++ b/conversion_service/config/settings/common.py
@@ -22,7 +22,7 @@
# APP CONFIGURATION
# ------------------------------------------------------------------------------
-DJANGO_APPS = (
+DJANGO_APPS = [
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
@@ -30,21 +30,21 @@
'django.contrib.sites',
'django.contrib.staticfiles',
'django.contrib.gis',
-)
-THIRD_PARTY_APPS = (
+]
+THIRD_PARTY_APPS = [
# async execution worker
'django_rq',
# rest API Framework
'rest_framework',
'rest_framework_gis',
'pbf_file_size_estimation',
-)
+]
# Apps specific for this project go here.
-LOCAL_APPS = (
+LOCAL_APPS = [
'osmaxx.version',
'osmaxx.clipping_area',
'osmaxx.conversion',
-)
+]
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml
index 8c8548f94..f2a73fd27 100644
--- a/docker-compose-dev.yml
+++ b/docker-compose-dev.yml
@@ -24,8 +24,8 @@ services:
- DJANGO_SETTINGS_MODULE=web_frontend.config.settings.local
- DJANGO_DEBUG=true
# set these if you want to be able to log in using OSM
- - SOCIAL_AUTH_OPENSTREETMAP_KEY=avBljc8UoB9WuibATQSTaBv5VxxRAw0uFvNumuj7
- - SOCIAL_AUTH_OPENSTREETMAP_SECRET=GBtBboD1ZvngDVgkqVm6n7iNmR6HqMmnxAPQB0kS
+ - SOCIAL_AUTH_OPENSTREETMAP_KEY=
+ - SOCIAL_AUTH_OPENSTREETMAP_SECRET=
# Allow access from any private-use IP, since docker assigns "random" IPs.
# 172.*.*.* is actually allowing too much, but this docker-compose file should
# only ever be used on local development machine, anyway!
@@ -71,16 +71,8 @@ services:
dockerfile: Dockerfile
volumes:
- osm_data:/var/data/osm-planet
- entrypoint: /bin/sh -c
- command: >
- "
- mkdir -p /var/data/osm-planet/pbf &&
- wget -O /tmp/monaco-latest.osm.pbf http://download.geofabrik.de/europe/monaco-latest.osm.pbf &&
- wget -O /tmp/switzerland-latest.osm.pbf http://download.geofabrik.de/europe/switzerland-latest.osm.pbf &&
- osmconvert /tmp/monaco-latest.osm.pbf -o=/tmp/monaco-latest.osm &&
- osmconvert /tmp/switzerland-latest.osm.pbf -o=/tmp/switzerland-latest.osm &&
- osmconvert /tmp/monaco-latest.osm /tmp/switzerland-latest.osm -o=/var/data/osm-planet/pbf/planet-latest.osm.pbf
- "
+ entrypoint: /bin/bash
+ command: /opt/delvelopment_download_only.sh
# these are just examples
# environment:
# - osmupdate_extra_params=--base-url=download.geofabrik.de/europe/switzerland-updates/
diff --git a/osm_pbf_updater/Dockerfile b/osm_pbf_updater/Dockerfile
index 6767c006f..795fa0a02 100644
--- a/osm_pbf_updater/Dockerfile
+++ b/osm_pbf_updater/Dockerfile
@@ -12,6 +12,7 @@ RUN apt-get update && apt-get install -y \
RUN pip3 install sentry-sdk
COPY ./pbf_updater.py /opt/pbf_updater.py
+COPY ./delvelopment_download_only.sh /opt/delvelopment_download_only.sh
ENTRYPOINT /opt/pbf_updater.py
diff --git a/osm_pbf_updater/delvelopment_download_only.sh b/osm_pbf_updater/delvelopment_download_only.sh
new file mode 100755
index 000000000..1c90b58a8
--- /dev/null
+++ b/osm_pbf_updater/delvelopment_download_only.sh
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+set -ex
+
+if [[ -f "/var/data/osm-planet/pbf/planet-latest.osm.pbf" ]]; then
+ echo "PBF has already been assembled"
+ exit 0
+fi
+
+mkdir -p /var/data/osm-planet/pbf
+
+wget -O /tmp/monaco-latest.osm.pbf http://download.geofabrik.de/europe/monaco-latest.osm.pbf
+
+wget -O /tmp/switzerland-latest.osm.pbf http://download.geofabrik.de/europe/switzerland-latest.osm.pbf
+
+osmconvert /tmp/monaco-latest.osm.pbf -o=/tmp/monaco-latest.osm
+osmconvert /tmp/switzerland-latest.osm.pbf -o=/tmp/switzerland-latest.osm
+osmconvert /tmp/monaco-latest.osm /tmp/switzerland-latest.osm -o=/var/data/osm-planet/pbf/planet-latest.osm.pbf
+
+rm -f /tmp/monaco-latest.osm /tmp/switzerland-latest.osm /tmp/monaco-latest.osm.pbf /tmp/switzerland-latest.osm.pbf
diff --git a/tests/conftest.py b/tests/conftest.py
index d6d832181..5fff3e078 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -28,8 +28,8 @@ def pytest_configure():
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': 'postgres',
- 'PORT': '5432',
- 'HOST': 'postgres',
+ 'PORT': os.environ.get('DJANGO_DB_PORT', '54321'),
+ 'HOST': os.environ.get('DJANGO_DB_HOST', '127.0.0.1'),
}
},
SITE_ID=1,
diff --git a/web_frontend/config/settings/common.py b/web_frontend/config/settings/common.py
index c039c0643..531c2970a 100644
--- a/web_frontend/config/settings/common.py
+++ b/web_frontend/config/settings/common.py
@@ -24,7 +24,7 @@
# APP CONFIGURATION
# ------------------------------------------------------------------------------
-DJANGO_APPS = (
+DJANGO_APPS = [
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
@@ -39,8 +39,8 @@
# Admin
'django.contrib.admin',
'django.contrib.gis',
-)
-THIRD_PARTY_APPS = (
+]
+THIRD_PARTY_APPS = [
'social_django',
# messages for users
'stored_messages',
@@ -51,16 +51,16 @@
'rest_framework_gis',
# async execution worker
'django_rq',
-)
+]
# Apps specific for this project go here.
-LOCAL_APPS = (
+LOCAL_APPS = [
'osmaxx.version',
'osmaxx.excerptexport',
'osmaxx.job_progress',
'osmaxx.profile',
'osmaxx.core',
-)
+]
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
From 0409f08296c65116c48c8ed5b580119999ff93a8 Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Tue, 28 May 2019 07:48:31 +0200
Subject: [PATCH 07/14] awk now reading the ip address also documented
---
docs/development/project-development-environment.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/development/project-development-environment.md b/docs/development/project-development-environment.md
index ec7e8feb4..2775b3147 100644
--- a/docs/development/project-development-environment.md
+++ b/docs/development/project-development-environment.md
@@ -133,7 +133,7 @@ ip route get 1 | awk '{print $NF;exit}'
You can generate the complete URL in `sh` with:
```bash
-echo "http://$(ip route get 1 | awk '{print $NF;exit}'):8889"
+echo "http://$(ip route get 1 | awk '{print $(NF-2);exit}'):8889"
```
## Enable development with debug toolbar enabled
From 33a12c27fa8338b195c0caf7457698150bb2aa7c Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Tue, 28 May 2019 07:49:38 +0200
Subject: [PATCH 08/14] corrected comment; better practice used (do not
overwrite DEBIAN_FRONTEND globally)
---
Dockerfile.frontend | 9 ++++++---
Dockerfile.mediator | 10 +++++-----
Dockerfile.worker | 32 ++++++++++++++++++--------------
3 files changed, 29 insertions(+), 22 deletions(-)
diff --git a/Dockerfile.frontend b/Dockerfile.frontend
index 9ecbdd7f2..d8359fc35 100755
--- a/Dockerfile.frontend
+++ b/Dockerfile.frontend
@@ -1,5 +1,6 @@
-# this gdal image comes with support for FileGDB and has python3.6 already installed
-# is dervied from ubuntu
+# This gdal image comes with support for FileGDB and has python3.6 already installed.
+# Based ubuntu.
+
FROM geometalab/gdal-docker:v3.0.0
USER root
@@ -8,7 +9,9 @@ ENV PYTHONUNBUFFERED=non-empty-string PYTHONIOENCODING=utf-8 LC_ALL=C.UTF-8 LANG
ENV DJANGO_OSMAXX_CONVERSION_SERVICE_USERNAME=default_user DJANGO_OSMAXX_CONVERSION_SERVICE_PASSWORD=default_password
ENV NUM_WORKERS=5 DATABASE_HOST=frontenddatabase DATABASE_PORT=5432 APP_PORT=8000 APP_HOST=0.0.0.0
-RUN apt-get clean && DEBIAN_FRONTEND=noninteractive apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \
+RUN DEBIAN_FRONTEND=noninteractive apt-get clean \
+ && DEBIAN_FRONTEND=noninteractive apt-get update \
+ && DEBIAN_FRONTEND=noninteractive apt-get install -y \
git \
libpq-dev
diff --git a/Dockerfile.mediator b/Dockerfile.mediator
index 95f68aaf2..c0ff39f44 100644
--- a/Dockerfile.mediator
+++ b/Dockerfile.mediator
@@ -1,5 +1,6 @@
-# this gdal image comes with support for FileGDB and has python3.6 already installed
-# is dervied from ubuntu
+# This gdal image comes with support for FileGDB and has python3.6 already installed.
+# Based ubuntu.
+
FROM geometalab/gdal-docker:v3.0.0
USER root
@@ -9,10 +10,9 @@ ENV DJANGO_OSMAXX_CONVERSION_SERVICE_USERNAME=default_user DJANGO_OSMAXX_CONVERS
ENV NUM_WORKERS=5 DATABASE_HOST=mediatordatabase DATABASE_PORT=5432 APP_PORT=8901 APP_HOST=0.0.0.0
MAINTAINER HSR Geometalab
-ENV DEBIAN_FRONTEND=noninteractive
-RUN apt-get update && \
- apt-get install -y \
+RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
+ DEBIAN_FRONTEND=noninteractive apt-get install -y \
\
libgeos-dev \
libgeos++-dev \
diff --git a/Dockerfile.worker b/Dockerfile.worker
index c8304d1bb..17e04013c 100644
--- a/Dockerfile.worker
+++ b/Dockerfile.worker
@@ -1,13 +1,16 @@
-# this gdal image comes with support for FileGDB and has python3.6 already installed
-# is derived from ubuntu
+# This gdal image comes with support for FileGDB and has python3.6 already installed.
+# Based ubuntu.
+
FROM geometalab/gdal-docker:v3.0.0
USER root
-ENV PYTHONUNBUFFERED=non-empty-string PYTHONIOENCODING=utf-8 LC_ALL=C.UTF-8 LANG=C.UTF-8 DEBIAN_FRONTEND=noninteractive
+ENV PYTHONUNBUFFERED=non-empty-string PYTHONIOENCODING=utf-8 LC_ALL=C.UTF-8 LANG=C.UTF-8
# make the "en_US.UTF-8" locale so postgres will be utf-8 enabled by default
-RUN apt-get update && apt-get install -y apt-utils locales gpg && rm -rf /var/lib/apt/lists/* \
+RUN DEBIAN_FRONTEND=noninteractive apt-get update \
+ && DEBIAN_FRONTEND=noninteractive apt-get install -y apt-utils locales gpg \
+ && rm -rf /var/lib/apt/lists/* \
&& localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 \
&& rm -rf /var/lib/apt/lists/*
@@ -24,17 +27,17 @@ ENV PG_MAJOR 11
ENV POSTGIS_MAJOR 2.5
RUN echo 'deb http://apt.postgresql.org/pub/repos/apt/ bionic-pgdg main' $PG_MAJOR > /etc/apt/sources.list.d/pgdg.list \
- && apt-get update \
- && apt-get install -y postgresql-common \
+ && DEBIAN_FRONTEND=noninteractive apt-get update \
+ && DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-common \
&& sed -ri 's/#(create_main_cluster) .*$/\1 = false/' /etc/postgresql-common/createcluster.conf \
- && apt-get install -y \
+ && DEBIAN_FRONTEND=noninteractive apt-get install -y \
postgresql-${PG_MAJOR} \
postgresql-contrib-${PG_MAJOR} \
postgresql-${PG_MAJOR}-postgis-${POSTGIS_MAJOR} \
postgresql-${PG_MAJOR}-postgis-scripts \
postgresql-server-dev-${PG_MAJOR} \
postgresql-contrib-${PG_MAJOR} \
- && apt-get clean \
+ && DEBIAN_FRONTEND=noninteractive apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& mkdir -p /var/run/postgresql && chown -R 999:999 /var/run/postgresql
@@ -46,8 +49,8 @@ RUN mkdir -p $PGDATA && chown -R 999:999 /var/lib/postgresql \
################## END SETUP POSTGIS DATABASE with UTF8 support #############
-RUN apt-get update && \
- apt-get install -y\
+RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
+ DEBIAN_FRONTEND=noninteractive apt-get install -y\
\
make cmake g++ libboost-dev libboost-system-dev \
libboost-filesystem-dev libexpat1-dev zlib1g-dev \
@@ -55,7 +58,7 @@ RUN apt-get update && \
libproj-dev \
curl git wget \
libstdc++6 osmctools \
- && apt-get clean \
+ && DEBIAN_FRONTEND=noninteractive apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ENV LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:/usr/lib:${LD_LIBRARY_PATH}
@@ -98,12 +101,13 @@ RUN wget -O libutf8proc-dev.deb http://ftp.ch.debian.org/debian/pool/main/u/utf8
&& dpkg --install libutf8proc1.deb libutf8proc-dev.deb \
&& rm libutf8proc1.deb libutf8proc-dev.deb
-RUN apt-get update && apt-get install -y pandoc libkakasi2-dev libicu-dev \
+RUN DEBIAN_FRONTEND=noninteractive apt-get update \
+ && DEBIAN_FRONTEND=noninteractive apt-get install -y pandoc libkakasi2-dev libicu-dev \
&& git clone https://github.com/giggls/mapnik-german-l10n.git mapnik-german-l10n \
&& cd mapnik-german-l10n && git checkout v2.5.1 \
&& make && make install && make clean \
- && apt-get purge -y pandoc \
- && apt-get clean \
+ && DEBIAN_FRONTEND=noninteractive apt-get purge -y pandoc \
+ && DEBIAN_FRONTEND=noninteractive apt-get clean \
&& rm -rf /var/lib/apt/lists/*
ENV HOME /home/py
From 52ac3c58d4e269b9f85b6f65cfb83e6eba640cba Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Thu, 30 May 2019 07:54:28 +0200
Subject: [PATCH 09/14] Update Dockerfile.frontend
Co-Authored-By: Raphael Das Gupta
---
Dockerfile.frontend | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile.frontend b/Dockerfile.frontend
index d8359fc35..efc722d1e 100755
--- a/Dockerfile.frontend
+++ b/Dockerfile.frontend
@@ -1,4 +1,4 @@
-# This gdal image comes with support for FileGDB and has python3.6 already installed.
+# This GDAL image comes with support for FileGDB and has Python 3.6 already installed.
# Based ubuntu.
FROM geometalab/gdal-docker:v3.0.0
From 1ab226da9e91dc5187908831eaf5105e9788557f Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Thu, 30 May 2019 07:54:38 +0200
Subject: [PATCH 10/14] Update Dockerfile.frontend
Co-Authored-By: Raphael Das Gupta
---
Dockerfile.frontend | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile.frontend b/Dockerfile.frontend
index efc722d1e..52e64cd03 100755
--- a/Dockerfile.frontend
+++ b/Dockerfile.frontend
@@ -1,5 +1,5 @@
# This GDAL image comes with support for FileGDB and has Python 3.6 already installed.
-# Based ubuntu.
+# Based on image _/ubuntu.
FROM geometalab/gdal-docker:v3.0.0
From e5e3a22dae3fd61a4f53a6fc2dcff824bb3cf359 Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Thu, 30 May 2019 07:54:46 +0200
Subject: [PATCH 11/14] Update Dockerfile.mediator
Co-Authored-By: Raphael Das Gupta
---
Dockerfile.mediator | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile.mediator b/Dockerfile.mediator
index c0ff39f44..78274b078 100644
--- a/Dockerfile.mediator
+++ b/Dockerfile.mediator
@@ -1,4 +1,4 @@
-# This gdal image comes with support for FileGDB and has python3.6 already installed.
+# This GDAL image comes with support for FileGDB and has Python 3.6 already installed.
# Based ubuntu.
FROM geometalab/gdal-docker:v3.0.0
From ff70539c72928e9287900514618017dada0835d6 Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Thu, 30 May 2019 07:55:09 +0200
Subject: [PATCH 12/14] Update Dockerfile.mediator
Co-Authored-By: Raphael Das Gupta
---
Dockerfile.mediator | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile.mediator b/Dockerfile.mediator
index 78274b078..226cc2166 100644
--- a/Dockerfile.mediator
+++ b/Dockerfile.mediator
@@ -1,5 +1,5 @@
# This GDAL image comes with support for FileGDB and has Python 3.6 already installed.
-# Based ubuntu.
+# Based on official Ubuntu docker image.
FROM geometalab/gdal-docker:v3.0.0
From 5ac964704661e6871c2daf0d87da102d0010640d Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Thu, 30 May 2019 07:55:20 +0200
Subject: [PATCH 13/14] Update Dockerfile.worker
Co-Authored-By: Raphael Das Gupta
---
Dockerfile.worker | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile.worker b/Dockerfile.worker
index 17e04013c..6b3770aef 100644
--- a/Dockerfile.worker
+++ b/Dockerfile.worker
@@ -1,4 +1,4 @@
-# This gdal image comes with support for FileGDB and has python3.6 already installed.
+# This GDAL image comes with support for FileGDB and has Python 3.6 already installed.
# Based ubuntu.
FROM geometalab/gdal-docker:v3.0.0
From ef1bdec3967721e9613fc2ba8e2f69bfc40233a9 Mon Sep 17 00:00:00 2001
From: Nicola Jordan
Date: Thu, 30 May 2019 07:55:27 +0200
Subject: [PATCH 14/14] Update Dockerfile.worker
Co-Authored-By: Raphael Das Gupta
---
Dockerfile.worker | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile.worker b/Dockerfile.worker
index 6b3770aef..9fc91004d 100644
--- a/Dockerfile.worker
+++ b/Dockerfile.worker
@@ -1,5 +1,5 @@
# This GDAL image comes with support for FileGDB and has Python 3.6 already installed.
-# Based ubuntu.
+# Based on official Ubuntu docker image.
FROM geometalab/gdal-docker:v3.0.0