diff --git a/.github/workflows/branch-deploy.yml b/.github/workflows/branch-deploy.yml
index d344018..dcf0149 100644
--- a/.github/workflows/branch-deploy.yml
+++ b/.github/workflows/branch-deploy.yml
@@ -28,7 +28,11 @@ jobs:
restore-keys: |
${{ runner.os }}-pip-
- run: pip install dokkusd
- - run: python -m dokkusd.cli deploy --appname ${{ secrets.DOKKUSD_BRANCH_APP_NAME_PREFIX }}-${GITHUB_REF##*/}
+ - uses: oNaiPs/secrets-to-env-action@v1
+ with:
+ secrets: ${{ toJSON(secrets) }}
+ prefix: ALLSECRETS_
+ - run: python -m dokkusd.cli deploy --appname ${{ secrets.DOKKUSD_BRANCH_APP_NAME_PREFIX }}-${GITHUB_REF##*/} --environmentvariablesprefixedby ALLSECRETS_DOKKUSD_BRANCH_ENVIRONMENT_VARIABLE_
env:
DOKKUSD_REMOTE_HOST: ${{ secrets.DOKKUSD_BRANCH_REMOTE_HOST }}
DOKKUSD_REMOTE_PORT: ${{ secrets.DOKKUSD_BRANCH_REMOTE_PORT }}
diff --git a/.github/workflows/live-deploy.yml b/.github/workflows/live-deploy.yml
index a7e647d..96e76ec 100644
--- a/.github/workflows/live-deploy.yml
+++ b/.github/workflows/live-deploy.yml
@@ -28,7 +28,11 @@ jobs:
restore-keys: |
${{ runner.os }}-pip-
- run: pip install dokkusd
- - run: python -m dokkusd.cli deploy --appname ${{ secrets.DOKKUSD_LIVE_APP_NAME }}
+ - uses: oNaiPs/secrets-to-env-action@v1
+ with:
+ secrets: ${{ toJSON(secrets) }}
+ prefix: ALLSECRETS_
+ - run: python -m dokkusd.cli deploy --appname ${{ secrets.DOKKUSD_LIVE_APP_NAME }} --environmentvariablesprefixedby ALLSECRETS_DOKKUSD_LIVE_ENVIRONMENT_VARIABLE_
env:
DOKKUSD_REMOTE_HOST: ${{ secrets.DOKKUSD_LIVE_REMOTE_HOST }}
DOKKUSD_REMOTE_PORT: ${{ secrets.DOKKUSD_LIVE_REMOTE_PORT }}
diff --git a/cove_ofds/forms.py b/cove_ofds/forms.py
index 7eafc18..049f1e4 100644
--- a/cove_ofds/forms.py
+++ b/cove_ofds/forms.py
@@ -4,6 +4,7 @@
class NewGeoJSONUploadForm(forms.Form):
nodes_file_upload = forms.FileField(
+ label="Select GeoJSON Nodes file",
widget=forms.FileInput(
attrs={
"accept": ",".join(
@@ -11,9 +12,10 @@ class NewGeoJSONUploadForm(forms.Form):
+ settings.ALLOWED_GEOJSON_EXTENSIONS
)
}
- )
+ ),
)
spans_file_upload = forms.FileField(
+ label="Select GeoJSON Spans file",
widget=forms.FileInput(
attrs={
"accept": ",".join(
@@ -21,5 +23,5 @@ class NewGeoJSONUploadForm(forms.Form):
+ settings.ALLOWED_GEOJSON_EXTENSIONS
)
}
- )
+ ),
)
diff --git a/cove_ofds/process.py b/cove_ofds/process.py
index 509fff2..353f076 100644
--- a/cove_ofds/process.py
+++ b/cove_ofds/process.py
@@ -1,5 +1,6 @@
import json
import os.path
+import zipfile
import flattentool
from libcoveofds.additionalfields import AdditionalFields
@@ -97,6 +98,55 @@ def get_context(self):
return context
+class ConvertCSVsIntoJSON(ProcessDataTask):
+ """If User uploaded CSVs, convert to our primary format, JSON."""
+
+ def process(self, process_data: dict) -> dict:
+ if self.supplied_data.format != "csvs":
+ return process_data
+
+ # check already done
+ # TODO
+
+ output_dir = os.path.join(self.supplied_data.data_dir(), "unflatten")
+
+ os.makedirs(output_dir, exist_ok=True)
+
+ unflatten_kwargs = {
+ "output_name": os.path.join(output_dir, "unflattened.json"),
+ "root_list_path": "networks",
+ "input_format": "csv",
+ }
+
+ flattentool.unflatten(self.supplied_data.upload_dir(), **unflatten_kwargs)
+
+ process_data["json_data_filename"] = os.path.join(
+ self.supplied_data.data_dir(), "unflatten", "unflattened.json"
+ )
+
+ return process_data
+
+ def get_context(self):
+ context = {}
+ # original format
+ if self.supplied_data.format == "csvs":
+ context["original_format"] = "csvs"
+ # Download data
+ filename = os.path.join(
+ self.supplied_data.data_dir(), "unflatten", "unflattened.json"
+ )
+ if os.path.exists(filename):
+ context["can_download_json"] = True
+ context["download_json_url"] = os.path.join(
+ self.supplied_data.data_url(), "unflatten", "unflattened.json"
+ )
+ context["download_json_size"] = os.stat(filename).st_size
+ else:
+ context["can_download_json"] = False
+ # Return
+ return context
+
+
class ConvertGeoJSONIntoJSON(ProcessDataTask):
"""If User uploaded GeoJSON, convert to our primary format, JSON."""
@@ -230,6 +280,12 @@ def get_context(self):
class ConvertJSONIntoSpreadsheets(ProcessDataTask):
"""Convert primary format (JSON) to spreadsheets"""
+ def __init__(self, supplied_data):
+ super().__init__(supplied_data)
+ self.csvs_zip_filename = os.path.join(
+ self.supplied_data.data_dir(), "flatten", "flattened.csvs.zip"
+ )
+
def process(self, process_data: dict) -> dict:
# TODO don't run if already done
@@ -243,6 +299,12 @@ def process(self, process_data: dict) -> dict:
flattentool.flatten(process_data["json_data_filename"], **flatten_kwargs)
+ # Make Zip file of all CSV files
+ with zipfile.ZipFile(self.csvs_zip_filename, "w") as out_zip:
+ for f in os.listdir(output_dir):
+ if os.path.isfile(os.path.join(output_dir, f)) and f.endswith(".csv"):
+ out_zip.write(os.path.join(output_dir, f), arcname=f)
+
return process_data
def get_context(self):
@@ -271,6 +333,15 @@ def get_context(self):
context["download_ods_size"] = os.stat(ods_filename).st_size
else:
context["can_download_ods"] = False
+ # CSVs
+ if os.path.exists(self.csvs_zip_filename):
+ context["can_download_csvs_zip"] = True
+ context["download_csvs_zip_url"] = os.path.join(
+ self.supplied_data.data_url(), "flatten", "flattened.csvs.zip"
+ )
+ context["download_csvs_zip_size"] = os.stat(ods_filename).st_size
+ else:
+ context["can_download_csvs_zip"] = False
# done!
return context
@@ -291,13 +362,78 @@ def process(self, process_data: dict) -> dict:
schema = OFDSSchema()
worker = PythonValidate(schema)
-
context = {"additional_checks": worker.validate(data)}
+
+ # has_links_with_external_node_data and has_links_with_external_span_data are shown in a different bit of UI.
+ # Set variables and move out of additional_checks
+ context["has_links_with_external_node_data"] = (
+ True
+ if [
+ r
+ for r in context["additional_checks"]
+ if r["type"] == "has_links_with_external_node_data"
+ ]
+ else False
+ )
+ context["has_links_with_external_span_data"] = (
+ True
+ if [
+ r
+ for r in context["additional_checks"]
+ if r["type"] == "has_links_with_external_span_data"
+ ]
+ else False
+ )
+ context["additional_checks"] = [
+ r
+ for r in context["additional_checks"]
+ if (
+ r["type"] != "has_links_with_external_node_data"
+ and r["type"] != "has_links_with_external_span_data"
+ )
+ ]
+
+ # Count and group what's left
context["additional_checks_count"] = len(context["additional_checks"])
context["additional_checks"] = group_data_list_by(
context["additional_checks"], lambda i: i["type"]
)
+ # The library returns *_name_does_not_match and *_reference_name_set_but_not_in_original as different types,
+ # but in this UI we don't care - we just want to show them as one section.
+ # So join the 2 types of errors into 1 list.
+ for f1, f2 in [
+ (
+ "node_phase_reference_name_does_not_match",
+ "node_phase_reference_name_set_but_not_in_original",
+ ),
+ (
+ "span_phase_reference_name_does_not_match",
+ "span_phase_reference_name_set_but_not_in_original",
+ ),
+ (
+ "contract_related_phase_reference_name_does_not_match",
+ "contract_related_phase_reference_name_set_but_not_in_original",
+ ),
+ (
+ "node_organisation_reference_name_does_not_match",
+ "node_organisation_reference_name_set_but_not_in_original",
+ ),
+ (
+ "span_organisation_reference_name_does_not_match",
+ "span_organisation_reference_name_set_but_not_in_original",
+ ),
+ (
+ "phase_organisation_reference_name_does_not_match",
+ "phase_organisation_reference_name_set_but_not_in_original",
+ ),
+ ]:
+ new_list = context["additional_checks"].get(f1, []) + context[
+ "additional_checks"
+ ].get(f2, [])
+ if new_list:
+ context["additional_checks"][f1] = new_list
+
with open(self.data_filename, "w") as fp:
json.dump(context, fp, indent=4)
diff --git a/cove_ofds/templates/cove_ofds/additional_checks_table.html b/cove_ofds/templates/cove_ofds/additional_checks_table.html
index ed406cc..94a502a 100644
--- a/cove_ofds/templates/cove_ofds/additional_checks_table.html
+++ b/cove_ofds/templates/cove_ofds/additional_checks_table.html
@@ -4,6 +4,12 @@
{% if 'span_start_node_not_found' in additional_checks or 'span_end_node_not_found' in additional_checks %}
{% trans 'Node references' %}
{% trans 'Your data contains spans with node references that cannot be resolved. `Span.start` and `Span.end` must match the `.id` of exactly one node in the `/nodes` array.' %}
+ {% if 'span_start_node_not_found' in additional_checks %}
+
{% if 'node_location_coordinates_incorrect' in additional_checks %}
{% trans 'Node location coordinates' %}
{% trans 'Your data contains nodes with incorrectly formatted location coordinates. `/nodes/location/coordinates` must be a single position, i.e. an array of numbers.' %}
{% if 'span_route_coordinates_incorrect' in additional_checks %}
{% trans 'Span route coordinates' %}
{% trans 'Your data contains spans with incorrectly formatted route coordinates. `/spans/route/coordinates` must be an array of positions, i.e. an array of arrays of numbers.' %}
{% if 'node_phase_reference_id_not_found' in additional_checks or 'span_phase_reference_id_not_found' in additional_checks or 'contract_related_phase_reference_id_not_found' in additional_checks %}
{% trans 'Phase references' %}
{% trans 'Your data contains phase references that cannot be resolved. The `.id` of each phase reference must match the `.id` of exactly one phase in the `/phases` array.' %}
{% if 'node_phase_reference_id_not_found' in additional_checks %}
@@ -225,6 +236,7 @@
{% trans 'Phase references' %}
{% if 'node_phase_reference_name_does_not_match' in additional_checks or 'span_phase_reference_name_does_not_match' in additional_checks or 'contract_related_phase_reference_name_does_not_match' in additional_checks %}
{% trans 'Phase names' %}
{% trans 'Your data contains phase references with inconsistent names. The `.name` of each phase reference must match the `.name` of the phase it references.' %}
{% if 'node_phase_reference_name_does_not_match' in additional_checks %}
@@ -293,89 +305,11 @@
{% trans 'Phase names' %}
{% endif %}
{% endif %}
-{% if 'node_phase_reference_name_set_but_not_in_original' in additional_checks %}
-
{% trans 'TODO node_phase_reference_name_set_but_not_in_original' %}
-
{% trans 'TODO' %}
-
-
-
-
{% trans 'Node Id' %}
-
{% trans 'Network ID' %}
-
-
-
- {% for additional_check in additional_checks.node_phase_reference_name_set_but_not_in_original %}
-
-
- {{ additional_check.node_id }}
-
-
- {{ additional_check.network_id }}
-
-
- {% endfor %}
-
-
-{% endif %}
-
-
-
-
-{% if 'span_phase_reference_name_set_but_not_in_original' in additional_checks %}
-
{% trans 'TODO span_phase_reference_name_set_but_not_in_original' %}
-
{% trans 'TODO' %}
-
-
-
-
{% trans 'Span Id' %}
-
{% trans 'Network ID' %}
-
-
-
- {% for additional_check in additional_checks.span_phase_reference_name_set_but_not_in_original %}
-
-
- {{ additional_check.span_id }}
-
-
- {{ additional_check.network_id }}
-
-
- {% endfor %}
-
-
-{% endif %}
-
-
-{% if 'contract_related_phase_reference_name_set_but_not_in_original' in additional_checks %}
-
{% trans 'TODO span_phase_reference_name_set_but_not_in_original' %}
-
{% trans 'TODO' %}
-
-
-
-
{% trans 'Contract Id' %}
-
{% trans 'Network ID' %}
-
-
-
- {% for additional_check in additional_checks.contract_related_phase_reference_name_set_but_not_in_original %}
-
-
- {{ additional_check.contract_id }}
-
-
- {{ additional_check.network_id }}
-
-
- {% endfor %}
-
-
-{% endif %}
-
{% if 'node_organisation_reference_id_not_found' in additional_checks or 'node_organisation_reference_id_not_found' in additional_checks or 'phase_organisation_reference_id_not_found' in additional_checks %}
{% trans 'Organisation references' %}
{% trans 'Your data contains organisation references that cannot be resolved. The `.id` of each organisation reference must match the `.id` of exactly one organisation in the `/organisations` array.' %}
{% if 'node_organisation_reference_id_not_found' in additional_checks %}
@@ -457,6 +391,7 @@
{% trans 'Organisation references' %}
{% if 'node_organisation_reference_name_does_not_match' in additional_checks %}
{% trans 'Organisation names' %}
{% trans 'Your data contains organisation references with inconsistent names. The `.name` of each organisation reference must match the `.name` of the organisation it references.' %}
{% if 'node_organisation_reference_name_does_not_match' in additional_checks %}
@@ -534,97 +469,10 @@
{% trans 'Organisation names' %}
{% endif %}
-{% if 'node_organisation_reference_name_set_but_not_in_original' in additional_checks %}
-
{% trans 'TODO node_organisation_reference_name_set_but_not_in_original' %}
-
{% trans 'TODO ' %}
-
-
-
-
{% trans 'Field' %}
-
{% trans 'Node Id' %}
-
{% trans 'Network ID' %}
-
-
-
- {% for additional_check in additional_checks.node_organisation_reference_name_set_but_not_in_original %}
-
-
- {{ additional_check.field }}
-
-
- {{ additional_check.node_id }}
-
-
- {{ additional_check.network_id }}
-
-
- {% endfor %}
-
-
-{% endif %}
-
-
-{% if 'span_organisation_reference_name_set_but_not_in_original' in additional_checks %}
-
{% trans 'TODO span_organisation_reference_name_set_but_not_in_original' %}
-
{% trans 'TODO ' %}
-
-
-
-
{% trans 'Field' %}
-
{% trans 'Span Id' %}
-
{% trans 'Network ID' %}
-
-
-
- {% for additional_check in additional_checks.span_organisation_reference_name_set_but_not_in_original %}
-
-
- {{ additional_check.field }}
-
-
- {{ additional_check.span_id }}
-
-
- {{ additional_check.network_id }}
-
-
- {% endfor %}
-
-
-{% endif %}
-
-
-
-
-{% if 'phase_organisation_reference_name_set_but_not_in_original' in additional_checks %}
-
{% trans 'TODO phase_organisation_reference_name_set_but_not_in_original' %}
-
{% trans 'TODO ' %}
-
-
-
-
{% trans 'Phase Id' %}
-
{% trans 'Network ID' %}
-
-
-
- {% for additional_check in additional_checks.phase_organisation_reference_name_set_but_not_in_original %}
-
-
- {{ additional_check.phase_id }}
-
-
- {{ additional_check.network_id }}
-
-
- {% endfor %}
-
-
-{% endif %}
-
-
{% if 'node_international_connections_country_not_set' in additional_checks %}
{% trans 'International connection countries' %}
{% trans 'Your data contains nodes with international connections that do not specify a country. `.country` must be set for each international connection in `/nodes/internationalConnections`.' %}
{% endif %}
-{% if 'has_links_with_external_node_data' in additional_checks %}
-
{% trans 'Links to nodes data' %}
-
{% trans 'Your data contains links to API endpoints or bulk files for nodes. The additional data available from the links has not been checked. You can check the data by embedding it in network package and submitting it to CoVE.' %}
-{% endif %}
-
-
-{% if 'has_links_with_external_span_data' in additional_checks %}
-
{% trans 'Links to spans data' %}
-
{% trans 'Your data contains links to API endpoints or bulk files for nodes. The additional data available from the links has not been checked. You can check the data by embedding it in network package and submitting it to CoVE.' %}
-{% endif %}
-
-
{% if 'node_not_used_in_any_spans' in additional_checks %}
-
{% trans 'Orphaned nodes' %}
+
{% trans 'Unreferenced nodes' %}
{% trans 'Your data contains nodes that are not referenced by any spans.' %}
{% trans 'Schema Version Used' %}: {{ schema_version_used }}
+
{% trans 'Your data was checked against schema version' %}: {{ schema_version_used }}
-
-
-
- {% trans 'Download Data' %}
+ {% if has_links_with_external_node_data or has_links_with_external_span_data %}
+
+
+
+
+ {% trans 'Unchecked data' %}
-
-
-
- {% if original_format == 'json' %}
- {% trans 'You uploaded data in JSON format.' %}
- {% elif original_format == 'spreadsheet' %}
- {% trans 'You uploaded data in a spreadsheet format.' %}
- {% elif original_format == 'geojson' %}
- {% trans 'You uploaded data in a GeoJSON format.' %}
- {% endif %}
-
-
-
- {% trans 'You can download your original data:' %}
-
-
-
- {% for supplied_data_file in supplied_data_files %}
-
{% trans 'Your data contains links to API endpoints or bulk files. The additional data available from the links has not been checked. You can check the data by embedding it in network package and submitting it to CoVE. For more information, see ' %}/links.
- {% trans 'Validation Errors' %}
+ {% trans 'Structure and Format' %}
+
{% trans 'The structure and format of your data does not conform to the OFDS schema. You should check your mapping and data pipeline for errors. For more information, see the ' %}{% trans 'reference documentation' %}.
{% include "cove_ofds/validation_table.html" %}
@@ -109,11 +185,11 @@
- {% trans 'Validation Errors' %}
+ {% trans 'Structure and Format' %}
-
{% trans 'There were no validation errors!' %}
+
{% trans 'The structure and format of your data conforms to the OFDS schema.' %}
{% endif %}
@@ -127,25 +203,32 @@
{% if additional_fields_count %}
-
-
+
+
{% trans 'Additional Fields' %}
+
{% trans 'Your data contains additional fields that are not part of the OFDS schema. You should:' %}
+
+
{% trans 'Check that additional fields are not the result of typos in field names or other errors in your mapping or data pipeline.' %}
+
{% trans 'Check whether the data in these fields could be provided by using a field in the OFDS schema.' %}
+ Use the form below to submit your data. You can submit data in either JSON, GeoJSON or CSV format.
+ For more information, see the publication format reference.
+
+
+
{% endblock %}
diff --git a/libcoveweb2/urls.py b/libcoveweb2/urls.py
index d8a933a..e877bc9 100644
--- a/libcoveweb2/urls.py
+++ b/libcoveweb2/urls.py
@@ -6,6 +6,7 @@
urlpatterns = [
re_path(r"^new_json$", libcoveweb2.views.new_json, name="new_json"),
+ re_path(r"^new_csvs$", libcoveweb2.views.new_csvs, name="new_csvs"),
re_path(
r"^new_spreadsheet$", libcoveweb2.views.new_spreadsheet, name="new_spreadsheet"
),
diff --git a/libcoveweb2/views.py b/libcoveweb2/views.py
index deff4d6..d150a55 100644
--- a/libcoveweb2/views.py
+++ b/libcoveweb2/views.py
@@ -1,48 +1,122 @@
+from django.conf import settings
from django.core.exceptions import ValidationError
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import gettext_lazy as _
-from libcoveweb2.forms import NewJSONUploadForm, NewSpreadsheetUploadForm
-from libcoveweb2.models import SuppliedData, SuppliedDataFile
-from libcoveweb2.settings import (
- ALLOWED_JSON_CONTENT_TYPES,
- ALLOWED_JSON_EXTENSIONS,
- ALLOWED_SPREADSHEET_CONTENT_TYPES,
- ALLOWED_SPREADSHEET_EXTENSIONS,
+from libcoveweb2.forms import (
+ NewCSVsUploadForm,
+ NewJSONTextForm,
+ NewJSONUploadForm,
+ NewSpreadsheetUploadForm,
)
+from libcoveweb2.models import SuppliedData, SuppliedDataFile
+
+JSON_FORM_CLASSES = {
+ "upload_form": NewJSONUploadForm,
+ "text_form": NewJSONTextForm,
+}
def new_json(request):
forms = {
- "upload_form": NewJSONUploadForm(request.POST, request.FILES)
+ form_name: form_class() for form_name, form_class in JSON_FORM_CLASSES.items()
+ }
+ request_data = None
+ if request.POST:
+ request_data = request.POST
+ if request_data:
+ if "paste" in request_data:
+ form_name = "text_form"
+ else:
+ form_name = "upload_form"
+ forms[form_name] = JSON_FORM_CLASSES[form_name](request_data, request.FILES)
+ form = forms[form_name]
+ if form.is_valid():
+ # Extra Validation
+ if form_name == "upload_form":
+ if (
+ not request.FILES["file_upload"].content_type
+ in settings.ALLOWED_JSON_CONTENT_TYPES
+ ):
+ form.add_error(
+ "file_upload", "This does not appear to be a JSON file"
+ )
+ if not [
+ e
+ for e in settings.ALLOWED_JSON_EXTENSIONS
+ if str(request.FILES["file_upload"].name).lower().endswith(e)
+ ]:
+ form.add_error(
+ "file_upload", "This does not appear to be a JSON file"
+ )
+ elif form_name == "text_form":
+ pass # TODO
+
+ # Process
+ if form.is_valid():
+ supplied_data = SuppliedData()
+ supplied_data.format = "json"
+ supplied_data.save()
+
+ if form_name == "upload_form":
+ supplied_data.save_file(request.FILES["file_upload"])
+ elif form_name == "text_form":
+ supplied_data.save_file_contents(
+ "input.json",
+ form.cleaned_data["paste"],
+ "application/json",
+ None,
+ )
+
+ return HttpResponseRedirect(supplied_data.get_absolute_url())
+
+ return render(request, "libcoveweb2/new_json.html", {"forms": forms})
+
+
+CSVS_FORM_CLASSES = {
+ "upload_form": NewCSVsUploadForm,
+}
+
+
+def new_csvs(request):
+
+ forms = {
+ "upload_form": NewCSVsUploadForm(request.POST, request.FILES)
if request.POST
- else NewJSONUploadForm()
+ else NewCSVsUploadForm()
}
form = forms["upload_form"]
if form.is_valid():
# Extra Validation
- if not request.FILES["file_upload"].content_type in ALLOWED_JSON_CONTENT_TYPES:
- form.add_error("file_upload", "This does not appear to be a JSON file")
- if not [
- e
- for e in ALLOWED_JSON_EXTENSIONS
- if str(request.FILES["file_upload"].name).lower().endswith(e)
- ]:
- form.add_error("file_upload", "This does not appear to be a JSON file")
+ for field in form.file_field_names:
+ if request.FILES.get(field):
+ if (
+ not request.FILES[field].content_type
+ in settings.ALLOWED_CSV_CONTENT_TYPES
+ ):
+ form.add_error(field, "This does not appear to be a CSV file")
+ if not [
+ e
+ for e in settings.ALLOWED_CSV_EXTENSIONS
+ if str(request.FILES[field].name).lower().endswith(e)
+ ]:
+ form.add_error(field, "This does not appear to be a CSV file")
# Process
if form.is_valid():
supplied_data = SuppliedData()
- supplied_data.format = "json"
+ supplied_data.format = "csvs"
supplied_data.save()
- supplied_data.save_file(request.FILES["file_upload"])
+ for field in form.file_field_names:
+ if request.FILES.get(field):
+ supplied_data.save_file(request.FILES[field])
return HttpResponseRedirect(supplied_data.get_absolute_url())
- return render(request, "libcoveweb2/new_json.html", {"forms": forms})
+ return render(request, "libcoveweb2/new_csvs.html", {"forms": forms})
def new_spreadsheet(request):
@@ -57,12 +131,12 @@ def new_spreadsheet(request):
# Extra Validation
if (
not request.FILES["file_upload"].content_type
- in ALLOWED_SPREADSHEET_CONTENT_TYPES
+ in settings.ALLOWED_SPREADSHEET_CONTENT_TYPES
):
form.add_error("file_upload", "This does not appear to be a spreadsheet")
if not [
e
- for e in ALLOWED_SPREADSHEET_EXTENSIONS
+ for e in settings.ALLOWED_SPREADSHEET_EXTENSIONS
if str(request.FILES["file_upload"].name).lower().endswith(e)
]:
form.add_error("file_upload", "This does not appear to be a spreadsheet")