From ef0e84a32055c12db8252223bc743e0b366d38ab Mon Sep 17 00:00:00 2001 From: Luccas Mateus Date: Mon, 15 Jan 2024 15:31:03 -0300 Subject: [PATCH] Merge Dev to Staging (#201) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(map view): prevent client error when legend config is empty * Feature/pending datasets (#198) * Add pending_datasets table init and CRUD * Add approval_status and draft to metadata * Fix draft permissions * [main.yml] Init. pendingdb tables * Run WRI unit tests first * Fix paths in unit tests script * Run WRI unit tests first in src * Fix OR in unit tests script --------- Co-authored-by: Muhammad Ismail Shahzad <57398621+MuhammadIsmailShahzad@users.noreply.github.com> * Rm test.yml (#202) --------- Co-authored-by: Demenech Co-authored-by: Joรฃo Demenech Co-authored-by: Michael Polidori Co-authored-by: Muhammad Ismail Shahzad <57398621+MuhammadIsmailShahzad@users.noreply.github.com> --- .github/workflows/main.yml | 2 + .../ckan/docker-entrypoint.d/init_tables.sh | 8 +- .../ckan/scripts/init-extensions.sh | 1 + .../ckan/scripts/run_unit_tests.sh | 50 +-- ckan-backend-dev/ckan/setup/schema.xml | 2 + ckan-backend-dev/src/ckanext-wri/README.md | 126 +++++++ .../ckanext/wri/logic/action/create.py | 59 +++- .../ckanext/wri/logic/action/delete.py | 32 ++ .../ckanext/wri/logic/action/get.py | 309 ++++++++++++------ .../ckanext/wri/logic/action/update.py | 60 +++- .../ckanext/wri/logic/auth/auth.py | 29 +- .../ckanext/wri/logic/auth/schema.py | 68 +++- .../ckanext-wri/ckanext/wri/model/__init__.py | 14 +- .../ckanext/wri/model/pending_datasets.py | 116 +++++++ .../src/ckanext-wri/ckanext/wri/plugin.py | 36 +- .../ckanext/wri/schema/ckan_dataset.yaml | 19 +- .../wri/tests/test_dataset_permissions.py | 3 +- .../ckanext/wri/tests/test_location_search.py | 2 +- .../wri/tests/test_pending_datasets.py | 123 +++++++ .../ckanext/wri/tests/test_schema.py | 4 +- .../ckanext/wri/tests/test_search.py | 6 +- .../src/ckanext-wri/requirements.txt | 1 - .../legend-item-types/LegendItemTypesList.tsx | 2 + 23 files changed, 897 insertions(+), 175 deletions(-) create mode 100644 ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/delete.py create mode 100644 ckan-backend-dev/src/ckanext-wri/ckanext/wri/model/pending_datasets.py create mode 100644 ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_pending_datasets.py diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cedacc7cb..01984613f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -126,6 +126,8 @@ jobs: CKAN_IMAGE: '${{ steps.login-ecr.outputs.registry }}/${{ secrets.ECR_CKAN_REPO }}:${{ github.sha }}' run: docker compose -f docker-compose.test.yml --env-file .env.example exec -T ckan-dev /bin/bash -c "/srv/app/fix_s3filestore_test_ini.sh" working-directory: ./ckan-backend-dev + - name: Initialize the pending datasets table + run: docker exec ckan-wri sh -c "ckan -c production.ini pendingdatasetsdb" - name: Run Unit Tests ๐Ÿงช env: CKAN_IMAGE: '${{ steps.login-ecr.outputs.registry }}/${{ secrets.ECR_CKAN_REPO }}:${{ github.sha }}' diff --git a/ckan-backend-dev/ckan/docker-entrypoint.d/init_tables.sh b/ckan-backend-dev/ckan/docker-entrypoint.d/init_tables.sh index bd0dbd6b7..7ef43b59c 100644 --- a/ckan-backend-dev/ckan/docker-entrypoint.d/init_tables.sh +++ b/ckan-backend-dev/ckan/docker-entrypoint.d/init_tables.sh @@ -7,7 +7,13 @@ ckan -c production.ini notificationdb EXIT_CODE=$? if [ $EXIT_CODE -ne 0 ]; then - echo "Failed to initialize custom tables" + echo "Failed to initialize the notification table" +fi + +ckan -c production.ini pendingdatasetsdb + +if [ $EXIT_CODE -ne 0 ]; then + echo "Failed to initialize the pending datasets table" exit $EXIT_CODE fi diff --git a/ckan-backend-dev/ckan/scripts/init-extensions.sh b/ckan-backend-dev/ckan/scripts/init-extensions.sh index b9306d55f..e2fad55b9 100644 --- a/ckan-backend-dev/ckan/scripts/init-extensions.sh +++ b/ckan-backend-dev/ckan/scripts/init-extensions.sh @@ -2,3 +2,4 @@ # Initialize Issues and Notification DB docker exec ckan-wri sh -c "ckan -c production.ini issuesdb" docker exec ckan-wri sh -c "ckan -c production.ini notificationdb" +docker exec ckan-wri sh -c "ckan -c production.ini pendingdatasetsdb" diff --git a/ckan-backend-dev/ckan/scripts/run_unit_tests.sh b/ckan-backend-dev/ckan/scripts/run_unit_tests.sh index f917f485c..29a5a7e45 100755 --- a/ckan-backend-dev/ckan/scripts/run_unit_tests.sh +++ b/ckan-backend-dev/ckan/scripts/run_unit_tests.sh @@ -13,12 +13,40 @@ echo "Test Summary" > "$ROOT_DIR/test_summary.txt" # echo "CKAN Core: Failed" >> test_summary.txt # fi -cd src +if [ -d "src_extensions/ckanext-wri" ]; then + cd src_extensions/ckanext-wri + + pytest --ckan-ini=test.ini ckanext/wri/tests 2>&1 | tee -a "$ROOT_DIR/test_results.txt" + PYTEST_EXIT_CODE=${PIPESTATUS[0]} + + if [ $PYTEST_EXIT_CODE -eq 0 ]; then + echo "ckanext-wri: Passed" >> "$ROOT_DIR/test_summary.txt" + else + echo "ckanext-wri: Failed" >> "$ROOT_DIR/test_summary.txt" + fi + +fi + +if [ -d "$ROOT_DIR/src/ckanext-wri" ]; then + cd $ROOT_DIR/src/ckanext-wri + + pytest --ckan-ini=test.ini ckanext/wri/tests 2>&1 | tee -a "$ROOT_DIR/test_results.txt" + PYTEST_EXIT_CODE=${PIPESTATUS[0]} + + if [ $PYTEST_EXIT_CODE -eq 0 ]; then + echo "ckanext-wri: Passed" >> "$ROOT_DIR/test_summary.txt" + else + echo "ckanext-wri: Failed" >> "$ROOT_DIR/test_summary.txt" + fi + +fi + +cd $ROOT_DIR/src for dir in ckanext-*; do if [ -d "$dir" ]; then # Skip ckanext-envvars - if [ "$dir" == "ckanext-envvars" ]; then + if [ "$dir" == "ckanext-envvars" ] || [ "$dir" == "ckanext-wri" ]; then continue fi @@ -38,24 +66,6 @@ for dir in ckanext-*; do fi done -cd .. - -if [ -d "src_extensions/ckanext-wri" ]; then - cd src_extensions/ckanext-wri - - pytest --ckan-ini=test.ini ckanext/wri/tests 2>&1 | tee -a "$ROOT_DIR/test_results.txt" - PYTEST_EXIT_CODE=${PIPESTATUS[0]} - - if [ $PYTEST_EXIT_CODE -eq 0 ]; then - echo "ckanext-wri: Passed" >> "$ROOT_DIR/test_summary.txt" - else - echo "ckanext-wri: Failed" >> "$ROOT_DIR/test_summary.txt" - fi - - cd .. - -fi - cat "$ROOT_DIR/test_summary.txt" # GitHub Actions failure exit code diff --git a/ckan-backend-dev/ckan/setup/schema.xml b/ckan-backend-dev/ckan/setup/schema.xml index 3121b0645..277ee3327 100644 --- a/ckan-backend-dev/ckan/setup/schema.xml +++ b/ckan-backend-dev/ckan/setup/schema.xml @@ -203,6 +203,8 @@ attribute with the form `ckan-X.Y` --> + + index_id diff --git a/ckan-backend-dev/src/ckanext-wri/README.md b/ckan-backend-dev/src/ckanext-wri/README.md index a713f2e51..8f1674536 100644 --- a/ckan-backend-dev/src/ckanext-wri/README.md +++ b/ckan-backend-dev/src/ckanext-wri/README.md @@ -3,6 +3,21 @@ **Table of Contents** - [ckanext-wri](#ckanext-wri) + - [Notifications Feature](#notifications-feature) + - [Database Setup](#database-setup) + - [API Endpoints](#api-endpoints) + - [POST /api/action/notification_create](#post-apiactionnotification_create) + - [POST /api/action/notification_update](#post-apiactionnotification_update) + - [GET /api/action/notification_get_all](#get-apiactionnotification_get_all) + - [Pending Datasets (Approval Workflow)](#pending-datasets-approval-workflow) + - [Pending Dataset Table](#pending-dataset-table) + - [Initializing the Pending Dataset Table](#initializing-the-pending-dataset-table) + - [API Endpoints](#api-endpoints-1) + - [POST /api/action/pending_dataset_create](#post-apiactionpending_dataset_create) + - [POST /api/action/pending_dataset_update](#post-apiactionpending_dataset_update) + - [POST /api/action/pending_dataset_delete](#post-apiactionpending_dataset_delete) + - [GET /api/action/pending_dataset_show](#get-apiactionpending_dataset_show) + - [GET /api/action/pending_diff_show](#get-apiactionpending_diff_show) - [Development](#development) - [Testing](#testing) @@ -62,6 +77,117 @@ Returns a list of notifications for a sender or recipient. - **recipient_id** (string) โ€“ The user ID of the recipient of the notification (optional, but either `recipient_id` or `sender_id` is required). - **sender_id** (string) โ€“ The user ID of the sender of the notification (optional, but either `recipient_id` or `sender_id` is required). +## Pending Datasets (Approval Workflow) + +A pending dataset is dataset metadata that's been submitted for approval. While pending, the dataset metadata lives in a separate table from the main `package` table, `pending_datasets`. Once approved, the existing dataset is updated with the new metadata. + +### Pending Dataset Table + +The `pending_datasets` table has the following columns: + +| `package_id` | `package_data` | `last_modified` | +| ------------ | -------------- | --------------- | +| `text` (PK) | `jsonb` | `timestamp` | + +The `package_id` column is the UUID of the dataset (and it's the primary key). The `package_data` column contains the dataset metadata as a JSONB object. The `last_modified` column is a timestamp that is automatically generated whenever `package_data` is updated. + +#### Initializing the Pending Dataset Table + +You can initialize the pending dataset table by running the following command: + +```console +ckan -c pendingdatasetsdb +``` + +### API Endpoints + +#### POST /api/action/pending_dataset_create + +**Parameters:** +- **package_id** (string) โ€“ The UUID of the dataset (required). +- **package_data** (JSON object) โ€“ The dataset metadata (required). + +Creates a new pending dataset and returns the newly created pending dataset. + +#### POST /api/action/pending_dataset_update + +**Parameters:** +- **package_id** (string) โ€“ The UUID of the dataset (required). +- **package_data** (JSON object) โ€“ The dataset metadata (required). + +Updates an existing pending dataset and returns the updated pending dataset. + +#### POST /api/action/pending_dataset_delete + +**Parameters:** +- **package_id** (string) โ€“ The UUID of the dataset (required). + +Deletes an existing pending dataset. + +#### GET /api/action/pending_dataset_show + +**Parameters:** +- **package_id** (string) โ€“ The UUID of the dataset (required). + +Returns the pending dataset for the given `package_id`. + +#### GET /api/action/pending_diff_show + +**Parameters:** +- **package_id** (string) โ€“ The UUID of the dataset (required). + +Returns the diff between the pending dataset and the existing dataset for the given `package_id`. + +Here's an example: + +```json +{ + "help": "http://ckan-dev:5000/api/3/action/help_show?name=pending_diff_show", + "success": true, + "result": { + "title": { + "old_value": "My dataset title", + "new_value": "My better dataset title" + }, + "application": { + "old_value": "", + "new_value": "wri" + }, + "resources[0].description": { + "old_value": "My resource description", + "new_value": "My better resource description" + }, + "resources[0].format": { + "old_value": "CSV", + "new_value": "HTML" + }, + "resources[1].title": { + "old_value": "My resource title", + "new_value": "My better resource title" }, + "wri_data": { + "old_value": false, + "new_value": true + }, + "cautions": { + "old_value": "", + "new_value": "This is a caution" + }, + "languages": { + "old_value": [ + "fr" + ], + "new_value": [ + "en" + ] + }, + "function": { + "old_value": "The function of this dataset is to x...", + "new_value": "The function of this dataset is to y..." + }, + }, +} +``` + ## Development See the [CKAN Backend Development README](ckan-backend-dev/README.md) for instructions on how to set up a local Docker CKAN backend development environment. diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/create.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/create.py index ec650c725..306e27d85 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/create.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/create.py @@ -1,18 +1,25 @@ -from ckan.types import ActionResult, Context, DataDict from typing_extensions import TypeAlias import logging + from ckanext.wri.model.notification import Notification, notification_dictize -import ckan.plugins.toolkit as tk +from ckanext.wri.model.pending_datasets import PendingDatasets from ckanext.wri.logic.auth import schema +from ckan.common import _ +import ckan.plugins.toolkit as tk +from ckan.types import Context, DataDict + NotificationGetUserViewedActivity: TypeAlias = None log = logging.getLogger(__name__) -def notification_create(context: Context, data_dict: DataDict) -> NotificationGetUserViewedActivity: + +def notification_create( + context: Context, data_dict: DataDict +) -> NotificationGetUserViewedActivity: """Create a Notification by providing Sender and Recipient""" - + model = context["model"] - session = context['session'] + session = context["session"] user_obj = model.User.get(context["user"]) tk.check_access("notification_create", context, data_dict) @@ -21,24 +28,52 @@ def notification_create(context: Context, data_dict: DataDict) -> NotificationGe if errors: raise tk.ValidationError(errors) - recipient_id = data_dict.get('recipient_id') - sender_id = data_dict.get('sender_id') - activity_type = data_dict.get('activity_type') - object_type = data_dict.get('object_type') - object_id = data_dict.get('object_id') + recipient_id = data_dict.get("recipient_id") + sender_id = data_dict.get("sender_id") + activity_type = data_dict.get("activity_type") + object_type = data_dict.get("object_type") + object_id = data_dict.get("object_id") user_notifications = Notification( recipient_id=recipient_id, sender_id=sender_id, activity_type=activity_type, object_type=object_type, - object_id=object_id + object_id=object_id, ) session.add(user_notifications) - if not context.get('defer_commit'): + if not context.get("defer_commit"): model.repo.commit() notification_dicts = notification_dictize(user_notifications, context) return notification_dicts + + +def pending_dataset_create(context: Context, data_dict: DataDict): + """Create a Pending Dataset""" + package_id = data_dict.get("package_id") + package_data = data_dict.get("package_data") + log.error(package_data) + + if not package_id: + raise tk.ValidationError(_("package_id is required")) + + if not package_data: + raise tk.ValidationError(_("package_data is required")) + + tk.check_access("pending_dataset_create", context, package_data) + + pending_dataset = None + + try: + pending_dataset = PendingDatasets.create(package_id, package_data) + except Exception as e: + log.error(e) + raise tk.ValidationError(e) + + if not pending_dataset: + raise tk.ValidationError(_(f"Pending Dataset not found: {package_id}")) + + return pending_dataset diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/delete.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/delete.py new file mode 100644 index 000000000..855776eee --- /dev/null +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/delete.py @@ -0,0 +1,32 @@ +import logging + +from ckan.types import Context, DataDict +import ckan.plugins.toolkit as tk +from ckan.common import _ + +from ckanext.wri.model.pending_datasets import PendingDatasets + +log = logging.getLogger(__name__) + + +def pending_dataset_delete(context: Context, data_dict: DataDict): + """Delete a Pending Dataset""" + package_id = data_dict.get("package_id") + + if not package_id: + raise tk.ValidationError(_("package_id is required")) + + tk.check_access("pending_dataset_delete", context, data_dict) + + pending_dataset = None + + try: + pending_dataset = PendingDatasets.delete(package_id) + except Exception as e: + log.error(e) + raise tk.ValidationError(e) + + if not pending_dataset: + raise tk.ValidationError(_(f"Pending Dataset not found: {package_id}")) + + return pending_dataset diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/get.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/get.py index 66730da0c..87da767db 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/get.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/get.py @@ -1,12 +1,13 @@ # encoding: utf-8 -'''API functions for searching for and getting data from CKAN.''' +"""API functions for searching for and getting data from CKAN.""" from __future__ import annotations import logging import json -from typing import (Any, cast) +from typing import Any, cast import re +from itertools import zip_longest from ckan.common import config, asbool @@ -22,16 +23,22 @@ import ckan.lib.plugins as lib_plugins import ckan.authz as authz +from ckan.lib.dictization import table_dictize from ckan.common import _ from ckan.types import ActionResult, Context, DataDict from typing_extensions import TypeAlias -from ckanext.wri.model.notification import Notification,notification_dictize, notification_list_dictize +from ckanext.wri.model.notification import ( + Notification, + notification_dictize, + notification_list_dictize, +) +from ckanext.wri.model.pending_datasets import PendingDatasets import datetime import ckan.plugins.toolkit as tk from ckanext.wri.logic.auth import schema -log = logging.getLogger('ckan.logic') +log = logging.getLogger("ckan.logic") # Define some shortcuts # Ensure they are module-private so that they don't get loaded as available @@ -41,6 +48,7 @@ NotFound = logic.NotFound NotAuthorized = logic.NotAuthorized ValidationError = logic.ValidationError +get_action = ckan.logic.get_action NotificationGetUserViewedActivity: TypeAlias = None @@ -48,12 +56,12 @@ SOLR_BOOLEAN_FIELDS = [ - 'featured_dataset', + "featured_dataset", ] def _fix_solr_boolean_query(query: str, field: str) -> str: - ''' + """ Solr does not support boolean queries with capital letters (the Python boolean style). This function replaces the uppercase boolean values with lowercase ones for a given field. @@ -63,18 +71,18 @@ def _fix_solr_boolean_query(query: str, field: str) -> str: :type field: string :returns: the fixed query :rtype: string - ''' + """ safe_field = re.escape(field) - query = re.sub(r'\b{}:True\b'.format(safe_field), '{}:true'.format(field), query) - query = re.sub(r'\b{}:False\b'.format(safe_field), '{}:false'.format(field), query) + query = re.sub(r"\b{}:True\b".format(safe_field), "{}:true".format(field), query) + query = re.sub(r"\b{}:False\b".format(safe_field), "{}:false".format(field), query) return query @logic.side_effect_free def package_search(context: Context, data_dict: DataDict) -> ActionResult.PackageSearch: - ''' + """ Searches for packages satisfying a given search criteria. This action accepts solr search query parameters (details below), and @@ -204,40 +212,39 @@ def package_search(context: Context, data_dict: DataDict) -> ActionResult.Packag fl can be None or a list of result fields, such as ['id', 'extras_custom_field']. if fl = None, datasets are returned as a list of full dictionary. - ''' + """ # Fix boolean Solr query for featured datasets - q = data_dict.get('q') + q = data_dict.get("q") for field in SOLR_BOOLEAN_FIELDS: if q and field in q: - data_dict['q'] = _fix_solr_boolean_query(q, field) + data_dict["q"] = _fix_solr_boolean_query(q, field) # sometimes context['schema'] is None - schema = (context.get('schema') or - ckan.logic.schema.default_package_search_schema()) + schema = context.get("schema") or ckan.logic.schema.default_package_search_schema() data_dict, errors = _validate(data_dict, schema, context) # put the extras back into the data_dict so that the search can # report needless parameters - data_dict.update(data_dict.get('__extras', {})) - data_dict.pop('__extras', None) + data_dict.update(data_dict.get("__extras", {})) + data_dict.pop("__extras", None) if errors: raise ValidationError(errors) - model = context['model'] - session = context['session'] - user = context.get('user') + model = context["model"] + session = context["session"] + user = context.get("user") - _check_access('package_search', context, data_dict) + _check_access("package_search", context, data_dict) # Move ext_ params to extras and remove them from the root of the search # params, so they don't cause and error - data_dict['extras'] = data_dict.get('extras', {}) - for key in [key for key in data_dict.keys() if key.startswith('ext_')]: - data_dict['extras'][key] = data_dict.pop(key) + data_dict["extras"] = data_dict.get("extras", {}) + for key in [key for key in data_dict.keys() if key.startswith("ext_")]: + data_dict["extras"][key] = data_dict.pop(key) # set default search field - data_dict['df'] = 'text' + data_dict["df"] = "text" # check if some extension needs to modify the search params for item in plugins.PluginImplementations(plugins.IPackageController): @@ -245,67 +252,68 @@ def package_search(context: Context, data_dict: DataDict) -> ActionResult.Packag # the extension may have decided that it is not necessary to perform # the query - abort = data_dict.get('abort_search', False) + abort = data_dict.get("abort_search", False) - if data_dict.get('sort') in (None, 'rank'): - data_dict['sort'] = config.get('ckan.search.default_package_sort') + if data_dict.get("sort") in (None, "rank"): + data_dict["sort"] = config.get("ckan.search.default_package_sort") results: list[dict[str, Any]] = [] facets: dict[str, Any] = {} count = 0 if not abort: - if asbool(data_dict.get('use_default_schema')): - data_source = 'data_dict' + if asbool(data_dict.get("use_default_schema")): + data_source = "data_dict" else: - data_source = 'validated_data_dict' - data_dict.pop('use_default_schema', None) + data_source = "validated_data_dict" + data_dict.pop("use_default_schema", None) - result_fl = data_dict.get('fl') + result_fl = data_dict.get("fl") if not result_fl: - data_dict['fl'] = 'id {0}'.format(data_source) + data_dict["fl"] = "id {0}".format(data_source) else: - data_dict['fl'] = ' '.join(result_fl) + data_dict["fl"] = " ".join(result_fl) - data_dict.setdefault('fq', '') + data_dict.setdefault("fq", "") # Remove before these hit solr FIXME: whitelist instead - include_private = asbool(data_dict.pop('include_private', False)) - include_drafts = asbool(data_dict.pop('include_drafts', False)) - include_deleted = asbool(data_dict.pop('include_deleted', False)) + include_private = asbool(data_dict.pop("include_private", False)) + include_drafts = asbool(data_dict.pop("include_drafts", False)) + include_deleted = asbool(data_dict.pop("include_deleted", False)) if not include_private: - data_dict['fq'] = '+capacity:public ' + data_dict['fq'] + data_dict["fq"] = "+capacity:public " + data_dict["fq"] - if '+state' not in data_dict['fq']: - states = ['active'] + if "+state" not in data_dict["fq"]: + states = ["active"] if include_drafts: - states.append('draft') + states.append("draft") if include_deleted: - states.append('deleted') - data_dict['fq'] += ' +state:({})'.format(' OR '.join(states)) + states.append("deleted") + data_dict["fq"] += " +state:({})".format(" OR ".join(states)) # Pop these ones as Solr does not need them - extras = data_dict.pop('extras', None) + extras = data_dict.pop("extras", None) # enforce permission filter based on user - if context.get('ignore_auth') or (user and authz.is_sysadmin(user)): + if context.get("ignore_auth") or (user and authz.is_sysadmin(user)): labels = None else: - labels = lib_plugins.get_permission_labels( - ).get_user_dataset_labels(context['auth_user_obj']) + labels = lib_plugins.get_permission_labels().get_user_dataset_labels( + context["auth_user_obj"] + ) query = search.query_for(model.Package) query.run(data_dict, permission_labels=labels) # Add them back so extensions can use them on after_search - data_dict['extras'] = extras + data_dict["extras"] = extras if result_fl: for package in query.results: if isinstance(package, str): package = {result_fl[0]: package} - extras = cast("dict[str, Any]", package.pop('extras', {})) + extras = cast("dict[str, Any]", package.pop("extras", {})) package.update(extras) results.append(package) else: @@ -316,65 +324,67 @@ def package_search(context: Context, data_dict: DataDict) -> ActionResult.Packag if package_dict: # the package_dict still needs translating when being viewed package_dict = json.loads(package_dict) - if context.get('for_view'): + if context.get("for_view"): for item in plugins.PluginImplementations( - plugins.IPackageController): - package_dict = item.before_dataset_view( - package_dict) + plugins.IPackageController + ): + package_dict = item.before_dataset_view(package_dict) results.append(package_dict) else: - log.error('No package_dict is coming from solr for package ' - 'id %s', package['id']) + log.error( + "No package_dict is coming from solr for package " "id %s", + package["id"], + ) count = query.count facets = query.facets search_results: dict[str, Any] = { - 'count': count, - 'facets': facets, - 'results': results, - 'sort': data_dict['sort'] + "count": count, + "facets": facets, + "results": results, + "sort": data_dict["sort"], } # create a lookup table of group name to title for all the groups and # organizations in the current search's facets. group_names = [] - for field_name in ('groups', 'organization'): + for field_name in ("groups", "organization"): group_names.extend(facets.get(field_name, {}).keys()) - groups = (session.query(model.Group.name, model.Group.title) - # type_ignore_reason: incomplete SQLAlchemy types - .filter(model.Group.name.in_(group_names)) # type: ignore - .all() - if group_names else []) + groups = ( + session.query(model.Group.name, model.Group.title) + # type_ignore_reason: incomplete SQLAlchemy types + .filter(model.Group.name.in_(group_names)).all() # type: ignore + if group_names + else [] + ) group_titles_by_name = dict(groups) # Transform facets into a more useful data structure. restructured_facets: dict[str, Any] = {} for key, value in facets.items(): - restructured_facets[key] = { - 'title': key, - 'items': [] - } + restructured_facets[key] = {"title": key, "items": []} for key_, value_ in value.items(): new_facet_dict = {} - new_facet_dict['name'] = key_ - if key in ('groups', 'organization'): + new_facet_dict["name"] = key_ + if key in ("groups", "organization"): display_name = group_titles_by_name.get(key_, key_) - display_name = display_name \ - if display_name and display_name.strip() else key_ - new_facet_dict['display_name'] = display_name - elif key == 'license_id': + display_name = ( + display_name if display_name and display_name.strip() else key_ + ) + new_facet_dict["display_name"] = display_name + elif key == "license_id": license = model.Package.get_license_register().get(key_) if license: - new_facet_dict['display_name'] = license.title + new_facet_dict["display_name"] = license.title else: - new_facet_dict['display_name'] = key_ + new_facet_dict["display_name"] = key_ else: - new_facet_dict['display_name'] = key_ - new_facet_dict['count'] = value_ - restructured_facets[key]['items'].append(new_facet_dict) - search_results['search_facets'] = restructured_facets + new_facet_dict["display_name"] = key_ + new_facet_dict["count"] = value_ + restructured_facets[key]["items"].append(new_facet_dict) + search_results["search_facets"] = restructured_facets # check if some extension needs to modify the search results for item in plugins.PluginImplementations(plugins.IPackageController): @@ -382,25 +392,27 @@ def package_search(context: Context, data_dict: DataDict) -> ActionResult.Packag # After extensions have had a chance to modify the facets, sort them by # display name. - for facet in search_results['search_facets']: - search_results['search_facets'][facet]['items'] = sorted( - search_results['search_facets'][facet]['items'], - key=lambda facet: facet['display_name'], reverse=True) + for facet in search_results["search_facets"]: + search_results["search_facets"][facet]["items"] = sorted( + search_results["search_facets"][facet]["items"], + key=lambda facet: facet["display_name"], + reverse=True, + ) return search_results + @logic.side_effect_free def notification_get_all( context: Context, data_dict: DataDict - ) -> NotificationGetUserViewedActivity: - """Get the notifications for a user by sender_id or receiver_id - """ +) -> NotificationGetUserViewedActivity: + """Get the notifications for a user by sender_id or receiver_id""" model = context["model"] - session = context['session'] + session = context["session"] tk.check_access("notification_get_all", context, data_dict) - sender_id = data_dict.get('sender_id') - recipient_id = data_dict.get('recipient_id') + sender_id = data_dict.get("sender_id") + recipient_id = data_dict.get("recipient_id") user_obj = model.User.get(context["user"]) user_id = user_obj.id @@ -410,19 +422,114 @@ def notification_get_all( if errors: raise tk.ValidationError(errors) - notification_objects = Notification.get(recipient_id=recipient_id,sender_id=sender_id) + notification_objects = Notification.get( + recipient_id=recipient_id, sender_id=sender_id + ) try: iter(notification_objects) - notification_objecst_result = notification_list_dictize( - notification_objects, context - ) + notification_objecst_result = notification_list_dictize( + notification_objects, context + ) except TypeError: notification_objecst_result = notification_dictize( - notification_objects, context - ) + notification_objects, context + ) if not notification_objecst_result: - raise logic.NotFound(_('Notification not found')) + raise logic.NotFound(_("Notification not found")) + + return notification_objecst_result + + +@logic.side_effect_free +def pending_dataset_show(context: Context, data_dict: DataDict): + """Get a pending dataset by package_id""" + package_id = data_dict.get("package_id") - return notification_objecst_result \ No newline at end of file + if not package_id: + raise logic.ValidationError(_("package_id is required")) + + tk.check_access("pending_dataset_show", context, {"id": package_id}) + + pending_dataset = None + + try: + pending_dataset = PendingDatasets.get(package_id=package_id) + except Exception as e: + log.error(e) + raise tk.ValidationError(e) + + if not pending_dataset: + raise logic.NotFound(_(f"Pending Dataset not found: {package_id}")) + + return pending_dataset + + +@logic.side_effect_free +def pending_diff_show(context: Context, data_dict: DataDict): + """Get a pending dataset by package_id""" + package_id = data_dict.get("package_id") + + if not package_id: + raise logic.ValidationError(_("package_id is required")) + + tk.check_access("package_show", context, {"id": package_id}) + + dataset_diff = None + + try: + pending_dataset = PendingDatasets.get(package_id=package_id).get("package_data") + existing_dataset = get_action("package_show")(context, {"id": package_id}) + dataset_diff = _diff(existing_dataset, pending_dataset) + except Exception as e: + log.error(e) + raise tk.ValidationError(e) + + if not dataset_diff: + raise logic.NotFound( + _("Diff not found for Pending Dataset: {}".format(package_id)) + ) + + return dataset_diff + + +def _diff(existing, pending, path=""): + diff = {} + keys = set(existing.keys()) | set(pending.keys()) + + for key in keys: + full_path = f"{path}.{key}" if path else key + existing_value = existing.get(key, None) + pending_value = pending.get(key, None) + + if isinstance(existing_value, dict) and isinstance(pending_value, dict): + nested_diff = _diff(existing_value, pending_value, full_path) + diff.update(nested_diff) + elif isinstance(existing_value, list) and isinstance(pending_value, list): + list_diff = _process_lists(existing_value, pending_value, full_path) + diff.update(list_diff) + elif existing_value != pending_value: + diff[full_path] = {"old_value": existing_value, "new_value": pending_value} + + return diff + + +def _process_lists(existing_list, pending_list, path): + list_diff = {} + + for index, (item_existing, item_pending) in enumerate( + zip_longest(existing_list, pending_list) + ): + item_path = f"{path}[{index}]" + + if isinstance(item_existing, dict) and isinstance(item_pending, dict): + item_diff = _diff(item_existing, item_pending, item_path) + list_diff.update(item_diff) + elif item_existing != item_pending: + list_diff[item_path] = { + "old_value": item_existing, + "new_value": item_pending, + } + + return list_diff diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/update.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/update.py index 9d585841a..4d8b3e253 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/update.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/update.py @@ -2,6 +2,7 @@ from typing_extensions import TypeAlias import logging from ckanext.wri.model.notification import Notification, notification_list_dictize +from ckanext.wri.model.pending_datasets import PendingDatasets from ckanext.wri.logic.auth import schema import ckan.plugins.toolkit as tk import ckan.logic as logic @@ -10,9 +11,12 @@ NotificationGetUserViewedActivity: TypeAlias = None log = logging.getLogger(__name__) -def notification_update(context: Context, data_dict: DataDict) -> NotificationGetUserViewedActivity: + +def notification_update( + context: Context, data_dict: DataDict +) -> NotificationGetUserViewedActivity: """Update notification status for a user""" - + tk.check_access("notification_create", context, data_dict) sch = context.get("schema") or schema.default_update_notification_schema() data, errors = tk.navl_validate(data_dict, sch, context) @@ -20,21 +24,21 @@ def notification_update(context: Context, data_dict: DataDict) -> NotificationGe raise tk.ValidationError(errors) model = context["model"] - session = context['session'] + session = context["session"] user_obj = model.User.get(context["user"]) if not data_dict.get("id"): return notification_id = data_dict.get("id") - recipient_id = data_dict.get('recipient_id') - sender_id = data_dict.get('sender_id') - activity_type = data_dict.get('activity_type') - object_type = data_dict.get('object_type') - object_id = data_dict.get('object_id') - time_sent = data_dict.get('time_sent') - is_unread = data_dict.get('is_unread') - state = data_dict.get('state') + recipient_id = data_dict.get("recipient_id") + sender_id = data_dict.get("sender_id") + activity_type = data_dict.get("activity_type") + object_type = data_dict.get("object_type") + object_id = data_dict.get("object_id") + time_sent = data_dict.get("time_sent") + is_unread = data_dict.get("is_unread") + state = data_dict.get("state") user_notifications = Notification.update( notification_id=notification_id, @@ -45,10 +49,40 @@ def notification_update(context: Context, data_dict: DataDict) -> NotificationGe object_id=object_id, time_sent=time_sent, is_unread=is_unread, - state=state + state=state, ) notification_dicts = notification_list_dictize(user_notifications, context) if not notification_dicts: - raise logic.NotFound(_('Notification not found')) + raise logic.NotFound(_("Notification not found")) return notification_dicts + + +def pending_dataset_update(context: Context, data_dict: DataDict): + """Update a Pending Dataset""" + package_id = data_dict.get("package_id") + package_data = data_dict.get("package_data") + + if not package_id: + raise tk.ValidationError(_("package_id is required")) + + if not package_data: + raise tk.ValidationError(_("package_data is required")) + + tk.check_access("pending_dataset_update", context, package_data) + + pending_dataset = None + + try: + pending_dataset = PendingDatasets.update( + package_id=package_id, + package_data=package_data, + ) + except Exception as e: + log.error(e) + raise tk.ValidationError(e) + + if not pending_dataset: + raise logic.NotFound(_(f"Pending Dataset not found: {package_id}")) + + return pending_dataset diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/auth/auth.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/auth/auth.py index 2b624da4c..926cce425 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/auth/auth.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/auth/auth.py @@ -4,9 +4,7 @@ @tk.auth_allow_anonymous_access -def notification_get_all( - context: Context, data_dict: DataDict -) -> AuthResult: +def notification_get_all(context: Context, data_dict: DataDict) -> AuthResult: if context.get("user"): return {"success": True} else: @@ -17,7 +15,26 @@ def notification_get_all( @tk.auth_allow_anonymous_access -def notification_create( - context: Context, data_dict: DataDict -) -> AuthResult: +def notification_create(context: Context, data_dict: DataDict) -> AuthResult: return tk.check_access("notification_get_all", context, data_dict) + + +def pending_dataset_create(context: Context, data_dict: DataDict) -> AuthResult: + return tk.check_access("package_update", context, data_dict) + + +def pending_dataset_show(context: Context, data_dict: DataDict) -> AuthResult: + if tk.check_access("package_show", context, data_dict): + return {"success": True} + else: + return { + "success": False, + "msg": tk._("Unauthorized to access pending dataset."), + } + +def pending_dataset_update(context: Context, data_dict: DataDict) -> AuthResult: + return tk.check_access("package_update", context, data_dict) + + +def pending_dataset_delete(context: Context, data_dict: DataDict) -> AuthResult: + return tk.check_access("package_delete", context, data_dict) \ No newline at end of file diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/auth/schema.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/auth/schema.py index 563ccf796..64dcc183a 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/auth/schema.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/auth/schema.py @@ -131,4 +131,70 @@ def default_get_notification_schema( "time_sent": [ignore_empty, ignore_missing], "is_unread":[ignore_empty, ignore_missing], "state": [ignore_empty, ignore_missing] - } \ No newline at end of file + } + +@validator_args +def default_create_pending_dataset_schema( + not_missing: Validator, + not_empty: Validator, + unicode_safe: Validator, +) -> Schema: + return { + "package_id": [ + not_missing, + not_empty, + unicode_safe, + ], + "package_data": [ + not_missing, + not_empty, + unicode_safe, + ], + } + +@validator_args +def default_update_pending_dataset_schema( + not_missing: Validator, + not_empty: Validator, + unicode_safe: Validator, +) -> Schema: + return { + "package_id": [ + not_missing, + not_empty, + unicode_safe, + ], + "package_data": [ + not_missing, + not_empty, + unicode_safe, + ], + } + +@validator_args +def default_get_pending_dataset_schema( + not_missing: Validator, + not_empty: Validator, + unicode_safe: Validator, +) -> Schema: + return { + "package_id": [ + not_missing, + not_empty, + unicode_safe, + ], + } + +@validator_args +def default_delete_pending_dataset_schema( + not_missing: Validator, + not_empty: Validator, + unicode_safe: Validator, +) -> Schema: + return { + "package_id": [ + not_missing, + not_empty, + unicode_safe, + ], + } diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/model/__init__.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/model/__init__.py index 87040c997..c9aa2369e 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/model/__init__.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/model/__init__.py @@ -1,5 +1,5 @@ from .notification import Notification, notification -from ckan import model +from .pending_datasets import pending_datasets import logging log = logging.getLogger(__name__) @@ -14,4 +14,14 @@ def setup(): notification.create(checkfirst=True) log.info('Tables created for notifications') else: - log.info('Notificaitons Table already exists') \ No newline at end of file + log.info('Notifications Table already exists') + +def setup_pending_datasets(): + """ + Create Pending Datasets Table in the database. + """ + if not pending_datasets.exists(): + pending_datasets.create(checkfirst=True) + log.info('Tables created for pending datasets') + else: + log.info('Pending Datasets Table already exists') diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/model/pending_datasets.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/model/pending_datasets.py new file mode 100644 index 000000000..045cd1c7c --- /dev/null +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/model/pending_datasets.py @@ -0,0 +1,116 @@ +import logging +from typing import Optional +import sqlalchemy +from sqlalchemy.sql import func +from sqlalchemy.dialects.postgresql import JSONB + +import ckan.model.meta as meta +from ckan.common import _ + + +log = logging.getLogger(__name__) + + +pending_datasets = sqlalchemy.Table( + "pending_datasets", + meta.metadata, + sqlalchemy.Column("package_id", sqlalchemy.types.UnicodeText, primary_key=True), + sqlalchemy.Column("package_data", JSONB, nullable=False), + sqlalchemy.Column( + "last_modified", + sqlalchemy.types.DateTime, + default=func.now(), + onupdate=func.now(), + ), +) + + +class PendingDatasets(object): + """Manage pending datasets""" + + def __init__(self, package_id: str, package_data: dict) -> None: + self.package_id = package_id + self.package_data = package_data + self.last_modified = None + + @classmethod + def get(cls, package_id: str) -> Optional[dict]: + try: + pending_dataset = ( + meta.Session.query(PendingDatasets) + .filter(PendingDatasets.package_id == package_id) + .one() + ) + return { + "package_id": pending_dataset.package_id, + "package_data": pending_dataset.package_data, + "last_modified": pending_dataset.last_modified, + } + except Exception as e: + log.error(e) + + @classmethod + def create( + cls, + package_id: str, + package_data: dict, + ) -> Optional[dict]: + try: + pending_dataset = PendingDatasets(package_id, package_data) + meta.Session.add(pending_dataset) + meta.Session.commit() + return { + "package_id": pending_dataset.package_id, + "package_data": pending_dataset.package_data, + "last_modified": pending_dataset.last_modified, + } + except Exception as e: + log.error(e) + meta.Session.rollback() + raise + + @classmethod + def update( + cls, + package_id: str, + package_data: dict, + ) -> Optional[dict]: + try: + pending_dataset = ( + meta.Session.query(PendingDatasets) + .filter(PendingDatasets.package_id == package_id) + .one() + ) + + if pending_dataset: + pending_dataset.package_data = package_data + meta.Session.commit() + return { + "package_id": pending_dataset.package_id, + "package_data": pending_dataset.package_data, + "last_modified": pending_dataset.last_modified, + } + else: + log.error(_(f"Pending Dataset not found: {package_id}")) + return + + except Exception as e: + log.error(e) + meta.Session.rollback() + + @classmethod + def delete(cls, package_id: str) -> None: + try: + pending_dataset = meta.Session.query(PendingDatasets).filter( + PendingDatasets.package_id == package_id + ).one() + log.error(pending_dataset) + meta.Session.delete(pending_dataset) + meta.Session.commit() + return pending_dataset + except Exception as e: + log.error(e) + meta.Session.rollback() + + +meta.mapper(PendingDatasets, pending_datasets) diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/plugin.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/plugin.py index a3f3cea4d..c1c3e9898 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/plugin.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/plugin.py @@ -7,9 +7,10 @@ from ckan.types import Action, AuthFunction, Context from ckan.lib.search import SearchError from ckanext.wri.logic.auth import auth as auth -from ckanext.wri.logic.action.create import notification_create -from ckanext.wri.logic.action.update import notification_update -from ckanext.wri.logic.action.get import package_search, notification_get_all +from ckanext.wri.logic.action.create import notification_create, pending_dataset_create +from ckanext.wri.logic.action.update import notification_update, pending_dataset_update +from ckanext.wri.logic.action.get import package_search, notification_get_all, pending_dataset_show, pending_diff_show +from ckanext.wri.logic.action.delete import pending_dataset_delete from ckanext.wri.search import SolrSpatialFieldSearchBackend import logging @@ -34,7 +35,7 @@ def update_config(self, config_): toolkit.add_resource("assets", "wri") def get_commands(self): - """CLI commands - Creates notifications data tables""" + """CLI commands - Creates custom data tables""" import click @click.command() @@ -43,14 +44,24 @@ def notificationdb(): from ckanext.wri.model import setup setup() - return [notificationdb] + @click.command() + def pendingdatasetsdb(): + """Creates pending datasets table""" + from ckanext.wri.model import setup_pending_datasets + setup_pending_datasets() + + return [notificationdb, pendingdatasetsdb] # IAuth def get_auth_functions(self) -> dict[str, AuthFunction]: return { 'notification_get_all': auth.notification_get_all, - 'notification_create': auth.notification_create + 'notification_create': auth.notification_create, + 'pending_dataset_create': auth.pending_dataset_create, + 'pending_dataset_show': auth.pending_dataset_show, + 'pending_dataset_update': auth.pending_dataset_update, + 'pending_dataset_delete': auth.pending_dataset_delete, } # IValidators @@ -90,15 +101,20 @@ def get_actions(self): 'password_reset': action.password_reset, 'notification_get_all': notification_get_all, 'notification_create': notification_create, - 'notification_update': notification_update - + 'notification_update': notification_update, + 'pending_dataset_create': pending_dataset_create, + 'pending_dataset_show': pending_dataset_show, + 'pending_dataset_update': pending_dataset_update, + 'pending_dataset_delete': pending_dataset_delete, + 'pending_diff_show': pending_diff_show, } # IPermissionLabels def get_dataset_labels(self, dataset_obj: model.Package) -> list[str]: visibility_type = dataset_obj.extras.get('visibility_type', '') - if dataset_obj.state == u'active' and visibility_type == "public": + is_draft = dataset_obj.extras.get('draft', False) + if dataset_obj.state == u'active' and visibility_type == "public" and is_draft != 'true': return [u'public'] if authz.check_config_permission('allow_dataset_collaborators'): @@ -109,7 +125,7 @@ def get_dataset_labels(self, dataset_obj: model.Package) -> list[str]: if dataset_obj.owner_org and visibility_type in ["private"]: labels.append(u'member-%s' % dataset_obj.owner_org) - elif visibility_type == "internal": + elif visibility_type == "internal" and is_draft is not True: labels.append(u'authenticated') else: # Draft labels.append(u'creator-%s' % dataset_obj.creator_user_id) diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/schema/ckan_dataset.yaml b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/schema/ckan_dataset.yaml index 7bb9c6b3a..4e2c2530b 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/schema/ckan_dataset.yaml +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/schema/ckan_dataset.yaml @@ -6,6 +6,23 @@ about_url: http://github.com/ckan/ckanext-scheming dataset_fields: +- field_name: approval_status + label: Approval Status + preset: select + choices: + - value: pending + label: Pending + - value: approved + label: Approved + - value: rejected + label: Rejected + +- field_name: draft + label: Draft + validators: boolean_validator + output_validators: boolean_validator + preset: checkbox + - field_name: spatial_address label: Spatial address validators: ignore_missing @@ -111,8 +128,6 @@ dataset_fields: label: Visibility Type preset: select choices: - - value: draft - label: Draft - value: private label: Private - value: internal diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_dataset_permissions.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_dataset_permissions.py index 8a9837261..c6c79533d 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_dataset_permissions.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_dataset_permissions.py @@ -109,7 +109,8 @@ def test_package_create(): } dataset_draft = dict(dataset_public) - dataset_draft["visibility_type"] = "draft" + dataset_draft["draft"] = True + dataset_draft["visibility_type"] = "internal" dataset_draft["name"] = "draft-dataset" dataset_private = dict(dataset_public) diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_location_search.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_location_search.py index 83bd6f8d3..da0093724 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_location_search.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_location_search.py @@ -37,7 +37,7 @@ def test_location_search(): # 'temporal_coverage_end': '2011', 'update_frequency': 'annually', 'citation': 'Citation information', - 'visibility_type': 'draft', + 'visibility_type': 'public', 'license_id': 'cc-by-4.0', 'featured_dataset': True, 'short_description': 'A short description of the dataset', diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_pending_datasets.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_pending_datasets.py new file mode 100644 index 000000000..2f12ff725 --- /dev/null +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_pending_datasets.py @@ -0,0 +1,123 @@ +import pytest + +import ckan.tests.factories as factories +from ckan.logic import get_action + + +@pytest.mark.usefixtures("with_plugins", "test_request_context") +def _setup(): + user = factories.Sysadmin() + dataset = factories.Dataset( + notes="My dataset description", + private=False, + rw_dataset=True, + title="My dataset", + wri_data=False, + ) + + context = { + "user": user["name"], + "user_obj": user, + } + + data_dict = { + "package_id": dataset["id"], + "package_data": dataset, + } + + return dataset, context, data_dict + + +@pytest.mark.usefixtures("with_plugins", "test_request_context") +def test_pending_dataset_create(): + dataset, context, data_dict = _setup() + + result = get_action("pending_dataset_create")(context, data_dict) + + assert result["package_id"] == dataset["id"] + assert result["package_data"] == dataset + + +@pytest.mark.usefixtures("with_plugins", "test_request_context") +def test_pending_dataset_show(): + dataset, context, data_dict = _setup() + + get_action("pending_dataset_create")(context, data_dict) + + result = get_action("pending_dataset_show")(context, {"package_id": dataset["id"]}) + + assert result["package_id"] == dataset["id"] + assert result["package_data"] == dataset + + +@pytest.mark.usefixtures("with_plugins", "test_request_context") +def test_pending_dataset_update(): + dataset, context, data_dict = _setup() + + get_action("pending_dataset_create")(context, data_dict) + + result = get_action("pending_dataset_show")(context, {"package_id": dataset["id"]}) + + assert result["package_id"] == dataset["id"] + assert result["package_data"] == dataset + + dataset["title"] = "New Title" + + data_dict = { + "package_id": dataset["id"], + "package_data": dataset, + } + + result = get_action("pending_dataset_update")(context, data_dict) + + assert result["package_id"] == dataset["id"] + assert result["package_data"] == dataset + assert result["package_data"]["title"] == "New Title" + + +@pytest.mark.usefixtures("with_plugins", "test_request_context") +def test_pending_dataset_delete(): + dataset, context, data_dict = _setup() + + get_action("pending_dataset_create")(context, data_dict) + + result = get_action("pending_dataset_show")(context, {"package_id": dataset["id"]}) + + assert result["package_id"] == dataset["id"] + assert result["package_data"] == dataset + + get_action("pending_dataset_delete")(context, {"package_id": dataset["id"]}) + + try: + get_action("pending_dataset_show")(context, {"package_id": dataset["id"]}) + except Exception as e: + assert e.message == f"Pending Dataset not found: {dataset['id']}" + + +@pytest.mark.usefixtures("with_plugins", "test_request_context") +def test_pending_diff_show(): + dataset, context, data_dict = _setup() + + get_action("pending_dataset_create")(context, data_dict) + + updated_dataset = dataset.copy() + updated_dataset["title"] = "New Title" + updated_dataset["notes"] = "New description" + updated_dataset["private"] = True + updated_dataset["rw_dataset"] = False + updated_dataset["wri_data"] = True + + data_dict = { + "package_id": dataset["id"], + "package_data": updated_dataset, + } + + get_action("pending_dataset_update")(context, data_dict) + + result = get_action("pending_diff_show")(context, {"package_id": dataset["id"]}) + + assert result["title"]["new_value"] == "New Title" + assert result["notes"]["new_value"] == "New description" + assert result["private"]["new_value"] is True + assert result["rw_dataset"]["new_value"] is False + assert result["wri_data"]["new_value"] is True \ No newline at end of file diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_schema.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_schema.py index c9ea10923..47b9a39a4 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_schema.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_schema.py @@ -35,8 +35,9 @@ def test_package_create(): "temporal_coverage_end": "2011", "update_frequency": "annually", "citation": "Citation information", - "visibility_type": "draft", + "visibility_type": "public", "license_id": "cc-by-4.0", + "draft": False, "featured_dataset": True, "short_description": "A short description of the dataset", "notes": "Some useful notes about the data", @@ -85,6 +86,7 @@ def test_package_create(): assert result["citation"] == dataset["citation"] assert result["visibility_type"] == dataset["visibility_type"] assert result["license_id"] == dataset["license_id"] + assert result["draft"] is False assert result["featured_dataset"] is True assert result["short_description"] == dataset["short_description"] assert result["notes"] == dataset["notes"] diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_search.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_search.py index 6cab4dc2c..c736db0a4 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_search.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/tests/test_search.py @@ -35,7 +35,7 @@ def test_search_queries(): # 'temporal_coverage_end': '2011', 'update_frequency': 'annually', 'citation': 'Citation information', - 'visibility_type': 'draft', + 'visibility_type': 'public', 'license_id': 'cc-by-4.0', 'featured_dataset': True, 'short_description': 'A short description of the dataset', @@ -79,7 +79,7 @@ def test_search_queries(): # Test that correct queries return the dataset for field in fields_to_test: - if field == 'featured_dataset' or field == 'wri_data': + if field in ['featured_dataset', 'wri_data', 'draft']: result = get_action('package_search')( context=context, data_dict={'q': f'{field}:{dataset[field]}'} @@ -97,7 +97,7 @@ def test_search_queries(): # Test that incorrect queries do not return the dataset for field in fields_to_test: - if field == 'featured_dataset' or field == 'wri_data': + if field in ['featured_dataset', 'wri_data', 'draft']: result = get_action('package_search')( context=context, data_dict={'q': f'{field}:false'} diff --git a/ckan-backend-dev/src/ckanext-wri/requirements.txt b/ckan-backend-dev/src/ckanext-wri/requirements.txt index ff82ea114..225d5342e 100644 --- a/ckan-backend-dev/src/ckanext-wri/requirements.txt +++ b/ckan-backend-dev/src/ckanext-wri/requirements.txt @@ -1,3 +1,2 @@ pycountry==22.3.5 - Shapely==2.0.1 diff --git a/deployment/frontend/src/components/vizzuality/components/legend/components/legend-item-types/LegendItemTypesList.tsx b/deployment/frontend/src/components/vizzuality/components/legend/components/legend-item-types/LegendItemTypesList.tsx index 9fc44edaf..3af854708 100644 --- a/deployment/frontend/src/components/vizzuality/components/legend/components/legend-item-types/LegendItemTypesList.tsx +++ b/deployment/frontend/src/components/vizzuality/components/legend/components/legend-item-types/LegendItemTypesList.tsx @@ -12,6 +12,8 @@ export default function LegendItemTypesList(props: any) { } } + legendConfigs = legendConfigs.filter((lc: any) => lc.type) + return legendConfigs.map((lc: any, i: number) => (