Skip to content

Commit

Permalink
Applications as objects (#582)
Browse files Browse the repository at this point in the history
* Applications as objects

* Progress

* Progress

* Progress

* Progress

* Forgot to commit

* Applications as objs frontend

* Fix build

* Fix tests

* Tests

* Fix build

* Fix build

* Fix tests

* Fix test

* Trigger CI

* Rm test

* Fix unit tests
  • Loading branch information
luccasmmg authored Nov 14, 2024
1 parent ec4ca8a commit 5c382f6
Show file tree
Hide file tree
Showing 47 changed files with 2,355 additions and 125 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import json
import logging
from ckan.authz import users_role_for_group_or_org
import ckan.plugins.toolkit as tk
from ckan.common import _, config, current_user

from ckan.types import DataDict
from ckan.logic.validators import email_validator
Expand Down Expand Up @@ -61,7 +64,10 @@ def _check_type(actors: str, data_dict: DataDict, actor_type: str) -> DataDict:
actors = actors.strip()

if (
actors[0] == '"' and actors[-1] == '"' or actors[0] == "'" and actors[-1] == "'"
actors[0] == '"'
and actors[-1] == '"'
or actors[0] == "'"
and actors[-1] == "'"
) and len(actors) > 1:
actors = actors[1:-1]

Expand Down Expand Up @@ -90,3 +96,71 @@ def stringify_actor_objects(data_dict: DataDict) -> DataDict:
data_dict = _check_type(actors, data_dict, key)

return data_dict


def _fix_application_field(data_dict):
"""
When "applications" field is provided, add dataset to the
application
"""
applications = data_dict.get("applications", None)

if applications is not None and len(applications) > 0:
application_names = [
group.get("name") for group in data_dict.get("applications", [])
]
priviliged_context = {"ignore_auth": True}

group_list_action = tk.get_action("group_list")
group_list_data_dict = {
"type": "application",
"groups": application_names,
"include_extras": True,
"all_fields": True,
}
group_list = group_list_action(priviliged_context, group_list_data_dict)

application_groups = [
{"name": x.get("name"), "type": "application"} for x in group_list
]
groups = [
{"name": x.get("name"), "type": "group"}
for x in data_dict.get("groups", [])
]
groups += application_groups
data_dict["groups"] = groups
data_dict["applications"] = [x.get("name") for x in group_list]
return data_dict

def _fix_user_group_permission(data_dict):
"""
By default, any user should be able to create datasets
with any application or topic.
To do that, add user as member of groups.
"""
groups = data_dict.get("groups", [])
if not hasattr(current_user, "id"):
return
user_id = current_user.name

if len(groups) > 0 and user_id:
priviliged_context = {"ignore_auth": True}
group_member_create_action = tk.get_action("group_member_create")

for group in groups:
group_id = group.get("name")
capacity = users_role_for_group_or_org(group_id, user_id)
if capacity not in ["member", "editor", "admin"]:
group_member_create_data_dict = {
"id": group.get("name"),
"username": user_id,
"role": "member",
}
group_member_create_action(
priviliged_context, group_member_create_data_dict
)
return data_dict


def _before_dataset_create_or_update(context, data_dict, is_update=False):
_fix_user_group_permission(data_dict)
52 changes: 40 additions & 12 deletions ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/create.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from pprint import pprint
from typing_extensions import TypeAlias, Any
import logging
import requests
from urllib.parse import urljoin
import json
from typing import Any, Union, cast
import six
from ckan.common import _, config, current_user

from ckanext.wri.model.notification import Notification, notification_dictize
from ckanext.wri.model.pending_datasets import PendingDatasets
Expand All @@ -26,8 +28,10 @@
import ckan.lib.uploader as uploader
import ckan.lib.plugins as lib_plugins
import ckan.lib.dictization.model_save as model_save

from ckanext.wri.logic.action.action_helpers import stringify_actor_objects
from ckanext.wri.logic.action.action_helpers import (
stringify_actor_objects,
_before_dataset_create_or_update,
)
import uuid

NotificationGetUserViewedActivity: TypeAlias = None
Expand Down Expand Up @@ -204,7 +208,7 @@ def notification_create(

user_notifications = Notification(
recipient_id=recipient_id,
sender_id=sender_id if sender_id else '',
sender_id=sender_id if sender_id else "",
activity_type=activity_type,
object_type=object_type,
object_id=object_id,
Expand Down Expand Up @@ -285,7 +289,9 @@ def migrate_dataset(context: Context, data_dict: DataDict):

if not dataset_id:
if not gfw_dataset:
raise tk.ValidationError(_("Dataset 'rw_dataset_id' or 'gfw_dataset' is required"))
raise tk.ValidationError(
_("Dataset 'rw_dataset_id' or 'gfw_dataset' is required")
)
else:
data_dict["gfw_only"] = True

Expand Down Expand Up @@ -414,7 +420,27 @@ def package_create(context: Context, data_dict: DataDict):

data_dict = stringify_actor_objects(data_dict)

_before_dataset_create_or_update(context, data_dict)
dataset = l.action.create.package_create(context, data_dict)
if dataset.get("groups"):
# This is necessary because the pending dataset doesnt have any of the logic that package_show has
groups = [
tk.get_action("group_show")(context, {"id": group.get("name")})
for group in dataset.get("groups")
]
groups = [
{
"id": group.get("id"),
"name": group.get("name"),
"display_name": group.get("display_name"),
"title": group.get("title"),
"description": group.get("description"),
"image_display_url": group.get("image_display_url"),
"type": group.get("type"),
}
for group in groups
]
dataset["groups"] = groups
if data_dict.get("owner_org"):
org = tk.get_action("organization_show")(
context, {"id": data_dict.get("owner_org")}
Expand Down Expand Up @@ -458,16 +484,18 @@ def package_create(context: Context, data_dict: DataDict):
context, {"dataset_id": dataset.get("id")}
)

if (dataset.get("visibility_type") == "internal"):
if dataset.get("visibility_type") == "internal":
print("INTERNAL PENDING DATASET")

__import__('pprint').pprint(pending_dataset)
__import__("pprint").pprint(pending_dataset)
return dataset


# IMPORTANT: This function includes an override/change for authors/maintainers (the call to stringify_actor_objects).
# This is not a 1:1 match with the original function, though all other logic is the same.
def old_package_create(context: Context, data_dict: DataDict) -> ActionResult.PackageCreate:
def old_package_create(
context: Context, data_dict: DataDict
) -> ActionResult.PackageCreate:
"""Create a new dataset (package).
You must be authorized to create new datasets. If you specify any groups
Expand Down Expand Up @@ -721,8 +749,8 @@ def resource_create(
if not data_dict.get("url"):
data_dict["url"] = ""

if not data_dict.get('id'):
data_dict['id'] = str(uuid.uuid4())
if not data_dict.get("id"):
data_dict["id"] = str(uuid.uuid4())

package_show_context: Union[Context, Any] = dict(context, for_update=True)
pkg_dict = _get_action("package_show")(package_show_context, {"id": package_id})
Expand Down Expand Up @@ -764,15 +792,15 @@ def resource_create(
# package_show until after commit
package = context["package"]
assert package
upload.upload(data_dict['id'], uploader.get_max_resource_size())
upload.upload(data_dict["id"], uploader.get_max_resource_size())

model.repo.commit()

# Run package show again to get out actual last_resource
updated_pkg_dict = _get_action("package_show")(context, {"id": package_id})
resource = updated_pkg_dict["resources"][-1]
if not resource.get('id'):
resource['id'] = data_dict['id']
if not resource.get("id"):
resource["id"] = data_dict["id"]

# Add the default views to the new resource
logic.get_action("resource_create_default_resource_views")(
Expand Down
57 changes: 49 additions & 8 deletions ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/get.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,13 +414,21 @@ def package_search(context: Context, data_dict: DataDict) -> ActionResult.Packag
group_names.extend(facets.get(field_name, {}).keys())

groups = (
session.query(model.Group.name, model.Group.title)
session.query(model.Group.name, model.Group.title)
# type_ignore_reason: incomplete SQLAlchemy types
.filter(model.Group.name.in_(group_names)).all() # type: ignore
if group_names
else []
)
_groups = (
session.query(model.Group.name, model.Group.type)
# type_ignore_reason: incomplete SQLAlchemy types
.filter(model.Group.name.in_(group_names)).all() # type: ignore
if group_names
else []
)
group_titles_by_name = dict(groups)
group_types_by_name = dict(_groups)

# Transform facets into a more useful data structure.
restructured_facets: dict[str, Any] = {}
Expand All @@ -431,9 +439,14 @@ def package_search(context: Context, data_dict: DataDict) -> ActionResult.Packag
new_facet_dict["name"] = key_
if key in ("groups", "organization"):
display_name = group_titles_by_name.get(key_, key_)
group_type = group_types_by_name.get(key_, key_)
display_name = (
display_name if display_name and display_name.strip() else key_
)
group_type = (
group_type if group_type and group_type.strip() else key_
)
new_facet_dict["type"] = group_type
new_facet_dict["display_name"] = display_name
elif key == "license_id":
license = model.Package.get_license_register().get(key_)
Expand Down Expand Up @@ -554,6 +567,36 @@ def pending_dataset_show(context: Context, data_dict: DataDict):

try:
pending_dataset = PendingDatasets.get(package_id=package_id)
if pending_dataset and pending_dataset.get('package_data'):
package_data = pending_dataset['package_data']
if package_data.get("groups", None) is not None:
_groups = [
tk.get_action("group_show")(context, {"id": group.get('name')})
for group in package_data.get("groups")
]
groups = [
{
"description": group.get("description"),
"display_name": group.get("display_name"),
"id": group.get("id"),
"image_display_url": group.get("image_display_url"),
"name": group.get("name"),
"title": group.get("title"),
"type": group.get("type"),
"homepage_url": group.get("homepage_url", None) if 'homepage_url' in group else None,
"contact_url": group.get("contact_url", None) if 'contact_url' in group else None,
"help_url": group.get("help_url", None) if 'help_url' in group else None,
}
for group in _groups
]
for group in groups:
if group.get('help_url') is None:
del group['help_url']
if group.get('contact_url') is None:
del group['contact_url']
if group.get('homepage_url') is None:
del group['homepage_url']
pending_dataset['package_data']["groups"] = groups
except Exception as e:
log.error(e)
raise tk.ValidationError(e)
Expand Down Expand Up @@ -581,8 +624,9 @@ def pending_diff_show(context: Context, data_dict: DataDict):
try:
pending_dataset = PendingDatasets.get(package_id=package_id)
if pending_dataset is not None:
context["for_approval"] = True
pending_dataset = pending_dataset.get("package_data")
# context["for_approval"] = True
pending_dataset = get_action('pending_dataset_show')(context, { "package_id": package_id})
pending_dataset = pending_dataset['package_data']
existing_dataset = get_action("package_show")(context, {"id": package_id})
dataset_diff = _diff(existing_dataset, pending_dataset)
except Exception as e:
Expand Down Expand Up @@ -1356,9 +1400,7 @@ def _add_group_types(context: Context, data_dict: DataDict):
}

if group_type == "application":
group_dict_updates = {
"type": group_type
}
group_dict_updates = {"type": group_type}
group.update(group_dict_updates)

for key, value in new_group_dict.items():
Expand All @@ -1372,8 +1414,7 @@ def _add_group_types(context: Context, data_dict: DataDict):
group.update({"type": group_type})
updated_package_groups.append(group)

data_dict["groups"] = updated_package_groups
data_dict["applications"] = package_applications if package_applications else []
data_dict["groups"] = updated_package_groups + package_applications
except Exception as e:
log.error(f"Error adding group types: {e}")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
GroupNotificationParams,
send_group_notification,
)
from ckanext.wri.logic.action.action_helpers import stringify_actor_objects
from ckanext.wri.logic.action.action_helpers import stringify_actor_objects, _before_dataset_create_or_update
import ckan.plugins.toolkit as tk
import ckan.logic as logic
from ckan.common import _
Expand Down Expand Up @@ -167,8 +167,6 @@ def pending_dataset_update(context: Context, data_dict: DataDict):
if not package_data:
raise tk.ValidationError(_("package_data is required"))

tk.check_access("package_create", context, package_data)

pending_dataset = None

try:
Expand Down Expand Up @@ -468,6 +466,7 @@ def old_package_patch(context: Context, data_dict: DataDict) -> ActionResult.Pac
You must be authorized to edit the dataset and the groups that it belongs
to.
"""
_before_dataset_create_or_update(context, data_dict)
_check_access("package_patch", context, data_dict)

show_context: Context = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,6 @@ def package_update(up_func, context, data_dict):
# (ie if user is a collaborator)
if user_obj:
if user_obj.id == package.creator_user_id:
print("CREATOR ID")
return {"success": True}
if (
authz.user_is_collaborator_on_dataset(
Expand Down Expand Up @@ -173,11 +172,9 @@ def pending_dataset_show(context: Context, data_dict: DataDict) -> AuthResult:


def pending_dataset_update(context: Context, data_dict: DataDict) -> AuthResult:
#print("PENDING DATASET UPDATE", flush=True)
#print(data_dict, flush=True)
print("CHECKING PENDING DATASET UPDATE AUTH", flush=True)
return tk.check_access("package_update", context, data_dict)


def pending_dataset_delete(context: Context, data_dict: DataDict) -> AuthResult:
return tk.check_access("package_delete", context, data_dict)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ def test_package_create_public(mail_user):
title="Test Group",
description="A description of the group",
)
group_dict = factories.Group()

userobj_sysadmin = factories.Sysadmin()
userobj_org_admin = factories.User()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,11 +132,10 @@ def test_package_create(mail_user):
assert result["learn_more"] == dataset["learn_more"]
assert result["cautions"] == dataset["cautions"]
assert result["methodology"] == dataset["methodology"]
assert application_group_dict["id"] in [group["id"] for group in result["applications"]]
assert application_group_dict["name"] in [group["name"] for group in result["applications"]]
assert application_group_dict["title"] in [group["title"] for group in result["applications"]]
assert application_group_dict["type"] in [group["type"] for group in result["applications"]]
assert application_group_dict["homepage_url"] in [group["homepage_url"] for group in result["applications"]]
assert application_group_dict["id"] in [group["id"] for group in result["groups"]]
assert application_group_dict["name"] in [group["name"] for group in result["groups"]]
assert application_group_dict["title"] in [group["title"] for group in result["groups"]]
assert application_group_dict["type"] in [group["type"] for group in result["groups"]]

invalid_urls = ["invalid_url_1", "invalid_url_2", "invalid_url_3"]

Expand Down
Loading

0 comments on commit 5c382f6

Please sign in to comment.