Skip to content

Commit

Permalink
stop supporting setting of dataset type via DDI #10517
Browse files Browse the repository at this point in the history
Maybe someday but we're not confident about which field to use and
we're not even sure if there is any interest in this because DDI
usually represents data, not software or workflows.
  • Loading branch information
pdurbin committed Jul 24, 2024
1 parent cfac9dc commit 3aab5c0
Show file tree
Hide file tree
Showing 5 changed files with 1 addition and 284 deletions.
4 changes: 0 additions & 4 deletions doc/release-notes/10517-datasetType.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,3 @@
### Initial Support for Dataset Types (Dataset, Software, Workflow)

Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see <https://dataverse-guide--10694.org.readthedocs.build/en/10694/user/dataset-types.html> and #10517. Please note that this feature is highly experimental.

next:

- create with DDI
196 changes: 0 additions & 196 deletions doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml

This file was deleted.

7 changes: 0 additions & 7 deletions doc/sphinx-guides/source/user/dataset-types.rst
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,3 @@ Import with Native JSON
-----------------------

The same native JSON file as above can be used when importing a dataset: :download:`dataset-create-software.json <../_static/api/dataset-create-software.json>`

Import with DDI
---------------

An example DDI file is available at :download:`dataset-create-software-ddi.xml <../_static/api/dataset-create-software-ddi.xml>`

Note that for DDI import to work ``dataKind`` must be set to one of the valid types. The first valid type wins.
31 changes: 1 addition & 30 deletions src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
Original file line number Diff line number Diff line change
Expand Up @@ -430,13 +430,8 @@ public Response importDatasetDdi(@Context ContainerRequestContext crc, String xm
Dataverse owner = findDataverseOrDie(parentIdtf);
Dataset ds = null;
try {
JsonObject jsonObject = importService.ddiToJson(xml);
ds = jsonParser().parseDataset(jsonObject);
ds = jsonParser().parseDataset(importService.ddiToJson(xml));
DataverseUtil.checkMetadataLangauge(ds, owner, settingsService.getBaseMetadataLanguageMap(null, true));
DatasetType datasetType = getDatasetTypeFromJson(jsonObject);
if (datasetType != null) {
ds.setDatasetType(datasetType);
}
} catch (JsonParseException jpe) {
return badRequest("Error parsing data as Json: "+jpe.getMessage());
} catch (ImportException e) {
Expand Down Expand Up @@ -498,30 +493,6 @@ public Response importDatasetDdi(@Context ContainerRequestContext crc, String xm
}
}

public DatasetType getDatasetTypeFromJson(JsonObject jsonObject) {
JsonArray citationFields = jsonObject.getJsonObject("datasetVersion")
.getJsonObject("metadataBlocks")
.getJsonObject("citation")
.getJsonArray("fields");
for (JsonValue citationField : citationFields) {
JsonObject field = (JsonObject) citationField;
String name = field.getString("typeName");
if (name.equals(DatasetFieldConstant.kindOfData)) {
JsonArray values = field.getJsonArray("value");
for (JsonString value : values.getValuesAs(JsonString.class)) {
try {
// return the first DatasetType you find
DatasetType.Type type = DatasetType.Type.fromString(value.getString());
return new DatasetType(type);
} catch (IllegalArgumentException ex) {
// No worries, it's just some other kind of data.
}
}
}
}
return null;
}

@POST
@AuthRequired
@Path("{identifier}/datasets/:startmigration")
Expand Down
47 changes: 0 additions & 47 deletions src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
package edu.harvard.iq.dataverse.api;

import edu.harvard.iq.dataverse.search.SearchFields;
import io.restassured.RestAssured;
import io.restassured.path.json.JsonPath;
import io.restassured.response.Response;
import static jakarta.ws.rs.core.Response.Status.CREATED;
import static jakarta.ws.rs.core.Response.Status.OK;
import org.hamcrest.CoreMatchers;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
Expand Down Expand Up @@ -137,49 +135,4 @@ public void testImportJson() {

}

@Test
public void testImportDdiWorkflow() {
Response createUser = UtilIT.createRandomUser();
createUser.then().assertThat().statusCode(OK.getStatusCode());
String username = UtilIT.getUsernameFromResponse(createUser);
String apiToken = UtilIT.getApiTokenFromResponse(createUser);

UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode());

Response createDataverse = UtilIT.createRandomDataverse(apiToken);
createDataverse.then().assertThat().statusCode(CREATED.getStatusCode());
String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse);
Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse);

UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode());

String jsonIn = UtilIT.getDatasetJson("doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml");

String randomString = UtilIT.getRandomString(6);

Response importJson = UtilIT.importDatasetDDIViaNativeApi(apiToken, dataverseAlias, jsonIn, "doi:10.5072/FK2/" + randomString, "yes");
importJson.prettyPrint();
importJson.then().assertThat().statusCode(CREATED.getStatusCode());

Integer datasetId = JsonPath.from(importJson.getBody().asString()).getInt("data.id");
String datasetPid = JsonPath.from(importJson.getBody().asString()).getString("data.persistentId");

Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken);
getDatasetJson.prettyPrint();
getDatasetJson.then().assertThat().statusCode(OK.getStatusCode());
String datasetType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType");
System.out.println("datasetType: " + datasetType);
assertEquals("workflow", datasetType);

Response search = UtilIT.searchAndShowFacets("id:dataset_" + datasetId, apiToken);
search.prettyPrint();
search.then().assertThat()
.body("data.total_count", CoreMatchers.is(1))
.body("data.count_in_response", CoreMatchers.is(1))
.body("data.facets[0].datasetType_s.friendly", CoreMatchers.is("Dataset Type"))
.body("data.facets[0].datasetType_s.labels[0].workflow", CoreMatchers.is(1))
.statusCode(OK.getStatusCode());

}

}

0 comments on commit 3aab5c0

Please sign in to comment.