diff --git a/doc/release-notes/10517-datasetType.md b/doc/release-notes/10517-datasetType.md index 9e72498cf70..3f17035385a 100644 --- a/doc/release-notes/10517-datasetType.md +++ b/doc/release-notes/10517-datasetType.md @@ -1,7 +1,3 @@ ### Initial Support for Dataset Types (Dataset, Software, Workflow) Datasets now have types. By default the dataset type will be "dataset" but if you turn on support for additional types, datasets can have a type of "software" or "workflow" as well. For more details see and #10517. Please note that this feature is highly experimental. - -next: - -- create with DDI diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml b/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml deleted file mode 100644 index bbe14a265d8..00000000000 --- a/doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml +++ /dev/null @@ -1,196 +0,0 @@ - - - - - - A Workflow for Whale Identification - - - Root - 2020-02-19 - - - 1 - - LastAuthor1, FirstAuthor1; LastAuthor2, FirstAuthor2, 2020, "Replication Data for: Title", Root, V1 - - - - - - A Workflow for Whale Identification - Subtitle - Alternative Title - OtherIDIdentifier1 - OtherIDIdentifier2 - - - LastAuthor1, FirstAuthor1 - LastAuthor2, FirstAuthor2 - LastContributor1, FirstContributor1 - LastContributor2, FirstContributor2 - - - LastProducer1, FirstProducer1 - LastProducer2, FirstProducer2 - 1003-01-01 - ProductionPlace One - ProductionPlace Two - SoftwareName1 - SoftwareName2 - GrantInformationGrantNumber1 - GrantInformationGrantNumber2 - - - Root - LastDistributor1, FirstDistributor1 - LastDistributor2, FirstDistributor2 - LastContact1, FirstContact1 - LastContact2, FirstContact2 - 1004-01-01 - LastDepositor, FirstDepositor - 1002-01-01 - - - SeriesName One - SeriesInformation One - - - SeriesName Two - SeriesInformation Two - - - - - Agricultural Sciences - Business and Management - Engineering - Law - KeywordTerm1 - KeywordTerm2 - - DescriptionText 1 - DescriptionText2 - - 1005-01-01 - 1005-01-02 - 1005-02-01 - 1005-02-02 - 1006-01-01 - 1006-01-01 - 1006-02-01 - 1006-02-02 - workflow - Afghanistan - GeographicCoverageCity1 - GeographicCoverageStateProvince1 - GeographicCoverageOther1 - Albania - GeographicCoverageCity2 - GeographicCoverageStateProvince2 - GeographicCoverageOther2 - - 10 - 20 - 40 - 30 - - - 70 - 80 - 60 - 50 - - GeographicUnit1 - GeographicUnit2 - UnitOfAnalysis1 - UnitOfAnalysis2 - Universe1 - Universe2 - - Notes1 - - - - TimeMethod - LastDataCollector1, FirstDataCollector1 - CollectorTraining - Frequency - SamplingProcedure - - TargetSampleSizeFormula - 100 - - MajorDeviationsForSampleDesign - - DataSources1 - DataSources2 - OriginOfSources - CharacteristicOfSourcesNoted - DocumentationAndAccessToSources - - CollectionMode - TypeOfResearchInstrument - CharacteristicsOfDataCollectionSituation - ActionsToMinimizeLosses - ControlOperations - Weighting - CleaningOperations - - - ResponseRate - EstimatesOfSamplingError - OtherFormsOfDataAppraisal - - NotesText - - - Terms of Access - Terms of Use - - Data Access Place - Original Archive - Availability Status - Size of Collection - Study Completion - - - Confidentiality Declaration - Special Permissions - Restrictions - Contact for Access - Citation Requirements - Depositor Requirements - Conditions - Disclaimer - - - - RelatedMaterial1 - RelatedMaterial2 - RelatedDatasets1 - RelatedDatasets2 - - - - RelatedPublicationIDNumber1 - - RelatedPublicationCitation1 - - - - - - - RelatedPublicationIDNumber2 - - RelatedPublicationCitation2 - - - - OtherReferences1 - OtherReferences2 - - StudyLevelErrorNotes - - diff --git a/doc/sphinx-guides/source/user/dataset-types.rst b/doc/sphinx-guides/source/user/dataset-types.rst index 45bbb558508..4c96745f434 100755 --- a/doc/sphinx-guides/source/user/dataset-types.rst +++ b/doc/sphinx-guides/source/user/dataset-types.rst @@ -35,10 +35,3 @@ Import with Native JSON ----------------------- The same native JSON file as above can be used when importing a dataset: :download:`dataset-create-software.json <../_static/api/dataset-create-software.json>` - -Import with DDI ---------------- - -An example DDI file is available at :download:`dataset-create-software-ddi.xml <../_static/api/dataset-create-software-ddi.xml>` - -Note that for DDI import to work ``dataKind`` must be set to one of the valid types. The first valid type wins. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 670c221b36e..47f05a75e93 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -430,13 +430,8 @@ public Response importDatasetDdi(@Context ContainerRequestContext crc, String xm Dataverse owner = findDataverseOrDie(parentIdtf); Dataset ds = null; try { - JsonObject jsonObject = importService.ddiToJson(xml); - ds = jsonParser().parseDataset(jsonObject); + ds = jsonParser().parseDataset(importService.ddiToJson(xml)); DataverseUtil.checkMetadataLangauge(ds, owner, settingsService.getBaseMetadataLanguageMap(null, true)); - DatasetType datasetType = getDatasetTypeFromJson(jsonObject); - if (datasetType != null) { - ds.setDatasetType(datasetType); - } } catch (JsonParseException jpe) { return badRequest("Error parsing data as Json: "+jpe.getMessage()); } catch (ImportException e) { @@ -498,30 +493,6 @@ public Response importDatasetDdi(@Context ContainerRequestContext crc, String xm } } - public DatasetType getDatasetTypeFromJson(JsonObject jsonObject) { - JsonArray citationFields = jsonObject.getJsonObject("datasetVersion") - .getJsonObject("metadataBlocks") - .getJsonObject("citation") - .getJsonArray("fields"); - for (JsonValue citationField : citationFields) { - JsonObject field = (JsonObject) citationField; - String name = field.getString("typeName"); - if (name.equals(DatasetFieldConstant.kindOfData)) { - JsonArray values = field.getJsonArray("value"); - for (JsonString value : values.getValuesAs(JsonString.class)) { - try { - // return the first DatasetType you find - DatasetType.Type type = DatasetType.Type.fromString(value.getString()); - return new DatasetType(type); - } catch (IllegalArgumentException ex) { - // No worries, it's just some other kind of data. - } - } - } - } - return null; - } - @POST @AuthRequired @Path("{identifier}/datasets/:startmigration") diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index 46eb04b8d25..22b106c2906 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -1,13 +1,11 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.search.SearchFields; import io.restassured.RestAssured; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.OK; import org.hamcrest.CoreMatchers; -import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -137,49 +135,4 @@ public void testImportJson() { } - @Test - public void testImportDdiWorkflow() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String username = UtilIT.getUsernameFromResponse(createUser); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); - - Response createDataverse = UtilIT.createRandomDataverse(apiToken); - createDataverse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); - Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse); - - UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); - - String jsonIn = UtilIT.getDatasetJson("doc/sphinx-guides/source/_static/api/dataset-create-software-ddi.xml"); - - String randomString = UtilIT.getRandomString(6); - - Response importJson = UtilIT.importDatasetDDIViaNativeApi(apiToken, dataverseAlias, jsonIn, "doi:10.5072/FK2/" + randomString, "yes"); - importJson.prettyPrint(); - importJson.then().assertThat().statusCode(CREATED.getStatusCode()); - - Integer datasetId = JsonPath.from(importJson.getBody().asString()).getInt("data.id"); - String datasetPid = JsonPath.from(importJson.getBody().asString()).getString("data.persistentId"); - - Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken); - getDatasetJson.prettyPrint(); - getDatasetJson.then().assertThat().statusCode(OK.getStatusCode()); - String datasetType = JsonPath.from(getDatasetJson.getBody().asString()).getString("data.datasetType"); - System.out.println("datasetType: " + datasetType); - assertEquals("workflow", datasetType); - - Response search = UtilIT.searchAndShowFacets("id:dataset_" + datasetId, apiToken); - search.prettyPrint(); - search.then().assertThat() - .body("data.total_count", CoreMatchers.is(1)) - .body("data.count_in_response", CoreMatchers.is(1)) - .body("data.facets[0].datasetType_s.friendly", CoreMatchers.is("Dataset Type")) - .body("data.facets[0].datasetType_s.labels[0].workflow", CoreMatchers.is(1)) - .statusCode(OK.getStatusCode()); - - } - }