From 2f5da7be64a7d1f6b41ceb2dd0dfb3992b6f242d Mon Sep 17 00:00:00 2001 From: Paul Boon Date: Wed, 22 Mar 2023 15:10:36 +0100 Subject: [PATCH 001/402] Add Shib attribute characterset conversion to getValueFromAssertion --- src/main/java/edu/harvard/iq/dataverse/Shib.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index 0f0e20aba94..ade97146acb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -19,6 +19,7 @@ import org.apache.commons.lang3.StringUtils; import java.io.IOException; +import java.io.UnsupportedEncodingException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; @@ -416,6 +417,13 @@ private String getValueFromAssertion(String key) { Object attribute = request.getAttribute(key); if (attribute != null) { String attributeValue = attribute.toString(); + if(systemConfig.isShibAttributeCharacterSetConversionEnabled()) { + try { + attributeValue = new String(attributeValue.getBytes("ISO-8859-1"), "UTF-8"); + } catch (UnsupportedEncodingException e) { + logger.warning("Character conversion failed for Shib attribute (key, value) = (" + key + ", " + attributeValue + ") ; ignoring it"); + } + } String trimmedValue = attributeValue.trim(); if (!trimmedValue.isEmpty()) { logger.fine("The SAML assertion for \"" + key + "\" (optional) was \"" + attributeValue + "\" and was trimmed to \"" + trimmedValue + "\"."); From 09b5c4c5c1903a78842135217c3c2001a214cba0 Mon Sep 17 00:00:00 2001 From: Ludovic DANIEL Date: Wed, 16 Aug 2023 16:25:41 +0200 Subject: [PATCH 002/402] 8941 - fileCount is now added in SOLR while indexing --- conf/solr/8.11.1/schema.xml | 2 ++ .../iq/dataverse/search/IndexServiceBean.java | 2 ++ .../iq/dataverse/search/SearchFields.java | 2 ++ .../iq/dataverse/search/SearchServiceBean.java | 2 ++ .../iq/dataverse/search/SolrSearchResult.java | 16 +++++++++++++++- 5 files changed, 23 insertions(+), 1 deletion(-) diff --git a/conf/solr/8.11.1/schema.xml b/conf/solr/8.11.1/schema.xml index ceff082f418..0203d1c690a 100644 --- a/conf/solr/8.11.1/schema.xml +++ b/conf/solr/8.11.1/schema.xml @@ -236,6 +236,8 @@ + + - + + + org.apache.james + apache-mime4j-core + 0.8.7 + + + org.apache.james + apache-mime4j-dom + 0.8.7 + - - ${revision} + ${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion} + From c59746d2176f1d8a7e1b567f12143175281e094a Mon Sep 17 00:00:00 2001 From: Vera Clemens <16904069+vera@users.noreply.github.com> Date: Mon, 30 Sep 2024 19:19:33 +0200 Subject: [PATCH 074/402] Importing unmanaged pids (#10805) * fix: send proper error response when trying to import dataset with unmanaged PID * test: add tests for importing datasets as JSON * docs: state that importing datasets with unmanaged PIDs is not supported --- doc/sphinx-guides/source/api/native-api.rst | 4 +- .../harvard/iq/dataverse/api/Dataverses.java | 6 + .../iq/dataverse/api/DataversesIT.java | 173 ++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 29 +++ 4 files changed, 210 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 117aceb141d..acb6131c9d2 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -887,7 +887,7 @@ Before calling the API, make sure the data files referenced by the ``POST``\ ed * This API does not cover staging files (with correct contents, checksums, sizes, etc.) in the corresponding places in the Dataverse installation's filestore. * This API endpoint does not support importing *files'* persistent identifiers. - * A Dataverse installation can import datasets with a valid PID that uses a different protocol or authority than said server is configured for. However, the server will not update the PID metadata on subsequent update and publish actions. + * A Dataverse installation can only import datasets with a valid PID that is managed by one of the PID providers that said installation is configured for. .. _import-dataset-with-type: @@ -935,7 +935,7 @@ Note that DDI XML does not have a field that corresponds to the "Subject" field .. warning:: * This API does not handle files related to the DDI file. - * A Dataverse installation can import datasets with a valid PID that uses a different protocol or authority than said server is configured for. However, the server will not update the PID metadata on subsequent update and publish actions. + * A Dataverse installation can only import datasets with a valid PID that is managed by one of the PID providers that said installation is configured for. .. _publish-dataverse-api: diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 17e3086f184..0ee146ed99b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -407,6 +407,12 @@ public Response importDataset(@Context ContainerRequestContext crc, String jsonB if (ds.getIdentifier() == null) { return badRequest("Please provide a persistent identifier, either by including it in the JSON, or by using the pid query parameter."); } + + PidProvider pidProvider = PidUtil.getPidProvider(ds.getGlobalId().getProviderId()); + if (pidProvider == null || !pidProvider.canManagePID()) { + return badRequest("Cannot import a dataset that has a PID that doesn't match the server's settings"); + } + boolean shouldRelease = StringUtil.isTrue(releaseParam); DataverseRequest request = createDataverseRequest(u); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 6fbe91c8405..8c6a8244af1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -646,6 +646,179 @@ public void testImportDDI() throws IOException, InterruptedException { Response deleteUserResponse = UtilIT.deleteUser(username); assertEquals(200, deleteUserResponse.getStatusCode()); } + + @Test + public void testImport() throws IOException, InterruptedException { + + Response createUser = UtilIT.createRandomUser(); + String username = UtilIT.getUsernameFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + JsonObjectBuilder datasetJson = Json.createObjectBuilder() + .add("datasetVersion", Json.createObjectBuilder() + .add("license", Json.createObjectBuilder() + .add("name", "CC0 1.0") + ) + .add("metadataBlocks", Json.createObjectBuilder() + .add("citation", Json.createObjectBuilder() + .add("fields", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("typeName", "title") + .add("value", "Test Dataset") + .add("typeClass", "primitive") + .add("multiple", false) + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("authorName", + Json.createObjectBuilder() + .add("value", "Simpson, Homer") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "authorName")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "author") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("datasetContactEmail", + Json.createObjectBuilder() + .add("value", "hsimpson@mailinator.com") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "datasetContactEmail")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "datasetContact") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("dsDescriptionValue", + Json.createObjectBuilder() + .add("value", "This a test dataset.") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "dsDescriptionValue")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "dsDescription") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add("Other") + ) + .add("typeClass", "controlledVocabulary") + .add("multiple", true) + .add("typeName", "subject") + ) + ) + ) + )); + + String json = datasetJson.build().toString(); + + Response importJSONNoPid = UtilIT.importDatasetViaNativeApi(apiToken, dataverseAlias, json, null, "no"); + logger.info(importJSONNoPid.prettyPrint()); + assertEquals(400, importJSONNoPid.getStatusCode()); + + String body = importJSONNoPid.getBody().asString(); + String status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + String message = JsonPath.from(body).getString("message"); + assertEquals( + "Please provide a persistent identifier, either by including it in the JSON, or by using the pid query parameter.", + message + ); + + Response importJSONNoPidRelease = UtilIT.importDatasetViaNativeApi(apiToken, dataverseAlias, json, null, "yes"); + logger.info( importJSONNoPidRelease.prettyPrint()); + assertEquals(400, importJSONNoPidRelease.getStatusCode()); + + body = importJSONNoPidRelease.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + message = JsonPath.from(body).getString("message"); + assertEquals( + "Please provide a persistent identifier, either by including it in the JSON, or by using the pid query parameter.", + message + ); + + Response importJSONUnmanagedPid = UtilIT.importDatasetViaNativeApi(apiToken, dataverseAlias, json, "doi:10.5073/FK2/ABCD11", "no"); + logger.info(importJSONUnmanagedPid.prettyPrint()); + assertEquals(400, importJSONUnmanagedPid.getStatusCode()); + + body = importJSONUnmanagedPid.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + message = JsonPath.from(body).getString("message"); + assertEquals( + "Cannot import a dataset that has a PID that doesn't match the server's settings", + message + ); + + // Under normal conditions, you shouldn't need to destroy these datasets. + // Uncomment if they're still around from a previous failed run. +// Response destroy1 = UtilIT.destroyDataset("doi:10.5072/FK2/ABCD11", apiToken); +// destroy1.prettyPrint(); +// Response destroy2 = UtilIT.destroyDataset("doi:10.5072/FK2/ABCD22", apiToken); +// destroy2.prettyPrint(); + + Response importJSONPid = UtilIT.importDatasetViaNativeApi(apiToken, dataverseAlias, json, "doi:10.5072/FK2/ABCD11", "no"); + logger.info(importJSONPid.prettyPrint()); + assertEquals(201, importJSONPid.getStatusCode()); + + Response importJSONPidRel = UtilIT.importDatasetViaNativeApi(apiToken, dataverseAlias, json, "doi:10.5072/FK2/ABCD22", "yes"); + logger.info(importJSONPidRel.prettyPrint()); + assertEquals(201, importJSONPidRel.getStatusCode()); + + Integer datasetIdInt = JsonPath.from(importJSONPid.body().asString()).getInt("data.id"); + + Response search1 = UtilIT.search("id:dataset_" + datasetIdInt + "_draft", apiToken); // santity check, can find it + search1.prettyPrint(); + search1.then().assertThat() + .body("data.total_count", CoreMatchers.is(1)) + .body("data.count_in_response", CoreMatchers.is(1)) + .body("data.items[0].name", CoreMatchers.is("Test Dataset")) + .statusCode(OK.getStatusCode()); + + //cleanup + + Response destroyDatasetResponse = UtilIT.destroyDataset(datasetIdInt, apiToken); + assertEquals(200, destroyDatasetResponse.getStatusCode()); + + Integer datasetIdIntPidRel = JsonPath.from(importJSONPidRel.body().asString()).getInt("data.id"); + Response destroyDatasetResponsePidRel = UtilIT.destroyDataset(datasetIdIntPidRel, apiToken); + assertEquals(200, destroyDatasetResponsePidRel.getStatusCode()); + + UtilIT.sleepForDeadlock(UtilIT.MAXIMUM_IMPORT_DURATION); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + + Response deleteUserResponse = UtilIT.deleteUser(username); + assertEquals(200, deleteUserResponse.getStatusCode()); + } @Test public void testAttributesApi() throws Exception { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4e20e8e4c33..4fbe84bcfcf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3672,6 +3672,35 @@ static Response importDatasetDDIViaNativeApi(String apiToken, String dataverseAl return importDDI.post(postString); } + + static Response importDatasetViaNativeApi(String apiToken, String dataverseAlias, String json, String pid, String release) { + String postString = "/api/dataverses/" + dataverseAlias + "/datasets/:import"; + if (pid != null || release != null ) { + //postString = postString + "?"; + if (pid != null) { + postString = postString + "?pid=" + pid; + if (release != null && release.compareTo("yes") == 0) { + postString = postString + "&release=" + release; + } + } else { + if (release != null && release.compareTo("yes") == 0) { + postString = postString + "?release=" + release; + } + } + } + logger.info("Here importDatasetViaNativeApi"); + logger.info(postString); + + RequestSpecification importJSON = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .urlEncodingEnabled(false) + .body(json) + .contentType("application/json"); + + return importJSON.post(postString); + } + + static Response retrieveMyDataAsJsonString(String apiToken, String userIdentifier, ArrayList roleIds) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) From 700c39fe8cfb93e69c20ce3ed44f617b7d2ba8b3 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 30 Sep 2024 13:34:08 -0400 Subject: [PATCH 075/402] add release note --- .../10886-update-to-conditions-to-display-image_url.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 doc/release-notes/10886-update-to-conditions-to-display-image_url.md diff --git a/doc/release-notes/10886-update-to-conditions-to-display-image_url.md b/doc/release-notes/10886-update-to-conditions-to-display-image_url.md new file mode 100644 index 00000000000..03bd8299d45 --- /dev/null +++ b/doc/release-notes/10886-update-to-conditions-to-display-image_url.md @@ -0,0 +1,6 @@ +Search API (/api/search) responses for Datafiles include image_url for the thumbnail if each of the following are true: +1. The DataFile is not Harvested +2. A Thumbnail is available for the Datafile +3. If the Datafile is Restricted then the caller must have Download File Permission for the Datafile +4. The Datafile is NOT actively embargoed +5. The Datafile's retention is NOT expired From 48dead84d3ce2d7b093eb1b1c6e39e475cd9aa9a Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 30 Sep 2024 13:35:11 -0400 Subject: [PATCH 076/402] add release note --- .../10886-update-to-conditions-to-display-image_url.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10886-update-to-conditions-to-display-image_url.md b/doc/release-notes/10886-update-to-conditions-to-display-image_url.md index 03bd8299d45..a7adda11840 100644 --- a/doc/release-notes/10886-update-to-conditions-to-display-image_url.md +++ b/doc/release-notes/10886-update-to-conditions-to-display-image_url.md @@ -3,4 +3,4 @@ Search API (/api/search) responses for Datafiles include image_url for the thumb 2. A Thumbnail is available for the Datafile 3. If the Datafile is Restricted then the caller must have Download File Permission for the Datafile 4. The Datafile is NOT actively embargoed -5. The Datafile's retention is NOT expired +5. The Datafile's retention has NOT expired From d9139effc5f37330b939e8a72eec96bde90004cf Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 30 Sep 2024 14:35:15 -0400 Subject: [PATCH 077/402] #10879 remove access request from render logic --- src/main/webapp/dataset-license-terms.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index 255e63fbfc2..03173faf989 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -12,7 +12,7 @@ or !empty termsOfUseAndAccess.originalArchive or !empty termsOfUseAndAccess.availabilityStatus or !empty termsOfUseAndAccess.contactForAccess or !empty termsOfUseAndAccess.sizeOfCollection or !empty termsOfUseAndAccess.studyCompletion - or termsOfUseAndAccess.fileAccessRequest}"/> + }"/>
Date: Tue, 1 Oct 2024 12:00:51 +0200 Subject: [PATCH 079/402] replace ZipInputStream with ZipFile --- .../impl/CreateNewDataFilesCommand.java | 331 ++++++++---------- .../ingest/IngestServiceShapefileHelper.java | 67 ++-- .../harvard/iq/dataverse/util/FileUtil.java | 13 +- .../iq/dataverse/util/ShapefileHandler.java | 149 +++----- .../command/impl/CreateNewDataFilesTest.java | 187 ++++++++++ .../util/shapefile/ShapefileHandlerTest.java | 38 +- .../own-cloud-downloads/greetings.zip | Bin 0 -> 679 bytes .../resources/own-cloud-downloads/shapes.zip | Bin 0 -> 4336 bytes 8 files changed, 429 insertions(+), 356 deletions(-) create mode 100644 src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java create mode 100644 src/test/resources/own-cloud-downloads/greetings.zip create mode 100644 src/test/resources/own-cloud-downloads/shapes.zip diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 3a21345448b..e543606e039 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -2,34 +2,29 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; -import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -//import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit; -import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; -import static edu.harvard.iq.dataverse.util.FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; -import static edu.harvard.iq.dataverse.util.FileUtil.createIngestFailureReport; -import static edu.harvard.iq.dataverse.util.FileUtil.determineFileType; -import static edu.harvard.iq.dataverse.util.FileUtil.determineFileTypeByNameAndExtension; -import static edu.harvard.iq.dataverse.util.FileUtil.getFilesTempDirectory; -import static edu.harvard.iq.dataverse.util.FileUtil.saveInputStreamInTempFile; -import static edu.harvard.iq.dataverse.util.FileUtil.useRecognizedType; import edu.harvard.iq.dataverse.util.ShapefileHandler; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.file.BagItFileHandler; import edu.harvard.iq.dataverse.util.file.BagItFileHandlerFactory; import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; +import jakarta.enterprise.inject.spi.CDI; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; + import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -42,7 +37,7 @@ import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; -import java.util.Enumeration; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -51,12 +46,17 @@ import java.util.Set; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; -import java.util.zip.ZipFile; import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; -import jakarta.enterprise.inject.spi.CDI; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; +import java.util.zip.ZipFile; + +import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; +import static edu.harvard.iq.dataverse.util.FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; +import static edu.harvard.iq.dataverse.util.FileUtil.createIngestFailureReport; +import static edu.harvard.iq.dataverse.util.FileUtil.determineFileType; +import static edu.harvard.iq.dataverse.util.FileUtil.determineFileTypeByNameAndExtension; +import static edu.harvard.iq.dataverse.util.FileUtil.getFilesTempDirectory; +import static edu.harvard.iq.dataverse.util.FileUtil.saveInputStreamInTempFile; +import static edu.harvard.iq.dataverse.util.FileUtil.useRecognizedType; /** * @@ -140,9 +140,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (newStorageIdentifier == null) { - if (getFilesTempDirectory() != null) { + var filesTempDirectory = getFilesTempDirectory(); + if (filesTempDirectory != null) { try { - tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); + tempFile = Files.createTempFile(Paths.get(filesTempDirectory), "tmp", "upload"); // "temporary" location is the key here; this is why we are not using // the DataStore framework for this - the assumption is that // temp files will always be stored on the local filesystem. @@ -260,10 +261,6 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // DataFile objects from its contents: } else if (finalType.equals("application/zip")) { - ZipFile zipFile = null; - ZipInputStream unZippedIn = null; - ZipEntry zipEntry = null; - int fileNumberLimit = ctxt.systemConfig().getZipUploadFilesLimit(); Long combinedUnzippedFileSize = 0L; @@ -271,14 +268,14 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException Charset charset = null; /* TODO: (?) - We may want to investigate somehow letting the user specify + We may want to investigate somehow letting the user specify the charset for the filenames in the zip file... - - otherwise, ZipInputStream bails out if it encounteres a file - name that's not valid in the current charest (i.e., UTF-8, in - our case). It would be a bit trickier than what we're doing for - SPSS tabular ingests - with the lang. encoding pulldown menu - + - otherwise, ZipInputStream bails out if it encounteres a file + name that's not valid in the current charest (i.e., UTF-8, in + our case). It would be a bit trickier than what we're doing for + SPSS tabular ingests - with the lang. encoding pulldown menu - because this encoding needs to be specified *before* we upload and - attempt to unzip the file. + attempt to unzip the file. -- L.A. 4.0 beta12 logger.info("default charset is "+Charset.defaultCharset().name()); if (Charset.isSupported("US-ASCII")) { @@ -287,25 +284,21 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (charset != null) { logger.info("was able to obtain charset for US-ASCII"); } - + } */ - /** - * Perform a quick check for how many individual files are - * inside this zip archive. If it's above the limit, we can - * give up right away, without doing any unpacking. + /** + * Perform a quick check for how many individual files are + * inside this zip archive. If it's above the limit, we can + * give up right away, without doing any unpacking. * This should be a fairly inexpensive operation, we just need - * to read the directory at the end of the file. + * to read the directory at the end of the file. */ - - if (charset != null) { - zipFile = new ZipFile(tempFile.toFile(), charset); - } else { - zipFile = new ZipFile(tempFile.toFile()); - } + + /** - * The ZipFile constructors above will throw ZipException - + * The ZipFile constructors in openZipFile will throw ZipException - * a type of IOException - if there's something wrong * with this file as a zip. There's no need to intercept it * here, it will be caught further below, with other IOExceptions, @@ -313,8 +306,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException * then attempt to save it as is. */ - int numberOfUnpackableFiles = 0; - + int numberOfUnpackableFiles = 0; + /** * Note that we can't just use zipFile.size(), * unfortunately, since that's the total number of entries, @@ -323,83 +316,48 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException * that are files. */ - for (Enumeration entries = zipFile.entries(); entries.hasMoreElements();) { - ZipEntry entry = entries.nextElement(); - logger.fine("inside first zip pass; this entry: "+entry.getName()); - if (!entry.isDirectory()) { - String shortName = entry.getName().replaceFirst("^.*[\\/]", ""); - // ... and, finally, check if it's a "fake" file - a zip archive entry - // created for a MacOS X filesystem element: (these - // start with "._") - if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { - numberOfUnpackableFiles++; - if (numberOfUnpackableFiles > fileNumberLimit) { - logger.warning("Zip upload - too many files in the zip to process individually."); - warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit - + "); please upload a zip archive with fewer files, if you want them to be ingested " - + "as individual DataFiles."; - throw new IOException(); - } - // In addition to counting the files, we can - // also check the file size while we're here, - // provided the size limit is defined; if a single - // file is above the individual size limit, unzipped, - // we give up on unpacking this zip archive as well: - if (fileSizeLimit != null && entry.getSize() > fileSizeLimit) { - throw new FileExceedsMaxSizeException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(entry.getSize()), bytesToHumanReadable(fileSizeLimit))); - } - // Similarly, we want to check if saving all these unpacked - // files is going to push the disk usage over the - // quota: - if (storageQuotaLimit != null) { - combinedUnzippedFileSize = combinedUnzippedFileSize + entry.getSize(); - if (combinedUnzippedFileSize > storageQuotaLimit) { - //throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); - // change of plans: if the unzipped content inside exceeds the remaining quota, - // we reject the upload outright, rather than accepting the zip - // file as is. - throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.unzipped.quota_exceeded"), bytesToHumanReadable(storageQuotaLimit)), this); - } + try (var zipFile = openZipFile(tempFile, charset)) { + for (var entry : filteredZipEntries(zipFile)) { + logger.fine("inside first zip pass; this entry: " + entry.getName()); + numberOfUnpackableFiles++; + if (numberOfUnpackableFiles > fileNumberLimit) { + logger.warning("Zip upload - too many files in the zip to process individually."); + warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit + + "); please upload a zip archive with fewer files, if you want them to be ingested " + + "as individual DataFiles."; + throw new IOException(); + } + // In addition to counting the files, we can + // also check the file size while we're here, + // provided the size limit is defined; if a single + // file is above the individual size limit, unzipped, + // we give up on unpacking this zip archive as well: + if (fileSizeLimit != null && entry.getSize() > fileSizeLimit) { + throw new FileExceedsMaxSizeException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(entry.getSize()), bytesToHumanReadable(fileSizeLimit))); + } + // Similarly, we want to check if saving all these unpacked + // files is going to push the disk usage over the + // quota: + if (storageQuotaLimit != null) { + combinedUnzippedFileSize = combinedUnzippedFileSize + entry.getSize(); + if (combinedUnzippedFileSize > storageQuotaLimit) { + //throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); + // change of plans: if the unzipped content inside exceeds the remaining quota, + // we reject the upload outright, rather than accepting the zip + // file as is. + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.unzipped.quota_exceeded"), bytesToHumanReadable(storageQuotaLimit)), this); } } } } - + // OK we're still here - that means we can proceed unzipping. - // Close the ZipFile, re-open as ZipInputStream: - zipFile.close(); // reset: combinedUnzippedFileSize = 0L; - if (charset != null) { - unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); - } else { - unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); - } - - while (true) { - try { - zipEntry = unZippedIn.getNextEntry(); - } catch (IllegalArgumentException iaex) { - // Note: - // ZipInputStream documentation doesn't even mention that - // getNextEntry() throws an IllegalArgumentException! - // but that's what happens if the file name of the next - // entry is not valid in the current CharSet. - // -- L.A. - warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is."; - logger.warning(warningMessage); - throw new IOException(); - } - - if (zipEntry == null) { - break; - } - // Note that some zip entries may be directories - we - // simply skip them: - - if (!zipEntry.isDirectory()) { + try (var zipFile = openZipFile(tempFile, charset)) { + for (var entry : filteredZipEntries(zipFile)) { if (datafiles.size() > fileNumberLimit) { logger.warning("Zip upload - too many files."); warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit @@ -407,72 +365,55 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException + "as individual DataFiles."; throw new IOException(); } - - String fileEntryName = zipEntry.getName(); + var fileEntryName = entry.getName(); + var shortName = getShortName(fileEntryName); logger.fine("ZipEntry, file: " + fileEntryName); + String storageIdentifier = FileUtil.generateStorageIdentifier(); + File unzippedFile = new File(getFilesTempDirectory() + "/" + storageIdentifier); + Files.copy(zipFile.getInputStream(entry), unzippedFile.toPath(), StandardCopyOption.REPLACE_EXISTING); + // No need to check the size of this unpacked file against the size limit, + // since we've already checked for that in the first pass. + DataFile datafile = FileUtil.createSingleDataFile(version, null, storageIdentifier, shortName, + MIME_TYPE_UNDETERMINED_DEFAULT, + ctxt.systemConfig().getFileFixityChecksumAlgorithm(), null, false); + + if (!fileEntryName.equals(shortName)) { + // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), + // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all + // the leading, trailing and duplicate slashes; then replace all the characters that + // don't pass our validation rules. + String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", ""); + directoryName = StringUtil.sanitizeFileDirectory(directoryName, true); + // if (!"".equals(directoryName)) { + if (!StringUtil.isEmpty(directoryName)) { + logger.fine("setting the directory label to " + directoryName); + datafile.getFileMetadata().setDirectoryLabel(directoryName); + } + } - if (fileEntryName != null && !fileEntryName.equals("")) { - - String shortName = fileEntryName.replaceFirst("^.*[\\/]", ""); - - // Check if it's a "fake" file - a zip archive entry - // created for a MacOS X filesystem element: (these - // start with "._") - if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { - // OK, this seems like an OK file entry - we'll try - // to read it and create a DataFile with it: - - String storageIdentifier = FileUtil.generateStorageIdentifier(); - File unzippedFile = new File(getFilesTempDirectory() + "/" + storageIdentifier); - Files.copy(unZippedIn, unzippedFile.toPath(), StandardCopyOption.REPLACE_EXISTING); - // No need to check the size of this unpacked file against the size limit, - // since we've already checked for that in the first pass. - - DataFile datafile = FileUtil.createSingleDataFile(version, null, storageIdentifier, shortName, - MIME_TYPE_UNDETERMINED_DEFAULT, - ctxt.systemConfig().getFileFixityChecksumAlgorithm(), null, false); - - if (!fileEntryName.equals(shortName)) { - // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), - // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all - // the leading, trailing and duplicate slashes; then replace all the characters that - // don't pass our validation rules. - String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", ""); - directoryName = StringUtil.sanitizeFileDirectory(directoryName, true); - // if (!"".equals(directoryName)) { - if (!StringUtil.isEmpty(directoryName)) { - logger.fine("setting the directory label to " + directoryName); - datafile.getFileMetadata().setDirectoryLabel(directoryName); - } - } + if (datafile != null) { + // We have created this datafile with the mime type "unknown"; + // Now that we have it saved in a temporary location, + // let's try and determine its real type: - if (datafile != null) { - // We have created this datafile with the mime type "unknown"; - // Now that we have it saved in a temporary location, - // let's try and determine its real type: - - String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); - - try { - recognizedType = determineFileType(unzippedFile, shortName); - // null the File explicitly, to release any open FDs: - unzippedFile = null; - logger.fine("File utility recognized unzipped file as " + recognizedType); - if (recognizedType != null && !recognizedType.equals("")) { - datafile.setContentType(recognizedType); - } - } catch (Exception ex) { - logger.warning("Failed to run the file utility mime type check on file " + fileName); - } - - datafiles.add(datafile); - combinedUnzippedFileSize += datafile.getFilesize(); + String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); + + try { + recognizedType = determineFileType(unzippedFile, shortName); + // null the File explicitly, to release any open FDs: + unzippedFile = null; + logger.fine("File utility recognized unzipped file as " + recognizedType); + if (recognizedType != null && !recognizedType.equals("")) { + datafile.setContentType(recognizedType); } + } catch (Exception ex) { + logger.warning("Failed to run the file utility mime type check on file " + fileName); } + + datafiles.add(datafile); + combinedUnzippedFileSize += datafile.getFilesize(); } } - unZippedIn.closeEntry(); - } } catch (IOException ioex) { @@ -494,18 +435,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException //warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); //datafiles.clear(); throw new CommandExecutionException(fesqx.getMessage(), fesqx, this); - }*/ finally { - if (zipFile != null) { - try { - zipFile.close(); - } catch (Exception zEx) {} - } - if (unZippedIn != null) { - try { - unZippedIn.close(); - } catch (Exception zEx) {} - } - } + }*/ if (!datafiles.isEmpty()) { // remove the uploaded zip file: try { @@ -591,7 +521,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // The try-catch is due to error encountered in using NFS for stocking file, // cf. https://github.com/IQSS/dataverse/issues/5909 try { - FileUtils.deleteDirectory(rezipFolder); + if (rezipFolder!=null) + FileUtils.deleteDirectory(rezipFolder); } catch (IOException ioex) { // do nothing - it's a temp folder. logger.warning("Could not remove temp folder, error message : " + ioex.getMessage()); @@ -730,7 +661,37 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException return CreateDataFileResult.error(fileName, finalType); } // end createDataFiles - + + private static List filteredZipEntries(ZipFile zipFile) { + var entries = Collections.list(zipFile.entries()).stream().filter(e -> { + var entryName = e.getName(); + logger.fine("ZipEntry, file: " + entryName); + return !e.isDirectory() && !entryName.isEmpty() && !isFileToSkip(entryName); + }).toList(); + return entries; + } + + private static ZipFile openZipFile(Path tempFile, Charset charset) throws IOException { + if (charset != null) { + return new ZipFile(tempFile.toFile(), charset); + } + else { + return new ZipFile(tempFile.toFile()); + } + } + + private static boolean isFileToSkip(String fileName) { + // check if it's a "fake" file - a zip archive entry + // created for a MacOS X filesystem element: (these + // start with "._") + var shortName = getShortName(fileName); + return shortName.startsWith("._") || shortName.startsWith(".DS_Store") || "".equals(shortName); + } + + private static String getShortName(String fileName) { + return fileName.replaceFirst("^.*[\\/]", ""); + } + @Override public Map> getRequiredPermissions() { Map> ret = new HashMap<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java index 8c5dad237b1..27a2ab99376 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java @@ -100,71 +100,48 @@ public IngestServiceShapefileHelper(File zippedShapefile, File rezipFolder){ //this.processFile(zippedShapefile, rezipFolder); } - - private FileInputStream getFileInputStream(File fileObject){ - if (fileObject==null){ - return null; - } - try { + + private FileInputStream getFileInputStream(File fileObject){ + if (fileObject==null){ + return null; + } + try { return new FileInputStream(fileObject); } catch (FileNotFoundException ex) { logger.severe("Failed to create FileInputStream from File: " + fileObject.getAbsolutePath()); return null; } - } - - private void closeFileInputStream(FileInputStream fis){ - if (fis==null){ - return; - } + } + + private void closeFileInputStream(FileInputStream fis){ + if (fis==null){ + return; + } try { - fis.close(); + fis.close(); } catch (IOException ex) { logger.info("Failed to close FileInputStream"); } - } - + } + public boolean processFile() { if ((!isValidFile(this.zippedShapefile))||(!isValidFolder(this.rezipFolder))){ return false; } - - // (1) Use the ShapefileHandler to the .zip for a shapefile - // - FileInputStream shpfileInputStream = this.getFileInputStream(zippedShapefile); - if (shpfileInputStream==null){ - return false; - } - - this.shpHandler = new ShapefileHandler(shpfileInputStream); - if (!shpHandler.containsShapefile()){ - logger.severe("Shapefile was incorrectly detected upon Ingest (FileUtil) and passed here"); - return false; - } - - this.closeFileInputStream(shpfileInputStream); - - // (2) Rezip the shapefile pieces - logger.info("rezipFolder: " + rezipFolder.getAbsolutePath()); - shpfileInputStream = this.getFileInputStream(zippedShapefile); - if (shpfileInputStream==null){ - return false; - } - - boolean rezipSuccess; try { - rezipSuccess = shpHandler.rezipShapefileSets(shpfileInputStream, rezipFolder); + this.shpHandler = new ShapefileHandler(zippedShapefile); + if (!shpHandler.containsShapefile()){ + logger.severe("Shapefile was incorrectly detected upon Ingest (FileUtil) and passed here"); + return false; + } + logger.info("rezipFolder: " + rezipFolder.getAbsolutePath()); + return shpHandler.rezipShapefileSets(rezipFolder); } catch (IOException ex) { logger.severe("Shapefile was not correctly unpacked/repacked"); logger.severe("shpHandler message: " + shpHandler.errorMessage); return false; } - - this.closeFileInputStream(shpfileInputStream); - - return rezipSuccess; - // return createDataFiles(rezipFolder); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index a0c32d5c8ce..991682ec8e8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -525,15 +525,18 @@ public static String determineFileType(File f, String fileName) throws IOExcepti // Check for shapefile extensions as described here: http://en.wikipedia.org/wiki/Shapefile //logger.info("Checking for shapefile"); - ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(f)); + ShapefileHandler shp_handler = new ShapefileHandler(f); if (shp_handler.containsShapefile()){ // logger.info("------- shapefile FOUND ----------"); fileType = ShapefileHandler.SHAPEFILE_FILE_TYPE; //"application/zipped-shapefile"; } - - Optional bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler(); - if(bagItFileHandler.isPresent() && bagItFileHandler.get().isBagItPackage(fileName, f)) { - fileType = BagItFileHandler.FILE_TYPE; + try { + Optional bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler(); + if (bagItFileHandler.isPresent() && bagItFileHandler.get().isBagItPackage(fileName, f)) { + fileType = BagItFileHandler.FILE_TYPE; + } + } catch (Exception e) { + logger.warning("Error checking for BagIt package: " + e.getMessage()); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java index f1440cc3c02..20dc6d9c26a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java @@ -1,23 +1,21 @@ package edu.harvard.iq.dataverse.util; import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; import java.io.FileNotFoundException; import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.Date; import java.util.ArrayList; import java.util.List; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; -import java.util.zip.ZipException; +import java.util.zip.ZipFile; import java.util.HashMap; import java.util.*; import java.nio.file.Files; import java.nio.file.Paths; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; -import java.util.logging.Level; + import java.util.logging.Logger; import org.apache.commons.io.FileUtils; @@ -43,11 +41,10 @@ * "shape1.pdf", "README.md", "shape_notes.txt" * * Code Example: - * FileInputStream shp_file_input_stream = new FileInputStream(new File("zipped_shapefile.zip")) - * ShapefileHandler shp_handler = new ShapefileHandler(shp_file_input_stream); + * ShapefileHandler shp_handler = new ShapefileHandler(new File("zipped_shapefile.zip")); * if (shp_handler.containsShapefile()){ * File rezip_folder = new File("~/folder_for_rezipping"); - * boolean rezip_success = shp_handler.rezipShapefileSets(shp_file_input_stream, rezip_folder ); + * boolean rezip_success = shp_handler.rezipShapefileSets(rezip_folder ); * if (!rezip_success){ * // rezip failed, should be an error message (String) available System.out.println(shp_handler.error_message); @@ -74,7 +71,8 @@ public class ShapefileHandler{ public final static String SHP_XML_EXTENSION = "shp.xml"; public final static String BLANK_EXTENSION = "__PLACEHOLDER-FOR-BLANK-EXTENSION__"; public final static List SHAPEFILE_ALL_EXTENSIONS = Arrays.asList("shp", "shx", "dbf", "prj", "sbn", "sbx", "fbn", "fbx", "ain", "aih", "ixs", "mxs", "atx", "cpg", "qpj", "qmd", SHP_XML_EXTENSION); - + private final File zipFile; + public boolean DEBUG = false; private boolean zipFileProcessed = false; @@ -97,9 +95,6 @@ public class ShapefileHandler{ private Map> fileGroups = new HashMap<>(); private List finalRezippedFiles = new ArrayList<>(); - - private String outputFolder = "unzipped"; - private String rezippedFolder = "rezipped"; // Debug helper private void msg(String s){ @@ -116,40 +111,28 @@ private void msgt(String s){ } /* - Constructor, start with filename - */ - public ShapefileHandler(String filename){ - - if (filename==null){ - this.addErrorMessage("The filename was null"); - return; - } - - FileInputStream zip_file_stream; - try { - zip_file_stream = new FileInputStream(new File(filename)); - } catch (FileNotFoundException ex) { - this.addErrorMessage("The file was not found"); + Constructor, start with File + */ + public ShapefileHandler(File zip_file) throws IOException { + zipFile = zip_file; + if (zip_file == null) { + this.addErrorMessage("The file was null"); return; } - - this.examineZipfile(zip_file_stream); - } - - - /* - Constructor, start with FileInputStream - */ - public ShapefileHandler(FileInputStream zip_file_stream){ - - if (zip_file_stream==null){ - this.addErrorMessage("The zip_file_stream was null"); - return; + try (var zip_file_object = new ZipFile(zip_file)) { + this.examineZipfile(zip_file_object); + } + catch (FileNotFoundException ex) { + // While this constructor had a FileInputStream as argument: + // FileUtil.determineFileType threw this exception before calling the constructor with a FileInputStream + // IngestServiceShapefileHelper.processFile won´t call this constructor if the file is not valid hence does not exist. + // When the file would have disappeared in the meantime, it would have produced a slightly different error message. + logger.severe("File not found: " + zip_file.getAbsolutePath()); + throw ex; } - this.examineZipfile(zip_file_stream); } - + public List getFinalRezippedFiles(){ return this.finalRezippedFiles; } @@ -291,26 +274,19 @@ inside the uploaded zip file (issue #6873). To achieve this, we recreate subfolders in the FileMetadata of the newly created DataFiles. (-- L.A. 09/2020) */ - private boolean unzipFilesToDirectory(FileInputStream zipfile_input_stream, File target_directory){ + private boolean unzipFilesToDirectory(ZipFile zipfileInput, File target_directory){ logger.fine("unzipFilesToDirectory: " + target_directory.getAbsolutePath() ); - if (zipfile_input_stream== null){ - this.addErrorMessage("unzipFilesToDirectory. The zipfile_input_stream is null."); - return false; - } if (!target_directory.isDirectory()){ this.addErrorMessage("This directory does not exist: " + target_directory.getAbsolutePath()); return false; } - List unzippedFileNames = new ArrayList<>(); - - ZipInputStream zipStream = new ZipInputStream(zipfile_input_stream); + List unzippedFileNames = new ArrayList<>(); + - ZipEntry origEntry; - byte[] buffer = new byte[2048]; try { - while((origEntry = zipStream.getNextEntry())!=null){ + for(var origEntry : Collections.list(zipfileInput.entries())){ String zentryFileName = origEntry.getName(); logger.fine("\nOriginal entry name: " + origEntry); @@ -360,15 +336,10 @@ private boolean unzipFilesToDirectory(FileInputStream zipfile_input_stream, File unzippedFileNames.add(outpath); } logger.fine("Write zip file: " + outpath); - FileOutputStream fileOutputStream; - long fsize = 0; - fileOutputStream = new FileOutputStream(outpath); - int len;// = 0; - while ((len = zipStream.read(buffer)) > 0){ - fileOutputStream.write(buffer, 0, len); - fsize+=len; - } // end while - fileOutputStream.close(); + try(var inputStream = zipfileInput.getInputStream(origEntry)) { + Files.createDirectories(new File(outpath).getParentFile().toPath()); + Files.copy(inputStream, Path.of(outpath), StandardCopyOption.REPLACE_EXISTING); + } } // end outer while } catch (IOException ex) { for (StackTraceElement el : ex.getStackTrace()){ @@ -377,19 +348,13 @@ private boolean unzipFilesToDirectory(FileInputStream zipfile_input_stream, File this.addErrorMessage("Failed to open ZipInputStream entry" + ex.getMessage()); return false; } - - try { - zipStream.close(); - } catch (IOException ex) { - Logger.getLogger(ShapefileHandler.class.getName()).log(Level.SEVERE, null, ex); - } - return true; + return true; } /* Rezip the shapefile(s) into a given directory Assumes that the zipfile_input_stream has already been checked! */ - public boolean rezipShapefileSets(FileInputStream zipfile_input_stream, File rezippedFolder) throws IOException{ + public boolean rezipShapefileSets(File rezippedFolder) throws IOException{ logger.fine("rezipShapefileSets"); //msgt("rezipShapefileSets"); if (!this.zipFileProcessed){ @@ -400,10 +365,6 @@ public boolean rezipShapefileSets(FileInputStream zipfile_input_stream, File rez this.addErrorMessage("There are no shapefiles here!"); return false; } - if (zipfile_input_stream== null){ - this.addErrorMessage("The zipfile_input_stream is null."); - return false; - } if (rezippedFolder == null){ this.addErrorMessage("The rezippedFolder is null."); return false; @@ -433,9 +394,11 @@ public boolean rezipShapefileSets(FileInputStream zipfile_input_stream, File rez // Unzip files! - if (!this.unzipFilesToDirectory(zipfile_input_stream, dir_for_unzipping)){ - this.addErrorMessage("Failed to unzip files."); - return false; + try(var zipfileObject = new ZipFile(zipFile)) { + if (!this.unzipFilesToDirectory(zipfileObject, dir_for_unzipping)) { + this.addErrorMessage("Failed to unzip files."); + return false; + } } // Redistribute files! String target_dirname = rezippedFolder.getAbsolutePath(); @@ -681,27 +644,19 @@ private boolean isFileToSkip(String fname){ /************************************** * Iterate through the zip file contents. * Does it contain any shapefiles? - * - * @param FileInputStream zip_file_stream */ - private boolean examineZipfile(FileInputStream zip_file_stream){ + private boolean examineZipfile(ZipFile zip_file){ // msgt("examineZipfile"); - - if (zip_file_stream==null){ - this.addErrorMessage("The zip file stream was null"); - return false; - } - + // Clear out file lists this.filesListInDir.clear(); this.filesizeHash.clear(); this.fileGroups.clear(); - try{ - ZipInputStream zipStream = new ZipInputStream(zip_file_stream); - ZipEntry entry; - List hiddenDirectories = new ArrayList<>(); - while((entry = zipStream.getNextEntry())!=null){ + try{ + List hiddenDirectories = new ArrayList<>(); + for(var entry : Collections.list(zip_file.entries())){ + String zentryFileName = entry.getName(); boolean isDirectory = entry.isDirectory(); @@ -748,8 +703,6 @@ private boolean examineZipfile(FileInputStream zip_file_stream){ this.filesizeHash.put(unzipFilePath, entry.getSize()); } } // end while - - zipStream.close(); if (this.filesListInDir.isEmpty()){ errorMessage = "No files in zipStream"; @@ -759,13 +712,8 @@ private boolean examineZipfile(FileInputStream zip_file_stream){ this.zipFileProcessed = true; return true; - }catch(ZipException ex){ - this.addErrorMessage("ZipException"); - msgt("ZipException"); - return false; - }catch(IOException ex){ - //ex.printStackTrace(); + //ex.printStackTrace(); this.addErrorMessage("IOException File name"); msgt("IOException"); return false; @@ -773,9 +721,6 @@ private boolean examineZipfile(FileInputStream zip_file_stream){ this.addErrorMessage("IllegalArgumentException when parsing zipfile"); msgt("IllegalArgumentException when parsing zipfile"); return false; - - }finally{ - } } // end examineFile diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java new file mode 100644 index 00000000000..1262984eb27 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java @@ -0,0 +1,187 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit; +import edu.harvard.iq.dataverse.util.JhoveFileType; +import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.MockedStatic; +import org.mockito.Mockito; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.PrintStream; +import java.nio.file.NoSuchFileException; +import java.nio.file.Path; + +import static edu.harvard.iq.dataverse.DataFile.ChecksumType.MD5; +import static org.apache.commons.io.file.FilesUncheck.createDirectories; +import static org.apache.commons.io.file.PathUtils.deleteDirectory; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; + + +@LocalJvmSettings +public class CreateNewDataFilesTest { + // TODO keep constants for annotations in sync with class name + Path testDir = Path.of("target/test/").resolve(getClass().getSimpleName()); + PrintStream original_stderr; + + @BeforeEach + public void cleanTmpDir() throws IOException { + original_stderr = System.err; + if(testDir.toFile().exists()) + deleteDirectory(testDir); + } + + @AfterEach void restoreStderr() { + System.setErr(original_stderr); + } + + @Test + @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp") + public void execute_fails_to_upload_when_tmp_does_not_exist() throws FileNotFoundException { + + mockTmpLookup(); + var cmd = createCmd("scripts/search/data/shape/shapefile.zip", mockDatasetVersion(), 1000L, 500L); + var ctxt = mockCommandContext(mockSysConfig(true, 0L, MD5, 10)); + + assertThatThrownBy(() -> cmd.execute(ctxt)) + .isInstanceOf(CommandException.class) + .hasMessageContaining("Failed to save the upload as a temp file (temp disk space?)") + .hasRootCauseInstanceOf(NoSuchFileException.class) + .getRootCause() + .hasMessageStartingWith("target/test/CreateNewDataFilesTest/tmp/temp/tmp"); + } + + @Test + @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp") + public void execute_fails_on_size_limit() throws Exception { + createDirectories(Path.of("target/test/CreateNewDataFilesTest/tmp/temp")); + + mockTmpLookup(); + var cmd = createCmd("scripts/search/data/binary/3files.zip", mockDatasetVersion(), 1000L, 500L); + var ctxt = mockCommandContext(mockSysConfig(true, 50L, MD5, 0)); + try (var mockedStatic = Mockito.mockStatic(JhoveFileType.class)) { + mockedStatic.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf"); + + assertThatThrownBy(() -> cmd.execute(ctxt)) + .isInstanceOf(CommandException.class) + .hasMessage("This file size (462 B) exceeds the size limit of 50 B."); + } + } + + @Test + @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp") + public void execute_loads_individual_files_from_uploaded_zip() throws Exception { + var tempDir = testDir.resolve("tmp/temp"); + createDirectories(tempDir); + + mockTmpLookup(); + var cmd = createCmd("src/test/resources/own-cloud-downloads/greetings.zip", mockDatasetVersion(), 1000L, 500L); + var ctxt = mockCommandContext(mockSysConfig(false, 1000000L, MD5, 10)); + try (MockedStatic mockedStatic = Mockito.mockStatic(JhoveFileType.class)) { + mockedStatic.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf"); + + // the test + var result = cmd.execute(ctxt); + + assertThat(result.getErrors()).hasSize(0); + assertThat(result.getDataFiles().stream().map(dataFile -> + dataFile.getFileMetadata().getDirectoryLabel() + "/" + dataFile.getDisplayName() + )).containsExactlyInAnyOrder( + "DD-1576/goodbye.txt", "DD-1576/hello.txt" + ); + var storageIds = result.getDataFiles().stream().map(DataFile::getStorageIdentifier).toList(); + assertThat(tempDir.toFile().list()) + .containsExactlyInAnyOrderElementsOf(storageIds); + } + } + + @Test + @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp") + public void execute_rezips_sets_of_shape_files_from_uploaded_zip() throws Exception { + var tempDir = testDir.resolve("tmp/temp"); + createDirectories(tempDir); + + mockTmpLookup(); + var cmd = createCmd("src/test/resources/own-cloud-downloads/shapes.zip", mockDatasetVersion(), 1000L, 500L); + var ctxt = mockCommandContext(mockSysConfig(false, 100000000L, MD5, 10)); + try (var mockedJHoveFileType = Mockito.mockStatic(JhoveFileType.class)) { + mockedJHoveFileType.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf"); + + // the test + var result = cmd.execute(ctxt); + + assertThat(result.getErrors()).hasSize(0); + assertThat(result.getDataFiles().stream().map(dataFile -> + (dataFile.getFileMetadata().getDirectoryLabel() + "/" + dataFile.getDisplayName()) + .replaceAll(".*temp/shp_[-0-9]*/", "") + )).containsExactlyInAnyOrder( + "dataDir/shape1.zip", + "dataDir/shape2/shape2", + "dataDir/shape2/shape2.pdf", + "dataDir/shape2/shape2.txt", + "dataDir/shape2/shape2.zip", + "dataDir/extra/shp_dictionary.xls", + "dataDir/extra/notes", + "dataDir/extra/README.MD" + ); + var storageIds = result.getDataFiles().stream().map(DataFile::getStorageIdentifier).toList(); + assertThat(tempDir.toFile().list()) + .containsExactlyInAnyOrderElementsOf(storageIds); + } + } + + private static @NotNull CreateNewDataFilesCommand createCmd(String name, DatasetVersion dsVersion, long allocatedQuotaLimit, long usedQuotaLimit) throws FileNotFoundException { + return new CreateNewDataFilesCommand( + Mockito.mock(DataverseRequest.class), + dsVersion, + new FileInputStream(name), + "example.zip", + "application/zip", + null, + new UploadSessionQuotaLimit(allocatedQuotaLimit, usedQuotaLimit), + "sha"); + } + + private static @NotNull CommandContext mockCommandContext(SystemConfig sysCfg) { + var ctxt = Mockito.mock(CommandContext.class); + Mockito.when(ctxt.systemConfig()).thenReturn(sysCfg); + return ctxt; + } + + private static @NotNull SystemConfig mockSysConfig(boolean isStorageQuataEnforced, long maxFileUploadSizeForStore, DataFile.ChecksumType checksumType, int zipUploadFilesLimit) { + var sysCfg = Mockito.mock(SystemConfig.class); + Mockito.when(sysCfg.isStorageQuotasEnforced()).thenReturn(isStorageQuataEnforced); + Mockito.when(sysCfg.getMaxFileUploadSizeForStore(any())).thenReturn(maxFileUploadSizeForStore); + Mockito.when(sysCfg.getFileFixityChecksumAlgorithm()).thenReturn(checksumType); + Mockito.when(sysCfg.getZipUploadFilesLimit()).thenReturn(zipUploadFilesLimit); + return sysCfg; + } + + private static void mockTmpLookup() { + JvmSettings mockFilesDirectory = Mockito.mock(JvmSettings.class); + Mockito.when(mockFilesDirectory.lookup()).thenReturn("/mocked/path"); + } + + private static @NotNull DatasetVersion mockDatasetVersion() { + var dsVersion = Mockito.mock(DatasetVersion.class); + Mockito.when(dsVersion.getDataset()).thenReturn(Mockito.mock(Dataset.class)); + return dsVersion; + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java index 3c5b4797b0a..c4ee4547ed7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java @@ -63,22 +63,22 @@ private File createBlankFile(String filename) throws IOException { } return Files.createFile(tempFolder.resolve(filename)).toFile(); } - + private FileInputStream createZipReturnFilestream(List file_names, String zipfile_name) throws IOException{ - + File zip_file_obj = this.createAndZipFiles(file_names, zipfile_name); if (zip_file_obj == null){ return null; } - + FileInputStream file_input_stream = new FileInputStream(zip_file_obj); return file_input_stream; - + } - + /* - Convenience class to create .zip file and return a FileInputStream + Convenience method to create .zip file and return a File @param List file_names - List of filenames to add to .zip. These names will be used to create 0 length files @param String zipfile_name - Name of .zip file to create @@ -98,13 +98,13 @@ private File createAndZipFiles(List file_names, String zipfile_name) thr } Path zip_file_obj = this.tempFolder.resolve(zipfile_name); - ZipOutputStream zip_stream = new ZipOutputStream(new FileOutputStream(zip_file_obj.toFile())); + try (ZipOutputStream zip_stream = new ZipOutputStream(new FileOutputStream(zip_file_obj.toFile()))) { - // Iterate through File objects and add them to the ZipOutputStream - for (File file_obj : fileCollection) { - this.addToZipFile(file_obj.getName(), file_obj, zip_stream); + // Iterate through File objects and add them to the ZipOutputStream + for (File file_obj : fileCollection) { + this.addToZipFile(file_obj.getName(), file_obj, zip_stream); + } } - /* ----------------------------------- Cleanup: Delete single files that were added to .zip ----------------------------------- */ @@ -126,7 +126,7 @@ public void testCreateZippedNonShapefile() throws IOException{ File zipfile_obj = createAndZipFiles(file_names, "not-quite-a-shape.zip"); // Pass the .zip to the ShapefileHandler - ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj)); + ShapefileHandler shp_handler = new ShapefileHandler(zipfile_obj); shp_handler.DEBUG= true; // Contains shapefile? @@ -157,7 +157,7 @@ public void testShapefileWithQpjAndQmd() throws IOException { File zipFile = createAndZipFiles(fileNames, "testShapeWithNewExtensions.zip"); // Pass the zip to the ShapefileHandler - ShapefileHandler shpHandler = new ShapefileHandler(new FileInputStream(zipFile)); + ShapefileHandler shpHandler = new ShapefileHandler(zipFile); shpHandler.DEBUG = true; // Check if it is recognized as a shapefile @@ -191,7 +191,7 @@ public void testZippedTwoShapefiles() throws IOException{ File zipfile_obj = createAndZipFiles(file_names, "two-shapes.zip"); // Pass the .zip to the ShapefileHandler - ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj)); + ShapefileHandler shp_handler = new ShapefileHandler(zipfile_obj); shp_handler.DEBUG= true; assertTrue(shp_handler.containsShapefile(), "verify shapefile existance"); @@ -217,7 +217,7 @@ public void testZippedTwoShapefiles() throws IOException{ // Rezip/Reorder the files File test_unzip_folder = Files.createDirectory(this.tempFolder.resolve("test_unzip")).toFile(); //File test_unzip_folder = new File("/Users/rmp553/Desktop/blah"); - shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), test_unzip_folder ); + shp_handler.rezipShapefileSets(test_unzip_folder ); // Does the re-ordering do what we wanted? @@ -244,7 +244,7 @@ public void testZippedShapefileWithExtraFiles() throws IOException{ File zipfile_obj = createAndZipFiles(file_names, "shape-plus.zip"); // Pass the .zip to the ShapefileHandler - ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj)); + ShapefileHandler shp_handler = new ShapefileHandler(zipfile_obj); shp_handler.DEBUG= true; assertTrue(shp_handler.containsShapefile(), "verify shapefile existance"); @@ -264,7 +264,7 @@ public void testZippedShapefileWithExtraFiles() throws IOException{ File unzip2Folder = Files.createDirectory(this.tempFolder.resolve("test_unzip2")).toFile(); // Rezip/Reorder the files - shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), unzip2Folder); + shp_handler.rezipShapefileSets(unzip2Folder); //shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), new File("/Users/rmp553/Desktop/blah")); @@ -284,9 +284,9 @@ public void testZippedShapefileWithExtraFiles() throws IOException{ } @Test - public void testHiddenFiles() { + public void testHiddenFiles() throws IOException { // test with shapefiles in hidden directory - ShapefileHandler shp_handler = new ShapefileHandler("src/test/resources/hiddenShapefiles.zip"); + ShapefileHandler shp_handler = new ShapefileHandler(new File("src/test/resources/hiddenShapefiles.zip")); shp_handler.DEBUG= true; assertFalse(shp_handler.containsShapefile()); } diff --git a/src/test/resources/own-cloud-downloads/greetings.zip b/src/test/resources/own-cloud-downloads/greetings.zip new file mode 100644 index 0000000000000000000000000000000000000000..6e166d385d18dce3b63eac9b742c40fea5dddae9 GIT binary patch literal 679 zcmWIWW@gc400HJ?!$=VQ9||}a6c}7wbPY|-&GZ==WI$3lK>*kk4xl`1BC09ENT#Ic z=cgo9rs|bclwdQ^4as}OAjq@OSojYL zK!~Xf3`-i3O+^b%Sg62g=vMe94rK2G;+uVc(bwT01W{F uSm=P*%pd{?V6+$`gbz$lU;xy|4q~`6LHHoy0B=?{kkgofx?oOZU;qG5YmhPk literal 0 HcmV?d00001 diff --git a/src/test/resources/own-cloud-downloads/shapes.zip b/src/test/resources/own-cloud-downloads/shapes.zip new file mode 100644 index 0000000000000000000000000000000000000000..99d5f36c8952950869f77b3316e3c9d5b65c72fe GIT binary patch literal 4336 zcmbVOJ%|%Q7~NcQxm=^s5cLj=Ac7!wIg&!fLWtz9Sxy!$L{KETY)Je`Lb65tDHIgJ zLR9cLEd)_38wCXm3vCn|wX(FZ6S1=s(fP9Tk}s2)u$yfj+1+{X&6oN1dyXBAEn0LP zcXKBQ|H&(1WvoicD;4YQrHHkY9vLsk$g+vCxLvPZ(sHdZW}hn|?z}xV>}dN4*U{xd zadkPnTGSc5%Sf@2{({U}v*Ec7&#`9`_a5bM&&ij8W*4au7mcRpc61JQL)K(nu{%vYyY zYPq>=rCik&B|9}*l)(z_;7#t*{dxH^FqDI8xuJ9L-0VQC;11lqpD#ig7}7yk=O7<7 z)}UJMat9B$q@O|=7|KDbeMaZtrP+a4!5z&0{OTnCTqsiy3PmR&WRJ?IT~I z3=HW&`!Bp6=V0hx`1WJ{Jd}aH1KNZ7<=m|HLSFmPJ2*WkfO$LVQL_$0(lNf1Lv|pM#W+P$+H*)#G zmHjXPoXCj2+2&`A6;Yb)!$#~63;-uFV(7WYW+O_^CH7o93j@H(jA&bLf#$oslrz$) z4YgkK`pGXa0Gz~#*Y$KA^U7>Qtl(#)`Tg$!7yvqUWNI;L&QFx4EGudKDp07xMs+?= z48n1&l&AEpLDQA*82O5_4-y~cE1%o4o?PXQeeQmEq75J@0eRZO1|UKa2O<=o#)G*m8}7$8qUDnF%;!2B}f zfEUO!j>=D&A_RZ0fx!#p2}b3o#1NQY<`eJ&c|KA3DGvnZmtR8g0(tsS`DxmR;MdlN zXAAI)gQCp}{`pbA68_u{kQ{~^nua^fQ+610IP*(`B!L_NRDmhU!^+4B+%J|mH+1m{?AY=6f`8k>T&15Wc@OBwLLZv-ABc6mJGYYl?}z}$o{p2n KlvoX`mh~UjzepMY literal 0 HcmV?d00001 From 97a38eadd9ecfee2a9eefaaec7dcf67bb35b211e Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 1 Oct 2024 12:05:31 -0400 Subject: [PATCH 080/402] Fix Deaccessioned edition. --- src/main/java/edu/harvard/iq/dataverse/Dataset.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 52cb7d6f2dc..3bcfbcb0d5e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -335,7 +335,7 @@ public DatasetVersion getLatestVersion() { public DatasetVersion getLatestVersionForCopy() { for (DatasetVersion testDsv : getVersions()) { - if (testDsv.isReleased() || testDsv.isArchived()) { + if (testDsv.isReleased() || testDsv.isArchived() || testDsv.isDeaccessioned()) { return testDsv; } } From 2aad633b85df87775ac978da1bcc35e612507d10 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 1 Oct 2024 14:36:27 -0400 Subject: [PATCH 081/402] Patch notes --- doc/release-notes/10901deaccessioned file edit fix.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10901deaccessioned file edit fix.md diff --git a/doc/release-notes/10901deaccessioned file edit fix.md b/doc/release-notes/10901deaccessioned file edit fix.md new file mode 100644 index 00000000000..db12b1fc978 --- /dev/null +++ b/doc/release-notes/10901deaccessioned file edit fix.md @@ -0,0 +1 @@ +When a dataset was deaccessioned and was the only previous version it will cause an error when trying to update the files. \ No newline at end of file From db1a066c411f9d25a782ecb55e99c2813aadf55a Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 1 Oct 2024 14:55:46 -0400 Subject: [PATCH 082/402] Added a signature of the method to receive includeDeaccessioned param --- src/main/java/edu/harvard/iq/dataverse/Dataset.java | 9 +++++++-- .../engine/command/impl/UpdateDatasetVersionCommand.java | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 3bcfbcb0d5e..40ed491a302 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -333,15 +333,20 @@ public DatasetVersion getLatestVersion() { return getVersions().get(0); } - public DatasetVersion getLatestVersionForCopy() { + public DatasetVersion getLatestVersionForCopy(boolean includeDeaccessioned) { for (DatasetVersion testDsv : getVersions()) { - if (testDsv.isReleased() || testDsv.isArchived() || testDsv.isDeaccessioned()) { + if (testDsv.isReleased() || testDsv.isArchived() + || (testDsv.isDeaccessioned() && includeDeaccessioned)) { return testDsv; } } return getVersions().get(0); } + public DatasetVersion getLatestVersionForCopy(){ + return getLatestVersionForCopy(false); + } + public List getVersions() { return versions; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index bb5f5a71e24..f501094ac92 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -115,7 +115,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { */ if(persistedVersion==null) { Long id = getDataset().getLatestVersion().getId(); - persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy().getId()); + persistedVersion = ctxt.datasetVersion().find(id!=null ? id : getDataset().getLatestVersionForCopy().getId()); } //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied. From c2e1e157ad96275f5ae0801320f7f3fcd8fe6ac6 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Tue, 1 Oct 2024 14:58:15 -0400 Subject: [PATCH 083/402] Missing call to the new method --- .../engine/command/impl/UpdateDatasetVersionCommand.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index f501094ac92..dc8884405ef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -115,7 +115,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { */ if(persistedVersion==null) { Long id = getDataset().getLatestVersion().getId(); - persistedVersion = ctxt.datasetVersion().find(id!=null ? id : getDataset().getLatestVersionForCopy().getId()); + persistedVersion = ctxt.datasetVersion().find(id!=null ? id : getDataset().getLatestVersionForCopy(true).getId()); } //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied. From c46e0ce83969a6c678314a158205f1d133781167 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 2 Oct 2024 13:57:34 -0400 Subject: [PATCH 084/402] Test added --- .../harvard/iq/dataverse/api/DatasetsIT.java | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index f52aa4fe9bd..e3ef11266ff 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -667,6 +667,57 @@ public void testCreatePublishDestroyDataset() { deleteDatasetResponse.prettyPrint(); assertEquals(200, deleteDatasetResponse.getStatusCode()); + // Start of deaccession test. + + // Create Dataset for deaccession test. + Response deaccessionTestDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + deaccessionTestDataset.prettyPrint(); + deaccessionTestDataset.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer deaccessionTestDatasetId = UtilIT.getDatasetIdFromResponse(deaccessionTestDataset); + + // File upload for deaccession test. + String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(deaccessionTestDatasetId.toString(), pathToFile, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + Integer deaccessionTestFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + + // Publish Dataset for deaccession test. + Response deaccessionTestPublishResponse = UtilIT.publishDatasetViaNativeApi(deaccessionTestDatasetId, "major", apiToken); + deaccessionTestPublishResponse.prettyPrint(); + + // Deaccession Dataset for deaccession test. + Response deaccessionTestDatasetResponse = UtilIT.deaccessionDataset(deaccessionTestDatasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionTestDatasetResponse.prettyPrint(); + deaccessionTestDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Version check for deaccession test - Deaccessioned. + Response deaccessionTestVersions = UtilIT.getDatasetVersions(deaccessionTestDatasetId.toString(), apiToken); + deaccessionTestVersions.prettyPrint(); + deaccessionTestVersions.then().assertThat() + .body("data[0].latestVersionPublishingState", equalTo("DEACCESSIONED")) + .statusCode(OK.getStatusCode()); + + // File deletion / Draft creation due deltigion check for deaccession test. + Response deaccessionTestDeleteFile = UtilIT.deleteFileInDataset(deaccessionTestFileId, apiToken); + deaccessionTestDeleteFile.prettyPrint(); + + // Version check for deaccession test - Draft. + deaccessionTestVersions = UtilIT.getDatasetVersions(deaccessionTestDatasetId.toString(), apiToken); + deaccessionTestVersions.prettyPrint(); + deaccessionTestVersions.then().assertThat() + .body("data[0].latestVersionPublishingState", equalTo("DRAFT")) + .statusCode(OK.getStatusCode()); + + // Deleting Dataset for deaccession test. + Response deaccessionTestDelete = UtilIT.destroyDataset(deaccessionTestDatasetId, apiToken); + deaccessionTestDelete.prettyPrint(); + deaccessionTestDelete.then() + .assertThat() + .statusCode(OK.getStatusCode()); + + // End of deaccession test. + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); deleteDataverseResponse.prettyPrint(); assertEquals(200, deleteDataverseResponse.getStatusCode()); From 2f4a84cefbedc1e3cd0ded56c7b11a2ab5348a99 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 2 Oct 2024 13:58:08 -0400 Subject: [PATCH 085/402] Typo --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index e3ef11266ff..d6db7a6a6c6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -667,7 +667,7 @@ public void testCreatePublishDestroyDataset() { deleteDatasetResponse.prettyPrint(); assertEquals(200, deleteDatasetResponse.getStatusCode()); - // Start of deaccession test. + // Start of deaccession test. // Create Dataset for deaccession test. Response deaccessionTestDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); From cb6e44f0d58f72aea6bae7476d85fa79ca2b31b7 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 2 Oct 2024 15:15:25 -0400 Subject: [PATCH 086/402] tweak release note #10875 --- .../10886-update-to-conditions-to-display-image_url.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/10886-update-to-conditions-to-display-image_url.md b/doc/release-notes/10886-update-to-conditions-to-display-image_url.md index a7adda11840..6dfe8eb9f2d 100644 --- a/doc/release-notes/10886-update-to-conditions-to-display-image_url.md +++ b/doc/release-notes/10886-update-to-conditions-to-display-image_url.md @@ -3,4 +3,6 @@ Search API (/api/search) responses for Datafiles include image_url for the thumb 2. A Thumbnail is available for the Datafile 3. If the Datafile is Restricted then the caller must have Download File Permission for the Datafile 4. The Datafile is NOT actively embargoed -5. The Datafile's retention has NOT expired +5. The Datafile's retention period has NOT expired + +See also #10875 and #10886. From e57f834558952d61f0efdd447dcdee49c7691927 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 2 Oct 2024 17:18:41 -0400 Subject: [PATCH 087/402] Remove of clone from the API file deletion. --- src/main/java/edu/harvard/iq/dataverse/api/Files.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index d786aab35a8..c8bc8420944 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -343,10 +343,9 @@ public Response deleteFileInDataset(@Context ContainerRequestContext crc, @PathP DataFile dataFile = findDataFileOrDie(fileIdOrPersistentId); FileMetadata fileToDelete = dataFile.getLatestFileMetadata(); Dataset dataset = dataFile.getOwner(); - DatasetVersion v = dataset.getOrCreateEditVersion(); deletePhysicalFile = !dataFile.isReleased(); - UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, Arrays.asList(fileToDelete), v); + UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, Arrays.asList(fileToDelete)); update_cmd.setValidateLenient(true); try { From 8accad72c693a2f92aa1a6198a80101623d07bcb Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 2 Oct 2024 18:13:51 -0400 Subject: [PATCH 088/402] Copy still needs to be created but not sent as a parameter to the command. --- src/main/java/edu/harvard/iq/dataverse/api/Files.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index c8bc8420944..633d420c527 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -343,6 +343,7 @@ public Response deleteFileInDataset(@Context ContainerRequestContext crc, @PathP DataFile dataFile = findDataFileOrDie(fileIdOrPersistentId); FileMetadata fileToDelete = dataFile.getLatestFileMetadata(); Dataset dataset = dataFile.getOwner(); + dataset.getOrCreateEditVersion(); deletePhysicalFile = !dataFile.isReleased(); UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, Arrays.asList(fileToDelete)); From 70a2651bc94a15e7a3bc5ad2423aa9edf2c73a99 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Wed, 2 Oct 2024 18:15:38 -0400 Subject: [PATCH 089/402] File upload assertion --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index d6db7a6a6c6..6a854b08023 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -701,7 +701,10 @@ public void testCreatePublishDestroyDataset() { // File deletion / Draft creation due deltigion check for deaccession test. Response deaccessionTestDeleteFile = UtilIT.deleteFileInDataset(deaccessionTestFileId, apiToken); deaccessionTestDeleteFile.prettyPrint(); - + deaccessionTestDeleteFile + .then().assertThat() + .statusCode(OK.getStatusCode()); + // Version check for deaccession test - Draft. deaccessionTestVersions = UtilIT.getDatasetVersions(deaccessionTestDatasetId.toString(), apiToken); deaccessionTestVersions.prettyPrint(); From 4e2bc1a0e5f37876d1c8defbeadfe7357c758fbf Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 3 Oct 2024 10:11:51 -0400 Subject: [PATCH 090/402] Update DatasetsIT.java --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 6a854b08023..d51ce010dab 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -667,7 +667,7 @@ public void testCreatePublishDestroyDataset() { deleteDatasetResponse.prettyPrint(); assertEquals(200, deleteDatasetResponse.getStatusCode()); - // Start of deaccession test. + // Start of test of deleting a file from a deaccessioned version. // Create Dataset for deaccession test. Response deaccessionTestDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); @@ -698,7 +698,7 @@ public void testCreatePublishDestroyDataset() { .body("data[0].latestVersionPublishingState", equalTo("DEACCESSIONED")) .statusCode(OK.getStatusCode()); - // File deletion / Draft creation due deltigion check for deaccession test. + // File deletion / Draft creation due diligence check for deaccession test. Response deaccessionTestDeleteFile = UtilIT.deleteFileInDataset(deaccessionTestFileId, apiToken); deaccessionTestDeleteFile.prettyPrint(); deaccessionTestDeleteFile From d50c484892519e342c2de440985fc554c92b3c4b Mon Sep 17 00:00:00 2001 From: jeromeroucou Date: Fri, 4 Oct 2024 17:40:46 +0200 Subject: [PATCH 091/402] Facets filter labels not translated in result block (#10158) * indentation * add comments et remove old "dead" code * Add friendly name for value from filter query * Add release note --- ...s-labels-not-translated-in-result-block.md | 7 +++ .../search/SearchIncludeFragment.java | 48 +++++++++++-------- 2 files changed, 34 insertions(+), 21 deletions(-) create mode 100644 doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md diff --git a/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md b/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md new file mode 100644 index 00000000000..344859e2dbd --- /dev/null +++ b/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md @@ -0,0 +1,7 @@ +## Fix facets filter labels not translated in result block + +On the main page, it's possible to filter results using search facets. If internationalization (i18n) has been activated in the Dataverse installation, allowing pages to be displayed in several languages, the facets are translated in the filter column. However, they aren't translated in the search results and remain in the default language, English. + +This version of Dataverse fix this, and includes internationalization in the facets visible in the search results section. + +For more information, see issue [#9408](https://github.com/IQSS/dataverse/issues/9408) and pull request [#10158](https://github.com/IQSS/dataverse/pull/10158) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 4f3f6e46e48..9328dd03ca2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -34,6 +34,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.MissingResourceException; import java.util.Optional; import java.util.Set; import java.util.logging.Logger; @@ -1231,40 +1232,33 @@ public String getTypeFromFilterQuery(String filterQuery) { } public List getFriendlyNamesFromFilterQuery(String filterQuery) { - - - if ((filterQuery == null)|| - (datasetfieldFriendlyNamesBySolrField == null)|| - (staticSolrFieldFriendlyNamesBySolrField==null)){ + + if ((filterQuery == null) || + (datasetfieldFriendlyNamesBySolrField == null) || + (staticSolrFieldFriendlyNamesBySolrField == null)) { return null; } - - if(!filterQuery.contains(":")) { + + if (!filterQuery.contains(":")) { return null; } - + int index = filterQuery.indexOf(":"); String key = filterQuery.substring(0,index); String value = filterQuery.substring(index+1); - List friendlyNames = new ArrayList<>(); + // friendlyNames get 2 entries : key and value + List friendlyNames = new ArrayList<>(2); + // Get dataset field friendly name from default ressource bundle file String datasetfieldFriendyName = datasetfieldFriendlyNamesBySolrField.get(key); if (datasetfieldFriendyName != null) { friendlyNames.add(datasetfieldFriendyName); } else { + // Get non dataset field friendly name from "staticSearchFields" ressource bundle file String nonDatasetSolrField = staticSolrFieldFriendlyNamesBySolrField.get(key); if (nonDatasetSolrField != null) { friendlyNames.add(nonDatasetSolrField); - } else if (key.equals(SearchFields.PUBLICATION_STATUS)) { - /** - * @todo Refactor this quick fix for - * https://github.com/IQSS/dataverse/issues/618 . We really need - * to get rid of all the reflection that's happening with - * solrQueryResponse.getStaticSolrFieldFriendlyNamesBySolrField() - * and - */ - friendlyNames.add("Publication Status"); } else { // meh. better than nuthin' friendlyNames.add(key); @@ -1276,9 +1270,13 @@ public List getFriendlyNamesFromFilterQuery(String filterQuery) { String valueWithoutQuotes = noTrailingQuote; if (key.equals(SearchFields.METADATA_TYPES) && getDataverse() != null && getDataverse().getMetadataBlockFacets() != null) { - Optional friendlyName = getDataverse().getMetadataBlockFacets().stream().filter(block -> block.getMetadataBlock().getName().equals(valueWithoutQuotes)).findFirst().map(block -> block.getMetadataBlock().getLocaleDisplayFacet()); + Optional friendlyName = getDataverse().getMetadataBlockFacets() + .stream() + .filter(block -> block.getMetadataBlock().getName().equals(valueWithoutQuotes)) + .findFirst() + .map(block -> block.getMetadataBlock().getLocaleDisplayFacet()); logger.fine(String.format("action=getFriendlyNamesFromFilterQuery key=%s value=%s friendlyName=%s", key, value, friendlyName)); - if(friendlyName.isPresent()) { + if (friendlyName.isPresent()) { friendlyNames.add(friendlyName.get()); return friendlyNames; } @@ -1290,7 +1288,15 @@ public List getFriendlyNamesFromFilterQuery(String filterQuery) { } } - friendlyNames.add(valueWithoutQuotes); + // Get value friendly name from default ressource bundle file + String valueFriendlyName; + try { + valueFriendlyName = BundleUtil.getStringFromPropertyFile(noTrailingQuote, "Bundle"); + } catch (MissingResourceException e) { + valueFriendlyName = noTrailingQuote; + } + + friendlyNames.add(valueFriendlyName); return friendlyNames; } From 1a5ca4ba1b8f275fc44c7b1f67bf5d4509aa237f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 4 Oct 2024 11:41:54 -0400 Subject: [PATCH 092/402] PR template, add hyphen to show issue title --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index b57aa23fc0f..f2a779bbf21 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -2,7 +2,7 @@ **Which issue(s) this PR closes**: -Closes # +- Closes # **Special notes for your reviewer**: From 7d3ab225af18650b67d9e018bb37806229f69bf1 Mon Sep 17 00:00:00 2001 From: jo-pol Date: Mon, 7 Oct 2024 09:51:24 +0200 Subject: [PATCH 093/402] open zip once and reuse list of entries --- .../command/impl/CreateNewDataFilesCommand.java | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index e543606e039..76939751899 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -317,7 +317,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException */ try (var zipFile = openZipFile(tempFile, charset)) { - for (var entry : filteredZipEntries(zipFile)) { + var zipEntries = filteredZipEntries(zipFile); + for (var entry : zipEntries) { logger.fine("inside first zip pass; this entry: " + entry.getName()); numberOfUnpackableFiles++; if (numberOfUnpackableFiles > fileNumberLimit) { @@ -349,15 +350,12 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } } } - } + // OK we're still here - that means we can proceed unzipping. - // OK we're still here - that means we can proceed unzipping. - - // reset: - combinedUnzippedFileSize = 0L; + // reset: + combinedUnzippedFileSize = 0L; - try (var zipFile = openZipFile(tempFile, charset)) { - for (var entry : filteredZipEntries(zipFile)) { + for (var entry : zipEntries) { if (datafiles.size() > fileNumberLimit) { logger.warning("Zip upload - too many files."); warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit From aeb8f371ee4361073976015800c36ee5b3d47642 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 8 Oct 2024 09:14:54 -0400 Subject: [PATCH 094/402] #10889 bump to Postgres 17, Flyway 10.19 --- modules/dataverse-parent/pom.xml | 2 +- pom.xml | 11 ++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 5abf2763128..9442b55d622 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -149,7 +149,7 @@ 6.2024.6 - 42.7.2 + 42.7.4 9.4.1 1.12.748 26.30.0 diff --git a/pom.xml b/pom.xml index edf72067976..b59b69ce765 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ war 1.2.18.4 - 9.22.1 + 10.19.0 1.20.1 5.2.1 2.4.1 @@ -177,6 +177,11 @@ flyway-core ${flyway.version} + + org.flywaydb + flyway-database-postgresql + ${flyway.version} + org.eclipse.persistence @@ -993,7 +998,7 @@ true docker-build - 16 + 17 gdcc/dataverse:${app.image.tag} unstable @@ -1127,4 +1132,4 @@ - \ No newline at end of file + From cd2fa36dfc75544b4b05a6c51684699faf9baf22 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 8 Oct 2024 13:21:23 -0400 Subject: [PATCH 095/402] #8184 ui-bundle changes --- .../iq/dataverse/privateurl/package-info.java | 60 +++++++++---------- src/main/java/propertyFiles/Bundle.properties | 32 +++++----- .../webapp/previewurl-popup-fragment.xhtml | 15 +++++ 3 files changed, 61 insertions(+), 46 deletions(-) create mode 100644 src/main/webapp/previewurl-popup-fragment.xhtml diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java index 6e939c1bb6d..1310e0eb199 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java @@ -1,19 +1,19 @@ /** - * Private URL for unpublished datasets. + * Preview URL for unpublished datasets. *

- * The Private URL feature has been implemented as a specialized role assignment + * The Preview (formerly Private) URL feature has been implemented as a specialized role assignment * with an associated token that permits read-only access to the metadata and * all files (regardless of if the files are restricted or not) of a draft * version of a dataset. *

- * As of this note, a second option - to create a Private URL that provides an + * As of this note, a second option - to create a Preview URL that provides an * anonymized view of the dataset has been added. This option works the same as * the original except that it hides author names in the citation block, hides * the values for an admin specified list of metadata fields, disables citation * downloads, and disables API access (except for file and file thumbnail * downloads which are used by the UI). *

- * The primary use case for a Private URL is for journal editors to send a link + * The primary use case for a Preview URL is for journal editors to send a link * to reviewers of a dataset before publication. In most cases, these journal * editors do not permit depositors to publish on their own, which is to say * they only allow depositors to have the "Contributor" role on the datasets @@ -24,42 +24,42 @@ * the depositor, who is in charge of both the security of the dataset and the * timing of when the dataset is published. *

- * A secondary use case for a Private URL is for depositors who have the ability + * A secondary use case for a Preview URL is for depositors who have the ability * to manage permissions on their dataset (depositors who have the "Curator" or * "Admin" role, which grants much more power than the "Contributor" role) to * send a link to coauthors or other trusted parties to preview the dataset * before the depositors publish the dataset on their own. For better security, * these depositors could ask their coauthors to create Dataverse accounts and - * assign roles to them directly, rather than using a Private URL which requires + * assign roles to them directly, rather than using a Preview URL which requires * no username or password. *

* As of this note, a second option aimed specifically at the review use case - - * to create a Private URL that provides an anonymized view of the dataset - has + * to create a Preview URL that provides an anonymized view of the dataset - has * been added. This option works the same as the original except that it hides * author names in the citation block, hides the values for an admin specified * list of metadata fields, disables citation downloads, and disables API access * (except for file and file thumbnail downloads which are used by the UI). *

- * The token associated with the Private URL role assignment that can be used + * The token associated with the Preview URL role assignment that can be used * either in the GUI or, for the non-anonymized-access option, via the API to * elevate privileges beyond what a "Guest" can see. The ability to use a - * Private URL token via API was added mostly to facilitate automated testing of - * the feature but the far more common case is expected to be use of the Private + * Preview URL token via API was added mostly to facilitate automated testing of + * the feature but the far more common case is expected to be use of the Preview * URL token in a link that is clicked to open a browser, similar to links * shared via Dropbox, Google, etc. *

- * When reviewers click a Private URL their browser sessions are set to the + * When reviewers click a Preview URL their browser sessions are set to the * "{@link edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser}" that * has the "Member" role only on the dataset in question and redirected to that * dataset, where they will see an indication in blue at the top of the page * that they are viewing an unpublished dataset. If the reviewer happens to be * logged into Dataverse already, clicking the link will log them out because * the review is meant to be blind. Because the dataset is always in draft when - * a Private URL is in effect, no downloads or any other activity by the - * reviewer are logged to the guestbook. All reviewers click the same Private + * a Preview URL is in effect, no downloads or any other activity by the + * reviewer are logged to the guestbook. All reviewers click the same Preview * URL containing the same token, and with the exception of an IP address being * logged, it should be impossible to trace which reviewers have clicked a - * Private URL. If the reviewer navigates to the home page, the session is set + * Preview URL. If the reviewer navigates to the home page, the session is set * to the Guest user and they will see what a Guest would see. *

* The "Member" role is used because it contains the necessary read-only @@ -76,51 +76,51 @@ * version. A Member can also download restricted files that have been deleted * from previously published versions. *

- * Likewise, when a Private URL token is used via API, commands are executed + * Likewise, when a Preview URL token is used via API, commands are executed * using the "PrivateUrlUser" that has the "Member" role only on the dataset in * question. This means that read-only operations such as downloads of the - * dataset's files are permitted. The Search API does not respect the Private + * dataset's files are permitted. The Search API does not respect the Preview * URL token but you can download files using the Access API, and, with the * non-anonymized-access option, download unpublished metadata using the Native * API. *

- * A Private URL cannot be created for a published version of a dataset. In the + * A Preview URL cannot be created for a published version of a dataset. In the * GUI, you will be reminded of this fact with a popup. The API will explain * this as well. *

- * An anonymized-access Private URL can't be created if any published dataset + * An anonymized-access Preview URL can't be created if any published dataset * version exists. The primary reason for this is that, since datasets have * DOIs, the full metadata about published versions is available directly from * the DOI provider. (While the metadata for that version could be somewhat * different, in practice it would probably provide a means of identifying * some/all of the authors). *

- * If a draft dataset containing a Private URL is - * published, the Private URL is deleted. This means that reviewers who click + * If a draft dataset containing a Preview URL is + * published, the Preview URL is deleted. This means that reviewers who click * the link after publication will see a 404. *

- * If a post-publication draft containing a Private URL is deleted, the Private + * If a post-publication draft containing a Preview URL is deleted, the Preview * URL is deleted. This is to ensure that if a new draft is created in the * future, a new token will be used. *

- * The creation and deletion of a Private URL are limited to the "Curator" and + * The creation and deletion of a Preview URL are limited to the "Curator" and * "Admin" roles because only those roles have the permission called * "ManageDatasetPermissions", which is the permission used by the * "AssignRoleCommand" and "RevokeRoleCommand" commands. If you have the - * permission to create or delete a Private URL, the fact that a Private URL is + * permission to create or delete a Preview URL, the fact that a Preview URL is * enabled for a dataset will be indicated in blue at the top of the page. * Success messages are shown at the top of the page when you create or delete a - * Private URL. In the GUI, deleting a Private URL is called "disabling" and you + * Preview URL. In the GUI, deleting a Preview URL is called "disabling" and you * will be prompted for a confirmation. No matter what you call it the role is - * revoked. You can also delete a Private URL by revoking the role. + * revoked. You can also delete a Preview URL by revoking the role. *

* A "Contributor" does not have the "ManageDatasetPermissions" permission and - * cannot see "Permissions" nor "Private URL" under the "Edit" menu of their - * dataset. When a Curator or Admin has enabled a Private URL on a Contributor's - * dataset, the Contributor does not see a visual indication that a Private URL + * cannot see "Permissions" nor "Preview URL" under the "Edit" menu of their + * dataset. When a Curator or Admin has enabled a Preview URL on a Contributor's + * dataset, the Contributor does not see a visual indication that a Preview URL * has been enabled for their dataset. *

- * There is no way for an "Admin" or "Curator" to see when a Private URL was + * There is no way for an "Admin" or "Curator" to see when a Preview URL was * created or deleted for a dataset but someone who has access to the database * can see that the following commands are logged to the "actionlogrecord" * database table: @@ -129,7 +129,7 @@ *

  • {@link edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand}
  • *
  • {@link edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand}
  • * - * See also the Private URL To Unpublished Dataset BRD at * https://docs.google.com/document/d/1FT47QkZKcmjSgRnePaJO2g1nzcotLyN3Yb2ORvBr6cs/edit?usp=sharing */ diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 5f3e4c33e0b..3ee017e06de 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1457,7 +1457,7 @@ dataset.editBtn.itemLabel.metadata=Metadata dataset.editBtn.itemLabel.terms=Terms dataset.editBtn.itemLabel.permissions=Permissions dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets -dataset.editBtn.itemLabel.privateUrl=Private URL +dataset.editBtn.itemLabel.privateUrl=Preview URL dataset.editBtn.itemLabel.permissionsDataset=Dataset dataset.editBtn.itemLabel.permissionsFile=Restricted Files dataset.editBtn.itemLabel.deleteDataset=Delete Dataset @@ -1722,22 +1722,22 @@ dataset.requestAccessToRestrictedFiles=You may request access to any restricted dataset.requestAccessToRestrictedFilesWithEmbargo=Embargoed files cannot be accessed during the embargo period. If your selection contains restricted files, you may request access to them by clicking the Request Access button. dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0} dataset.privateurl.infoMessageReviewer=This unpublished dataset is being privately shared. -dataset.privateurl.header=Unpublished Dataset Private URL -dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide. -dataset.privateurl.absent=Private URL has not been created. -dataset.privateurl.createPrivateUrl=Create Private URL +dataset.privateurl.header=Unpublished Dataset Preview URL +dataset.privateurl.tip=Use a Preview URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Preview URL feature, please refer to the User Guide. +dataset.privateurl.absent=Preview URL has not been created. +dataset.privateurl.createPrivateUrl=Create Preview URL dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published -dataset.privateurl.disablePrivateUrl=Disable Private URL -dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL -dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset. -dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets. -dataset.privateurl.roleassigeeTitle=Private URL Enabled +dataset.privateurl.disablePrivateUrl=Disable Preview URL +dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Preview URL +dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Preview URL? If you have shared the Preview URL with others they will no longer be able to use it to access your unpublished dataset. +dataset.privateurl.cannotCreate=Preview URL can only be used with unpublished versions of datasets. +dataset.privateurl.roleassigeeTitle=Preview URL Enabled dataset.privateurl.createdSuccess=Success! -dataset.privateurl.full=This Private URL provides full read access to the dataset -dataset.privateurl.anonymized=This Private URL provides access to the anonymized dataset -dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset. -dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}. +dataset.privateurl.full=This Preview URL provides full read access to the dataset +dataset.privateurl.anonymized=This Preview URL provides access to the anonymized dataset +dataset.privateurl.disabledSuccess=You have successfully disabled the Preview URL for this unpublished dataset. +dataset.privateurl.noPermToCreate=To create a Preview URL you must have the following permissions: {0}. dataset.externalstatus.header=Curation Status Changed dataset.externalstatus.removed=Curation Status Removed dataset.externalstatus.info=Curation Status is now "{0}" @@ -2719,8 +2719,8 @@ datasets.api.grant.role.assignee.has.role.error=User already has this role for t datasets.api.revoke.role.not.found.error="Role assignment {0} not found" datasets.api.revoke.role.success=Role {0} revoked for assignee {1} in {2} datasets.api.privateurl.error.datasetnotfound=Could not find dataset. -datasets.api.privateurl.error.alreadyexists=Private URL already exists for this dataset. -datasets.api.privateurl.error.notdraft=Can't create Private URL because the latest version of this dataset is not a draft. +datasets.api.privateurl.error.alreadyexists=Preview URL already exists for this dataset. +datasets.api.privateurl.error.notdraft=Can't create Preview URL because the latest version of this dataset is not a draft. datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymized access because this dataset has been published. datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date diff --git a/src/main/webapp/previewurl-popup-fragment.xhtml b/src/main/webapp/previewurl-popup-fragment.xhtml new file mode 100644 index 00000000000..f3963ad899e --- /dev/null +++ b/src/main/webapp/previewurl-popup-fragment.xhtml @@ -0,0 +1,15 @@ + + + + + + TODO supply a title + + + +
    TODO write content
    + + From 7cc81e0ec33ba854ffb581f0da7cf0019b2e6798 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:13:55 -0400 Subject: [PATCH 096/402] add isRelease field to isPartOf --- .../edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 1 + src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 34c8fc5c6a6..1bdee48b14d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -340,6 +340,7 @@ private static JsonObjectBuilder addEmbeddedOwnerObject(DvObject dvo, JsonObject ownerObject.add("type", "DATAVERSE"); Dataverse in = (Dataverse) dvo; ownerObject.add("identifier", in.getAlias()); + ownerObject.add("isReleased", in.isReleased()); } if (dvo.isInstanceofDataset()) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 8e38a96fc97..93f1024ae7a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2134,8 +2134,11 @@ public void testGetDatasetOwners() { Response getDatasetWithOwners = UtilIT.getDatasetWithOwners(persistentId, apiToken, true); getDatasetWithOwners.prettyPrint(); - getDatasetWithOwners.then().assertThat().body("data.isPartOf.identifier", equalTo(dataverseAlias)); - + getDatasetWithOwners.then().assertThat().body("data.isPartOf.identifier", equalTo(dataverseAlias)); + getDatasetWithOwners.then().assertThat().body("data.isPartOf.isReleased", equalTo(false)); + getDatasetWithOwners.then().assertThat().body("data.isPartOf.isPartOf.identifier", equalTo("root")); + getDatasetWithOwners.then().assertThat().body("data.isPartOf.isPartOf.isReleased", equalTo(true)); + Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); assertEquals(200, destroyDatasetResponse.getStatusCode()); From 06d4fa50ddcdd11679654032e898e8e450f669fc Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 8 Oct 2024 14:27:00 -0400 Subject: [PATCH 097/402] #8184 fix integration test text --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 8e38a96fc97..24168ff4d76 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1676,7 +1676,7 @@ public void testPrivateUrl() { List assignments = with(roleAssignments.body().asString()).param("member", "member").getJsonObject("data.findAll { data -> data._roleAlias == member }"); assertEquals(1, assignments.size()); PrivateUrlUser privateUrlUser = new PrivateUrlUser(datasetId); - assertEquals("Private URL Enabled", privateUrlUser.getDisplayInfo().getTitle()); + assertEquals("Preview URL Enabled", privateUrlUser.getDisplayInfo().getTitle()); List assigneeShouldExistForPrivateUrlUser = with(roleAssignments.body().asString()).param("assigneeString", privateUrlUser.getIdentifier()).getJsonObject("data.findAll { data -> data.assignee == assigneeString }"); logger.info(assigneeShouldExistForPrivateUrlUser + " found for " + privateUrlUser.getIdentifier()); assertEquals(1, assigneeShouldExistForPrivateUrlUser.size()); From bc8e88b26d20e66969f8c37f5a506f16f02f08b7 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 8 Oct 2024 16:58:23 -0400 Subject: [PATCH 098/402] Delete previewurl-popup-fragment.xhtml --- src/main/webapp/previewurl-popup-fragment.xhtml | 15 --------------- 1 file changed, 15 deletions(-) delete mode 100644 src/main/webapp/previewurl-popup-fragment.xhtml diff --git a/src/main/webapp/previewurl-popup-fragment.xhtml b/src/main/webapp/previewurl-popup-fragment.xhtml deleted file mode 100644 index f3963ad899e..00000000000 --- a/src/main/webapp/previewurl-popup-fragment.xhtml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - TODO supply a title - - - -
    TODO write content
    - - From 36d15a501f50a46e940e6cc54c5cdf7619f4027d Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 31 Oct 2023 11:13:35 +0100 Subject: [PATCH 099/402] Reorder modifiers to static final --- .../iq/dataverse/DatasetFieldConstant.java | 313 +++++++++--------- .../harvard/iq/dataverse/WidgetWrapper.java | 4 +- .../AbstractOAuth2AuthenticationProvider.java | 2 +- .../providers/oauth2/impl/OrcidOAuth2AP.java | 2 +- .../engine/command/DataverseRequest.java | 4 +- .../iq/dataverse/util/ShapefileHandler.java | 12 +- .../iq/dataverse/util/SystemConfig.java | 4 +- .../PasswordValidatorServiceBean.java | 2 +- .../AuthenticationProviderTest.java | 2 +- 9 files changed, 172 insertions(+), 173 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java index abb812d1ba3..71e339a6fca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java @@ -16,165 +16,164 @@ @Named("dfc") @Dependent public class DatasetFieldConstant implements java.io.Serializable { - - public final static String publication = "publication"; - public final static String otherId = "otherId"; - public final static String author = "author"; - public final static String authorFirstName = "authorFirstName"; - public final static String authorLastName = "authorLastName"; - public final static String producer = "producer"; - public final static String software = "software"; - public final static String grantNumber = "grantNumber"; - public final static String distributor = "distributor"; - public final static String datasetContact = "datasetContact"; - public final static String datasetContactEmail = "datasetContactEmail"; - public final static String datasetContactName = "datasetContactName"; - public final static String datasetContactAffiliation = "datasetContactAffiliation"; - public final static String series = "series"; - public final static String datasetVersion = "datasetVersion"; - - public final static String description = "dsDescription"; - public final static String keyword = "keyword"; - public final static String topicClassification = "topicClassification"; - public final static String geographicBoundingBox = "geographicBoundingBox"; - - public final static String note = "note"; - - public final static String publicationRelationType = "publicationRelationType"; - public final static String publicationCitation = "publicationCitation"; - public final static String publicationIDType = "publicationIDType"; - public final static String publicationIDNumber = "publicationIDNumber"; - public final static String publicationURL = "publicationURL"; - public final static String publicationReplicationData = "publicationReplicationData"; - - - public final static String title = "title"; - public final static String subTitle="subtitle"; //SEK 6-7-2016 to match what is in DB - public final static String alternativeTitle="alternativeTitle"; //missing from class - public final static String datasetId = "datasetId"; - public final static String authorName ="authorName"; - public final static String authorAffiliation = "authorAffiliation"; - public final static String authorIdType = "authorIdentifierScheme"; - public final static String authorIdValue = "authorIdentifier"; - public final static String otherIdValue="otherIdValue"; - public final static String otherIdAgency= "otherIdAgency"; - - public final static String producerName="producerName"; - public final static String producerURL="producerURL"; - public final static String producerLogo="producerLogoURL"; - public final static String producerAffiliation="producerAffiliation"; - public final static String producerAbbreviation= "producerAbbreviation"; - public final static String productionDate="productionDate"; - public final static String productionPlace="productionPlace"; - public final static String softwareName="softwareName"; - public final static String softwareVersion="softwareVersion"; - public final static String fundingAgency="fundingAgency"; - public final static String grantNumberValue="grantNumberValue"; - public final static String grantNumberAgency="grantNumberAgency"; - public final static String distributorName="distributorName"; - public final static String distributorURL="distributorURL"; - public final static String distributorLogo="distributorLogoURL"; - public final static String distributionDate="distributionDate"; - public final static String distributorContactName="distributorContactName"; - public final static String distributorContactAffiliation="distributorContactAffiliation"; - public final static String distributorContactEmail="distributorContactEmail"; - public final static String distributorAffiliation="distributorAffiliation"; - public final static String distributorAbbreviation="distributorAbbreviation"; - - public final static String contributor="contributor"; //SEK added for Dublin Core 6/22 - public final static String contributorType="contributorType"; - public final static String contributorName="contributorName"; - - public final static String depositor="depositor"; - public final static String dateOfDeposit="dateOfDeposit"; - public final static String seriesName="seriesName"; - public final static String seriesInformation="seriesInformation"; - public final static String datasetVersionValue="datasetVersionValue"; - public final static String versionDate="versionDate"; - public final static String keywordValue="keywordValue"; - public final static String keywordTermURI="keywordTermURI"; - public final static String keywordVocab="keywordVocabulary"; - public final static String keywordVocabURI="keywordVocabularyURI"; - public final static String topicClassValue="topicClassValue"; - public final static String topicClassVocab="topicClassVocab"; - public final static String topicClassVocabURI="topicClassVocabURI"; - public final static String descriptionText="dsDescriptionValue"; - public final static String descriptionDate="dsDescriptionDate"; - public final static String timePeriodCovered="timePeriodCovered"; // SEK added 6/13/2016 - public final static String timePeriodCoveredStart="timePeriodCoveredStart"; - public final static String timePeriodCoveredEnd="timePeriodCoveredEnd"; - public final static String dateOfCollection="dateOfCollection"; // SEK added 6/13/2016 - public final static String dateOfCollectionStart="dateOfCollectionStart"; - public final static String dateOfCollectionEnd="dateOfCollectionEnd"; - public final static String country="country"; - public final static String geographicCoverage="geographicCoverage"; - public final static String otherGeographicCoverage="otherGeographicCoverage"; - public final static String city="city"; // SEK added 6/13/2016 - public final static String state="state"; // SEK added 6/13/2016 - public final static String geographicUnit="geographicUnit"; - public final static String westLongitude="westLongitude"; - public final static String eastLongitude="eastLongitude"; - public final static String northLatitude="northLatitude"; - public final static String southLatitude="southLatitude"; - public final static String unitOfAnalysis="unitOfAnalysis"; - public final static String universe="universe"; - public final static String kindOfData="kindOfData"; - public final static String timeMethod="timeMethod"; - public final static String dataCollector="dataCollector"; - public final static String collectorTraining="collectorTraining"; - public final static String frequencyOfDataCollection="frequencyOfDataCollection"; - public final static String samplingProcedure="samplingProcedure"; - public final static String targetSampleSize = "targetSampleSize"; - public final static String targetSampleActualSize = "targetSampleActualSize"; - public final static String targetSampleSizeFormula = "targetSampleSizeFormula"; - public final static String deviationsFromSampleDesign="deviationsFromSampleDesign"; - public final static String collectionMode="collectionMode"; - public final static String researchInstrument="researchInstrument"; - public final static String dataSources="dataSources"; - public final static String originOfSources="originOfSources"; - public final static String characteristicOfSources="characteristicOfSources"; - public final static String accessToSources="accessToSources"; - public final static String dataCollectionSituation="dataCollectionSituation"; - public final static String actionsToMinimizeLoss="actionsToMinimizeLoss"; - public final static String controlOperations="controlOperations"; - public final static String weighting="weighting"; - public final static String cleaningOperations="cleaningOperations"; - public final static String datasetLevelErrorNotes="datasetLevelErrorNotes"; - public final static String responseRate="responseRate"; - public final static String samplingErrorEstimates="samplingErrorEstimates"; - - public final static String socialScienceNotes = "socialScienceNotes"; - public final static String socialScienceNotesType = "socialScienceNotesType"; - public final static String socialScienceNotesSubject = "socialScienceNotesSubject"; - public final static String socialScienceNotesText = "socialScienceNotesText"; - - public final static String otherDataAppraisal="otherDataAppraisal"; - public final static String placeOfAccess="placeOfAccess"; - public final static String originalArchive="originalArchive"; - public final static String availabilityStatus="availabilityStatus"; - public final static String collectionSize="collectionSize"; - public final static String datasetCompletion="datasetCompletion"; - public final static String numberOfFiles="numberOfFiles"; - public final static String confidentialityDeclaration="confidentialityDeclaration"; - public final static String specialPermissions="specialPermissions"; - public final static String restrictions="restrictions"; + + public static final String publication = "publication"; + public static final String otherId = "otherId"; + public static final String author = "author"; + public static final String authorFirstName = "authorFirstName"; + public static final String authorLastName = "authorLastName"; + public static final String producer = "producer"; + public static final String software = "software"; + public static final String grantNumber = "grantNumber"; + public static final String distributor = "distributor"; + public static final String datasetContact = "datasetContact"; + public static final String datasetContactEmail = "datasetContactEmail"; + public static final String datasetContactName = "datasetContactName"; + public static final String datasetContactAffiliation = "datasetContactAffiliation"; + public static final String series = "series"; + public static final String datasetVersion = "datasetVersion"; + + public static final String description = "dsDescription"; + public static final String keyword = "keyword"; + public static final String topicClassification = "topicClassification"; + public static final String geographicBoundingBox = "geographicBoundingBox"; + + public static final String note = "note"; + + public static final String publicationRelationType = "publicationRelationType"; + public static final String publicationCitation = "publicationCitation"; + public static final String publicationIDType = "publicationIDType"; + public static final String publicationIDNumber = "publicationIDNumber"; + public static final String publicationURL = "publicationURL"; + public static final String publicationReplicationData = "publicationReplicationData"; + + public static final String title = "title"; + public static final String subTitle="subtitle"; //SEK 6-7-2016 to match what is in DB + public static final String alternativeTitle="alternativeTitle"; //missing from class + public static final String datasetId = "datasetId"; + public static final String authorName ="authorName"; + public static final String authorAffiliation = "authorAffiliation"; + public static final String authorIdType = "authorIdentifierScheme"; + public static final String authorIdValue = "authorIdentifier"; + public static final String otherIdValue="otherIdValue"; + public static final String otherIdAgency= "otherIdAgency"; + + public static final String producerName="producerName"; + public static final String producerURL="producerURL"; + public static final String producerLogo="producerLogoURL"; + public static final String producerAffiliation="producerAffiliation"; + public static final String producerAbbreviation= "producerAbbreviation"; + public static final String productionDate="productionDate"; + public static final String productionPlace="productionPlace"; + public static final String softwareName="softwareName"; + public static final String softwareVersion="softwareVersion"; + public static final String fundingAgency="fundingAgency"; + public static final String grantNumberValue="grantNumberValue"; + public static final String grantNumberAgency="grantNumberAgency"; + public static final String distributorName="distributorName"; + public static final String distributorURL="distributorURL"; + public static final String distributorLogo="distributorLogoURL"; + public static final String distributionDate="distributionDate"; + public static final String distributorContactName="distributorContactName"; + public static final String distributorContactAffiliation="distributorContactAffiliation"; + public static final String distributorContactEmail="distributorContactEmail"; + public static final String distributorAffiliation="distributorAffiliation"; + public static final String distributorAbbreviation="distributorAbbreviation"; + + public static final String contributor="contributor"; //SEK added for Dublin Core 6/22 + public static final String contributorType="contributorType"; + public static final String contributorName="contributorName"; + + public static final String depositor="depositor"; + public static final String dateOfDeposit="dateOfDeposit"; + public static final String seriesName="seriesName"; + public static final String seriesInformation="seriesInformation"; + public static final String datasetVersionValue="datasetVersionValue"; + public static final String versionDate="versionDate"; + public static final String keywordValue="keywordValue"; + public static final String keywordTermURI="keywordTermURI"; + public static final String keywordVocab="keywordVocabulary"; + public static final String keywordVocabURI="keywordVocabularyURI"; + public static final String topicClassValue="topicClassValue"; + public static final String topicClassVocab="topicClassVocab"; + public static final String topicClassVocabURI="topicClassVocabURI"; + public static final String descriptionText="dsDescriptionValue"; + public static final String descriptionDate="dsDescriptionDate"; + public static final String timePeriodCovered="timePeriodCovered"; // SEK added 6/13/2016 + public static final String timePeriodCoveredStart="timePeriodCoveredStart"; + public static final String timePeriodCoveredEnd="timePeriodCoveredEnd"; + public static final String dateOfCollection="dateOfCollection"; // SEK added 6/13/2016 + public static final String dateOfCollectionStart="dateOfCollectionStart"; + public static final String dateOfCollectionEnd="dateOfCollectionEnd"; + public static final String country="country"; + public static final String geographicCoverage="geographicCoverage"; + public static final String otherGeographicCoverage="otherGeographicCoverage"; + public static final String city="city"; // SEK added 6/13/2016 + public static final String state="state"; // SEK added 6/13/2016 + public static final String geographicUnit="geographicUnit"; + public static final String westLongitude="westLongitude"; + public static final String eastLongitude="eastLongitude"; + public static final String northLatitude="northLatitude"; + public static final String southLatitude="southLatitude"; + public static final String unitOfAnalysis="unitOfAnalysis"; + public static final String universe="universe"; + public static final String kindOfData="kindOfData"; + public static final String timeMethod="timeMethod"; + public static final String dataCollector="dataCollector"; + public static final String collectorTraining="collectorTraining"; + public static final String frequencyOfDataCollection="frequencyOfDataCollection"; + public static final String samplingProcedure="samplingProcedure"; + public static final String targetSampleSize = "targetSampleSize"; + public static final String targetSampleActualSize = "targetSampleActualSize"; + public static final String targetSampleSizeFormula = "targetSampleSizeFormula"; + public static final String deviationsFromSampleDesign="deviationsFromSampleDesign"; + public static final String collectionMode="collectionMode"; + public static final String researchInstrument="researchInstrument"; + public static final String dataSources="dataSources"; + public static final String originOfSources="originOfSources"; + public static final String characteristicOfSources="characteristicOfSources"; + public static final String accessToSources="accessToSources"; + public static final String dataCollectionSituation="dataCollectionSituation"; + public static final String actionsToMinimizeLoss="actionsToMinimizeLoss"; + public static final String controlOperations="controlOperations"; + public static final String weighting="weighting"; + public static final String cleaningOperations="cleaningOperations"; + public static final String datasetLevelErrorNotes="datasetLevelErrorNotes"; + public static final String responseRate="responseRate"; + public static final String samplingErrorEstimates="samplingErrorEstimates"; + + public static final String socialScienceNotes = "socialScienceNotes"; + public static final String socialScienceNotesType = "socialScienceNotesType"; + public static final String socialScienceNotesSubject = "socialScienceNotesSubject"; + public static final String socialScienceNotesText = "socialScienceNotesText"; + + public static final String otherDataAppraisal="otherDataAppraisal"; + public static final String placeOfAccess="placeOfAccess"; + public static final String originalArchive="originalArchive"; + public static final String availabilityStatus="availabilityStatus"; + public static final String collectionSize="collectionSize"; + public static final String datasetCompletion="datasetCompletion"; + public static final String numberOfFiles="numberOfFiles"; + public static final String confidentialityDeclaration="confidentialityDeclaration"; + public static final String specialPermissions="specialPermissions"; + public static final String restrictions="restrictions"; @Deprecated //Doesn't appear to be used and is not datasetContact - public final static String contact="contact"; - public final static String citationRequirements="citationRequirements"; - public final static String depositorRequirements="depositorRequirements"; - public final static String conditions="conditions"; - public final static String disclaimer="disclaimer"; - public final static String relatedMaterial="relatedMaterial"; - //public final static String replicationFor="replicationFor"; - //public final static String relatedPublications="relatedPublications"; - public final static String relatedDatasets="relatedDatasets"; - public final static String otherReferences="otherReferences"; - public final static String notesText="notesText"; - public final static String language="language"; - public final static String noteInformationType="noteInformationType"; - public final static String notesInformationSubject="notesInformationSubject"; - public final static String subject="subject"; + public static final String contact="contact"; + public static final String citationRequirements="citationRequirements"; + public static final String depositorRequirements="depositorRequirements"; + public static final String conditions="conditions"; + public static final String disclaimer="disclaimer"; + public static final String relatedMaterial="relatedMaterial"; + //public static final String replicationFor="replicationFor"; + //public static final String relatedPublications="relatedPublications"; + public static final String relatedDatasets="relatedDatasets"; + public static final String otherReferences="otherReferences"; + public static final String notesText="notesText"; + public static final String language="language"; + public static final String noteInformationType="noteInformationType"; + public static final String notesInformationSubject="notesInformationSubject"; + public static final String subject="subject"; /* * The following getters are needed so we can use them as properties in JSP */ diff --git a/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java b/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java index a8ea5fabde4..c51903e2ed4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java @@ -18,8 +18,8 @@ @Named public class WidgetWrapper implements java.io.Serializable { - private final static String WIDGET_PARAMETER = "widget"; - private final static char WIDGET_SEPARATOR = '@'; + private static final String WIDGET_PARAMETER = "widget"; + private static final char WIDGET_SEPARATOR = '@'; private Boolean widgetView; private String widgetHome; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java index 7fd7bf3e885..a6b7c1b9d49 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java @@ -30,7 +30,7 @@ */ public abstract class AbstractOAuth2AuthenticationProvider implements AuthenticationProvider { - final static Logger logger = Logger.getLogger(AbstractOAuth2AuthenticationProvider.class.getName()); + static final Logger logger = Logger.getLogger(AbstractOAuth2AuthenticationProvider.class.getName()); protected static class ParsedUserResponse { public final AuthenticatedUserDisplayInfo displayInfo; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java index 089ca40e164..323c78ab47a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java @@ -49,7 +49,7 @@ */ public class OrcidOAuth2AP extends AbstractOAuth2AuthenticationProvider { - final static Logger logger = Logger.getLogger(OrcidOAuth2AP.class.getName()); + static final Logger logger = Logger.getLogger(OrcidOAuth2AP.class.getName()); public static final String PROVIDER_ID_PRODUCTION = "orcid"; public static final String PROVIDER_ID_SANDBOX = "orcid-sandbox"; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java index d792b616a0c..4d3ec2842a1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java @@ -26,9 +26,9 @@ public class DataverseRequest { private final String invocationId; private final HttpServletRequest httpServletRequest; - private final static String undefined = "0.0.0.0"; + private static final String undefined = "0.0.0.0"; - private final static String MDKEY_PREFIX="mdkey."; + private static final String MDKEY_PREFIX="mdkey."; private static final Logger logger = Logger.getLogger(DataverseRequest.class.getName()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java index f1440cc3c02..bb916cc3906 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java @@ -68,12 +68,12 @@ public class ShapefileHandler{ private static final Logger logger = Logger.getLogger(ShapefileHandler.class.getCanonicalName()); // Reference for these extensions: http://en.wikipedia.org/wiki/Shapefile - public final static String SHAPEFILE_FILE_TYPE = "application/zipped-shapefile"; - public final static String SHAPEFILE_FILE_TYPE_FRIENDLY_NAME = "Shapefile as ZIP Archive"; - public final static List SHAPEFILE_MANDATORY_EXTENSIONS = Arrays.asList("shp", "shx", "dbf", "prj"); - public final static String SHP_XML_EXTENSION = "shp.xml"; - public final static String BLANK_EXTENSION = "__PLACEHOLDER-FOR-BLANK-EXTENSION__"; - public final static List SHAPEFILE_ALL_EXTENSIONS = Arrays.asList("shp", "shx", "dbf", "prj", "sbn", "sbx", "fbn", "fbx", "ain", "aih", "ixs", "mxs", "atx", "cpg", "qpj", "qmd", SHP_XML_EXTENSION); + public static final String SHAPEFILE_FILE_TYPE = "application/zipped-shapefile"; + public static final String SHAPEFILE_FILE_TYPE_FRIENDLY_NAME = "Shapefile as ZIP Archive"; + public static final List SHAPEFILE_MANDATORY_EXTENSIONS = Arrays.asList("shp", "shx", "dbf", "prj"); + public static final String SHP_XML_EXTENSION = "shp.xml"; + public static final String BLANK_EXTENSION = "__PLACEHOLDER-FOR-BLANK-EXTENSION__"; + public static final List SHAPEFILE_ALL_EXTENSIONS = Arrays.asList("shp", "shx", "dbf", "prj", "sbn", "sbx", "fbn", "fbx", "ain", "aih", "ixs", "mxs", "atx", "cpg", "qpj", "qmd", SHP_XML_EXTENSION); public boolean DEBUG = false; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 60967b13131..2e1f7cb5cb3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -87,8 +87,8 @@ public class SystemConfig { private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE = 3000000L; // 3 MB private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_PDF = 1000000L; // 1 MB - public final static String DEFAULTCURATIONLABELSET = "DEFAULT"; - public final static String CURATIONLABELSDISABLED = "DISABLED"; + public static final String DEFAULTCURATIONLABELSET = "DEFAULT"; + public static final String CURATIONLABELSDISABLED = "DISABLED"; public String getVersion() { return getVersion(false); diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java index 41e7f1b8b22..92eafa5b856 100644 --- a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java @@ -84,7 +84,7 @@ private enum ValidatorTypes { } @SuppressWarnings("unchecked") - private final static LinkedHashMap validators = new LinkedHashMap(2); + private static final LinkedHashMap validators = new LinkedHashMap(2); private int goodStrength; private int maxLength; private int minLength; diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationProviderTest.java index eac9a605c9e..d4d7b6fa69d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationProviderTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationProviderTest.java @@ -15,7 +15,7 @@ public class AuthenticationProviderTest { - private final static String[] authProviders = {"null", "builtin", "github", "google", "orcid", "orcid-sandbox", "shib"}; + private static final String[] authProviders = {"null", "builtin", "github", "google", "orcid", "orcid-sandbox", "shib"}; private static Map bundleTestMap; @BeforeAll From ba35f992a30cc63f14f8144bb9fa611719337134 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 31 Oct 2023 11:14:41 +0100 Subject: [PATCH 100/402] Use Map type and diamond operator --- .../dataverse/validation/PasswordValidatorServiceBean.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java index 92eafa5b856..bbe7d135e0f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java @@ -13,6 +13,7 @@ import java.util.Date; import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Properties; import java.util.logging.Level; import java.util.logging.Logger; @@ -83,8 +84,7 @@ private enum ValidatorTypes { GoodStrengthValidator, StandardValidator } - @SuppressWarnings("unchecked") - private static final LinkedHashMap validators = new LinkedHashMap(2); + private static final Map validators = new LinkedHashMap<>(2); private int goodStrength; private int maxLength; private int minLength; @@ -100,7 +100,7 @@ private enum ValidatorTypes { public PasswordValidatorServiceBean() { final Properties properties = PropertiesMessageResolver.getDefaultProperties(); properties.setProperty(GoodStrengthRule.ERROR_CODE_GOODSTRENGTH, GoodStrengthRule.ERROR_MESSAGE_GOODSTRENGTH); - messageResolver = new PropertiesMessageResolver(properties); + messageResolver = new PropertiesMessageResolver(properties); } public PasswordValidatorServiceBean(List characterRules) { From a32b9ed685f3973044916c53dafecd136eecc942 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 31 Oct 2023 11:15:52 +0100 Subject: [PATCH 101/402] Replace deprecated constructors with valueOf(String) --- .../edu/harvard/iq/dataverse/util/SystemConfig.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 2e1f7cb5cb3..434b3bd8f8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -473,7 +473,7 @@ public Integer getSearchHighlightFragmentSize() { String fragSize = settingsService.getValueForKey(SettingsServiceBean.Key.SearchHighlightFragmentSize); if (fragSize != null) { try { - return new Integer(fragSize); + return Integer.valueOf(fragSize); } catch (NumberFormatException nfe) { logger.info("Could not convert " + SettingsServiceBean.Key.SearchHighlightFragmentSize + " to int: " + nfe); } @@ -490,7 +490,7 @@ public long getTabularIngestSizeLimit() { if (limitEntry != null) { try { - Long sizeOption = new Long(limitEntry); + Long sizeOption = Long.valueOf(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { logger.warning("Invalid value for TabularIngestSizeLimit option? - " + limitEntry); @@ -515,7 +515,7 @@ public long getTabularIngestSizeLimit(String formatName) { if (limitEntry != null) { try { - Long sizeOption = new Long(limitEntry); + Long sizeOption = Long.valueOf(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry ); @@ -1061,7 +1061,7 @@ public long getDatasetValidationSizeLimit() { if (limitEntry != null) { try { - Long sizeOption = new Long(limitEntry); + Long sizeOption = Long.valueOf(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { logger.warning("Invalid value for DatasetValidationSizeLimit option? - " + limitEntry); @@ -1076,7 +1076,7 @@ public long getFileValidationSizeLimit() { if (limitEntry != null) { try { - Long sizeOption = new Long(limitEntry); + Long sizeOption = Long.valueOf(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { logger.warning("Invalid value for FileValidationSizeLimit option? - " + limitEntry); From 8e956625205c8b0266fc002e1edf11decda2a3f0 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Thu, 10 Oct 2024 11:01:12 -0400 Subject: [PATCH 102/402] #10889 bump container POSTGRES_VERSION to 17 --- .env | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.env b/.env index d5cffcec0aa..9d604630073 100644 --- a/.env +++ b/.env @@ -1,5 +1,5 @@ APP_IMAGE=gdcc/dataverse:unstable -POSTGRES_VERSION=16 +POSTGRES_VERSION=17 DATAVERSE_DB_USER=dataverse SOLR_VERSION=9.3.0 -SKIP_DEPLOY=0 \ No newline at end of file +SKIP_DEPLOY=0 From 0f8f267b0fb50d1a03fb71cfb00ce2639ef82644 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Thu, 10 Oct 2024 11:20:33 -0400 Subject: [PATCH 103/402] #10889 add Postgres/FlyWay release notes --- doc/release-notes/10889_bump_PG17_FlyWay10.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/10889_bump_PG17_FlyWay10.md diff --git a/doc/release-notes/10889_bump_PG17_FlyWay10.md b/doc/release-notes/10889_bump_PG17_FlyWay10.md new file mode 100644 index 00000000000..012627bd43c --- /dev/null +++ b/doc/release-notes/10889_bump_PG17_FlyWay10.md @@ -0,0 +1,3 @@ +This release bumps both the Postgres JDBC driver and Flyway versions. This should better support Postgres version 17, and as of version 10 Flyway no longer requires a paid subscription to support older versions of Postgres. + +While we don't encourage the use of older Postgres versions, this flexibility may benefit some of our long-standing installations in their upgrade paths. From 9e53e0f7046bba58466fdee466a3ffa463836874 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Fri, 11 Oct 2024 14:40:58 -0400 Subject: [PATCH 104/402] #10889 update Docker PG version, state version used in automated testing --- doc/release-notes/10889_bump_PG17_FlyWay10.md | 2 +- docker/compose/demo/compose.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/10889_bump_PG17_FlyWay10.md b/doc/release-notes/10889_bump_PG17_FlyWay10.md index 012627bd43c..0f74568e5cd 100644 --- a/doc/release-notes/10889_bump_PG17_FlyWay10.md +++ b/doc/release-notes/10889_bump_PG17_FlyWay10.md @@ -1,3 +1,3 @@ This release bumps both the Postgres JDBC driver and Flyway versions. This should better support Postgres version 17, and as of version 10 Flyway no longer requires a paid subscription to support older versions of Postgres. -While we don't encourage the use of older Postgres versions, this flexibility may benefit some of our long-standing installations in their upgrade paths. +While we don't encourage the use of older Postgres versions, this flexibility may benefit some of our long-standing installations in their upgrade paths. Postgres 13 remains the version used with automated testing. diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 33e7b52004b..62444706950 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -76,7 +76,7 @@ services: postgres: container_name: "postgres" hostname: postgres - image: postgres:13 + image: postgres:17 restart: on-failure environment: - POSTGRES_USER=dataverse From 8c4cd4c59be741b797e04dc59574522dacabfb72 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 11 Oct 2024 14:42:22 -0400 Subject: [PATCH 105/402] fix if statement body --- .../iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java index a74a9f34bc9..8199b7d9c9f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java @@ -1317,8 +1317,8 @@ private void writeDescriptions(XMLStreamWriter xmlw, DvObject dvObject, boolean } if (StringUtils.isNotBlank(softwareName)) { if (StringUtils.isNotBlank(softwareVersion)) { + softwareName = softwareName + ", " + softwareVersion; } - softwareName = softwareName + ", " + softwareVersion; descriptionsWritten = XmlWriterUtil.writeOpenTagIfNeeded(xmlw, "descriptions", descriptionsWritten); XmlWriterUtil.writeFullElementWithAttributes(xmlw, "description", attributes, softwareName); } From f1380b1b760a3d1d90bc3c3e1de83cec142b1d35 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 11 Oct 2024 15:05:59 -0400 Subject: [PATCH 106/402] release note --- doc/release-notes/10919-minor-DataCiteXML-bugfix.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10919-minor-DataCiteXML-bugfix.md diff --git a/doc/release-notes/10919-minor-DataCiteXML-bugfix.md b/doc/release-notes/10919-minor-DataCiteXML-bugfix.md new file mode 100644 index 00000000000..4fa0c1142b1 --- /dev/null +++ b/doc/release-notes/10919-minor-DataCiteXML-bugfix.md @@ -0,0 +1 @@ +A minor bug fix was made to avoid sending a useless ", null" in the DataCiteXML sent to DataCite and in the DataCite export when a dataset has a metadata entry for "Software Name" and no entry for "Software Version". The bug fix will update datasets upon publication. Anyone with existing published datasets with this problem can be fixed by [pushing updated metadata to DataCite for affected datasets](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-a-published-dataset-at-the-pid-provider) and [re-exporting the dataset metadata](https://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api) or by following steps 9 and 10 in the v6.4 release notes to update and re-export all datasets. From 212df5e7f9c7100bdc58b9179b3827c9b0c2a342 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A9ter=20Kir=C3=A1ly?= Date: Sat, 12 Oct 2024 09:25:04 +0200 Subject: [PATCH 107/402] #10920 fixing skomos typo --- doc/sphinx-guides/source/admin/metadatacustomization.rst | 2 +- .../java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 8752f11c1e5..e5326efebef 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -579,7 +579,7 @@ In general, the external vocabulary support mechanism may be a better choice for The specifics of the user interface for entering/selecting a vocabulary term and how that term is then displayed are managed by third-party Javascripts. The initial Javascripts that have been created provide auto-completion, displaying a list of choices that match what the user has typed so far, but other interfaces, such as displaying a tree of options for a hierarchical vocabulary, are possible. Similarly, existing scripts do relatively simple things for displaying a term - showing the term's name in the appropriate language and providing a link to an external URL with more information, but more sophisticated displays are possible. -Scripts supporting use of vocabularies from services supporting the SKOMOS protocol (see https://skosmos.org), retrieving ORCIDs (from https://orcid.org), services based on Ontoportal product (see https://ontoportal.org/), and using ROR (https://ror.org/) are available https://github.com/gdcc/dataverse-external-vocab-support. (Custom scripts can also be used and community members are encouraged to share new scripts through the dataverse-external-vocab-support repository.) +Scripts supporting use of vocabularies from services supporting the SKOSMOS protocol (see https://skosmos.org), retrieving ORCIDs (from https://orcid.org), services based on Ontoportal product (see https://ontoportal.org/), and using ROR (https://ror.org/) are available https://github.com/gdcc/dataverse-external-vocab-support. (Custom scripts can also be used and community members are encouraged to share new scripts through the dataverse-external-vocab-support repository.) Configuration involves specifying which fields are to be mapped, to which Solr field they should be indexed, whether free-text entries are allowed, which vocabulary(ies) should be used, what languages those vocabulary(ies) are available in, and several service protocol and service instance specific parameters, including the ability to send HTTP headers on calls to the service. These are all defined in the :ref:`:CVocConf <:CVocConf>` setting as a JSON array. Details about the required elements as well as example JSON arrays are available at https://github.com/gdcc/dataverse-external-vocab-support, along with an example metadata block that can be used for testing. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index ff78b0c83ec..91150b79505 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -424,7 +424,7 @@ public Set getIndexableStringsByTermUri(String termUri, JsonObject cvocE for (int i = 0; i < jarr.size(); i++) { if (jarr.get(i).getValueType().equals(JsonValue.ValueType.STRING)) { strings.add(jarr.getString(i)); - } else if (jarr.get(i).getValueType().equals(ValueType.OBJECT)) { // This condition handles SKOMOS format like [{"lang": "en","value": "non-apis bee"},{"lang": "fr","value": "abeille non apis"}] + } else if (jarr.get(i).getValueType().equals(ValueType.OBJECT)) { // This condition handles SKOSMOS format like [{"lang": "en","value": "non-apis bee"},{"lang": "fr","value": "abeille non apis"}] JsonObject entry = jarr.getJsonObject(i); if (entry.containsKey("value")) { logger.fine("adding " + entry.getString("value") + " for " + termUri); From a242d14c75db789044792b5f5649de6aeed541af Mon Sep 17 00:00:00 2001 From: jo-pol Date: Mon, 14 Oct 2024 10:56:19 +0200 Subject: [PATCH 108/402] mime type of m4a uploaded in zip --- .../propertyFiles/MimeTypeDetectionByFileExtension.properties | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties index 630539d912e..05e61a40c17 100644 --- a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties +++ b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties @@ -15,6 +15,7 @@ m=text/x-matlab mat=application/matlab-mat md=text/markdown mp3=audio/mp3 +m4a=audio/x-m4a nii=image/nii nc=application/netcdf ods=application/vnd.oasis.opendocument.spreadsheet From cce22a281465959d776cd64c759b728a19cb3721 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 14 Oct 2024 11:54:53 +0100 Subject: [PATCH 109/402] Changed: users/token GET endpoint to support all available auth mechanisms --- .../edu/harvard/iq/dataverse/api/Users.java | 21 ++++++++++++------- .../edu/harvard/iq/dataverse/api/UsersIT.java | 3 +-- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java index c1a7c95dbff..ecf7839e616 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java @@ -137,15 +137,20 @@ public Response deleteToken(@Context ContainerRequestContext crc) { @Path("token") @AuthRequired @GET - public Response getTokenExpirationDate() { - ApiToken token = authSvc.findApiToken(getRequestApiKey()); - - if (token == null) { - return notFound("Token " + getRequestApiKey() + " not found."); + public Response getTokenExpirationDate(@Context ContainerRequestContext crc) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + ApiToken token = authSvc.findApiTokenByUser(user); + + if (token == null) { + return notFound("Token not found."); + } + + return ok(String.format("Token %s expires on %s", token.getTokenString(), token.getExpireTime())); + + } catch (WrappedResponse wr) { + return wr.getResponse(); } - - return ok("Token " + getRequestApiKey() + " expires on " + token.getExpireTime()); - } @Path("token/recreate") diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java index 1003c1a990c..ce3b8bf75ff 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java @@ -405,7 +405,6 @@ public void testAPITokenEndpoints() { */ createUser = UtilIT.createRandomUser(); - String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); createDataverseResponse.prettyPrint(); @@ -428,7 +427,7 @@ public void testAPITokenEndpoints() { getExpiration = UtilIT.getTokenExpiration(tokenForPrivateUrlUser); getExpiration.prettyPrint(); getExpiration.then().assertThat() - .statusCode(NOT_FOUND.getStatusCode()); + .statusCode(UNAUTHORIZED.getStatusCode()); createUser = UtilIT.createRandomUser(); assertEquals(OK.getStatusCode(), createUser.getStatusCode()); From 129c80c768a7c1e4fa2ec55dcacade723de24f94 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 14 Oct 2024 12:09:40 +0100 Subject: [PATCH 110/402] Added: release notes for #10914 --- doc/release-notes/10914-users-token-api-credentials.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/10914-users-token-api-credentials.md diff --git a/doc/release-notes/10914-users-token-api-credentials.md b/doc/release-notes/10914-users-token-api-credentials.md new file mode 100644 index 00000000000..888214481f6 --- /dev/null +++ b/doc/release-notes/10914-users-token-api-credentials.md @@ -0,0 +1,3 @@ +Extended the users/token GET endpoint to support any auth mechanism for retrieving the token information. + +Previously, this endpoint only accepted an API token to retrieve its information. Now, it accepts any authentication mechanism and returns the associated API token information. From 7f5b0bea1670b5c2ec84651b45820211b9df2988 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 15 Oct 2024 13:34:34 +0100 Subject: [PATCH 111/402] Added: updateDataverse endpoint with addDataverse refactoring --- .../harvard/iq/dataverse/api/Dataverses.java | 160 ++++++++++---- .../command/impl/CreateDataverseCommand.java | 6 +- .../command/impl/UpdateDataverseCommand.java | 204 ++++++++++-------- 3 files changed, 231 insertions(+), 139 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 0ee146ed99b..b85ee0afc8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -127,73 +127,145 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { @Path("{identifier}") public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { Dataverse newDataverse; - JsonObject newDataverseJson; try { - newDataverseJson = JsonUtil.getJsonObject(body); - newDataverse = jsonParser().parseDataverse(newDataverseJson); + newDataverse = parseAndValidateDataverse(body); } catch (JsonParsingException jpe) { - logger.log(Level.SEVERE, "Json: {0}", body); return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { - logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage())); } try { - JsonObject metadataBlocksJson = newDataverseJson.getJsonObject("metadataBlocks"); - List inputLevels = null; - List metadataBlocks = null; - List facetList = null; - if (metadataBlocksJson != null) { - JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); - inputLevels = inputLevelsArray != null ? parseInputLevels(inputLevelsArray, newDataverse) : null; - - JsonArray metadataBlockNamesArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); - metadataBlocks = metadataBlockNamesArray != null ? parseNewDataverseMetadataBlocks(metadataBlockNamesArray) : null; - - JsonArray facetIdsArray = metadataBlocksJson.getJsonArray("facetIds"); - facetList = facetIdsArray != null ? parseFacets(facetIdsArray) : null; - } + List inputLevels = parseInputLevels(body, newDataverse); + List metadataBlocks = parseMetadataBlocks(body); + List facets = parseFacets(body); if (!parentIdtf.isEmpty()) { Dataverse owner = findDataverseOrDie(parentIdtf); newDataverse.setOwner(owner); } - // set the dataverse - contact relationship in the contacts - for (DataverseContact dc : newDataverse.getDataverseContacts()) { - dc.setDataverse(newDataverse); - } - AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, inputLevels, metadataBlocks)); + newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facets, inputLevels, metadataBlocks)); return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse)); - } catch (WrappedResponse ww) { - - String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause()); - if (!error.isEmpty()) { - logger.log(Level.INFO, error); - return ww.refineResponse(error); - } - return ww.getResponse(); + } catch (WrappedResponse ww) { + return handleWrappedResponse(ww); } catch (EJBException ex) { - Throwable cause = ex; - StringBuilder sb = new StringBuilder(); - sb.append("Error creating dataverse."); - while (cause.getCause() != null) { - cause = cause.getCause(); - if (cause instanceof ConstraintViolationException) { - sb.append(ConstraintViolationUtil.getErrorStringForConstraintViolations(cause)); - } - } - logger.log(Level.SEVERE, sb.toString()); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + sb.toString()); + return handleEJBException(ex, "Error creating dataverse."); } catch (Exception ex) { logger.log(Level.SEVERE, "Error creating dataverse", ex); return error(Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + ex.getMessage()); + } + } + + @PUT + @AuthRequired + @Path("{identifier}") + public Response updateDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String identifier) { + Dataverse originalDataverse; + try { + originalDataverse = findDataverseOrDie(identifier); + } catch (WrappedResponse e) { + return e.getResponse(); + } + + Dataverse updatedDataverse; + try { + updatedDataverse = parseAndValidateDataverse(body); + } catch (JsonParsingException jpe) { + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); + } catch (JsonParseException ex) { + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage())); + } + + try { + List inputLevels = parseInputLevels(body, originalDataverse); + List metadataBlocks = parseMetadataBlocks(body); + List facets = parseFacets(body); + + updatedDataverse.setId(originalDataverse.getId()); + AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); + updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); + return created("/dataverses/" + updatedDataverse.getAlias(), json(updatedDataverse)); + + } catch (WrappedResponse ww) { + return handleWrappedResponse(ww); + } catch (EJBException ex) { + return handleEJBException(ex, "Error updating dataverse."); + } catch (Exception ex) { + logger.log(Level.SEVERE, "Error updating dataverse", ex); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Error updating dataverse: " + ex.getMessage()); + } + } + + private Dataverse parseAndValidateDataverse(String body) throws JsonParsingException, JsonParseException { + try { + JsonObject dataverseJson = JsonUtil.getJsonObject(body); + return jsonParser().parseDataverse(dataverseJson); + } catch (JsonParsingException jpe) { + logger.log(Level.SEVERE, "Json: {0}", body); + throw jpe; + } catch (JsonParseException ex) { + logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); + throw ex; + } + } + + private List parseInputLevels(String body, Dataverse dataverse) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); + return inputLevelsArray != null ? parseInputLevels(inputLevelsArray, dataverse) : null; + } + + private List parseMetadataBlocks(String body) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray metadataBlocksArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); + return metadataBlocksArray != null ? parseNewDataverseMetadataBlocks(metadataBlocksArray) : null; + } + + private List parseFacets(String body) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray facetsArray = metadataBlocksJson.getJsonArray("facetIds"); + return facetsArray != null ? parseFacets(facetsArray) : null; + } + + private JsonObject getMetadataBlocksJson(String body) { + JsonObject dataverseJson = JsonUtil.getJsonObject(body); + return dataverseJson.getJsonObject("metadataBlocks"); + } + + private Response handleWrappedResponse(WrappedResponse ww) { + String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause()); + if (!error.isEmpty()) { + logger.log(Level.INFO, error); + return ww.refineResponse(error); + } + return ww.getResponse(); + } + + private Response handleEJBException(EJBException ex, String action) { + Throwable cause = ex; + StringBuilder sb = new StringBuilder(); + sb.append(action); + while (cause.getCause() != null) { + cause = cause.getCause(); + if (cause instanceof ConstraintViolationException) { + sb.append(ConstraintViolationUtil.getErrorStringForConstraintViolations(cause)); + } } + logger.log(Level.SEVERE, sb.toString()); + return error(Response.Status.INTERNAL_SERVER_ERROR, sb.toString()); } private List parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 489b36e7cef..2ce16a86297 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -67,7 +67,6 @@ public CreateDataverseCommand(Dataverse created, @Override public Dataverse execute(CommandContext ctxt) throws CommandException { - Dataverse owner = created.getOwner(); if (owner == null) { if (ctxt.dataverses().isRootDataverseExists()) { @@ -75,6 +74,10 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } + for (DataverseContact dc : created.getDataverseContacts()) { + dc.setDataverse(created); + } + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { created.setMetadataBlockRoot(true); created.setMetadataBlocks(metadataBlocks); @@ -194,5 +197,4 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { public boolean onSuccess(CommandContext ctxt, Object r) { return ctxt.dataverses().index((Dataverse) r); } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index bdb69dc918f..b1670a264bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -1,13 +1,11 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.Dataverse.DataverseType; -import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; import edu.harvard.iq.dataverse.authorization.Permission; import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally; + import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -21,121 +19,141 @@ /** * Update an existing dataverse. + * * @author michael */ -@RequiredPermissions( Permission.EditDataverse ) +@RequiredPermissions(Permission.EditDataverse) public class UpdateDataverseCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(UpdateDataverseCommand.class.getName()); - - private final Dataverse editedDv; - private final List facetList; + private static final Logger logger = Logger.getLogger(UpdateDataverseCommand.class.getName()); + + private final Dataverse editedDv; + private final List facetList; private final List featuredDataverseList; private final List inputLevelList; + private final List metadataBlocks; private boolean datasetsReindexRequired = false; - public UpdateDataverseCommand(Dataverse editedDv, List facetList, List featuredDataverseList, - DataverseRequest aRequest, List inputLevelList ) { - super(aRequest, editedDv); - this.editedDv = editedDv; - // add update template uses this command but does not - // update facet list or featured dataverses - if (facetList != null){ - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (featuredDataverseList != null){ - this.featuredDataverseList = new ArrayList<>(featuredDataverseList); - } else { - this.featuredDataverseList = null; - } - if (inputLevelList != null){ - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - } - - @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - logger.fine("Entering update dataverse command"); - - // Perform any optional validation steps, if defined: - if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { - // For admins, an override of the external validation step may be enabled: - if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { - String executable = ctxt.systemConfig().getDataverseValidationExecutable(); - boolean result = validateDataverseMetadataExternally(editedDv, executable, getRequest()); - - if (!result) { - String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); - throw new IllegalCommandException(rejectionMessage, this); - } + public UpdateDataverseCommand(Dataverse editedDv, + List facetList, + List featuredDataverseList, + DataverseRequest aRequest, + List inputLevelList) { + this(editedDv, facetList, featuredDataverseList, aRequest, inputLevelList, null); + } + + public UpdateDataverseCommand(Dataverse editedDv, + List facetList, + List featuredDataverseList, + DataverseRequest aRequest, + List inputLevelList, + List metadataBlocks) { + super(aRequest, editedDv); + this.editedDv = editedDv; + // add update template uses this command but does not + // update facet list or featured dataverses + if (facetList != null) { + this.facetList = new ArrayList<>(facetList); + } else { + this.facetList = null; + } + if (featuredDataverseList != null) { + this.featuredDataverseList = new ArrayList<>(featuredDataverseList); + } else { + this.featuredDataverseList = null; + } + if (inputLevelList != null) { + this.inputLevelList = new ArrayList<>(inputLevelList); + } else { + this.inputLevelList = null; + } + if (metadataBlocks != null) { + this.metadataBlocks = new ArrayList<>(metadataBlocks); + } else { + this.metadataBlocks = null; + } + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + logger.fine("Entering update dataverse command"); + + // Perform any optional validation steps, if defined: + if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { + // For admins, an override of the external validation step may be enabled: + if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { + String executable = ctxt.systemConfig().getDataverseValidationExecutable(); + boolean result = validateDataverseMetadataExternally(editedDv, executable, getRequest()); + + if (!result) { + String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); + throw new IllegalCommandException(rejectionMessage, this); } } - - Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); - - DataverseType oldDvType = oldDv.getDataverseType(); - String oldDvAlias = oldDv.getAlias(); - String oldDvName = oldDv.getName(); - oldDv = null; - - Dataverse result = ctxt.dataverses().save(editedDv); - - if ( facetList != null ) { - ctxt.facets().deleteFacetsFor(result); - int i=0; - for ( DatasetFieldType df : facetList ) { - ctxt.facets().create(i++, df.getId(), result.getId()); - } + } + + for (DataverseContact dc : editedDv.getDataverseContacts()) { + dc.setDataverse(editedDv); + } + + Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); + + DataverseType oldDvType = oldDv.getDataverseType(); + String oldDvAlias = oldDv.getAlias(); + String oldDvName = oldDv.getName(); + + Dataverse result = ctxt.dataverses().save(editedDv); + + if (facetList != null) { + ctxt.facets().deleteFacetsFor(result); + int i = 0; + for (DatasetFieldType df : facetList) { + ctxt.facets().create(i++, df.getId(), result.getId()); } - if ( featuredDataverseList != null ) { - ctxt.featuredDataverses().deleteFeaturedDataversesFor(result); - int i=0; - for ( Object obj : featuredDataverseList ) { - Dataverse dv = (Dataverse) obj; - ctxt.featuredDataverses().create(i++, dv.getId(), result.getId()); - } + } + if (featuredDataverseList != null) { + ctxt.featuredDataverses().deleteFeaturedDataversesFor(result); + int i = 0; + for (Object obj : featuredDataverseList) { + Dataverse dv = (Dataverse) obj; + ctxt.featuredDataverses().create(i++, dv.getId(), result.getId()); } - if ( inputLevelList != null ) { - ctxt.fieldTypeInputLevels().deleteFacetsFor(result); - for ( DataverseFieldTypeInputLevel obj : inputLevelList ) { - ctxt.fieldTypeInputLevels().create(obj); - } + } + if (inputLevelList != null) { + ctxt.fieldTypeInputLevels().deleteFacetsFor(result); + for (DataverseFieldTypeInputLevel obj : inputLevelList) { + ctxt.fieldTypeInputLevels().create(obj); } - - // We don't want to reindex the children datasets unnecessarily: - // When these values are changed we need to reindex all children datasets - // This check is not recursive as all the values just report the immediate parent - if (!oldDvType.equals(editedDv.getDataverseType()) + } + + // We don't want to reindex the children datasets unnecessarily: + // When these values are changed we need to reindex all children datasets + // This check is not recursive as all the values just report the immediate parent + if (!oldDvType.equals(editedDv.getDataverseType()) || !oldDvName.equals(editedDv.getName()) || !oldDvAlias.equals(editedDv.getAlias())) { - datasetsReindexRequired = true; - } - - return result; - } - + datasetsReindexRequired = true; + } + + return result; + } + @Override public boolean onSuccess(CommandContext ctxt, Object r) { - + // first kick of async index of datasets // TODO: is this actually needed? Is there a better way to handle // It appears that we at some point lost some extra logic here, where // we only reindex the underlying datasets if one or more of the specific set - // of fields have been changed (since these values are included in the + // of fields have been changed (since these values are included in the // indexed solr documents for dataasets). So I'm putting that back. -L.A. Dataverse result = (Dataverse) r; - + if (datasetsReindexRequired) { List datasets = ctxt.datasets().findByOwnerId(result.getId()); ctxt.index().asyncIndexDatasetList(datasets, true); } - - return ctxt.dataverses().index((Dataverse) r); - } + return ctxt.dataverses().index((Dataverse) r); + } } - From 73dd0dd34064cca50304586588eef749053b9637 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 15 Oct 2024 17:11:43 -0400 Subject: [PATCH 112/402] fix relationType display value bug --- src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 0433c425fd2..ac5923b95bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -1390,7 +1390,10 @@ public List getRelatedPublications() { relatedPublication.setIdNumber(subField.getDisplayValue()); break; case DatasetFieldConstant.publicationRelationType: - relatedPublication.setRelationType(subField.getDisplayValue()); + List values = subField.getValues_nondisplay(); + if (!values.isEmpty()) { + relatedPublication.setRelationType(values.get(0)); //only one value allowed + } break; } } From d039a108f992f170d5323a26a6d6f7dafb14029b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 16 Oct 2024 08:59:11 -0400 Subject: [PATCH 113/402] IQSS/10697 - Improve batch permission indexing (#10698) * reindex batches of 20 files instead of all at once * Also only keep 100 files in list at a time * release note * Just do collections/datasets as you go Avoids keeping everything in memory, also helps in tracking progress as you can see the permissionindextime getting updated per dataset. * fix merge issues, add logging * put comments back to how they were #10697 * reduce logging #10697 * rename release note and add PR number #10697 * fix logging - finest for per file, space in message * adding a space in log message - per review --------- Co-authored-by: Philip Durbin --- .../10697-improve-permission-indexing.md | 7 + .../search/SolrIndexServiceBean.java | 151 ++++++++++-------- 2 files changed, 91 insertions(+), 67 deletions(-) create mode 100644 doc/release-notes/10697-improve-permission-indexing.md diff --git a/doc/release-notes/10697-improve-permission-indexing.md b/doc/release-notes/10697-improve-permission-indexing.md new file mode 100644 index 00000000000..b232b1c4d3c --- /dev/null +++ b/doc/release-notes/10697-improve-permission-indexing.md @@ -0,0 +1,7 @@ +### Reindexing after a role assignment is less memory intensive + +Adding/removing a user from a role on a collection, particularly the root collection, could lead to a significant increase in memory use resulting in Dataverse itself failing with an out-of-memory condition. Such changes now consume much less memory. + +If you have experienced out-of-memory failures in Dataverse in the past that could have been caused by this problem, you may wish to run a [reindex in place](https://guides.dataverse.org/en/latest/admin/solr-search-index.html#reindex-in-place) to update any out-of-date information. + +For more information, see #10697 and #10698. diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java index cfe29ea08c7..e4d885276d0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java @@ -34,7 +34,7 @@ public class SolrIndexServiceBean { private static final Logger logger = Logger.getLogger(SolrIndexServiceBean.class.getCanonicalName()); - + @EJB DvObjectServiceBean dvObjectService; @EJB @@ -149,7 +149,7 @@ private List constructDatasetSolrDocs(Dataset dataset) { return solrDocs; } -// private List constructDatafileSolrDocs(DataFile dataFile) { + // private List constructDatafileSolrDocs(DataFile dataFile) { private List constructDatafileSolrDocs(DataFile dataFile, Map> permStringByDatasetVersion) { List datafileSolrDocs = new ArrayList<>(); Map desiredCards = searchPermissionsService.getDesiredCards(dataFile.getOwner()); @@ -166,14 +166,14 @@ private List constructDatafileSolrDocs(DataFile dataFile, Map constructDatafileSolrDocsFromDataset(Dataset datas } else { perms = searchPermissionsService.findDatasetVersionPerms(datasetVersionFileIsAttachedTo); } + for (FileMetadata fileMetadata : datasetVersionFileIsAttachedTo.getFileMetadatas()) { Long fileId = fileMetadata.getDataFile().getId(); String solrIdStart = IndexServiceBean.solrDocIdentifierFile + fileId; String solrIdEnd = getDatasetOrDataFileSolrEnding(datasetVersionFileIsAttachedTo.getVersionState()); String solrId = solrIdStart + solrIdEnd; DvObjectSolrDoc dataFileSolrDoc = new DvObjectSolrDoc(fileId.toString(), solrId, datasetVersionFileIsAttachedTo.getId(), fileMetadata.getLabel(), perms); - logger.fine("adding fileid " + fileId); + logger.finest("adding fileid " + fileId); datafileSolrDocs.add(dataFileSolrDoc); } } @@ -361,20 +362,19 @@ private void persistToSolr(Collection docs) throws SolrServer public IndexResponse indexPermissionsOnSelfAndChildren(long definitionPointId) { DvObject definitionPoint = dvObjectService.findDvObject(definitionPointId); - if ( definitionPoint == null ) { + if (definitionPoint == null) { logger.log(Level.WARNING, "Cannot find a DvOpbject with id of {0}", definitionPointId); return null; } else { return indexPermissionsOnSelfAndChildren(definitionPoint); } } - + /** * We use the database to determine direct children since there is no * inheritance */ public IndexResponse indexPermissionsOnSelfAndChildren(DvObject definitionPoint) { - List dvObjectsToReindexPermissionsFor = new ArrayList<>(); List filesToReindexAsBatch = new ArrayList<>(); /** * @todo Re-indexing the definition point itself seems to be necessary @@ -383,27 +383,47 @@ public IndexResponse indexPermissionsOnSelfAndChildren(DvObject definitionPoint) // We don't create a Solr "primary/content" doc for the root dataverse // so don't create a Solr "permission" doc either. + int i = 0; + int numObjects = 0; if (definitionPoint.isInstanceofDataverse()) { Dataverse selfDataverse = (Dataverse) definitionPoint; if (!selfDataverse.equals(dataverseService.findRootDataverse())) { - dvObjectsToReindexPermissionsFor.add(definitionPoint); + indexPermissionsForOneDvObject(definitionPoint); + numObjects++; } List directChildDatasetsOfDvDefPoint = datasetService.findByOwnerId(selfDataverse.getId()); for (Dataset dataset : directChildDatasetsOfDvDefPoint) { - dvObjectsToReindexPermissionsFor.add(dataset); + indexPermissionsForOneDvObject(dataset); + numObjects++; for (DataFile datafile : filesToReIndexPermissionsFor(dataset)) { filesToReindexAsBatch.add(datafile); + i++; + if (i % 100 == 0) { + reindexFilesInBatches(filesToReindexAsBatch); + filesToReindexAsBatch.clear(); + } + if (i % 1000 == 0) { + logger.fine("Progress: " +i + " files permissions reindexed"); + } } + logger.fine("Progress : dataset " + dataset.getId() + " permissions reindexed"); } } else if (definitionPoint.isInstanceofDataset()) { - dvObjectsToReindexPermissionsFor.add(definitionPoint); + indexPermissionsForOneDvObject(definitionPoint); + numObjects++; // index files Dataset dataset = (Dataset) definitionPoint; for (DataFile datafile : filesToReIndexPermissionsFor(dataset)) { filesToReindexAsBatch.add(datafile); + i++; + if (i % 100 == 0) { + reindexFilesInBatches(filesToReindexAsBatch); + filesToReindexAsBatch.clear(); + } } } else { - dvObjectsToReindexPermissionsFor.add(definitionPoint); + indexPermissionsForOneDvObject(definitionPoint); + numObjects++; } /** @@ -412,64 +432,64 @@ public IndexResponse indexPermissionsOnSelfAndChildren(DvObject definitionPoint) * @todo Should update timestamps, probably, even thought these are * files, see https://github.com/IQSS/dataverse/issues/2421 */ - String response = reindexFilesInBatches(filesToReindexAsBatch); - - for (DvObject dvObject : dvObjectsToReindexPermissionsFor) { - /** - * @todo do something with this response - */ - IndexResponse indexResponse = indexPermissionsForOneDvObject(dvObject); - } - + reindexFilesInBatches(filesToReindexAsBatch); + logger.fine("Reindexed permissions for " + i + " files and " + numObjects + " datasets/collections"); return new IndexResponse("Number of dvObject permissions indexed for " + definitionPoint - + ": " + dvObjectsToReindexPermissionsFor.size() - ); + + ": " + numObjects); } private String reindexFilesInBatches(List filesToReindexPermissionsFor) { List docs = new ArrayList<>(); Map> byParentId = new HashMap<>(); Map> permStringByDatasetVersion = new HashMap<>(); - for (DataFile file : filesToReindexPermissionsFor) { - Dataset dataset = (Dataset) file.getOwner(); - Map desiredCards = searchPermissionsService.getDesiredCards(dataset); - for (DatasetVersion datasetVersionFileIsAttachedTo : datasetVersionsToBuildCardsFor(dataset)) { - boolean cardShouldExist = desiredCards.get(datasetVersionFileIsAttachedTo.getVersionState()); - if (cardShouldExist) { - List cachedPermission = permStringByDatasetVersion.get(datasetVersionFileIsAttachedTo.getId()); - if (cachedPermission == null) { - logger.fine("no cached permission! Looking it up..."); - List fileSolrDocs = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion); - for (DvObjectSolrDoc fileSolrDoc : fileSolrDocs) { - Long datasetVersionId = fileSolrDoc.getDatasetVersionId(); - if (datasetVersionId != null) { - permStringByDatasetVersion.put(datasetVersionId, fileSolrDoc.getPermissions()); + int i = 0; + try { + for (DataFile file : filesToReindexPermissionsFor) { + Dataset dataset = (Dataset) file.getOwner(); + Map desiredCards = searchPermissionsService.getDesiredCards(dataset); + for (DatasetVersion datasetVersionFileIsAttachedTo : datasetVersionsToBuildCardsFor(dataset)) { + boolean cardShouldExist = desiredCards.get(datasetVersionFileIsAttachedTo.getVersionState()); + if (cardShouldExist) { + List cachedPermission = permStringByDatasetVersion.get(datasetVersionFileIsAttachedTo.getId()); + if (cachedPermission == null) { + logger.finest("no cached permission! Looking it up..."); + List fileSolrDocs = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion); + for (DvObjectSolrDoc fileSolrDoc : fileSolrDocs) { + Long datasetVersionId = fileSolrDoc.getDatasetVersionId(); + if (datasetVersionId != null) { + permStringByDatasetVersion.put(datasetVersionId, fileSolrDoc.getPermissions()); + SolrInputDocument solrDoc = SearchUtil.createSolrDoc(fileSolrDoc); + docs.add(solrDoc); + i++; + } + } + } else { + logger.finest("cached permission is " + cachedPermission); + List fileSolrDocsBasedOnCachedPermissions = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion); + for (DvObjectSolrDoc fileSolrDoc : fileSolrDocsBasedOnCachedPermissions) { SolrInputDocument solrDoc = SearchUtil.createSolrDoc(fileSolrDoc); docs.add(solrDoc); + i++; } } - } else { - logger.fine("cached permission is " + cachedPermission); - List fileSolrDocsBasedOnCachedPermissions = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion); - for (DvObjectSolrDoc fileSolrDoc : fileSolrDocsBasedOnCachedPermissions) { - SolrInputDocument solrDoc = SearchUtil.createSolrDoc(fileSolrDoc); - docs.add(solrDoc); + if (i % 20 == 0) { + persistToSolr(docs); + docs = new ArrayList<>(); } } } + Long parent = file.getOwner().getId(); + List existingList = byParentId.get(parent); + if (existingList == null) { + List empty = new ArrayList<>(); + byParentId.put(parent, empty); + } else { + List updatedList = existingList; + updatedList.add(file.getId()); + byParentId.put(parent, updatedList); + } } - Long parent = file.getOwner().getId(); - List existingList = byParentId.get(parent); - if (existingList == null) { - List empty = new ArrayList<>(); - byParentId.put(parent, empty); - } else { - List updatedList = existingList; - updatedList.add(file.getId()); - byParentId.put(parent, updatedList); - } - } - try { + persistToSolr(docs); return " " + filesToReindexPermissionsFor.size() + " files indexed across " + docs.size() + " Solr documents "; } catch (SolrServerException | IOException ex) { @@ -517,29 +537,26 @@ public JsonObjectBuilder deleteAllFromSolrAndResetIndexTimes() throws SolrServer } /** - * - * * @return A list of dvobject ids that should have their permissions - * re-indexed because Solr was down when a permission was added. The permission - * should be added to Solr. The id of the permission contains the type of - * DvObject and the primary key of the dvObject. - * DvObjects of type DataFile are currently skipped because their index - * time isn't stored in the database, since they are indexed along - * with their parent dataset (this may change). + * re-indexed because Solr was down when a permission was added. The + * permission should be added to Solr. The id of the permission contains the + * type of DvObject and the primary key of the dvObject. DvObjects of type + * DataFile are currently skipped because their index time isn't stored in + * the database, since they are indexed along with their parent dataset + * (this may change). */ public List findPermissionsInDatabaseButStaleInOrMissingFromSolr() { List indexingRequired = new ArrayList<>(); long rootDvId = dataverseService.findRootDataverse().getId(); List missingDataversePermissionIds = dataverseService.findIdStalePermission(); List missingDatasetPermissionIds = datasetService.findIdStalePermission(); - for (Long id : missingDataversePermissionIds) { + for (Long id : missingDataversePermissionIds) { if (!id.equals(rootDvId)) { - indexingRequired.add(id); + indexingRequired.add(id); } } indexingRequired.addAll(missingDatasetPermissionIds); return indexingRequired; } - } From e85c4228524620d3e5db94b266a8967e10e0be0f Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 16 Oct 2024 09:44:29 -0400 Subject: [PATCH 114/402] #8184 update popup and Bundle --- src/main/java/propertyFiles/Bundle.properties | 11 +++- src/main/webapp/dataset.xhtml | 64 ++++++++++++++++--- 2 files changed, 64 insertions(+), 11 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 3ee017e06de..ce4d53a5eb9 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1723,9 +1723,18 @@ dataset.requestAccessToRestrictedFilesWithEmbargo=Embargoed files cannot be acce dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0} dataset.privateurl.infoMessageReviewer=This unpublished dataset is being privately shared. dataset.privateurl.header=Unpublished Dataset Preview URL -dataset.privateurl.tip=Use a Preview URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Preview URL feature, please refer to the User Guide. +dataset.privateurl.tip=To cite this data in publications, use the dataset's persistent ID instead of this URL. For more information about the Preview URL feature, please refer to the User Guide. +dataset.privateurl.onlyone=Only one Preview URL can be active for a single dataset. dataset.privateurl.absent=Preview URL has not been created. +dataset.privateurl.general.button.label=Create General Preview URL +dataset.privateurl.general.description=Create a URL that others can use to review this dataset version before it is published. they will be able to access all files in the dataset and see all metadata, including metadata that may identify the dataset's authors. +dataset.privateurl.general.title=General Preview +dataset.privateurl.anonymous.title=Anonymous Preview +dataset.privateurl.anonymous.button.label=Create Anonymous Preview URL +dataset.privateurl.anonymous.description=Create a URL that others can use to access an anonymized view of this unpublished dataset version. Metadata that could identify the dataset author will not be displayed. Non-identifying metadata will be visible. +dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and will be accessible if they're not restricted. Users of the preview URL will be able to see the name of the repository and the name of the collection that this dataset is in, which may expose the dataset author's identities. dataset.privateurl.createPrivateUrl=Create Preview URL +dataset.privateurl.introduction=You can create a Preview URL to copy and share with others who will not need a repository account to review this unpublished dataset version. Once the dataset is published ot if the URL is disabled, the URL will no longer work and will point to a "Page not found" page. dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published dataset.privateurl.disablePrivateUrl=Disable Preview URL diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 6de0f00e94e..9629b3bbc85 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1178,11 +1178,61 @@

    +

    #{bundle['dataset.privateurl.introduction']}

    +

    +

    +

    #{bundle['dataset.privateurl.onlyone']}

    + +

    +

    +

    +

    #{bundle['dataset.privateurl.general.description']}

    + + +
    + + +
    +
    + +

    + +

    +

    +

    +

    #{bundle['dataset.privateurl.anonymous.description']}

    +

    #{bundle['dataset.privateurl.anonymous.description.paragraph.two']} #{bundle['dataset.privateurl.createPrivateUrl.anonymized.unavailable']}.

    + + + + +

    + +
    + + +
    +
    +

    #{bundle['dataset.privateurl.absent']}

    @@ -1200,17 +1250,11 @@

    -
    - - - - - -
    +

    #{bundle['dataset.privateurl.cannotCreate']}

    From 8084bc66cbd8a9c47dbfb6354dd4ee997aeda1e5 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 16 Oct 2024 10:33:35 -0400 Subject: [PATCH 115/402] #8184 change nominal url add redirect --- .../iq/dataverse/privateurl/PrivateUrl.java | 2 +- src/main/webapp/dataset.xhtml | 2 +- src/main/webapp/previewurl.xhtml | 24 +++++++++++++++++++ 3 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 src/main/webapp/previewurl.xhtml diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java index beb676f60d1..63b5bf03ea7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java @@ -30,7 +30,7 @@ public class PrivateUrl { public PrivateUrl(RoleAssignment roleAssignment, Dataset dataset, String dataverseSiteUrl) { this.token = roleAssignment.getPrivateUrlToken(); - this.link = dataverseSiteUrl + "/privateurl.xhtml?token=" + token; + this.link = dataverseSiteUrl + "/previewurl.xhtml?token=" + token; this.dataset = dataset; this.roleAssignment = roleAssignment; } diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 9629b3bbc85..ec23a9cdcfd 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1190,7 +1190,7 @@

    -

    +

    #{bundle['dataset.privateurl.general.description']}

    + + + + + + + + + + + + + + + + + + \ No newline at end of file From bc798344ccabb9bfeacd047e7444ed83670e9cec Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 16 Oct 2024 10:48:19 -0400 Subject: [PATCH 116/402] #8184 revert redirect url --- .../java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java index 63b5bf03ea7..beb676f60d1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java @@ -30,7 +30,7 @@ public class PrivateUrl { public PrivateUrl(RoleAssignment roleAssignment, Dataset dataset, String dataverseSiteUrl) { this.token = roleAssignment.getPrivateUrlToken(); - this.link = dataverseSiteUrl + "/previewurl.xhtml?token=" + token; + this.link = dataverseSiteUrl + "/privateurl.xhtml?token=" + token; this.dataset = dataset; this.roleAssignment = roleAssignment; } From 53e2f0e87bc76fc2767ab72a15d0877241228787 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 16 Oct 2024 11:17:04 -0400 Subject: [PATCH 117/402] #8184 fix unit tests --- .../iq/dataverse/authorization/users/PrivateUrlUserTest.java | 2 +- .../engine/command/impl/CreatePrivateUrlCommandTest.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java index a8dda2f6a7e..d3c5cdca470 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java @@ -38,7 +38,7 @@ void getIdentifier() { @Test void testGetDisplayInfo() { RoleAssigneeDisplayInfo displayInfo = privateUrlUser.getDisplayInfo(); - assertEquals("Private URL Enabled", displayInfo.getTitle()); + assertEquals("Preview URL Enabled", displayInfo.getTitle()); assertNull(displayInfo.getEmailAddress()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java index 508eac46cb4..73cc867cf24 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java @@ -171,7 +171,7 @@ public void testCreatePrivateUrlSuccessfully() throws CommandException { assertEquals(expectedUser.getIdentifier(), privateUrl.getRoleAssignment().getAssigneeIdentifier()); assertEquals(expectedUser.isSuperuser(), false); assertEquals(expectedUser.isAuthenticated(), false); - assertEquals(expectedUser.getDisplayInfo().getTitle(), "Private URL Enabled"); + assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled"); assertNotNull(privateUrl.getToken()); assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); } @@ -188,7 +188,7 @@ public void testCreateAnonymizedAccessPrivateUrlSuccessfully() throws CommandExc assertEquals(expectedUser.getIdentifier(), privateUrl.getRoleAssignment().getAssigneeIdentifier()); assertEquals(expectedUser.isSuperuser(), false); assertEquals(expectedUser.isAuthenticated(), false); - assertEquals(expectedUser.getDisplayInfo().getTitle(), "Private URL Enabled"); + assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled"); assertNotNull(privateUrl.getToken()); assertTrue(privateUrl.isAnonymizedAccess()); assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); From 19c8a12b32a502ee43f46916248a7d4691928aa6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 16 Oct 2024 16:30:00 +0100 Subject: [PATCH 118/402] Changed: limiting the information to update in a dataverse through the new update endpoint --- .../harvard/iq/dataverse/api/Dataverses.java | 8 +- .../iq/dataverse/util/json/JsonParser.java | 79 ++++++++++++++----- 2 files changed, 62 insertions(+), 25 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index b85ee0afc8f..0bc389041c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -128,7 +128,7 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { Dataverse newDataverse; try { - newDataverse = parseAndValidateDataverse(body); + newDataverse = parseAndValidateDataverseRequestBody(body, null); } catch (JsonParsingException jpe) { return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { @@ -172,7 +172,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod Dataverse updatedDataverse; try { - updatedDataverse = parseAndValidateDataverse(body); + updatedDataverse = parseAndValidateDataverseRequestBody(body, originalDataverse); } catch (JsonParsingException jpe) { return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { @@ -200,10 +200,10 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod } } - private Dataverse parseAndValidateDataverse(String body) throws JsonParsingException, JsonParseException { + private Dataverse parseAndValidateDataverseRequestBody(String body, Dataverse dataverseToUpdate) throws JsonParsingException, JsonParseException { try { JsonObject dataverseJson = JsonUtil.getJsonObject(body); - return jsonParser().parseDataverse(dataverseJson); + return dataverseToUpdate != null ? jsonParser().parseDataverseUpdates(dataverseJson, dataverseToUpdate) : jsonParser().parseDataverse(dataverseJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); throw jpe; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 2f01c9bc2f2..f63e4c4fd9c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -50,6 +50,7 @@ import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; + import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; @@ -128,19 +129,8 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { dv.setPermissionRoot(jobj.getBoolean("permissionRoot", false)); dv.setFacetRoot(jobj.getBoolean("facetRoot", false)); dv.setAffiliation(jobj.getString("affiliation", null)); - - if (jobj.containsKey("dataverseContacts")) { - JsonArray dvContacts = jobj.getJsonArray("dataverseContacts"); - int i = 0; - List dvContactList = new LinkedList<>(); - for (JsonValue jsv : dvContacts) { - DataverseContact dvc = new DataverseContact(dv); - dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail")); - dvc.setDisplayOrder(i++); - dvContactList.add(dvc); - } - dv.setDataverseContacts(dvContactList); - } + + updateDataverseContacts(dv, jobj); if (jobj.containsKey("theme")) { DataverseTheme theme = parseDataverseTheme(jobj.getJsonObject("theme")); @@ -149,14 +139,8 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { } dv.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); // default - if (jobj.containsKey("dataverseType")) { - for (Dataverse.DataverseType dvtype : Dataverse.DataverseType.values()) { - if (dvtype.name().equals(jobj.getString("dataverseType"))) { - dv.setDataverseType(dvtype); - } - } - } - + updateDataverseType(dv, jobj); + if (jobj.containsKey("filePIDsEnabled")) { dv.setFilePIDsEnabled(jobj.getBoolean("filePIDsEnabled")); } @@ -189,6 +173,59 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { return dv; } + + public Dataverse parseDataverseUpdates(JsonObject jsonObject, Dataverse dataverseToUpdate) throws JsonParseException { + String alias = jsonObject.getString("alias", null); + if (alias != null) { + dataverseToUpdate.setAlias(alias); + } + + String name = jsonObject.getString("name", null); + if (name != null) { + dataverseToUpdate.setName(name); + } + + String description = jsonObject.getString("description", null); + if (description != null) { + dataverseToUpdate.setDescription(description); + } + + String affiliation = jsonObject.getString("affiliation", null); + if (affiliation != null) { + dataverseToUpdate.setAffiliation(affiliation); + } + + updateDataverseType(dataverseToUpdate, jsonObject); + + updateDataverseContacts(dataverseToUpdate, jsonObject); + + return dataverseToUpdate; + } + + private void updateDataverseType(Dataverse dataverse, JsonObject jsonObject) { + String receivedDataverseType = jsonObject.getString("dataverseType", null); + if (receivedDataverseType != null) { + Arrays.stream(Dataverse.DataverseType.values()) + .filter(type -> type.name().equals(receivedDataverseType)) + .findFirst() + .ifPresent(dataverse::setDataverseType); + } + } + + private void updateDataverseContacts(Dataverse dataverse, JsonObject jsonObject) throws JsonParseException { + if (jsonObject.containsKey("dataverseContacts")) { + JsonArray dvContacts = jsonObject.getJsonArray("dataverseContacts"); + int i = 0; + List dvContactList = new LinkedList<>(); + for (JsonValue jsv : dvContacts) { + DataverseContact dvc = new DataverseContact(dataverse); + dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail")); + dvc.setDisplayOrder(i++); + dvContactList.add(dvc); + } + dataverse.setDataverseContacts(dvContactList); + } + } public DataverseTheme parseDataverseTheme(JsonObject obj) { From f4c3d2c9d9991edd2d02bd760d1fec86547a519c Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 16 Oct 2024 16:43:12 +0100 Subject: [PATCH 119/402] Removed: DataverseContact host dataverse re-set --- .../dataverse/engine/command/impl/CreateDataverseCommand.java | 4 ---- .../dataverse/engine/command/impl/UpdateDataverseCommand.java | 4 ---- 2 files changed, 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 2ce16a86297..6957dac416d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -74,10 +74,6 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } - for (DataverseContact dc : created.getDataverseContacts()) { - dc.setDataverse(created); - } - if (metadataBlocks != null && !metadataBlocks.isEmpty()) { created.setMetadataBlockRoot(true); created.setMetadataBlocks(metadataBlocks); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index b1670a264bf..551f0ffdff7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -92,10 +92,6 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } - for (DataverseContact dc : editedDv.getDataverseContacts()) { - dc.setDataverse(editedDv); - } - Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); DataverseType oldDvType = oldDv.getDataverseType(); From 57960c612bffa782fd4f75d9ed01e3f3307391e3 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 16 Oct 2024 14:44:39 -0400 Subject: [PATCH 120/402] #8184 fix existing test --- .../command/impl/CreatePrivateUrlCommandTest.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java index 73cc867cf24..e0060af924b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java @@ -18,7 +18,9 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; +import java.sql.Timestamp; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.concurrent.Future; @@ -195,15 +197,17 @@ public void testCreateAnonymizedAccessPrivateUrlSuccessfully() throws CommandExc } @Test - public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() { + public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() throws CommandException { dataset = new Dataset(); List versions = new ArrayList<>(); + dataset.setPublicationDate(new Timestamp(new Date().getTime())); DatasetVersion datasetVersion = new DatasetVersion(); datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); DatasetVersion datasetVersion2 = new DatasetVersion(); - - versions.add(datasetVersion); + datasetVersion2.setVersionState(DatasetVersion.VersionState.DRAFT); + versions.add(datasetVersion2); + versions.add(datasetVersion); dataset.setVersions(versions); dataset.setId(versionIsReleased); PrivateUrl privateUrl = null; @@ -211,6 +215,7 @@ public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() { privateUrl = testEngine.submit(new CreatePrivateUrlCommand(null, dataset, true)); assertTrue(false); } catch (CommandException ex) { + } assertNull(privateUrl); } From 1b8a257adfe93bad1476bc325131d9206f488f66 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 16 Oct 2024 15:20:01 -0400 Subject: [PATCH 121/402] #8184 update constructor/test urls --- .../java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java | 2 +- .../engine/command/impl/CreatePrivateUrlCommandTest.java | 4 ++-- .../harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java | 2 +- .../edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java index beb676f60d1..63b5bf03ea7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java @@ -30,7 +30,7 @@ public class PrivateUrl { public PrivateUrl(RoleAssignment roleAssignment, Dataset dataset, String dataverseSiteUrl) { this.token = roleAssignment.getPrivateUrlToken(); - this.link = dataverseSiteUrl + "/privateurl.xhtml?token=" + token; + this.link = dataverseSiteUrl + "/previewurl.xhtml?token=" + token; this.dataset = dataset; this.roleAssignment = roleAssignment; } diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java index e0060af924b..0ba29f74774 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java @@ -175,7 +175,7 @@ public void testCreatePrivateUrlSuccessfully() throws CommandException { assertEquals(expectedUser.isAuthenticated(), false); assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled"); assertNotNull(privateUrl.getToken()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); } @Test @@ -193,7 +193,7 @@ public void testCreateAnonymizedAccessPrivateUrlSuccessfully() throws CommandExc assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled"); assertNotNull(privateUrl.getToken()); assertTrue(privateUrl.isAnonymizedAccess()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java index da94b288bee..f06be37578d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java @@ -277,7 +277,7 @@ public void testGetPrivateUrlFromRoleAssignmentSuccess() { PrivateUrl privateUrl = PrivateUrlUtil.getPrivateUrlFromRoleAssignment(ra, dataverseSiteUrl); assertNotNull(privateUrl); assertEquals(new Long(42), privateUrl.getDataset().getId()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=cd71e9d7-73a7-4ec8-b890-3d00499e8693", privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=cd71e9d7-73a7-4ec8-b890-3d00499e8693", privateUrl.getLink()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index 11da71e1980..30cef574a6a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -133,7 +133,7 @@ public void testJson_PrivateUrl() { assertNotNull(job); JsonObject jsonObject = job.build(); assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("token")); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link")); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link")); assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getJsonObject("roleAssignment").getString("privateUrlToken")); assertEquals(PrivateUrlUser.PREFIX + "42", jsonObject.getJsonObject("roleAssignment").getString("assignee")); } From 337ea789b82740174d1c4930f483261b3188f958 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 16 Oct 2024 16:15:04 -0400 Subject: [PATCH 122/402] update making releases page post 6.4 #10931 --- .../source/developers/making-releases.rst | 92 ++++++++++++++----- 1 file changed, 71 insertions(+), 21 deletions(-) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 4936e942389..2afdfd2eb2f 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -10,10 +10,43 @@ Introduction This document is about releasing the main Dataverse app (https://github.com/IQSS/dataverse). See :doc:`making-library-releases` for how to release our various libraries. Other projects have their own release documentation. -The steps below describe making both regular releases and hotfix releases. - Below you'll see branches like "develop" and "master" mentioned. For more on our branching strategy, see :doc:`version-control`. +Regular or Hotfix? +------------------ + +Early on, make sure it's clear what type of release this is. The steps below describe making both regular releases and hotfix releases. + +- regular + + - e.g. 6.5 (minor) + - e.g. 7.0 (major) + +- hotfix + + - e.g. 6.4.1 (patch) + - e.g. 7.0.1 (patch) + +Ensure Issues Have Been Created +------------------------------- + +In advance of a release, GitHub issues should have been created already that capture certain steps. See https://github.com/IQSS/dataverse-pm/issues/335 for examples. + +Declare a Code Freeze +--------------------- + +The following steps are made more difficult if code is changing in the "develop" branch. Declare a code freeze until the release is out. Do not allow pull requests to be merged. + +Conduct Performance Testing +--------------------------- + +See :doc:`/qa/performance-tests` for details. + +Conduct Smoke Testing +--------------------- + +See :doc:`/qa/testing-approach` for details. + .. _write-release-notes: Write Release Notes @@ -23,26 +56,46 @@ Developers express the need for an addition to release notes by creating a "rele The task at or near release time is to collect these snippets into a single file. -- Create an issue in GitHub to track the work of creating release notes for the upcoming release. +- Find the issue in GitHub that tracks the work of creating release notes for the upcoming release. - Create a branch, add a .md file for the release (ex. 5.10.1 Release Notes) in ``/doc/release-notes`` and write the release notes, making sure to pull content from the release note snippets mentioned above. Snippets may not include any issue number or pull request number in the text so be sure copy the number from the filename of the snippet into the final release note. - Delete (``git rm``) the release note snippets as the content is added to the main release notes file. - Include instructions describing the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure. -- Take the release notes .md through the regular Code Review and QA process. That is, make a pull request. +- Take the release notes .md through the regular Code Review and QA process. That is, make a pull request. Here's an example: https://github.com/IQSS/dataverse/pull/10866 -Create a GitHub Issue and Branch for the Release ------------------------------------------------- +Deploy Release Candidate to Demo +-------------------------------- + +First, build the release candidate. + +ssh into the dataverse-internal server and undeploy the current war file. + +Go to https://jenkins.dataverse.org/job/IQSS_Dataverse_Internal/ and make the following adjustments to the config: + +- Repository URL: ``https://github.com/IQSS/dataverse.git`` +- Branch Specifier (blank for 'any'): ``*/develop`` +- Execute shell: Update version in filenames to ``dataverse-5.10.war`` (for example) + +Click "Save" then "Build Now". + +This will build the war file, and then automatically deploy it on dataverse-internal. Verify that the application has deployed successfully. + +You can scp the war file to the demo server or download it from https://jenkins.dataverse.org/job/IQSS_Dataverse_Internal/ws/target/ + +ssh into the demo server and follow the upgrade instructions in the release notes. + +Prepare Release Branch +---------------------- + +The release branch will have the final changes such as bumping the version number. Usually we branch from the "develop" branch to create the release branch. If we are creating a hotfix for a particular version (5.11, for example), we branch from the tag (e.g. ``v5.11``). -Use the GitHub issue number and the release tag for the name of the branch. (e.g. ``8583-update-version-to-v5.10.1`` +Create a release branch named after the issue that tracks bumping the version with a descriptive name like "10852-bump-to-6.4" from https://github.com/IQSS/dataverse/pull/10871. **Note:** the changes below must be the very last commits merged into the develop branch before it is merged into master and tagged for the release! Make the following changes in the release branch. -Bump Version Numbers and Prepare Container Tags ------------------------------------------------ - Increment the version number to the milestone (e.g. 5.10.1) in the following two files: - modules/dataverse-parent/pom.xml -> ```` -> ```` (e.g. `pom.xml commit `_) @@ -58,14 +111,11 @@ Return to the parent pom and make the following change, which is necessary for p (Before you make this change the value should be ``${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion}``. Later on, after cutting a release, we'll change it back to that value.) -Check in the Changes Above into a Release Branch and Merge It -------------------------------------------------------------- - For a regular release, make the changes above in the release branch you created, make a pull request, and merge it into the "develop" branch. Like usual, you can safely delete the branch after the merge is complete. If you are making a hotfix release, make the pull request against the "master" branch. Do not delete the branch after merging because we will later merge it into the "develop" branch to pick up the hotfix. More on this later. -Either way, as usual, you should ensure that all tests are passing. Please note that you will need to bump the version in `jenkins.yml `_ in dataverse-ansible to get the tests to pass. Consider doing this before making the pull request. Alternatively, you can bump jenkins.yml after making the pull request and re-run the Jenkins job to make sure tests pass. +Either way, as usual, you should ensure that all tests are passing. Please note that you will need to bump the version in `jenkins.yml `_ in dataverse-ansible to get the tests to pass. Consider doing this before making the pull request. Alternatively, you can bump jenkins.yml after making the pull request and re-run the Jenkins job to make sure tests pass. Merge "develop" into "master" ----------------------------- @@ -94,7 +144,7 @@ After the "master" branch has been updated and the GitHub Action to build and pu To test these images against our API test suite, go to the "alpha" workflow at https://github.com/gdcc/api-test-runner/actions/workflows/alpha.yml and run it. -If there are failures, additional dependencies or settings may have been added to the "develop" workflow. Copy them over and try again. +Don't be surprised if there are failures. The test runner is a work in progress! Additional dependencies or settings may have been added to the "develop" workflow. Copy them over and try again. .. _build-guides: @@ -186,11 +236,6 @@ Upload the following artifacts to the draft release you created: - metadata block tsv files - config files -Deploy on Demo --------------- - -Now that you have the release ready to go, consider giving it one final test by deploying it on https://demo.dataverse.org. Note that this is also an opportunity to re-test the upgrade checklist as described in the release note. - Publish the Release ------------------- @@ -228,7 +273,12 @@ Create a new branch (any name is fine but ``prepare-next-iteration`` is suggeste Now create a pull request and merge it. -For more background, see :ref:`base-supported-image-tags`. +For more background, see :ref:`base-supported-image-tags`. For an example, see https://github.com/IQSS/dataverse/pull/10896 + +Deploy Final Release on Demo +---------------------------- + +Above you already did the hard work of deploying a release candidate to https://demo.dataverse.org. It should be relatively straighforward to undeploy the release candidate and deploy the final release. Add the Release to the Dataverse Roadmap ---------------------------------------- From 7f8f298c600782c15ae4374c6623c2a7e18ea4af Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 16 Oct 2024 17:00:16 -0400 Subject: [PATCH 123/402] typo #10931 --- doc/sphinx-guides/source/developers/making-releases.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 2afdfd2eb2f..350f1fdcaf3 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -278,7 +278,7 @@ For more background, see :ref:`base-supported-image-tags`. For an example, see h Deploy Final Release on Demo ---------------------------- -Above you already did the hard work of deploying a release candidate to https://demo.dataverse.org. It should be relatively straighforward to undeploy the release candidate and deploy the final release. +Above you already did the hard work of deploying a release candidate to https://demo.dataverse.org. It should be relatively straightforward to undeploy the release candidate and deploy the final release. Add the Release to the Dataverse Roadmap ---------------------------------------- From 330462e16a7b231cb5894fbb0f04fd8fd6cb2701 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 16 Oct 2024 17:01:15 -0400 Subject: [PATCH 124/402] Additional DataCiteXML testing with more fields --- .../doi/datacite/XmlMetadataTemplateTest.java | 289 +++++++++++++++++- 1 file changed, 287 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java index c03146904de..f282e681175 100644 --- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java @@ -1,15 +1,21 @@ package edu.harvard.iq.dataverse.pidproviders.doi.datacite; +import edu.harvard.iq.dataverse.ControlledVocabularyValue; +import edu.harvard.iq.dataverse.DataCitation; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetAuthor; import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; import edu.harvard.iq.dataverse.DatasetFieldConstant; import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataset.DatasetType; @@ -20,16 +26,30 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.json.CompoundVocabularyException; +import edu.harvard.iq.dataverse.util.json.ControlledVocabularyException; +import edu.harvard.iq.dataverse.util.json.JsonParseException; +import edu.harvard.iq.dataverse.util.json.JsonParser; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import edu.harvard.iq.dataverse.util.xml.XmlValidator; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonString; +import java.io.File; import java.io.IOException; import java.io.StringReader; import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; import javax.xml.transform.stream.StreamSource; @@ -72,7 +92,7 @@ public static void setupMocks() { } - /** + /** A minimal example to assure that the XMLMetadataTemplate generates output consistent with the DataCite XML v4.5 schema. */ @Test public void testDataCiteXMLCreation() throws IOException { @@ -135,7 +155,61 @@ public void testDataCiteXMLCreation() throws IOException { d.setDatasetType(dType); String xml = template.generateXML(d); - System.out.println("Output is " + xml); + System.out.println("Output from minimal example is " + xml); + try { + StreamSource source = new StreamSource(new StringReader(xml)); + source.setSystemId("DataCite XML for test dataset"); + assertTrue(XmlValidator.validateXmlSchema(source, new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); + } catch (SAXException e) { + System.out.println("Invalid schema: " + e.getMessage()); + } + + } + + /** + * This tests a more complete example based off of the dataset-all-defaults + * file, again checking for conformance of the result with the DataCite XML v4.5 + * schema. + */ + @Test + public void testDataCiteXMLCreationAllFields() throws IOException { + Dataverse collection = new Dataverse(); + collection.setCitationDatasetFieldTypes(new ArrayList<>()); + Dataset d = new Dataset(); + d.setOwner(collection); + DatasetVersion dv = new DatasetVersion(); + TermsOfUseAndAccess toa = new TermsOfUseAndAccess(); + toa.setTermsOfUse("Some terms"); + dv.setTermsOfUseAndAccess(toa); + dv.setDataset(d); + DatasetFieldType primitiveDSFType = new DatasetFieldType(DatasetFieldConstant.title, + DatasetFieldType.FieldType.TEXT, false); + DatasetField testDatasetField = new DatasetField(); + + dv.setVersionState(VersionState.DRAFT); + + testDatasetField.setDatasetVersion(dv); + + File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt"); + String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath()))); + JsonObject datasetJson = JsonUtil.getJsonObject(datasetVersionAsJson); + + GlobalId doi = new GlobalId("doi", datasetJson.getString("authority"), datasetJson.getString("identifier"), null, null, null); + d.setGlobalId(doi); + + + List fields = assertDoesNotThrow(() -> XmlMetadataTemplateTest.parseMetadataBlocks(datasetJson.getJsonObject("datasetVersion").getJsonObject("metadataBlocks"))); + dv.setDatasetFields(fields); + + ArrayList dsvs = new ArrayList<>(); + dsvs.add(0, dv); + d.setVersions(dsvs); + DatasetType dType = new DatasetType(); + dType.setName(DatasetType.DATASET_TYPE_DATASET); + d.setDatasetType(dType); + String xml = DOIDataCiteRegisterService.getMetadataFromDvObject( + dv.getDataset().getGlobalId().asString(), new DataCitation(dv).getDataCiteMetadata(), dv.getDataset()); + System.out.println("Output from dataset-all-defaults is " + xml); try { StreamSource source = new StreamSource(new StringReader(xml)); source.setSystemId("DataCite XML for test dataset"); @@ -146,4 +220,215 @@ public void testDataCiteXMLCreation() throws IOException { } + + /** + * Mock Utility Methods - These methods support importing DatasetFields from the + * Dataverse JSON export format. They assume that any DatasetFieldType + * referenced exists, that any Controlled Vocabulary value exists, etc. which + * avoids having to do database lookups or read metadatablock tsv files. They + * are derived from the JsonParser methods of the same names with any db + * references and DatasetFieldType-related error checking removed. + */ + public static List parseMetadataBlocks(JsonObject json) throws JsonParseException { + + Map existingTypes = new HashMap<>(); + + Set keys = json.keySet(); + List fields = new LinkedList<>(); + + for (String blockName : keys) { + MetadataBlock block = new MetadataBlock(); + block.setName(blockName); + JsonObject blockJson = json.getJsonObject(blockName); + JsonArray fieldsJson = blockJson.getJsonArray("fields"); + fields.addAll(parseFieldsFromArray(fieldsJson, true, block, existingTypes)); + } + return fields; + } + + private static List parseFieldsFromArray(JsonArray fieldsArray, Boolean testType, MetadataBlock block, + Map existingTypes) throws JsonParseException { + List fields = new LinkedList<>(); + for (JsonObject fieldJson : fieldsArray.getValuesAs(JsonObject.class)) { + + DatasetField field = parseField(fieldJson, testType, block, existingTypes); + if (field != null) { + fields.add(field); + } + + } + return fields; + + } + + + public static DatasetField parseField(JsonObject json, Boolean testType, MetadataBlock block, Map existingTypes) throws JsonParseException { + if (json == null) { + return null; + } + + DatasetField ret = new DatasetField(); + String fieldName = json.getString("typeName", ""); + String typeClass = json.getString("typeClass", ""); + if(!existingTypes.containsKey(fieldName)) { + boolean multiple = json.getBoolean("multiple"); + DatasetFieldType fieldType = new DatasetFieldType(); + fieldType.setName(fieldName); + fieldType.setAllowMultiples(multiple); + fieldType.setAllowControlledVocabulary(typeClass.equals("controlledVocabulary")); + fieldType.setFieldType(FieldType.TEXT); + fieldType.setMetadataBlock(block); + fieldType.setChildDatasetFieldTypes(new ArrayList<>()); + existingTypes.put(fieldName, fieldType); + } + DatasetFieldType type = existingTypes.get(fieldName); + ret.setDatasetFieldType(type); + + if (typeClass.equals("compound")) { + parseCompoundValue(ret, type, json, testType, block, existingTypes); + } else if (type.isControlledVocabulary()) { + parseControlledVocabularyValue(ret, type, json); + } else { + parsePrimitiveValue(ret, type, json); + } + + return ret; + } + + public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, JsonObject json, Boolean testType, MetadataBlock block, Map existingTypes) throws JsonParseException { + List vocabExceptions = new ArrayList<>(); + List vals = new LinkedList<>(); + if (compoundType.isAllowMultiples()) { + int order = 0; + try { + json.getJsonArray("value").getValuesAs(JsonObject.class); + } catch (ClassCastException cce) { + throw new JsonParseException("Invalid values submitted for " + compoundType.getName() + ". It should be an array of values."); + } + for (JsonObject obj : json.getJsonArray("value").getValuesAs(JsonObject.class)) { + DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue(); + List fields = new LinkedList<>(); + for (String fieldName : obj.keySet()) { + JsonObject childFieldJson = obj.getJsonObject(fieldName); + DatasetField f=null; + try { + f = parseField(childFieldJson, testType, block, existingTypes); + } catch(ControlledVocabularyException ex) { + vocabExceptions.add(ex); + } + + if (f!=null) { + f.setParentDatasetFieldCompoundValue(cv); + fields.add(f); + } + } + if (!fields.isEmpty()) { + cv.setChildDatasetFields(fields); + cv.setDisplayOrder(order); + vals.add(cv); + } + order++; + } + + + + } else { + + DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue(); + List fields = new LinkedList<>(); + JsonObject value = json.getJsonObject("value"); + for (String key : value.keySet()) { + JsonObject childFieldJson = value.getJsonObject(key); + DatasetField f = null; + try { + f=parseField(childFieldJson, testType, block, existingTypes); + } catch(ControlledVocabularyException ex ) { + vocabExceptions.add(ex); + } + if (f!=null) { + f.setParentDatasetFieldCompoundValue(cv); + fields.add(f); + } + } + if (!fields.isEmpty()) { + cv.setChildDatasetFields(fields); + vals.add(cv); + } + + } + if (!vocabExceptions.isEmpty()) { + throw new CompoundVocabularyException( "Invalid controlled vocabulary in compound field ", vocabExceptions, vals); + } + + for (DatasetFieldCompoundValue dsfcv : vals) { + dsfcv.setParentDatasetField(dsf); + } + dsf.setDatasetFieldCompoundValues(vals); + } + + public static void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft , JsonObject json) throws JsonParseException { + List vals = new LinkedList<>(); + if (dft.isAllowMultiples()) { + try { + json.getJsonArray("value").getValuesAs(JsonObject.class); + } catch (ClassCastException cce) { + throw new JsonParseException("Invalid values submitted for " + dft.getName() + ". It should be an array of values."); + } + for (JsonString val : json.getJsonArray("value").getValuesAs(JsonString.class)) { + DatasetFieldValue datasetFieldValue = new DatasetFieldValue(dsf); + datasetFieldValue.setDisplayOrder(vals.size() - 1); + datasetFieldValue.setValue(val.getString().trim()); + vals.add(datasetFieldValue); + } + + } else { + try {json.getString("value");} + catch (ClassCastException cce) { + throw new JsonParseException("Invalid value submitted for " + dft.getName() + ". It should be a single value."); + } + DatasetFieldValue datasetFieldValue = new DatasetFieldValue(); + datasetFieldValue.setValue(json.getString("value", "").trim()); + datasetFieldValue.setDatasetField(dsf); + vals.add(datasetFieldValue); + } + + dsf.setDatasetFieldValues(vals); + } + + public static void parseControlledVocabularyValue(DatasetField dsf, DatasetFieldType cvvType, JsonObject json) throws JsonParseException { + List vals = new LinkedList<>(); + try { + if (cvvType.isAllowMultiples()) { + try { + json.getJsonArray("value").getValuesAs(JsonObject.class); + } catch (ClassCastException cce) { + throw new JsonParseException("Invalid values submitted for " + cvvType.getName() + ". It should be an array of values."); + } + for (JsonString strVal : json.getJsonArray("value").getValuesAs(JsonString.class)) { + String strValue = strVal.getString(); + ControlledVocabularyValue cvv = new ControlledVocabularyValue(); + cvv.setDatasetFieldType(cvvType); + cvv.setStrValue(strVal.getString()); + vals.add(cvv); + } + + } else { + try { + json.getString("value"); + } catch (ClassCastException cce) { + throw new JsonParseException("Invalid value submitted for " + cvvType.getName() + ". It should be a single value."); + } + String strValue = json.getString("value", ""); + ControlledVocabularyValue cvv = new ControlledVocabularyValue(); + cvv.setDatasetFieldType(cvvType); + cvv.setStrValue(strValue); + vals.add(cvv); + } + } catch (ClassCastException cce) { + throw new JsonParseException("Invalid values submitted for " + cvvType.getName()); + } + + dsf.setControlledVocabularyValues(vals); + } + } From 816b7047e4f6209c112c6f2bba3c9b22d05a1ca4 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 16 Oct 2024 17:08:13 -0400 Subject: [PATCH 125/402] formatting after review dog notice --- .../doi/datacite/XmlMetadataTemplateTest.java | 113 ++++++++++-------- 1 file changed, 63 insertions(+), 50 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java index f282e681175..2bd6818821d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java @@ -92,7 +92,9 @@ public static void setupMocks() { } - /** A minimal example to assure that the XMLMetadataTemplate generates output consistent with the DataCite XML v4.5 schema. + /** + * A minimal example to assure that the XMLMetadataTemplate generates output + * consistent with the DataCite XML v4.5 schema. */ @Test public void testDataCiteXMLCreation() throws IOException { @@ -126,7 +128,7 @@ public void testDataCiteXMLCreation() throws IOException { doiMetadata.setAuthors(authors); doiMetadata.setPublisher("Dataverse"); XmlMetadataTemplate template = new XmlMetadataTemplate(doiMetadata); - + Dataset d = new Dataset(); GlobalId doi = new GlobalId("doi", "10.5072", "FK2/ABCDEF", null, null, null); d.setGlobalId(doi); @@ -159,11 +161,12 @@ public void testDataCiteXMLCreation() throws IOException { try { StreamSource source = new StreamSource(new StringReader(xml)); source.setSystemId("DataCite XML for test dataset"); - assertTrue(XmlValidator.validateXmlSchema(source, new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); + assertTrue(XmlValidator.validateXmlSchema(source, + new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); } catch (SAXException e) { System.out.println("Invalid schema: " + e.getMessage()); } - + } /** @@ -189,38 +192,39 @@ public void testDataCiteXMLCreationAllFields() throws IOException { dv.setVersionState(VersionState.DRAFT); testDatasetField.setDatasetVersion(dv); - + File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt"); String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath()))); JsonObject datasetJson = JsonUtil.getJsonObject(datasetVersionAsJson); - - GlobalId doi = new GlobalId("doi", datasetJson.getString("authority"), datasetJson.getString("identifier"), null, null, null); + + GlobalId doi = new GlobalId("doi", datasetJson.getString("authority"), datasetJson.getString("identifier"), + null, null, null); d.setGlobalId(doi); - - List fields = assertDoesNotThrow(() -> XmlMetadataTemplateTest.parseMetadataBlocks(datasetJson.getJsonObject("datasetVersion").getJsonObject("metadataBlocks"))); + List fields = assertDoesNotThrow(() -> XmlMetadataTemplateTest + .parseMetadataBlocks(datasetJson.getJsonObject("datasetVersion").getJsonObject("metadataBlocks"))); dv.setDatasetFields(fields); - + ArrayList dsvs = new ArrayList<>(); dsvs.add(0, dv); d.setVersions(dsvs); DatasetType dType = new DatasetType(); dType.setName(DatasetType.DATASET_TYPE_DATASET); d.setDatasetType(dType); - String xml = DOIDataCiteRegisterService.getMetadataFromDvObject( - dv.getDataset().getGlobalId().asString(), new DataCitation(dv).getDataCiteMetadata(), dv.getDataset()); + String xml = DOIDataCiteRegisterService.getMetadataFromDvObject(dv.getDataset().getGlobalId().asString(), + new DataCitation(dv).getDataCiteMetadata(), dv.getDataset()); System.out.println("Output from dataset-all-defaults is " + xml); try { StreamSource source = new StreamSource(new StringReader(xml)); source.setSystemId("DataCite XML for test dataset"); - assertTrue(XmlValidator.validateXmlSchema(source, new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); + assertTrue(XmlValidator.validateXmlSchema(source, + new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); } catch (SAXException e) { System.out.println("Invalid schema: " + e.getMessage()); } - + } - /** * Mock Utility Methods - These methods support importing DatasetFields from the * Dataverse JSON export format. They assume that any DatasetFieldType @@ -230,9 +234,9 @@ public void testDataCiteXMLCreationAllFields() throws IOException { * references and DatasetFieldType-related error checking removed. */ public static List parseMetadataBlocks(JsonObject json) throws JsonParseException { - + Map existingTypes = new HashMap<>(); - + Set keys = json.keySet(); List fields = new LinkedList<>(); @@ -259,10 +263,10 @@ private static List parseFieldsFromArray(JsonArray fieldsArray, Bo } return fields; - } + } - - public static DatasetField parseField(JsonObject json, Boolean testType, MetadataBlock block, Map existingTypes) throws JsonParseException { + public static DatasetField parseField(JsonObject json, Boolean testType, MetadataBlock block, + Map existingTypes) throws JsonParseException { if (json == null) { return null; } @@ -270,7 +274,7 @@ public static DatasetField parseField(JsonObject json, Boolean testType, Metad DatasetField ret = new DatasetField(); String fieldName = json.getString("typeName", ""); String typeClass = json.getString("typeClass", ""); - if(!existingTypes.containsKey(fieldName)) { + if (!existingTypes.containsKey(fieldName)) { boolean multiple = json.getBoolean("multiple"); DatasetFieldType fieldType = new DatasetFieldType(); fieldType.setName(fieldName); @@ -294,8 +298,10 @@ public static DatasetField parseField(JsonObject json, Boolean testType, Metad return ret; } - - public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, JsonObject json, Boolean testType, MetadataBlock block, Map existingTypes) throws JsonParseException { + + public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, JsonObject json, + Boolean testType, MetadataBlock block, Map existingTypes) + throws JsonParseException { List vocabExceptions = new ArrayList<>(); List vals = new LinkedList<>(); if (compoundType.isAllowMultiples()) { @@ -303,23 +309,24 @@ public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoun try { json.getJsonArray("value").getValuesAs(JsonObject.class); } catch (ClassCastException cce) { - throw new JsonParseException("Invalid values submitted for " + compoundType.getName() + ". It should be an array of values."); + throw new JsonParseException("Invalid values submitted for " + compoundType.getName() + + ". It should be an array of values."); } for (JsonObject obj : json.getJsonArray("value").getValuesAs(JsonObject.class)) { DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue(); List fields = new LinkedList<>(); for (String fieldName : obj.keySet()) { JsonObject childFieldJson = obj.getJsonObject(fieldName); - DatasetField f=null; + DatasetField f = null; try { f = parseField(childFieldJson, testType, block, existingTypes); - } catch(ControlledVocabularyException ex) { + } catch (ControlledVocabularyException ex) { vocabExceptions.add(ex); } - - if (f!=null) { + + if (f != null) { f.setParentDatasetFieldCompoundValue(cv); - fields.add(f); + fields.add(f); } } if (!fields.isEmpty()) { @@ -330,10 +337,8 @@ public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoun order++; } - - } else { - + DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue(); List fields = new LinkedList<>(); JsonObject value = json.getJsonObject("value"); @@ -341,11 +346,11 @@ public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoun JsonObject childFieldJson = value.getJsonObject(key); DatasetField f = null; try { - f=parseField(childFieldJson, testType, block, existingTypes); - } catch(ControlledVocabularyException ex ) { + f = parseField(childFieldJson, testType, block, existingTypes); + } catch (ControlledVocabularyException ex) { vocabExceptions.add(ex); } - if (f!=null) { + if (f != null) { f.setParentDatasetFieldCompoundValue(cv); fields.add(f); } @@ -354,10 +359,11 @@ public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoun cv.setChildDatasetFields(fields); vals.add(cv); } - - } + + } if (!vocabExceptions.isEmpty()) { - throw new CompoundVocabularyException( "Invalid controlled vocabulary in compound field ", vocabExceptions, vals); + throw new CompoundVocabularyException("Invalid controlled vocabulary in compound field ", vocabExceptions, + vals); } for (DatasetFieldCompoundValue dsfcv : vals) { @@ -366,13 +372,15 @@ public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoun dsf.setDatasetFieldCompoundValues(vals); } - public static void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft , JsonObject json) throws JsonParseException { + public static void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft, JsonObject json) + throws JsonParseException { List vals = new LinkedList<>(); if (dft.isAllowMultiples()) { - try { - json.getJsonArray("value").getValuesAs(JsonObject.class); + try { + json.getJsonArray("value").getValuesAs(JsonObject.class); } catch (ClassCastException cce) { - throw new JsonParseException("Invalid values submitted for " + dft.getName() + ". It should be an array of values."); + throw new JsonParseException( + "Invalid values submitted for " + dft.getName() + ". It should be an array of values."); } for (JsonString val : json.getJsonArray("value").getValuesAs(JsonString.class)) { DatasetFieldValue datasetFieldValue = new DatasetFieldValue(dsf); @@ -382,10 +390,12 @@ public static void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft , } } else { - try {json.getString("value");} - catch (ClassCastException cce) { - throw new JsonParseException("Invalid value submitted for " + dft.getName() + ". It should be a single value."); - } + try { + json.getString("value"); + } catch (ClassCastException cce) { + throw new JsonParseException( + "Invalid value submitted for " + dft.getName() + ". It should be a single value."); + } DatasetFieldValue datasetFieldValue = new DatasetFieldValue(); datasetFieldValue.setValue(json.getString("value", "").trim()); datasetFieldValue.setDatasetField(dsf); @@ -394,15 +404,17 @@ public static void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft , dsf.setDatasetFieldValues(vals); } - - public static void parseControlledVocabularyValue(DatasetField dsf, DatasetFieldType cvvType, JsonObject json) throws JsonParseException { + + public static void parseControlledVocabularyValue(DatasetField dsf, DatasetFieldType cvvType, JsonObject json) + throws JsonParseException { List vals = new LinkedList<>(); try { if (cvvType.isAllowMultiples()) { try { json.getJsonArray("value").getValuesAs(JsonObject.class); } catch (ClassCastException cce) { - throw new JsonParseException("Invalid values submitted for " + cvvType.getName() + ". It should be an array of values."); + throw new JsonParseException( + "Invalid values submitted for " + cvvType.getName() + ". It should be an array of values."); } for (JsonString strVal : json.getJsonArray("value").getValuesAs(JsonString.class)) { String strValue = strVal.getString(); @@ -416,7 +428,8 @@ public static void parseControlledVocabularyValue(DatasetField dsf, DatasetField try { json.getString("value"); } catch (ClassCastException cce) { - throw new JsonParseException("Invalid value submitted for " + cvvType.getName() + ". It should be a single value."); + throw new JsonParseException( + "Invalid value submitted for " + cvvType.getName() + ". It should be a single value."); } String strValue = json.getString("value", ""); ControlledVocabularyValue cvv = new ControlledVocabularyValue(); From 8ef8cfd2c70d34f458d5bad33d7b790c3150b409 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 13:35:44 +0100 Subject: [PATCH 126/402] Added: parseDataverseUpdates unit test --- .../dataverse/util/json/JsonParserTest.java | 54 ++++++++++++------- 1 file changed, 35 insertions(+), 19 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 59e175f30c1..1a1d836f6a0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -4,17 +4,9 @@ package edu.harvard.iq.dataverse.util.json; -import edu.harvard.iq.dataverse.ControlledVocabularyValue; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; -import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; -import edu.harvard.iq.dataverse.DatasetFieldValue; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseTheme.Alignment; -import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.UserNotification.Type; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroupProvider; @@ -50,16 +42,7 @@ import java.io.StringReader; import java.math.BigDecimal; import java.text.ParseException; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Collections; -import java.util.Date; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; -import java.util.TimeZone; +import java.util.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.*; @@ -281,6 +264,39 @@ public void testParseCompleteDataverse() throws JsonParseException { throw new JsonParseException("Couldn't read test file", ioe); } } + + /** + * TODO + * @throws JsonParseException when this test is broken. + */ + @Test + public void parseDataverseUpdates() throws JsonParseException { + Dataverse dataverse = new Dataverse(); + dataverse.setName("Name to update"); + dataverse.setAlias("aliasToUpdate"); + dataverse.setAffiliation("Affiliation to update"); + dataverse.setDescription("Description to update"); + dataverse.setDataverseType(Dataverse.DataverseType.DEPARTMENT); + List originalContacts = new ArrayList<>(); + originalContacts.add(new DataverseContact(dataverse, "updatethis@example.edu")); + dataverse.setDataverseContacts(originalContacts); + JsonObject dvJson; + try (FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-complete.json")) { + dvJson = Json.createReader(reader).readObject(); + Dataverse actual = sut.parseDataverseUpdates(dvJson, dataverse); + assertEquals("Scientific Research", actual.getName()); + assertEquals("science", actual.getAlias()); + assertEquals("Scientific Research University", actual.getAffiliation()); + assertEquals("We do all the science.", actual.getDescription()); + assertEquals("LABORATORY", actual.getDataverseType().toString()); + assertEquals(2, actual.getDataverseContacts().size()); + assertEquals("pi@example.edu,student@example.edu", actual.getContactEmails()); + assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); + assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); + } catch (IOException ioe) { + throw new JsonParseException("Couldn't read test file", ioe); + } + } @Test public void testParseThemeDataverse() throws JsonParseException { From 62df2a7d534b87cd8975fd01317d3d1a05576e4a Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:36:27 +0100 Subject: [PATCH 127/402] Changed: reordered logic in UpdateDataverseCommand for further refactoring --- .../command/impl/UpdateDataverseCommand.java | 56 +++++++++++-------- 1 file changed, 34 insertions(+), 22 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 551f0ffdff7..16b93debb6d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -78,6 +78,11 @@ public UpdateDataverseCommand(Dataverse editedDv, public Dataverse execute(CommandContext ctxt) throws CommandException { logger.fine("Entering update dataverse command"); + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { + editedDv.setMetadataBlockRoot(true); + editedDv.setMetadataBlocks(metadataBlocks); + } + // Perform any optional validation steps, if defined: if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { // For admins, an override of the external validation step may be enabled: @@ -98,39 +103,46 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { String oldDvAlias = oldDv.getAlias(); String oldDvName = oldDv.getName(); - Dataverse result = ctxt.dataverses().save(editedDv); - - if (facetList != null) { - ctxt.facets().deleteFacetsFor(result); - int i = 0; - for (DatasetFieldType df : facetList) { - ctxt.facets().create(i++, df.getId(), result.getId()); - } + // We don't want to reindex the children datasets unnecessarily: + // When these values are changed we need to reindex all children datasets + // This check is not recursive as all the values just report the immediate parent + if (!oldDvType.equals(editedDv.getDataverseType()) + || !oldDvName.equals(editedDv.getName()) + || !oldDvAlias.equals(editedDv.getAlias())) { + datasetsReindexRequired = true; } + if (featuredDataverseList != null) { - ctxt.featuredDataverses().deleteFeaturedDataversesFor(result); + ctxt.featuredDataverses().deleteFeaturedDataversesFor(editedDv); int i = 0; for (Object obj : featuredDataverseList) { Dataverse dv = (Dataverse) obj; - ctxt.featuredDataverses().create(i++, dv.getId(), result.getId()); + ctxt.featuredDataverses().create(i++, dv.getId(), editedDv.getId()); } } - if (inputLevelList != null) { - ctxt.fieldTypeInputLevels().deleteFacetsFor(result); - for (DataverseFieldTypeInputLevel obj : inputLevelList) { - ctxt.fieldTypeInputLevels().create(obj); + + if (facetList != null) { + ctxt.facets().deleteFacetsFor(editedDv); + if (!facetList.isEmpty()) { + editedDv.setFacetRoot(true); + } + int i = 0; + for (DatasetFieldType df : facetList) { + ctxt.facets().create(i++, df, editedDv); } } - - // We don't want to reindex the children datasets unnecessarily: - // When these values are changed we need to reindex all children datasets - // This check is not recursive as all the values just report the immediate parent - if (!oldDvType.equals(editedDv.getDataverseType()) - || !oldDvName.equals(editedDv.getName()) - || !oldDvAlias.equals(editedDv.getAlias())) { - datasetsReindexRequired = true; + if (inputLevelList != null) { + if (!inputLevelList.isEmpty()) { + editedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); + } + ctxt.fieldTypeInputLevels().deleteFacetsFor(editedDv); + for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { + inputLevel.setDataverse(editedDv); + ctxt.fieldTypeInputLevels().create(inputLevel); + } } + Dataverse result = ctxt.dataverses().save(editedDv); return result; } From 6ccbb4ae53ee6bdd573b686e98c964ecf4e8d2db Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:37:05 +0100 Subject: [PATCH 128/402] Changed: updateDataverse return code --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 0bc389041c2..d8bd2b8cb4b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -188,7 +188,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); - return created("/dataverses/" + updatedDataverse.getAlias(), json(updatedDataverse)); + return ok(json(updatedDataverse)); } catch (WrappedResponse ww) { return handleWrappedResponse(ww); From 5c1703906dfeb205604dd8608b286ee706295e2d Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:37:47 +0100 Subject: [PATCH 129/402] Added: IT for updateDataverse endpoint --- .../iq/dataverse/api/DataversesIT.java | 43 +++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 62 +++++++++++++++++++ 2 files changed, 105 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 8c6a8244af1..7abc35d536a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1253,6 +1253,49 @@ public void testAddDataverse() { .body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\"")); } + @Test + public void testUpdateDataverse() { + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String testAliasSuffix = "-update-dataverse"; + + String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + Response createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root"); + createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + String newAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + String newName = "New Test Dataverse Name"; + String newAffiliation = "New Test Dataverse Affiliation"; + String newDataverseType = Dataverse.DataverseType.TEACHING_COURSES.toString(); + String[] newContactEmails = new String[] {"new_email@dataverse.com"}; + String[] newInputLevelNames = new String[] {"geographicCoverage"}; + String[] newFacetIds = new String[] {"contributorName"}; + String[] newMetadataBlockNames = new String[] {"citation", "geospatial", "biomedical"}; + + Response updateDataverseResponse = UtilIT.updateDataverse( + testDataverseAlias, + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + + updateDataverseResponse.prettyPrint(); + updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // TODO add more assertions and cases + + // The alias has been changed, so we should not be able to do any operation using the old one + String oldDataverseAlias = testDataverseAlias; + Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); + getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + } + @Test public void testListFacets() { Response createUserResponse = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 70f49d81b35..eb40a85f10c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import io.restassured.http.ContentType; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; @@ -12,6 +13,7 @@ import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; +import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import static jakarta.ws.rs.core.Response.Status.CREATED; import java.nio.charset.StandardCharsets; @@ -428,6 +430,66 @@ static Response createSubDataverse(String alias, String category, String apiToke return createDataverseResponse; } + static Response updateDataverse(String alias, + String newAlias, + String newName, + String newAffiliation, + String newDataverseType, + String[] newContactEmails, + String[] newInputLevelNames, + String[] newFacetIds, + String[] newMetadataBlockNames, + String apiToken) { + JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); + for(String contactEmail : newContactEmails) { + contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", contactEmail)); + } + NullSafeJsonBuilder jsonBuilder = jsonObjectBuilder() + .add("alias", newAlias) + .add("name", newName) + .add("affiliation", newAffiliation) + .add("dataverseContacts", contactArrayBuilder) + .add("dataverseType", newDataverseType) + .add("affiliation", newAffiliation); + + JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); + + if (newInputLevelNames != null) { + JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); + for(String inputLevelName : newInputLevelNames) { + inputLevelsArrayBuilder.add(Json.createObjectBuilder() + .add("datasetFieldTypeName", inputLevelName) + .add("required", true) + .add("include", true) + ); + } + metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); + } + + if (newMetadataBlockNames != null) { + JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); + for(String metadataBlockName : newMetadataBlockNames) { + metadataBlockNamesArrayBuilder.add(metadataBlockName); + } + metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); + } + + if (newFacetIds != null) { + JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); + for(String facetId : newFacetIds) { + facetIdsArrayBuilder.add(facetId); + } + metadataBlocksObjectBuilder.add("facetIds", facetIdsArrayBuilder); + } + + jsonBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); + + JsonObject dvData = jsonBuilder.build(); + return given() + .body(dvData.toString()).contentType(ContentType.JSON) + .when().put("/api/dataverses/" + alias + "?key=" + apiToken); + } + static Response createDataverse(JsonObject dvData, String apiToken) { Response createDataverseResponse = given() .body(dvData.toString()).contentType(ContentType.JSON) From e5cdb106e22064fe4fc84fa834dae2bf984525ff Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:52:12 +0100 Subject: [PATCH 130/402] Refactor: UtilIT duplication on dataverse write operations --- .../edu/harvard/iq/dataverse/api/UtilIT.java | 66 ++++++------------- 1 file changed, 21 insertions(+), 45 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index eb40a85f10c..502f1ecb0a8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -391,43 +391,12 @@ static Response createSubDataverse(String alias, String category, String apiToke objectBuilder.add("affiliation", affiliation); } - JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); - - if (inputLevelNames != null) { - JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); - for(String inputLevelName : inputLevelNames) { - inputLevelsArrayBuilder.add(Json.createObjectBuilder() - .add("datasetFieldTypeName", inputLevelName) - .add("required", true) - .add("include", true) - ); - } - metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); - } - - if (metadataBlockNames != null) { - JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); - for(String metadataBlockName : metadataBlockNames) { - metadataBlockNamesArrayBuilder.add(metadataBlockName); - } - metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); - } - - if (facetIds != null) { - JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); - for(String facetId : facetIds) { - facetIdsArrayBuilder.add(facetId); - } - metadataBlocksObjectBuilder.add("facetIds", facetIdsArrayBuilder); - } - - objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); + updateDataverseRequestJsonWithMetadataBlocksConfiguration(inputLevelNames, facetIds, metadataBlockNames, objectBuilder); JsonObject dvData = objectBuilder.build(); - Response createDataverseResponse = given() + return given() .body(dvData.toString()).contentType(ContentType.JSON) .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken); - return createDataverseResponse; } static Response updateDataverse(String alias, @@ -452,11 +421,23 @@ static Response updateDataverse(String alias, .add("dataverseType", newDataverseType) .add("affiliation", newAffiliation); + updateDataverseRequestJsonWithMetadataBlocksConfiguration(newInputLevelNames, newFacetIds, newMetadataBlockNames, jsonBuilder); + + JsonObject dvData = jsonBuilder.build(); + return given() + .body(dvData.toString()).contentType(ContentType.JSON) + .when().put("/api/dataverses/" + alias + "?key=" + apiToken); + } + + private static void updateDataverseRequestJsonWithMetadataBlocksConfiguration(String[] inputLevelNames, + String[] facetIds, + String[] metadataBlockNames, + JsonObjectBuilder objectBuilder) { JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); - if (newInputLevelNames != null) { + if (inputLevelNames != null) { JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); - for(String inputLevelName : newInputLevelNames) { + for(String inputLevelName : inputLevelNames) { inputLevelsArrayBuilder.add(Json.createObjectBuilder() .add("datasetFieldTypeName", inputLevelName) .add("required", true) @@ -466,28 +447,23 @@ static Response updateDataverse(String alias, metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); } - if (newMetadataBlockNames != null) { + if (metadataBlockNames != null) { JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); - for(String metadataBlockName : newMetadataBlockNames) { + for(String metadataBlockName : metadataBlockNames) { metadataBlockNamesArrayBuilder.add(metadataBlockName); } metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); } - if (newFacetIds != null) { + if (facetIds != null) { JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); - for(String facetId : newFacetIds) { + for(String facetId : facetIds) { facetIdsArrayBuilder.add(facetId); } metadataBlocksObjectBuilder.add("facetIds", facetIdsArrayBuilder); } - jsonBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); - - JsonObject dvData = jsonBuilder.build(); - return given() - .body(dvData.toString()).contentType(ContentType.JSON) - .when().put("/api/dataverses/" + alias + "?key=" + apiToken); + objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); } static Response createDataverse(JsonObject dvData, String apiToken) { From 8020d50c26a3d41bef41403984495bad535dbad2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:56:24 +0100 Subject: [PATCH 131/402] Added: pending doc comment to JsonParserTest method --- .../edu/harvard/iq/dataverse/util/json/JsonParserTest.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 1a1d836f6a0..2cffa7d921c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -266,7 +266,8 @@ public void testParseCompleteDataverse() throws JsonParseException { } /** - * TODO + * Test that a JSON object passed for a complete Dataverse update is correctly parsed. + * This checks that all properties are parsed into the correct dataverse properties. * @throws JsonParseException when this test is broken. */ @Test From 9285c926e90ca7fec98777654630a36eb89550f7 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 17 Oct 2024 10:44:53 -0400 Subject: [PATCH 132/402] Update dataset-management.rst --- .../source/user/dataset-management.rst | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 2e5d84748a8..39fe0128dfd 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -169,7 +169,7 @@ Certain file types in the Dataverse installation are supported by additional fun File Previews ------------- -Dataverse installations can add previewers for common file types uploaded by their research communities. The previews appear on the file page. If a preview tool for a specific file type is available, the preview will be created and will display automatically, after terms have been agreed to or a guestbook entry has been made, if necessary. File previews are not available for restricted files unless they are being accessed using a Private URL. See also :ref:`privateurl`. +Dataverse installations can add previewers for common file types uploaded by their research communities. The previews appear on the file page. If a preview tool for a specific file type is available, the preview will be created and will display automatically, after terms have been agreed to or a guestbook entry has been made, if necessary. File previews are not available for restricted files unless they are being accessed using a Preview URL. See also :ref:`privateurl`. Previewers are available for the following file types: @@ -678,21 +678,21 @@ If you have a Contributor role (can edit metadata, upload files, and edit files, .. _privateurl: -Private URL to Review Unpublished Dataset +Preview URL to Review Unpublished Dataset ========================================= -Creating a Private URL for your dataset allows you to share your dataset (for viewing and downloading of files) before it is published to a wide group of individuals who may not have a user account on the Dataverse installation. Anyone you send the Private URL to will not have to log into the Dataverse installation to view the dataset. +Creating a Preview URL for your dataset allows you to share your dataset (for viewing and downloading of files) before it is published to a wide group of individuals who may not have a user account on the Dataverse installation. Anyone you send the Preview URL to will not have to log into the Dataverse installation to view the dataset. -**Note:** To create a Private URL, you must have the *ManageDatasetPermissions* permission for your dataset, usually given by the :ref:`roles ` *Curator* or *Administrator*. +**Note:** To create a Preview URL, you must have the *ManageDatasetPermissions* permission for your dataset, usually given by the :ref:`roles ` *Curator* or *Administrator*. #. Go to your unpublished dataset #. Select the “Edit” button -#. Select “Private URL” in the dropdown menu -#. In the pop-up select “Create Private URL” or "Create URL for Anonymized Access". The latter supports anonymous review by removing author names and other potentially identifying information from citations, version history tables, and some metadata fields (as configured by the administrator). -#. Copy the Private URL which has been created for this dataset and it can now be shared with anyone you wish to have access to view or download files in your unpublished dataset. +#. Select “Preview URL” in the dropdown menu +#. In the pop-up select “Create General Preview URL” or "Create URL for Anonymized Access". The latter supports anonymous review by removing author names and other potentially identifying information from citations, version history tables, and some metadata fields (as configured by the administrator). +#. Copy the Preview URL which has been created for this dataset and it can now be shared with anyone you wish to have access to view or download files in your unpublished dataset. -To disable a Private URL and to revoke access, follow the same steps as above until step #3 when you return to the popup, click the “Disable Private URL” button. -Note that only one PrivateURL (normal or with anonymized access) can be configured per dataset at a time. +To disable a Preview URL and to revoke access, follow the same steps as above until step #3 when you return to the popup, click the “Disable Preview URL” button. +Note that only one Preview URL (normal or with anonymized access) can be configured per dataset at a time. Embargoes ========= From 2d10f22de0a20ab73ef146d6b90f1fb587672f2a Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 15:49:36 +0100 Subject: [PATCH 133/402] Added: missing IT for updateDataverse endpoint --- .../iq/dataverse/api/DataversesIT.java | 63 ++++++++++++++++--- 1 file changed, 55 insertions(+), 8 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 7abc35d536a..c311fa1016e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1,12 +1,15 @@ package edu.harvard.iq.dataverse.api; import io.restassured.RestAssured; + import static io.restassured.RestAssured.given; import static io.restassured.path.json.JsonPath.with; + import io.restassured.response.Response; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; + import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; @@ -14,6 +17,7 @@ import java.util.Arrays; import java.util.List; import java.util.logging.Logger; + import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; @@ -31,6 +35,7 @@ import static org.junit.jupiter.api.Assertions.*; import java.nio.file.Files; + import io.restassured.path.json.JsonPath; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; @@ -43,7 +48,7 @@ public class DataversesIT { public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); } - + @AfterAll public static void afterClass() { Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport); @@ -1267,10 +1272,10 @@ public void testUpdateDataverse() { String newName = "New Test Dataverse Name"; String newAffiliation = "New Test Dataverse Affiliation"; String newDataverseType = Dataverse.DataverseType.TEACHING_COURSES.toString(); - String[] newContactEmails = new String[] {"new_email@dataverse.com"}; - String[] newInputLevelNames = new String[] {"geographicCoverage"}; - String[] newFacetIds = new String[] {"contributorName"}; - String[] newMetadataBlockNames = new String[] {"citation", "geospatial", "biomedical"}; + String[] newContactEmails = new String[]{"new_email@dataverse.com"}; + String[] newInputLevelNames = new String[]{"geographicCoverage"}; + String[] newFacetIds = new String[]{"contributorName"}; + String[] newMetadataBlockNames = new String[]{"citation", "geospatial", "biomedical"}; Response updateDataverseResponse = UtilIT.updateDataverse( testDataverseAlias, @@ -1285,15 +1290,57 @@ public void testUpdateDataverse() { apiToken ); - updateDataverseResponse.prettyPrint(); + // Assert dataverse properties are updated updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // TODO add more assertions and cases + String actualDataverseAlias = updateDataverseResponse.then().extract().path("data.alias"); + assertEquals(newAlias, actualDataverseAlias); + String actualDataverseName = updateDataverseResponse.then().extract().path("data.name"); + assertEquals(newName, actualDataverseName); + String actualDataverseAffiliation = updateDataverseResponse.then().extract().path("data.affiliation"); + assertEquals(newAffiliation, actualDataverseAffiliation); + String actualDataverseType = updateDataverseResponse.then().extract().path("data.dataverseType"); + assertEquals(newDataverseType, actualDataverseType); + String actualContactEmail = updateDataverseResponse.then().extract().path("data.dataverseContacts[0].contactEmail"); + assertEquals("new_email@dataverse.com", actualContactEmail); + + // Assert metadata blocks are updated + Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + String actualDataverseMetadataBlock1 = listMetadataBlocksResponse.then().extract().path("data[0].name"); + String actualDataverseMetadataBlock2 = listMetadataBlocksResponse.then().extract().path("data[1].name"); + String actualDataverseMetadataBlock3 = listMetadataBlocksResponse.then().extract().path("data[2].name"); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock1)); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock2)); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock3)); + + // Assert custom facets are updated + Response listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken); + String actualFacetName = listDataverseFacetsResponse.then().extract().path("data[0]"); + assertThat(newFacetIds, hasItemInArray(actualFacetName)); + + // Assert input levels are updated + Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken); + String actualInputLevelName = listDataverseInputLevelsResponse.then().extract().path("data[0].datasetFieldTypeName"); + assertThat(newInputLevelNames, hasItemInArray(actualInputLevelName)); // The alias has been changed, so we should not be able to do any operation using the old one String oldDataverseAlias = testDataverseAlias; Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Should return error when the dataverse to edit does not exist + updateDataverseResponse = UtilIT.updateDataverse( + "unexistingDataverseAlias", + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } @Test From 8af5b1c8ae262d9cdc05756e01f8f8ba1e76166b Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 17 Oct 2024 14:43:54 -0400 Subject: [PATCH 134/402] #8184 add updated api endpoints - deprecate private url --- .../harvard/iq/dataverse/api/Datasets.java | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 369a22fe8d7..3eee734877b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2171,6 +2171,7 @@ public Response getAssignments(@Context ContainerRequestContext crc, @PathParam( @GET @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") @Path("{id}/privateUrl") public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { return response( req -> { @@ -2182,6 +2183,7 @@ public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathPar @POST @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") @Path("{id}/privateUrl") public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) { if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) { @@ -2194,6 +2196,7 @@ public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathPara @DELETE @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") @Path("{id}/privateUrl") public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { return response( req -> { @@ -2207,6 +2210,46 @@ public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathPara } }, getRequestUser(crc)); } + + @GET + @AuthRequired + @Path("{id}/previewUrl") + public Response getPreviewUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + return response( req -> { + PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied))); + return (privateUrl != null) ? ok(json(privateUrl)) + : error(Response.Status.NOT_FOUND, "Private URL not found."); + }, getRequestUser(crc)); + } + + @POST + @AuthRequired + @Path("{id}/previewUrl") + public Response createPreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) { + if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) { + throw new NotAcceptableException("Anonymized Access not enabled"); + } + return response(req -> + ok(json(execCommand( + new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))), getRequestUser(crc)); + } + + @DELETE + @AuthRequired + @Path("{id}/previewUrl") + public Response deletePreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + return response( req -> { + Dataset dataset = findDatasetOrDie(idSupplied); + PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset)); + if (privateUrl != null) { + execCommand(new DeletePrivateUrlCommand(req, dataset)); + return ok("Private URL deleted."); + } else { + return notFound("No Private URL to delete."); + } + }, getRequestUser(crc)); + } + @GET @AuthRequired From 84e0fadf631865e08787e59428d455d1a9a86683 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 18 Oct 2024 13:39:17 -0400 Subject: [PATCH 135/402] adding description info to the fileDsc seciton in DDI CodeBook. #5051 --- .../export/DDIExportServiceBean.java | 38 +++++++------------ .../dataverse/export/ddi/DdiExportUtil.java | 15 ++++++++ src/main/java/propertyFiles/Bundle.properties | 4 +- 3 files changed, 30 insertions(+), 27 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java index edd01ae98a3..d76020cb8d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java @@ -98,8 +98,10 @@ public class DDIExportServiceBean { public static final String LEVEL_FILE = "file"; public static final String NOTE_TYPE_UNF = "VDC:UNF"; public static final String NOTE_TYPE_TAG = "DATAVERSE:TAG"; + public static final String NOTE_TYPE_FILEDESCRIPTION = "DATAVERSE:FILEDESC"; public static final String NOTE_SUBJECT_UNF = "Universal Numeric Fingerprint"; public static final String NOTE_SUBJECT_TAG = "Data File Tag"; + public static final String NOTE_SUBJECT_FILEDESCRIPTION = "DataFile Description"; /* * Internal service objects: @@ -742,11 +744,6 @@ private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, xmlw.writeEndElement(); // fileName } - /* - xmlw.writeStartElement("fileCont"); - xmlw.writeCharacters( df.getContentType() ); - xmlw.writeEndElement(); // fileCont - */ // dimensions if (checkField("dimensns", excludedFieldSet, includedFieldSet)) { if (dt.getCaseQuantity() != null || dt.getVarQuantity() != null || dt.getRecordsPerCase() != null) { @@ -801,26 +798,6 @@ private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, xmlw.writeEndElement(); // notes } - /* - xmlw.writeStartElement("notes"); - writeAttribute( xmlw, "type", "vdc:category" ); - xmlw.writeCharacters( fm.getCategory() ); - xmlw.writeEndElement(); // notes - */ - // A special note for LOCKSS crawlers indicating the restricted - // status of the file: - - /* - if (tdf != null && isRestrictedFile(tdf)) { - xmlw.writeStartElement("notes"); - writeAttribute( xmlw, "type", NOTE_TYPE_LOCKSS_CRAWL ); - writeAttribute( xmlw, "level", LEVEL_FILE ); - writeAttribute( xmlw, "subject", NOTE_SUBJECT_LOCKSS_PERM ); - xmlw.writeCharacters( "restricted" ); - xmlw.writeEndElement(); // notes - - } - */ if (checkField("tags", excludedFieldSet, includedFieldSet) && df.getTags() != null) { for (int i = 0; i < df.getTags().size(); i++) { xmlw.writeStartElement("notes"); @@ -831,6 +808,17 @@ private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, xmlw.writeEndElement(); // notes } } + + // A dedicated node for the Description entry + if (!StringUtilisEmpty(fm.getDescription())) { + xmlw.writeStartElement("notes"); + xmlw.writeAttribute("level", LEVEL_FILE); + xmlw.writeAttribute("type", NOTE_TYPE_FILEDESCRIPTION); + xmlw.writeAttribute("subject", NOTE_SUBJECT_FILEDESCRIPTION); + xmlw.writeCharacters(fm.getDescription()); + xmlw.writeEndElement(); // notes + } + xmlw.writeEndElement(); // fileDscr } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index f5efc448090..05ddbe83e78 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -14,8 +14,10 @@ import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.LEVEL_FILE; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_TAG; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_UNF; +import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_FILEDESCRIPTION; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_TAG; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_UNF; +import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_FILEDESCRIPTION; import edu.harvard.iq.dataverse.export.DDIExporter; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -1901,6 +1903,8 @@ private static void createFileDscr(XMLStreamWriter xmlw, JsonArray fileDetails) xmlw.writeEndElement(); // notes } + // If any tabular tags are present, each is formatted in a + // dedicated note: if (fileJson.containsKey("tabularTags")) { JsonArray tags = fileJson.getJsonArray("tabularTags"); for (int j = 0; j < tags.size(); j++) { @@ -1912,6 +1916,17 @@ private static void createFileDscr(XMLStreamWriter xmlw, JsonArray fileDetails) xmlw.writeEndElement(); // notes } } + + // Adding a dedicated node for the description entry (for + // non-tabular files we format it under the field) + if (fileJson.containsKey("description")) { + xmlw.writeStartElement("notes"); + xmlw.writeAttribute("level", LEVEL_FILE); + xmlw.writeAttribute("type", NOTE_TYPE_FILEDESCRIPTION); + xmlw.writeAttribute("subject", NOTE_SUBJECT_FILEDESCRIPTION); + xmlw.writeCharacters(fileJson.getString("description")); + xmlw.writeEndElement(); // notes + } // TODO: add the remaining fileDscr elements! xmlw.writeEndElement(); // fileDscr diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 5f3e4c33e0b..a355bcc379f 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1464,7 +1464,7 @@ dataset.editBtn.itemLabel.deleteDataset=Delete Dataset dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version dataset.editBtn.itemLabel.deaccession=Deaccession Dataset dataset.exportBtn=Export Metadata -dataset.exportBtn.itemLabel.ddi=DDI +dataset.exportBtn.itemLabel.ddi=DDI Codebook v2 dataset.exportBtn.itemLabel.dublinCore=Dublin Core dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD dataset.exportBtn.itemLabel.datacite=DataCite @@ -1934,7 +1934,7 @@ file.downloadBtn.format.all=All File Formats + Information file.downloadBtn.format.tab=Tab-Delimited file.downloadBtn.format.original={0} (Original File Format) file.downloadBtn.format.rdata=RData -file.downloadBtn.format.var=Variable Metadata +file.downloadBtn.format.var=DDI Codebook v2 file.downloadBtn.format.citation=Data File Citation file.download.filetype.unknown=Original File Format file.more.information.link=Link to more file information for From 08540194926f4e464515405703222e43d725db74 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 18 Oct 2024 15:45:32 -0400 Subject: [PATCH 136/402] how to set up languages in Docker #10939 Also, explain that Payara must be restarted after loading langs. --- doc/release-notes/10939-i18n-docker.md | 5 +++++ .../source/container/running/demo.rst | 17 +++++++++++++++++ .../source/installation/config.rst | 2 +- docker-compose-dev.yml | 1 + docker/compose/demo/compose.yml | 1 + 5 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 doc/release-notes/10939-i18n-docker.md diff --git a/doc/release-notes/10939-i18n-docker.md b/doc/release-notes/10939-i18n-docker.md new file mode 100644 index 00000000000..d9887b684db --- /dev/null +++ b/doc/release-notes/10939-i18n-docker.md @@ -0,0 +1,5 @@ +## Multiple Language in Docker + +Configuration and documentation has been added to explain how to set up multiple languages (e.g. English and French) in the tutorial for setting up Dataverse in Docker. + +See also #10939 diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index f9642347558..2e404e7a09a 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -137,6 +137,23 @@ In the example below of configuring :ref:`:FooterCopyright` we use the default u One you make this change it should be visible in the copyright in the bottom left of every page. +Multiple Languages +++++++++++++++++++ + +Generally speaking, you'll want to follow :ref:`i18n` in the Installation Guide to set up multiple languages such as English and French. + +To set up the toggle between English and French, we'll use a slight variation on the command in the instructions above, adding the unblock key we created above: + +``curl "http://localhost:8080/api/admin/settings/:Languages?unblock-key=unblockme" -X PUT -d '[{"locale":"en","title":"English"},{"locale":"fr","title":"Français"}]'`` + +Similarly, when loading the "languages.zip" file, we'll add the unblock key: + +``curl "http://localhost:8080/api/admin/datasetfield/loadpropertyfiles?unblock-key=unblockme" -X POST --upload-file /tmp/languages/languages.zip -H "Content-Type: application/zip"`` + +Stop and start the Dataverse container in order for the language toggle to work. + +Note that ``dataverse.lang.directory=/dv/lang`` has already been configured for you in the ``compose.yml`` file. The step where you loaded "languages.zip" should have populated the ``/dv/lang`` directory with files ending in ".properties". + Next Steps ---------- diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index e98ed8f5189..a2c27598b76 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1783,7 +1783,7 @@ Now that you have a "languages.zip" file, you can load it into your Dataverse in ``curl http://localhost:8080/api/admin/datasetfield/loadpropertyfiles -X POST --upload-file /tmp/languages/languages.zip -H "Content-Type: application/zip"`` -Click on the languages using the drop down in the header to try them out. +Stop and start Payara and then click on the languages using the drop down in the header to try them out. .. _help-translate: diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 402a95c0e16..384b70b7a7b 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -57,6 +57,7 @@ services: -Ddataverse.pid.fake.label=FakeDOIProvider -Ddataverse.pid.fake.authority=10.5072 -Ddataverse.pid.fake.shoulder=FK2/ + -Ddataverse.lang.directory=/dv/lang ports: - "8080:8080" # HTTP (Dataverse Application) - "4949:4848" # HTTPS (Payara Admin Console) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 33e7b52004b..d599967919e 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -26,6 +26,7 @@ services: -Ddataverse.pid.fake.label=FakeDOIProvider -Ddataverse.pid.fake.authority=10.5072 -Ddataverse.pid.fake.shoulder=FK2/ + -Ddataverse.lang.directory=/dv/lang ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) From d334b689437e06cf674a6725600c705628845c47 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 09:41:55 +0200 Subject: [PATCH 137/402] Refactor: CreateDataverseCommand inheriting AbstractWriteDataverseCommand --- .../impl/AbstractWriteDataverseCommand.java | 84 +++++++++++++++ .../command/impl/CreateDataverseCommand.java | 102 +++++------------- 2 files changed, 110 insertions(+), 76 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java new file mode 100644 index 00000000000..577f877db41 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -0,0 +1,84 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; + +import java.util.ArrayList; +import java.util.List; + +/** + * TODO + */ +abstract class AbstractWriteDataverseCommand extends AbstractCommand { + + protected Dataverse dataverse; + private final List inputLevels; + private final List facets; + protected final List metadataBlocks; + + public AbstractWriteDataverseCommand(Dataverse dataverse, + DataverseRequest request, + List facets, + List inputLevels, + List metadataBlocks) { + super(request, dataverse.getOwner()); + this.dataverse = dataverse; + if (facets != null) { + this.facets = new ArrayList<>(facets); + } else { + this.facets = null; + } + if (inputLevels != null) { + this.inputLevels = new ArrayList<>(inputLevels); + } else { + this.inputLevels = null; + } + if (metadataBlocks != null) { + this.metadataBlocks = new ArrayList<>(metadataBlocks); + } else { + this.metadataBlocks = null; + } + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + dataverse = innerExecute(ctxt); + + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { + dataverse.setMetadataBlockRoot(true); + dataverse.setMetadataBlocks(metadataBlocks); + } + + if (facets != null) { + ctxt.facets().deleteFacetsFor(dataverse); + + if (!facets.isEmpty()) { + dataverse.setFacetRoot(true); + } + + int i = 0; + for (DatasetFieldType df : facets) { + ctxt.facets().create(i++, df, dataverse); + } + } + + if (inputLevels != null) { + if (!inputLevels.isEmpty()) { + dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); + } + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + inputLevel.setDataverse(dataverse); + ctxt.fieldTypeInputLevels().create(inputLevel); + } + } + + return ctxt.dataverses().save(dataverse); + } + + abstract protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException; +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 6957dac416d..ce922dc565d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -6,11 +6,9 @@ import edu.harvard.iq.dataverse.authorization.groups.Group; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -27,47 +25,26 @@ * @author michael */ @RequiredPermissions(Permission.AddDataverse) -public class CreateDataverseCommand extends AbstractCommand { - - private final Dataverse created; - private final List inputLevelList; - private final List facetList; - private final List metadataBlocks; +public class CreateDataverseCommand extends AbstractWriteDataverseCommand { public CreateDataverseCommand(Dataverse created, - DataverseRequest aRequest, - List facetList, - List inputLevelList) { - this(created, aRequest, facetList, inputLevelList, null); + DataverseRequest request, + List facets, + List inputLevels) { + this(created, request, facets, inputLevels, null); } public CreateDataverseCommand(Dataverse created, - DataverseRequest aRequest, - List facetList, - List inputLevelList, + DataverseRequest request, + List facets, + List inputLevels, List metadataBlocks) { - super(aRequest, created.getOwner()); - this.created = created; - if (facetList != null) { - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (inputLevelList != null) { - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - if (metadataBlocks != null) { - this.metadataBlocks = new ArrayList<>(metadataBlocks); - } else { - this.metadataBlocks = null; - } + super(created, request, facets, inputLevels, metadataBlocks); } @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - Dataverse owner = created.getOwner(); + protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException { + Dataverse owner = dataverse.getOwner(); if (owner == null) { if (ctxt.dataverses().isRootDataverseExists()) { throw new IllegalCommandException("Root Dataverse already exists. Cannot create another one", this); @@ -75,44 +52,44 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } if (metadataBlocks != null && !metadataBlocks.isEmpty()) { - created.setMetadataBlockRoot(true); - created.setMetadataBlocks(metadataBlocks); + dataverse.setMetadataBlockRoot(true); + dataverse.setMetadataBlocks(metadataBlocks); } - if (created.getCreateDate() == null) { - created.setCreateDate(new Timestamp(new Date().getTime())); + if (dataverse.getCreateDate() == null) { + dataverse.setCreateDate(new Timestamp(new Date().getTime())); } - if (created.getCreator() == null) { + if (dataverse.getCreator() == null) { final User user = getRequest().getUser(); if (user.isAuthenticated()) { - created.setCreator((AuthenticatedUser) user); + dataverse.setCreator((AuthenticatedUser) user); } else { throw new IllegalCommandException("Guest users cannot create a Dataverse.", this); } } - if (created.getDataverseType() == null) { - created.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); + if (dataverse.getDataverseType() == null) { + dataverse.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); } - if (created.getDefaultContributorRole() == null) { - created.setDefaultContributorRole(ctxt.roles().findBuiltinRoleByAlias(DataverseRole.EDITOR)); + if (dataverse.getDefaultContributorRole() == null) { + dataverse.setDefaultContributorRole(ctxt.roles().findBuiltinRoleByAlias(DataverseRole.EDITOR)); } // @todo for now we are saying all dataverses are permission root - created.setPermissionRoot(true); + dataverse.setPermissionRoot(true); - if (ctxt.dataverses().findByAlias(created.getAlias()) != null) { - throw new IllegalCommandException("A dataverse with alias " + created.getAlias() + " already exists", this); + if (ctxt.dataverses().findByAlias(dataverse.getAlias()) != null) { + throw new IllegalCommandException("A dataverse with alias " + dataverse.getAlias() + " already exists", this); } - if (created.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { + if (dataverse.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { throw new IllegalCommandException("File PIDs cannot be enabled per collection", this); } // Save the dataverse - Dataverse managedDv = ctxt.dataverses().save(created); + Dataverse managedDv = ctxt.dataverses().save(dataverse); // Find the built in admin role (currently by alias) DataverseRole adminRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.ADMIN); @@ -159,33 +136,6 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } managedDv.setPermissionModificationTime(new Timestamp(new Date().getTime())); - - if (facetList != null) { - ctxt.facets().deleteFacetsFor(managedDv); - - if (!facetList.isEmpty()) { - managedDv.setFacetRoot(true); - } - - int i = 0; - for (DatasetFieldType df : facetList) { - ctxt.facets().create(i++, df, managedDv); - } - } - - if (inputLevelList != null) { - if (!inputLevelList.isEmpty()) { - managedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); - } - ctxt.fieldTypeInputLevels().deleteFacetsFor(managedDv); - for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { - inputLevel.setDataverse(managedDv); - ctxt.fieldTypeInputLevels().create(inputLevel); - } - } - - // TODO: save is called here and above; we likely don't need both - managedDv = ctxt.dataverses().save(managedDv); return managedDv; } From e7782394b037fb6890f785cebd6f12869630c6c6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 10:57:54 +0200 Subject: [PATCH 138/402] Refactor: UpdateDataverseCommand inheriting AbstractWriteDataverseCommand --- .../impl/AbstractWriteDataverseCommand.java | 5 +- .../command/impl/CreateDataverseCommand.java | 2 +- .../command/impl/UpdateDataverseCommand.java | 102 ++++-------------- 3 files changed, 27 insertions(+), 82 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 577f877db41..40c2abf5d21 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -11,7 +11,7 @@ import java.util.List; /** - * TODO + * An abstract base class for commands that perform write operations on {@link Dataverse}s. */ abstract class AbstractWriteDataverseCommand extends AbstractCommand { @@ -21,11 +21,12 @@ abstract class AbstractWriteDataverseCommand extends AbstractCommand protected final List metadataBlocks; public AbstractWriteDataverseCommand(Dataverse dataverse, + Dataverse affectedDataverse, DataverseRequest request, List facets, List inputLevels, List metadataBlocks) { - super(request, dataverse.getOwner()); + super(request, affectedDataverse); this.dataverse = dataverse; if (facets != null) { this.facets = new ArrayList<>(facets); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index ce922dc565d..145cfb6199c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -39,7 +39,7 @@ public CreateDataverseCommand(Dataverse created, List facets, List inputLevels, List metadataBlocks) { - super(created, request, facets, inputLevels, metadataBlocks); + super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 16b93debb6d..14d9e408be8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -6,16 +6,13 @@ import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import java.util.ArrayList; import java.util.List; -import java.util.logging.Logger; /** * Update an existing dataverse. @@ -23,72 +20,41 @@ * @author michael */ @RequiredPermissions(Permission.EditDataverse) -public class UpdateDataverseCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(UpdateDataverseCommand.class.getName()); - - private final Dataverse editedDv; - private final List facetList; +public class UpdateDataverseCommand extends AbstractWriteDataverseCommand { private final List featuredDataverseList; - private final List inputLevelList; - private final List metadataBlocks; private boolean datasetsReindexRequired = false; public UpdateDataverseCommand(Dataverse editedDv, - List facetList, - List featuredDataverseList, - DataverseRequest aRequest, - List inputLevelList) { - this(editedDv, facetList, featuredDataverseList, aRequest, inputLevelList, null); + List facets, + List featuredDataverses, + DataverseRequest request, + List inputLevels) { + this(editedDv, facets, featuredDataverses, request, inputLevels, null); } public UpdateDataverseCommand(Dataverse editedDv, - List facetList, - List featuredDataverseList, - DataverseRequest aRequest, - List inputLevelList, + List facets, + List featuredDataverses, + DataverseRequest request, + List inputLevels, List metadataBlocks) { - super(aRequest, editedDv); - this.editedDv = editedDv; - // add update template uses this command but does not - // update facet list or featured dataverses - if (facetList != null) { - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (featuredDataverseList != null) { - this.featuredDataverseList = new ArrayList<>(featuredDataverseList); + super(editedDv, editedDv, request, facets, inputLevels, metadataBlocks); + if (featuredDataverses != null) { + this.featuredDataverseList = new ArrayList<>(featuredDataverses); } else { this.featuredDataverseList = null; } - if (inputLevelList != null) { - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - if (metadataBlocks != null) { - this.metadataBlocks = new ArrayList<>(metadataBlocks); - } else { - this.metadataBlocks = null; - } } @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - logger.fine("Entering update dataverse command"); - - if (metadataBlocks != null && !metadataBlocks.isEmpty()) { - editedDv.setMetadataBlockRoot(true); - editedDv.setMetadataBlocks(metadataBlocks); - } - + protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException { // Perform any optional validation steps, if defined: if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { // For admins, an override of the external validation step may be enabled: if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { String executable = ctxt.systemConfig().getDataverseValidationExecutable(); - boolean result = validateDataverseMetadataExternally(editedDv, executable, getRequest()); + boolean result = validateDataverseMetadataExternally(dataverse, executable, getRequest()); if (!result) { String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); @@ -97,7 +63,7 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } - Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); + Dataverse oldDv = ctxt.dataverses().find(dataverse.getId()); DataverseType oldDvType = oldDv.getDataverseType(); String oldDvAlias = oldDv.getAlias(); @@ -106,44 +72,22 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { // We don't want to reindex the children datasets unnecessarily: // When these values are changed we need to reindex all children datasets // This check is not recursive as all the values just report the immediate parent - if (!oldDvType.equals(editedDv.getDataverseType()) - || !oldDvName.equals(editedDv.getName()) - || !oldDvAlias.equals(editedDv.getAlias())) { + if (!oldDvType.equals(dataverse.getDataverseType()) + || !oldDvName.equals(dataverse.getName()) + || !oldDvAlias.equals(dataverse.getAlias())) { datasetsReindexRequired = true; } if (featuredDataverseList != null) { - ctxt.featuredDataverses().deleteFeaturedDataversesFor(editedDv); + ctxt.featuredDataverses().deleteFeaturedDataversesFor(dataverse); int i = 0; for (Object obj : featuredDataverseList) { Dataverse dv = (Dataverse) obj; - ctxt.featuredDataverses().create(i++, dv.getId(), editedDv.getId()); - } - } - - if (facetList != null) { - ctxt.facets().deleteFacetsFor(editedDv); - if (!facetList.isEmpty()) { - editedDv.setFacetRoot(true); - } - int i = 0; - for (DatasetFieldType df : facetList) { - ctxt.facets().create(i++, df, editedDv); - } - } - if (inputLevelList != null) { - if (!inputLevelList.isEmpty()) { - editedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); - } - ctxt.fieldTypeInputLevels().deleteFacetsFor(editedDv); - for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { - inputLevel.setDataverse(editedDv); - ctxt.fieldTypeInputLevels().create(inputLevel); + ctxt.featuredDataverses().create(i++, dv.getId(), dataverse.getId()); } } - Dataverse result = ctxt.dataverses().save(editedDv); - return result; + return dataverse; } @Override @@ -154,7 +98,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) { // It appears that we at some point lost some extra logic here, where // we only reindex the underlying datasets if one or more of the specific set // of fields have been changed (since these values are included in the - // indexed solr documents for dataasets). So I'm putting that back. -L.A. + // indexed solr documents for datasets). So I'm putting that back. -L.A. Dataverse result = (Dataverse) r; if (datasetsReindexRequired) { From 2a62c0460c28a3704b26289a65cd00437684a7c6 Mon Sep 17 00:00:00 2001 From: jo-pol Date: Mon, 21 Oct 2024 15:15:44 +0200 Subject: [PATCH 139/402] performance test --- .../command/impl/CreateNewDataFilesTest.java | 74 +++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java index 1262984eb27..a956b473a4b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java @@ -13,18 +13,26 @@ import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.jetbrains.annotations.NotNull; +import org.joda.time.DateTime; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.Mockito; import java.io.FileInputStream; import java.io.FileNotFoundException; +import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; +import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; +import java.security.SecureRandom; +import java.text.MessageFormat; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; import static edu.harvard.iq.dataverse.DataFile.ChecksumType.MD5; import static org.apache.commons.io.file.FilesUncheck.createDirectories; @@ -146,6 +154,72 @@ public void execute_rezips_sets_of_shape_files_from_uploaded_zip() throws Except } } + @Disabled("Too slow. Intended for manual execution.") + @Test + @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "/tmp/test/CreateNewDataFilesTest/tmp") + public void extract_zip_performance() throws Exception { + /* + Developed to test performance difference between the old implementation with ZipInputStream and the new ZipFile implementation. + Play with numbers depending on: + - the time you want to spend on this test + - how much system stress you want to examine + */ + var nrOfZipFiles = 20; + var avgNrOfFilesPerZip = 300; + var avgFileLength = 5000; + + var tmpUploadStorage = Path.of("/tmp/test/CreateNewDataFilesTest/tmp/temp"); + if(tmpUploadStorage.toFile().exists()) { + deleteDirectory(tmpUploadStorage); + } + createDirectories(tmpUploadStorage); // temp in target would choke intellij + + var chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + var random = new SecureRandom(); + var totalNrOfFiles = 0; + var totalFileSize = 0; + var tmp = Path.of(Files.createTempDirectory(null).toString()); + var ctxt = mockCommandContext(mockSysConfig(false, 100000000L, MD5, 10000)); + try (var mockedJHoveFileType = Mockito.mockStatic(JhoveFileType.class)) { + mockedJHoveFileType.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf"); + var before = DateTime.now(); + for (var zipNr = 1; zipNr <= nrOfZipFiles; zipNr++) { + // build the zip + var zip = tmp.resolve(zipNr + "-data.zip"); + var nrOfFilesInZip = random.nextInt(avgNrOfFilesPerZip * 2); + try (var zipStream = new ZipOutputStream(new FileOutputStream(zip.toFile()))) { + for (var fileInZipNr = 1; fileInZipNr <= nrOfFilesInZip; fileInZipNr++) { + // build content for a file + var stringLength = random.nextInt(avgFileLength * 2 -5); + StringBuilder sb = new StringBuilder(stringLength); + for (int i = 1; i <= stringLength; i++) {// zero length causes buffer underflow + sb.append(chars.charAt(random.nextInt(chars.length()))); + } + // add the file to the zip + zipStream.putNextEntry(new ZipEntry(fileInZipNr + ".txt")); + zipStream.write((sb.toString()).getBytes()); + zipStream.closeEntry(); + totalFileSize += stringLength; + } + } + + // upload the zip + var result = createCmd(zip.toString(), mockDatasetVersion(), 1000L, 500L) + .execute(ctxt); + assertThat(result.getErrors()).hasSize(0); + assertThat(result.getDataFiles()).hasSize(nrOfFilesInZip); + totalNrOfFiles += nrOfFilesInZip; + + // report after each zip to have some data even when aborting a test that takes too long + System.out.println(MessageFormat.format( + "Total time: {0}ms; nr of zips {1} total nr of files {2}; total file size {3}", + DateTime.now().getMillis() - before.getMillis(), zipNr, totalNrOfFiles, totalFileSize + )); + } + assertThat(tmpUploadStorage.toFile().list()).hasSize(totalNrOfFiles); + } + } + private static @NotNull CreateNewDataFilesCommand createCmd(String name, DatasetVersion dsVersion, long allocatedQuotaLimit, long usedQuotaLimit) throws FileNotFoundException { return new CreateNewDataFilesCommand( Mockito.mock(DataverseRequest.class), From f2430c57b367a39ead1f73cbbbd5dfb40d3ad648 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 21 Oct 2024 10:59:50 -0400 Subject: [PATCH 140/402] Add API endpoint for comparing Dataset Versions --- ...-add-api-for-comparing-dataset-versions.md | 15 +++ doc/sphinx-guides/source/api/native-api.rst | 19 +++ .../harvard/iq/dataverse/DatasetVersion.java | 4 + .../dataverse/DatasetVersionDifference.java | 123 +++++++++++++++--- .../harvard/iq/dataverse/api/Datasets.java | 17 +++ .../CuratePublishedDatasetVersionCommand.java | 2 +- .../DatasetVersionDifferenceTest.java | 90 +++++++++++++ .../harvard/iq/dataverse/api/DatasetsIT.java | 80 ++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 20 ++- 9 files changed, 349 insertions(+), 21 deletions(-) create mode 100644 doc/release-notes/10888-add-api-for-comparing-dataset-versions.md create mode 100644 src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java diff --git a/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md b/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md new file mode 100644 index 00000000000..902bf1a4d02 --- /dev/null +++ b/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md @@ -0,0 +1,15 @@ +The following API have been added: + +/api/datasets/{persistentId}/versions/{versionId0}/compare/{versionId1} + +This API lists the changes between 2 dataset versions. The Json response shows the changes per field within the Metadata block and the Terms Of Access. Also listed are the files that have been added or removed. Files that have been modified will also display the new file data plus the fields that have been modified. + +Old and New values are represented by "0" and "1" respectively. +```json +[ + "ModifiedFieldName" = { + "0" : "old value", + "1" : "new value" + } +] +``` diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f8b8620f121..e3aae7a122a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1642,6 +1642,25 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata/citation" +Compare Versions of a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Returns a list of fields that have changed between 2 Dataset versions within the Metadata and Terms of Access. Also includes the files that have been added or removed as well as files that have been modified. +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSION0=1.0 + export VERSION1=:draft + + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION0/compare/$VERSION1" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/:latest-published/compare/:draft" + Update Metadata For a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 0433c425fd2..2a870c81767 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -102,6 +102,10 @@ public int compare(DatasetVersion o1, DatasetVersion o2) { } } }; + public static final JsonObjectBuilder compareVersions(DatasetVersion originalVersion, DatasetVersion newVersion) { + DatasetVersionDifference diff = new DatasetVersionDifference(newVersion, originalVersion); + return diff.compareVersionsAsJson(); + } // TODO: Determine the UI implications of various version states //IMPORTANT: If you add a new value to this enum, you will also have to modify the diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index eca0c84ae84..dc0c342cb03 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -5,19 +5,17 @@ import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; import edu.harvard.iq.dataverse.util.StringUtil; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Set; +import java.util.*; import java.util.logging.Logger; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; import org.apache.commons.lang3.StringUtils; import edu.harvard.iq.dataverse.util.BundleUtil; -import java.util.Arrays; -import java.util.Date; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; + +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; /** * @@ -34,6 +32,7 @@ public final class DatasetVersionDifference { private List addedFiles = new ArrayList<>(); private List removedFiles = new ArrayList<>(); private List changedFileMetadata = new ArrayList<>(); + private Map>> changedFileMetadataDiff = new HashMap<>(); private List changedVariableMetadata = new ArrayList<>(); private List replacedFiles = new ArrayList<>(); private List changedTermsAccess = new ArrayList<>(); @@ -122,9 +121,12 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin for (FileMetadata fmdn : newVersion.getFileMetadatas()) { if (fmdo.getDataFile().equals(fmdn.getDataFile())) { deleted = false; - if (!compareFileMetadatas(fmdo, fmdn)) { + Map> fileMetadataDiff = compareFileMetadatas(fmdo, fmdn); + if (!fileMetadataDiff.isEmpty()) { changedFileMetadata.add(fmdo); changedFileMetadata.add(fmdn); + // TODO: find a better key for the map. needs to be something that doesn't change + changedFileMetadataDiff.put(fmdo, fileMetadataDiff); } if (!variableMetadataUtil.compareVariableMetadata(fmdo,fmdn) || !compareVarGroup(fmdo, fmdn)) { changedVariableMetadata.add(fmdo); @@ -551,25 +553,40 @@ private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { } } - public static boolean compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { - + public static Map> compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { + Map> fileMetadataChanged = new HashMap<>(); + boolean equals = true; if (!StringUtils.equals(StringUtil.nullToEmpty(fmdo.getDescription()), StringUtil.nullToEmpty(fmdn.getDescription()))) { - return false; + equals = false; + fileMetadataChanged.put("Description", + List.of(StringUtil.nullToEmpty(fmdo.getDescription()), StringUtil.nullToEmpty(fmdn.getDescription()))); } if (!StringUtils.equals(fmdo.getCategoriesByName().toString(), fmdn.getCategoriesByName().toString())) { - return false; + equals = false; + fileMetadataChanged.put("Categories", + List.of(fmdo.getCategoriesByName().toString(), fmdn.getCategoriesByName().toString())); } if (!StringUtils.equals(fmdo.getLabel(), fmdn.getLabel())) { - return false; + equals = false; + fileMetadataChanged.put("Label", + List.of(fmdo.getLabel(), fmdn.getLabel())); } if (!StringUtils.equals(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())) { - return false; + equals = false; + fileMetadataChanged.put("ProvFreeForm", + List.of(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())); } - - return fmdo.isRestricted() == fmdn.isRestricted(); + + if (fmdo.isRestricted() != fmdn.isRestricted()) { + equals = false; + fileMetadataChanged.put("isRestricted", + List.of(String.valueOf(fmdo.isRestricted()), String.valueOf(fmdn.isRestricted()))); + } + + return fileMetadataChanged; } private void compareValues(DatasetField originalField, DatasetField newField, boolean compound) { @@ -1819,4 +1836,74 @@ private static boolean fieldsAreDifferent(DatasetField originalField, DatasetFie } return false; } + public JsonObjectBuilder compareVersionsAsJson() { + JsonObjectBuilder job = new NullSafeJsonBuilder(); + + JsonObjectBuilder jobMetadata = new NullSafeJsonBuilder(); + List> byBlock = getDetailDataByBlock(); + for (List l : byBlock) { + for (DatasetField[] dsfArray : l) { + JsonObjectBuilder jb = new NullSafeJsonBuilder(); + if (dsfArray[0].getDatasetFieldType().isPrimitive()) { + jb.add("0", dsfArray[0].getRawValue()); + } else { + jb.add("0", dsfArray[0].getCompoundRawValue()); + } + if (dsfArray[1].getDatasetFieldType().isPrimitive()) { + jb.add("1", dsfArray[1].getRawValue()); + } else { + jb.add("1", dsfArray[1].getCompoundRawValue()); + } + jobMetadata.add(dsfArray[0].getDatasetFieldType().getTitle(), jb); + } + } + if (!byBlock.isEmpty()) { + job.add("Metadata", jobMetadata); + } + + // Format added, removed, and modified files + JsonObjectBuilder jobFiles = new NullSafeJsonBuilder(); + if (!addedFiles.isEmpty()) { + JsonArrayBuilder jab = Json.createArrayBuilder(); + addedFiles.forEach(f -> jab.add(json(f))); + jobFiles.add("added", jab); + } + if (!removedFiles.isEmpty()) { + JsonArrayBuilder jab = Json.createArrayBuilder(); + removedFiles.forEach(f -> jab.add(json(f))); + jobFiles.add("removed", jab); + } + if (!changedFileMetadata.isEmpty()) { + JsonArrayBuilder jabDiffFiles = Json.createArrayBuilder(); + changedFileMetadataDiff.entrySet().forEach(entry -> { + JsonObjectBuilder jobDiffFiles = new NullSafeJsonBuilder(); + jobDiffFiles.add("fileMetadata", json(entry.getKey())); + entry.getValue().entrySet().forEach(e -> { + JsonObjectBuilder jobDiffField = new NullSafeJsonBuilder(); + jobDiffField.add("0",e.getValue().get(0)); + jobDiffField.add("1",e.getValue().get(1)); + jobDiffFiles.add(e.getKey(), jobDiffField); + }); + jabDiffFiles.add(jobDiffFiles); + }); + jobFiles.add("modified", jabDiffFiles); + } + if (!addedFiles.isEmpty() || !removedFiles.isEmpty() || !changedFileMetadata.isEmpty()) { + job.add("Files", jobFiles); + } + + // Format Terms Of Access changes + if (!changedTermsAccess.isEmpty()) { + JsonObjectBuilder jobTOA = new NullSafeJsonBuilder(); + changedTermsAccess.forEach(toa -> { + JsonObjectBuilder jobValue = new NullSafeJsonBuilder(); + jobValue.add("0",toa[1]); + jobValue.add("1",toa[2]); + jobTOA.add(toa[0], jobValue); + }); + job.add("TermsOfAccess", jobTOA); + } + + return job; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 369a22fe8d7..d7b0c78e611 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2992,6 +2992,23 @@ public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("i } + @GET + @AuthRequired + @Path("{id}/versions/{versionId1}/compare/{versionId2}") + public Response getCompareVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, + @PathParam("versionId1") String versionId1, + @PathParam("versionId2") String versionId2, + @Context UriInfo uriInfo, @Context HttpHeaders headers) { + try { + DataverseRequest req = createDataverseRequest(getRequestUser(crc)); + DatasetVersion dsv1 = getDatasetVersionOrDie(req, versionId1, findDatasetOrDie(id), uriInfo, headers); + DatasetVersion dsv2 = getDatasetVersionOrDie(req, versionId2, findDatasetOrDie(id), uriInfo, headers); + return ok(DatasetVersion.compareVersions(dsv1, dsv2)); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + private static Set getDatasetFilenames(Dataset dataset) { Set files = new HashSet<>(); for (DataFile dataFile: dataset.getFiles()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index e6e8279a314..e378e2e2ef7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -131,7 +131,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasetversion.update.failure"), this); } else { - metadataUpdated = DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd); + metadataUpdated = !DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd).isEmpty(); publishedFmd.setLabel(draftFmd.getLabel()); publishedFmd.setDescription(draftFmd.getDescription()); publishedFmd.setCategories(draftFmd.getCategories()); diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java new file mode 100644 index 00000000000..9457cdbdb4d --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java @@ -0,0 +1,90 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; +import edu.harvard.iq.dataverse.util.json.JsonUtil; +import io.restassured.path.json.JsonPath; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.assertj.core.util.DateUtil.now; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class DatasetVersionDifferenceTest { + + @Test + public void testCompareVersionsAsJson() { + + Dataverse dv = new Dataverse(); + Dataset ds = new Dataset(); + ds.setOwner(dv); + ds.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL,"10.5072","FK2/BYM3IW", "/", AbstractDOIProvider.DOI_RESOLVER_URL, null)); + + DatasetVersion dv1 = initDatasetVersion(0L, ds, DatasetVersion.VersionState.RELEASED); + DatasetVersion dv2 = initDatasetVersion(1L, ds, DatasetVersion.VersionState.DRAFT); + ds.setVersions(List.of(dv1, dv2)); + + TermsOfUseAndAccess toa = new TermsOfUseAndAccess(); + toa.setDisclaimer("disclaimer"); + dv2.setTermsOfUseAndAccess(toa); + dv2.getFileMetadatas().remove(1); + DatasetField dsf = new DatasetField(); + dsf.setDatasetFieldType(new DatasetFieldType("Author", DatasetFieldType.FieldType.TEXT, true)); + dsf.setSingleValue("TEST"); + dv2.getDatasetFields().add(dsf); + dv2.getFileMetadatas().get(2).setRestricted(!dv2.getFileMetadatas().get(2).isRestricted()); + DatasetVersionDifference dvd = new DatasetVersionDifference(dv2, dv1); + + JsonObjectBuilder json = dvd.compareVersionsAsJson(); + JsonObject obj = json.build(); + System.out.println(JsonUtil.prettyPrint(obj)); + + JsonPath dataFile = JsonPath.from(JsonUtil.prettyPrint(obj)); + assertTrue("TEST".equalsIgnoreCase(dataFile.getString("Metadata.Author.1"))); + assertTrue("true".equalsIgnoreCase(dataFile.getString("Files.modified[0].isRestricted.1"))); + assertTrue("disclaimer".equalsIgnoreCase(dataFile.getString("TermsOfAccess.Disclaimer.1"))); + } + private DatasetVersion initDatasetVersion(Long id, Dataset ds, DatasetVersion.VersionState vs) { + DatasetVersion dv = new DatasetVersion(); + dv.setDataset(ds); + dv.setVersion(1L); + dv.setVersionState(vs); + dv.setMinorVersionNumber(0L); + if (vs == DatasetVersion.VersionState.RELEASED) { + dv.setVersionNumber(1L); + dv.setVersion(1L); + dv.setReleaseTime(now()); + } + dv.setId(id); + dv.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); + dv.setFileMetadatas(initFiles(dv)); + return dv; + } + private List initFiles(DatasetVersion dsv) { + List fileMetadata = new ArrayList<>(); + for (int i=0; i < 4; i++) { + FileMetadata fm = new FileMetadata(); + fm.setDatasetVersion(dsv); + DataFile df = new DataFile(); + DataTable dt = new DataTable(); + dt.setOriginalFileName("filename"+i+".txt"); + df.setId(Long.valueOf(i)); + df.setDescription("Desc"+i); + df.setRestricted(false); + df.setFilesize(100 + i); + df.setChecksumType(DataFile.ChecksumType.MD5); + df.setChecksumValue("value"+i); + df.setDataTable(dt); + df.setOwner(dsv.getDataset()); + fm.setDataFile(df); + fm.setLabel("Label"+i); + fileMetadata.add(fm); + df.setFileMetadatas(fileMetadata); + + } + return fileMetadata; + } +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 93f1024ae7a..af0f218bcf6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -5168,4 +5168,84 @@ public void testGetCanDownloadAtLeastOneFile() { Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getCanDownloadAtLeastOneFile("testInvalidId", DS_VERSION_LATEST, secondUserApiToken); getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } + + @Test + public void testCompareDatasetVersionsAPI() { + + Response createUser = UtilIT.createRandomUser(); + assertEquals(200, createUser.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + Response getDatasetJsonBeforePublishing = UtilIT.nativeGet(datasetId, apiToken); + String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol"); + String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.authority"); + String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.identifier"); + String datasetPersistentId = protocol + ":" + authority + "/" + identifier; + + String pathToFile = "src/main/webapp/resources/images/dataverse-icon-1200.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, apiToken); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer modifyFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + pathToFile = "src/main/webapp/resources/images/dataverseproject_logo.jpg"; + uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, apiToken); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer deleteFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + + Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); + assertEquals(200, publishDataset.getStatusCode()); + + // post publish update to create DRAFT version + String pathToJsonFilePostPub = "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json"; + Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetPersistentId, pathToJsonFilePostPub, apiToken); + addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode()); + + // Test adding a file + pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Test removing a file + uploadResponse = UtilIT.deleteFile(deleteFileId, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat() + .statusCode(NO_CONTENT.getStatusCode()); + + // Test modify by restricting the file + Response restrictResponse = UtilIT.restrictFile(modifyFileId.toString(), true, apiToken); + restrictResponse.prettyPrint(); + restrictResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Also test a terms of access change + String jsonLDTerms = "{\"https://dataverse.org/schema/core#fileTermsOfAccess\":{\"https://dataverse.org/schema/core#dataAccessPlace\":\"Somewhere\"}}"; + Response updateTerms = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, jsonLDTerms, true); + updateTerms.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":latest-published", ":draft", apiToken); + compareResponse.prettyPrint(); + compareResponse.then().assertThat() + .body("data.Metadata.Author.1", CoreMatchers.containsString("Poe, Edgar Allen")) + .body("data.Files.added[0].label", CoreMatchers.equalTo("dataverseproject.png")) + .body("data.Files.removed[0].label", CoreMatchers.equalTo("dataverseproject_logo.jpg")) + .body("data.Files.modified[0].isRestricted.1", CoreMatchers.equalTo("true")) + .body("data.TermsOfAccess", CoreMatchers.notNullValue()) + .statusCode(OK.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 70f49d81b35..26382884dd5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -323,7 +323,14 @@ static Integer getDatasetIdFromResponse(Response createDatasetResponse) { logger.info("Id found in create dataset response: " + datasetId); return datasetId; } - + + static Integer getDataFileIdFromResponse(Response uploadDataFileResponse) { + JsonPath dataFile = JsonPath.from(uploadDataFileResponse.body().asString()); + int dataFileId = dataFile.getInt("data.files[0].dataFile.id"); + logger.info("Id found in upload DataFile response: " + dataFileId); + return dataFileId; + } + static Integer getSearchCountFromResponse(Response searchResponse) { JsonPath createdDataset = JsonPath.from(searchResponse.body().asString()); int searchCount = createdDataset.getInt("data.total_count"); @@ -1570,7 +1577,16 @@ static Response getDatasetVersion(String persistentId, String versionNumber, Str + persistentId + (excludeFiles ? "&excludeFiles=true" : "")); } - + static Response compareDatasetVersions(String persistentId, String versionNumber1, String versionNumber2, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/:persistentId/versions/" + + versionNumber1 + + "/compare/" + + versionNumber2 + + "?persistentId=" + + persistentId); + } static Response getDatasetWithOwners(String persistentId, String apiToken, boolean returnOwners) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) From 4e90d0c3fe8d501f5810a162c304ce4e3b43a891 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 16:40:43 +0100 Subject: [PATCH 141/402] Added: docs for #10904 --- doc/sphinx-guides/source/api/native-api.rst | 52 +++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f8b8620f121..6254742eebb 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -74,6 +74,58 @@ The request JSON supports an optional ``metadataBlocks`` object, with the follow To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. +.. _update-dataverse-api: + +Update a Dataverse Collection +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates an existing Dataverse collection using a JSON file following the same structure as the one used in the API for the creation. (see :ref:`create-dataverse-api`). + +The steps for updating a Dataverse collection are: + +- Prepare a JSON file containing the fields for the properties you want to update. You do not need to include all the properties, only the ones you want to update. +- Execute a curl command or equivalent. + +As an example, you can download :download:`dataverse-complete.json <../_static/api/dataverse-complete.json>` file and modify it to suit your needs. The controlled vocabulary for ``dataverseType`` is the following: + +- ``DEPARTMENT`` +- ``JOURNALS`` +- ``LABORATORY`` +- ``ORGANIZATIONS_INSTITUTIONS`` +- ``RESEARCHERS`` +- ``RESEARCH_GROUP`` +- ``RESEARCH_PROJECTS`` +- ``TEACHING_COURSES`` +- ``UNCATEGORIZED`` + +The curl command below assumes you are using the name "dataverse-complete.json" and that this file is in your current working directory. + +Next you need to figure out the alias or database id of the Dataverse collection you want to update. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export DV_ALIAS=dvAlias + + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/dataverses/$DV_ALIAS" --upload-file dataverse-complete.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/dataverses/dvAlias" --upload-file dataverse-complete.json + +You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the updated Dataverse collection. + +Same as in :ref:`create-dataverse-api`, the request JSON supports an optional ``metadataBlocks`` object, with the following supported sub-objects: + +- ``metadataBlockNames``: The names of the metadata blocks you want to add to the Dataverse collection. +- ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the new Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection. +- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the new Dataverse collection. Note that the order of the facets is defined by their order in the provided JSON array. + +To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. + .. _view-dataverse: View a Dataverse Collection From 6aac751d55375e7433d01d500f38b8be83a7b5bc Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 16:44:09 +0100 Subject: [PATCH 142/402] Added: release notes for #10904 --- doc/release-notes/10904-edit-dataverse-collection-endpoint.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10904-edit-dataverse-collection-endpoint.md diff --git a/doc/release-notes/10904-edit-dataverse-collection-endpoint.md b/doc/release-notes/10904-edit-dataverse-collection-endpoint.md new file mode 100644 index 00000000000..b9256941eea --- /dev/null +++ b/doc/release-notes/10904-edit-dataverse-collection-endpoint.md @@ -0,0 +1 @@ +Adds a new endpoint (`PUT /api/dataverses/`) for updating an existing Dataverse collection using a JSON file following the same structure as the one used in the API for the creation. From 4f98be6a1bcec06ffcada8098e57baf4ea0dd9d2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 17:37:26 +0100 Subject: [PATCH 143/402] Removed: unnecessary line in updateDataverse endpoint --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index d8bd2b8cb4b..895d073bb47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -184,8 +184,6 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod List metadataBlocks = parseMetadataBlocks(body); List facets = parseFacets(body); - updatedDataverse.setId(originalDataverse.getId()); - AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); return ok(json(updatedDataverse)); From 6393cc868930925592faa9ff0861890d735aaf93 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 21 Oct 2024 16:11:38 -0400 Subject: [PATCH 144/402] update schemaspy #10931 --- doc/sphinx-guides/source/developers/making-releases.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 350f1fdcaf3..25297a23fca 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -280,6 +280,15 @@ Deploy Final Release on Demo Above you already did the hard work of deploying a release candidate to https://demo.dataverse.org. It should be relatively straightforward to undeploy the release candidate and deploy the final release. +Update SchemaSpy +---------------- + +We maintain SchemaSpy at URLs like https://guides.dataverse.org/en/6.3/schemaspy/index.html + +Get the attention of the core team and ask someone to update it for the new release. + +Consider updating `the thread `_ on the mailing list once the update is in place. + Add the Release to the Dataverse Roadmap ---------------------------------------- From c12932f0ca46582fcd5f2674db0bb9b895cb0b13 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 21 Oct 2024 16:42:07 -0400 Subject: [PATCH 145/402] updaing json format --- ...-add-api-for-comparing-dataset-versions.md | 18 ++-- .../dataverse/DatasetVersionDifference.java | 97 +++++++++++-------- .../DatasetVersionDifferenceTest.java | 10 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 16 ++- 4 files changed, 88 insertions(+), 53 deletions(-) diff --git a/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md b/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md index 902bf1a4d02..f9b3822d29d 100644 --- a/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md +++ b/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md @@ -4,12 +4,16 @@ The following API have been added: This API lists the changes between 2 dataset versions. The Json response shows the changes per field within the Metadata block and the Terms Of Access. Also listed are the files that have been added or removed. Files that have been modified will also display the new file data plus the fields that have been modified. -Old and New values are represented by "0" and "1" respectively. +Example of Metadata Block field change: ```json -[ - "ModifiedFieldName" = { - "0" : "old value", - "1" : "new value" - } -] +{ + "blockName": "Life Sciences Metadata", + "changed": [ + { + "fieldName": "Design Type", + "oldValue": "", + "newValue": "Parallel Group Design; Nested Case Control Design" + } + ] +} ``` diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index dc0c342cb03..66542b18c8e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; import edu.harvard.iq.dataverse.util.StringUtil; +import java.text.SimpleDateFormat; import java.util.*; import java.util.logging.Logger; @@ -15,8 +16,6 @@ import org.apache.commons.lang3.StringUtils; import edu.harvard.iq.dataverse.util.BundleUtil; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; - /** * * @author skraffmiller @@ -1838,69 +1837,89 @@ private static boolean fieldsAreDifferent(DatasetField originalField, DatasetFie } public JsonObjectBuilder compareVersionsAsJson() { JsonObjectBuilder job = new NullSafeJsonBuilder(); + JsonObjectBuilder jobVersion = new NullSafeJsonBuilder(); + jobVersion.add("versionNumber", originalVersion.getFriendlyVersionNumber()); + jobVersion.add("createdDate", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(originalVersion.getCreateTime())); + job.add("oldVersion", jobVersion); + jobVersion = new NullSafeJsonBuilder(); + jobVersion.add("versionNumber", newVersion.getFriendlyVersionNumber()); + jobVersion.add("createdDate", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(newVersion.getCreateTime())); + job.add("newVersion", jobVersion); - JsonObjectBuilder jobMetadata = new NullSafeJsonBuilder(); - List> byBlock = getDetailDataByBlock(); - for (List l : byBlock) { - for (DatasetField[] dsfArray : l) { - JsonObjectBuilder jb = new NullSafeJsonBuilder(); - if (dsfArray[0].getDatasetFieldType().isPrimitive()) { - jb.add("0", dsfArray[0].getRawValue()); - } else { - jb.add("0", dsfArray[0].getCompoundRawValue()); - } - if (dsfArray[1].getDatasetFieldType().isPrimitive()) { - jb.add("1", dsfArray[1].getRawValue()); - } else { - jb.add("1", dsfArray[1].getCompoundRawValue()); + if (!this.detailDataByBlock.isEmpty()) { + JsonArrayBuilder jabMetadata = Json.createArrayBuilder(); + for (List blocks : detailDataByBlock) { + JsonObjectBuilder jobMetadata = new NullSafeJsonBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + String blockDisplay = blocks.get(0)[0].getDatasetFieldType().getMetadataBlock().getDisplayName(); + for (DatasetField[] dsfArray : blocks) { + JsonObjectBuilder jb = new NullSafeJsonBuilder(); + jb.add("fieldName", dsfArray[0].getDatasetFieldType().getTitle()); + if (dsfArray[0].getDatasetFieldType().isPrimitive()) { + jb.add("oldValue", dsfArray[0].getRawValue()); + } else { + jb.add("oldValue", dsfArray[0].getCompoundRawValue()); + } + if (dsfArray[1].getDatasetFieldType().isPrimitive()) { + jb.add("newValue", dsfArray[1].getRawValue()); + } else { + jb.add("newValue", dsfArray[1].getCompoundRawValue()); + } + jab.add(jb); } - jobMetadata.add(dsfArray[0].getDatasetFieldType().getTitle(), jb); + jobMetadata.add("blockName", blockDisplay); + jobMetadata.add("changed", jab); + jabMetadata.add(jobMetadata); } - } - if (!byBlock.isEmpty()) { - job.add("Metadata", jobMetadata); + job.add("metadataChanges", jabMetadata); } // Format added, removed, and modified files - JsonObjectBuilder jobFiles = new NullSafeJsonBuilder(); + JsonArrayBuilder jabDiffFiles = Json.createArrayBuilder(); if (!addedFiles.isEmpty()) { JsonArrayBuilder jab = Json.createArrayBuilder(); - addedFiles.forEach(f -> jab.add(json(f))); - jobFiles.add("added", jab); + addedFiles.forEach(f -> { + jab.add(new NullSafeJsonBuilder().add("fileName", f.getDataFile().getDisplayName())); + }); + job.add("filesAdded", jab); } if (!removedFiles.isEmpty()) { JsonArrayBuilder jab = Json.createArrayBuilder(); - removedFiles.forEach(f -> jab.add(json(f))); - jobFiles.add("removed", jab); + removedFiles.forEach(f -> { + jab.add(new NullSafeJsonBuilder().add("fileName", f.getDataFile().getDisplayName())); + }); + job.add("filesRemoved", jab); } if (!changedFileMetadata.isEmpty()) { - JsonArrayBuilder jabDiffFiles = Json.createArrayBuilder(); changedFileMetadataDiff.entrySet().forEach(entry -> { - JsonObjectBuilder jobDiffFiles = new NullSafeJsonBuilder(); - jobDiffFiles.add("fileMetadata", json(entry.getKey())); + JsonArrayBuilder jab = Json.createArrayBuilder(); + JsonObjectBuilder jobChanges = new NullSafeJsonBuilder(); + jobChanges.add("fileName", entry.getKey().getDataFile().getOriginalFileName()); entry.getValue().entrySet().forEach(e -> { JsonObjectBuilder jobDiffField = new NullSafeJsonBuilder(); - jobDiffField.add("0",e.getValue().get(0)); - jobDiffField.add("1",e.getValue().get(1)); - jobDiffFiles.add(e.getKey(), jobDiffField); + jobDiffField.add("fieldName",e.getKey()); + jobDiffField.add("oldValue",e.getValue().get(0)); + jobDiffField.add("newValue",e.getValue().get(1)); + jab.add(jobDiffField); }); - jabDiffFiles.add(jobDiffFiles); + jobChanges.add("changes", jab); + jabDiffFiles.add(jobChanges); }); - jobFiles.add("modified", jabDiffFiles); - } - if (!addedFiles.isEmpty() || !removedFiles.isEmpty() || !changedFileMetadata.isEmpty()) { - job.add("Files", jobFiles); + job.add("fileChanges", jabDiffFiles); } // Format Terms Of Access changes if (!changedTermsAccess.isEmpty()) { JsonObjectBuilder jobTOA = new NullSafeJsonBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); changedTermsAccess.forEach(toa -> { JsonObjectBuilder jobValue = new NullSafeJsonBuilder(); - jobValue.add("0",toa[1]); - jobValue.add("1",toa[2]); - jobTOA.add(toa[0], jobValue); + jobValue.add("fieldName",toa[0]); + jobValue.add("oldValue",toa[1]); + jobValue.add("newValue",toa[2]); + jab.add(jobValue); }); + jobTOA.add("changed", jab); job.add("TermsOfAccess", jobTOA); } diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java index 9457cdbdb4d..4b901f99afe 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java @@ -33,6 +33,9 @@ public void testCompareVersionsAsJson() { dv2.getFileMetadatas().remove(1); DatasetField dsf = new DatasetField(); dsf.setDatasetFieldType(new DatasetFieldType("Author", DatasetFieldType.FieldType.TEXT, true)); + MetadataBlock mb = new MetadataBlock(); + mb.setDisplayName("testMetadataBlock"); + dsf.getDatasetFieldType().setMetadataBlock(mb); dsf.setSingleValue("TEST"); dv2.getDatasetFields().add(dsf); dv2.getFileMetadatas().get(2).setRestricted(!dv2.getFileMetadatas().get(2).isRestricted()); @@ -43,9 +46,9 @@ public void testCompareVersionsAsJson() { System.out.println(JsonUtil.prettyPrint(obj)); JsonPath dataFile = JsonPath.from(JsonUtil.prettyPrint(obj)); - assertTrue("TEST".equalsIgnoreCase(dataFile.getString("Metadata.Author.1"))); - assertTrue("true".equalsIgnoreCase(dataFile.getString("Files.modified[0].isRestricted.1"))); - assertTrue("disclaimer".equalsIgnoreCase(dataFile.getString("TermsOfAccess.Disclaimer.1"))); + assertTrue("TEST".equalsIgnoreCase(dataFile.getString("metadataChanges[0].changed[0].newValue"))); + assertTrue("true".equalsIgnoreCase(dataFile.getString("fileChanges[0].changes[0].newValue"))); + assertTrue("disclaimer".equalsIgnoreCase(dataFile.getString("TermsOfAccess.changed[0].newValue"))); } private DatasetVersion initDatasetVersion(Long id, Dataset ds, DatasetVersion.VersionState vs) { DatasetVersion dv = new DatasetVersion(); @@ -59,6 +62,7 @@ private DatasetVersion initDatasetVersion(Long id, Dataset ds, DatasetVersion.Ve dv.setReleaseTime(now()); } dv.setId(id); + dv.setCreateTime(now()); dv.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); dv.setFileMetadatas(initFiles(dv)); return dv; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index af0f218bcf6..4b022b00cef 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -5241,10 +5241,18 @@ public void testCompareDatasetVersionsAPI() { Response compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":latest-published", ":draft", apiToken); compareResponse.prettyPrint(); compareResponse.then().assertThat() - .body("data.Metadata.Author.1", CoreMatchers.containsString("Poe, Edgar Allen")) - .body("data.Files.added[0].label", CoreMatchers.equalTo("dataverseproject.png")) - .body("data.Files.removed[0].label", CoreMatchers.equalTo("dataverseproject_logo.jpg")) - .body("data.Files.modified[0].isRestricted.1", CoreMatchers.equalTo("true")) + .body("data.oldVersion.versionNumber", CoreMatchers.equalTo("1.0")) + .body("data.newVersion.versionNumber", CoreMatchers.equalTo("DRAFT")) + .body("data.metadataChanges[0].blockName", CoreMatchers.equalTo("Citation Metadata")) + .body("data.metadataChanges[0].changed[0].fieldName", CoreMatchers.equalTo("Author")) + .body("data.metadataChanges[0].changed[0].oldValue", CoreMatchers.containsString("Finch, Fiona; (Birds Inc.)")) + .body("data.metadataChanges[1].blockName", CoreMatchers.equalTo("Life Sciences Metadata")) + .body("data.metadataChanges[1].changed[0].fieldName", CoreMatchers.equalTo("Design Type")) + .body("data.metadataChanges[1].changed[0].oldValue", CoreMatchers.containsString("")) + .body("data.metadataChanges[1].changed[0].newValue", CoreMatchers.containsString("Parallel Group Design; Nested Case Control Design")) + .body("data.filesAdded[0].fileName", CoreMatchers.equalTo("dataverseproject.png")) + .body("data.filesRemoved[0].fileName", CoreMatchers.equalTo("dataverseproject_logo.jpg")) + .body("data.fileChanges[0].changes[0].newValue", CoreMatchers.equalTo("true")) .body("data.TermsOfAccess", CoreMatchers.notNullValue()) .statusCode(OK.getStatusCode()); } From a8e1e80002010aa83b95add6923029c91a965c86 Mon Sep 17 00:00:00 2001 From: Thrinadh Manubothu Date: Tue, 22 Oct 2024 07:27:01 +0530 Subject: [PATCH 146/402] Fixed typo tombstone in Bundle.properties --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 5f3e4c33e0b..149e6a7e828 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2065,7 +2065,7 @@ file.deleteFileDialog.multiple.immediate=The file(s) will be deleted after you c file.deleteFileDialog.header=Delete Files file.deleteFileDialog.failed.tip=Files will not be removed from previously published versions of the dataset. file.deaccessionDialog.tip.permanent=Deaccession is permanent. -file.deaccessionDialog.tip=This dataset will no longer be public and a tumbstone will display the reason for deaccessioning.
    Please read the documentation if you have any questions. +file.deaccessionDialog.tip=This dataset will no longer be public and a tombstone will display the reason for deaccessioning.
    Please read the documentation if you have any questions. file.deaccessionDialog.version=Version file.deaccessionDialog.reason.question1=Which version(s) do you want to deaccession? file.deaccessionDialog.reason.question2=What is the reason for deaccession? From 255f4196ef053373205f632a460de392a592efb9 Mon Sep 17 00:00:00 2001 From: jo-pol Date: Tue, 22 Oct 2024 10:02:24 +0200 Subject: [PATCH 147/402] defensive assert; concise time measurement --- .../engine/command/impl/CreateNewDataFilesTest.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java index a956b473a4b..aa015dd68a6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java @@ -137,7 +137,7 @@ public void execute_rezips_sets_of_shape_files_from_uploaded_zip() throws Except assertThat(result.getErrors()).hasSize(0); assertThat(result.getDataFiles().stream().map(dataFile -> (dataFile.getFileMetadata().getDirectoryLabel() + "/" + dataFile.getDisplayName()) - .replaceAll(".*temp/shp_[-0-9]*/", "") + .replaceAll(".*temp/[-_shp0-9]*/", "") )).containsExactlyInAnyOrder( "dataDir/shape1.zip", "dataDir/shape2/shape2", @@ -178,11 +178,11 @@ public void extract_zip_performance() throws Exception { var random = new SecureRandom(); var totalNrOfFiles = 0; var totalFileSize = 0; + var totalTime = 0L; var tmp = Path.of(Files.createTempDirectory(null).toString()); var ctxt = mockCommandContext(mockSysConfig(false, 100000000L, MD5, 10000)); try (var mockedJHoveFileType = Mockito.mockStatic(JhoveFileType.class)) { mockedJHoveFileType.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf"); - var before = DateTime.now(); for (var zipNr = 1; zipNr <= nrOfZipFiles; zipNr++) { // build the zip var zip = tmp.resolve(zipNr + "-data.zip"); @@ -204,8 +204,11 @@ public void extract_zip_performance() throws Exception { } // upload the zip + var before = DateTime.now(); var result = createCmd(zip.toString(), mockDatasetVersion(), 1000L, 500L) .execute(ctxt); + totalTime += DateTime.now().getMillis() - before.getMillis(); + assertThat(result.getErrors()).hasSize(0); assertThat(result.getDataFiles()).hasSize(nrOfFilesInZip); totalNrOfFiles += nrOfFilesInZip; @@ -213,7 +216,7 @@ public void extract_zip_performance() throws Exception { // report after each zip to have some data even when aborting a test that takes too long System.out.println(MessageFormat.format( "Total time: {0}ms; nr of zips {1} total nr of files {2}; total file size {3}", - DateTime.now().getMillis() - before.getMillis(), zipNr, totalNrOfFiles, totalFileSize + totalTime, zipNr, totalNrOfFiles, totalFileSize )); } assertThat(tmpUploadStorage.toFile().list()).hasSize(totalNrOfFiles); From 580ca364305f8d97404a73724416e549553dcf05 Mon Sep 17 00:00:00 2001 From: jo-pol Date: Tue, 22 Oct 2024 14:10:17 +0200 Subject: [PATCH 148/402] more defensive assert Intellij shows directory labels like ewDataFilesTest/tmp/temp/shp_2024-10-22-01-57-21-833/dataDir/extra possibly different environments have different values --- .../command/impl/CreateNewDataFilesTest.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java index aa015dd68a6..f49ebcea39c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java @@ -137,16 +137,16 @@ public void execute_rezips_sets_of_shape_files_from_uploaded_zip() throws Except assertThat(result.getErrors()).hasSize(0); assertThat(result.getDataFiles().stream().map(dataFile -> (dataFile.getFileMetadata().getDirectoryLabel() + "/" + dataFile.getDisplayName()) - .replaceAll(".*temp/[-_shp0-9]*/", "") + .replaceAll(".*/dataDir/", "") )).containsExactlyInAnyOrder( - "dataDir/shape1.zip", - "dataDir/shape2/shape2", - "dataDir/shape2/shape2.pdf", - "dataDir/shape2/shape2.txt", - "dataDir/shape2/shape2.zip", - "dataDir/extra/shp_dictionary.xls", - "dataDir/extra/notes", - "dataDir/extra/README.MD" + "shape1.zip", + "shape2/shape2", + "shape2/shape2.pdf", + "shape2/shape2.txt", + "shape2/shape2.zip", + "extra/shp_dictionary.xls", + "extra/notes", + "extra/README.MD" ); var storageIds = result.getDataFiles().stream().map(DataFile::getStorageIdentifier).toList(); assertThat(tempDir.toFile().list()) From c8b9b38c5e356afca7f323b94d1302170aeb80da Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 22 Oct 2024 09:48:41 -0400 Subject: [PATCH 149/402] #8184 display url in popup upon creation --- src/main/webapp/dataset.xhtml | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index ec23a9cdcfd..86bb6adb4b4 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1198,6 +1198,20 @@ disabled="#{(!empty(DatasetPage.privateUrl) and DatasetPage.anonymizedPrivateUrl)}" rendered="#{empty(DatasetPage.privateUrl) or (!empty(DatasetPage.privateUrl) and DatasetPage.anonymizedPrivateUrl) }"/> +
    +
    +

    #{bundle['dataset.privateurl.anonymized']}

    +

    #{bundle['dataset.privateurl.full']}

    + +

    + #{privateUrlLink} +

    +
    +
    -
    +
    #{bundle['mydataFragment.resultsByUserName']} - +
    @@ -150,4 +150,4 @@
    - \ No newline at end of file + diff --git a/src/main/webapp/resources/js/mydata.js b/src/main/webapp/resources/js/mydata.js index 899ba6637e2..c731d6772ac 100644 --- a/src/main/webapp/resources/js/mydata.js +++ b/src/main/webapp/resources/js/mydata.js @@ -391,7 +391,7 @@ function submit_my_data_search(){ // -------------------------------- // ah, but with the horribly coded xhtml page, we can't use form tags... //var formData = $('#mydata_filter_form').serialize(); - var formData = $("#my_data_filter_column :input").serialize() + '&' + $("#my_data_filter_column2 :input").serialize() ; + var formData = $("#my_data_filter_column :input").serialize() + '&' + $("#my_data_filter_column3 :input").serialize()+ '&' + $("#my_data_filter_column2 :input").serialize() ; // For debugging, show the search params if (MYDATA_DEBUG_ON){ From f6a65c5803645f8be34a0f0b9b90b8a15d1a6c2c Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 29 Oct 2024 14:23:13 -0400 Subject: [PATCH 183/402] add release note --- doc/release-notes/7239-mydata-results-by-username.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/7239-mydata-results-by-username.md diff --git a/doc/release-notes/7239-mydata-results-by-username.md b/doc/release-notes/7239-mydata-results-by-username.md new file mode 100644 index 00000000000..7530783661c --- /dev/null +++ b/doc/release-notes/7239-mydata-results-by-username.md @@ -0,0 +1,3 @@ +## Fix My Data filter results by username for Administrators + +The filtering for the username on the MyData page was not working. This is only available for Administrators. This fixes the "Results for Username" field to return the data for the desired user. From 0daa077413492bd00d2e9ce1b99db9ff69922426 Mon Sep 17 00:00:00 2001 From: paulboon Date: Wed, 30 Oct 2024 13:24:34 +0100 Subject: [PATCH 184/402] Fixed wrong character set conversion in Shib.getValueFromAssertion --- src/main/java/edu/harvard/iq/dataverse/Shib.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index 38d732c6acd..121d03ef0c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -422,7 +422,7 @@ private String getValueFromAssertion(String key) { if (attribute != null) { String attributeValue = attribute.toString(); if(systemConfig.isShibAttributeCharacterSetConversionEnabled()) { - attributeValue = new String(attributeValue.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.ISO_8859_1); + attributeValue = new String(attributeValue.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8); } String trimmedValue = attributeValue.trim(); if (!trimmedValue.isEmpty()) { From fa904b6138f1faf3c6cd5f012637ca1515f30c09 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 30 Oct 2024 10:15:17 -0400 Subject: [PATCH 185/402] #8184 acceptance testing --- src/main/java/propertyFiles/Bundle.properties | 4 ++-- src/main/webapp/dataset.xhtml | 8 ++------ 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 6924b927f0c..2911af8f247 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1727,12 +1727,12 @@ dataset.privateurl.tip=To cite this data in publications, use the dataset's pers dataset.privateurl.onlyone=Only one Preview URL can be active for a single dataset. dataset.privateurl.absent=Preview URL has not been created. dataset.privateurl.general.button.label=Create General Preview URL -dataset.privateurl.general.description=Create a URL that others can use to review this dataset version before it is published. they will be able to access all files in the dataset and see all metadata, including metadata that may identify the dataset's authors. +dataset.privateurl.general.description=Create a URL that others can use to review this dataset version before it is published. They will be able to access all files in the dataset and see all metadata, including metadata that may identify the dataset's authors. dataset.privateurl.general.title=General Preview dataset.privateurl.anonymous.title=Anonymous Preview dataset.privateurl.anonymous.button.label=Create Anonymous Preview URL dataset.privateurl.anonymous.description=Create a URL that others can use to access an anonymized view of this unpublished dataset version. Metadata that could identify the dataset author will not be displayed. Non-identifying metadata will be visible. -dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and will be accessible if they're not restricted. Users of the preview URL will be able to see the name of the repository and the name of the collection that this dataset is in, which may expose the dataset author's identities. +dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and will be accessible if they're not restricted. Users of the preview URL will be able to see the name of the repository but not the name of the collection that this dataset is in. dataset.privateurl.createPrivateUrl=Create Preview URL dataset.privateurl.introduction=You can create a Preview URL to copy and share with others who will not need a repository account to review this unpublished dataset version. Once the dataset is published ot if the URL is disabled, the URL will no longer work and will point to a "Page not found" page. dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 94d14c055be..cb2f8361742 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1205,9 +1205,7 @@ } else { selectText(this); }"> -

    #{bundle['dataset.privateurl.anonymized']}

    -

    #{bundle['dataset.privateurl.full']}

    - +

    #{privateUrlLink}

    @@ -1246,9 +1244,7 @@ } else { selectText(this); }"> -

    #{bundle['dataset.privateurl.anonymized']}

    -

    #{bundle['dataset.privateurl.full']}

    - +

    #{privateUrlLink}

    From ad3505143e60e4b9cea896c52b3db782a77f2f07 Mon Sep 17 00:00:00 2001 From: Ludovic DANIEL Date: Wed, 30 Oct 2024 17:39:50 +0100 Subject: [PATCH 186/402] add release note --- doc/release-notes/10772-fix-importDDI-otherId.md | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 doc/release-notes/10772-fix-importDDI-otherId.md diff --git a/doc/release-notes/10772-fix-importDDI-otherId.md b/doc/release-notes/10772-fix-importDDI-otherId.md new file mode 100644 index 00000000000..d5a9018b2b2 --- /dev/null +++ b/doc/release-notes/10772-fix-importDDI-otherId.md @@ -0,0 +1,2 @@ +Bug Fix : +This PR fixes the `edu.harvard.iq.dataverse.util.json.JsonParseException: incorrect multiple for field otherId` error when DDI harvested data contains multiple ortherId. \ No newline at end of file From 93dd4231fa2d7d180412c93435b6f8e682816bc0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 30 Oct 2024 13:19:30 -0400 Subject: [PATCH 187/402] add test for returnDatasetFieldTypes #10984 --- .../java/edu/harvard/iq/dataverse/api/DataversesIT.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 6a040f27786..0eb2670b272 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -911,6 +911,15 @@ public void testListMetadataBlocks() { createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); + listMetadataBlocks.prettyPrint(); + listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + // failing? "fields" is empty, showing {} + .body("data[0].fields.title.displayOnCreate", equalTo(true)); + Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); From 51794df26c00ce5ab9eb1fdbeb7d5aa91e8576fc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 30 Oct 2024 16:45:57 -0400 Subject: [PATCH 188/402] Add FIXME about inheritence. Add "if #10984 fixed" to tests. --- .../iq/dataverse/DatasetFieldServiceBean.java | 2 +- .../iq/dataverse/api/DataversesIT.java | 29 ++++++++++++++----- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 91150b79505..c977ae784bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -947,7 +947,7 @@ public List findAllInMetadataBlockAndDataverse(MetadataBlock m criteriaQuery.where( criteriaBuilder.equal(dataverseRoot.get("id"), dataverse.getId()), // Match the Dataverse ID. criteriaBuilder.equal(metadataBlockRoot.get("id"), metadataBlock.getId()), // Match the MetadataBlock ID. - metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse. + metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse. FIXME: inherit blocks from parent datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")), // Ensure the DatasetFieldType is part of the MetadataBlock. criteriaBuilder.or(includedAsInputLevelPredicate, hasNoInputLevelPredicate), // Include DatasetFieldTypes based on the input level predicates. displayedOnCreatePredicate // Apply the display-on-create filter if necessary. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 0eb2670b272..9e3555555e8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -911,14 +911,27 @@ public void testListMetadataBlocks() { createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); - listMetadataBlocks.prettyPrint(); - listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); - listMetadataBlocks.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].name", is("citation")) - // failing? "fields" is empty, showing {} - .body("data[0].fields.title.displayOnCreate", equalTo(true)); + boolean issue10984fixed = false; + // See https://github.com/IQSS/dataverse/issues/10984 + if (issue10984fixed) { + Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); + listMetadataBlocks.prettyPrint(); + listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[0].fields.title.displayOnCreate", equalTo(true)); + + } else { + Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); + listMetadataBlocks.prettyPrint(); + listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + // "fields" should be more like 28, not 0 + .body("data[0].fields.size()", is(0)); + } Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); From db181848cd9c94044222598a97a7cd891a16e8de Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 30 Oct 2024 17:46:56 -0400 Subject: [PATCH 189/402] fixes the validation method in harvesting import that got broken in 10836. #10989 --- .../api/imports/ImportServiceBean.java | 49 +++++++++++++------ 1 file changed, 33 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index 66f48bfb872..b203738a9fd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -359,12 +359,7 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve if (harvestedVersion.getReleaseTime() == null) { harvestedVersion.setReleaseTime(oaiDateStamp); } - - // is this the right place to call tidyUpFields()? - // usually it is called within the body of the create/update commands - // later on. - DatasetFieldUtil.tidyUpFields(harvestedVersion.getDatasetFields(), true); - + // Check data against validation constraints. // Make an attempt to sanitize any invalid fields encountered - // missing required fields or invalid values, by filling the values @@ -382,7 +377,9 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve if (sanitized) { validateVersionMetadata(harvestedVersion, cleanupLog); } - + + DatasetFieldUtil.tidyUpFields(harvestedVersion.getDatasetFields(), true); + if (existingDataset != null) { importedDataset = engineSvc.submit(new UpdateHarvestedDatasetCommand(existingDataset, harvestedVersion, dataverseRequest)); } else { @@ -742,15 +739,35 @@ private boolean validateVersionMetadata(DatasetVersion version, boolean sanitize boolean fixed = false; Set invalidViolations = version.validate(); if (!invalidViolations.isEmpty()) { - for (ConstraintViolation v : invalidViolations) { - DatasetFieldValue f = v.getRootBean(); - - String msg = "Invalid metadata field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " - + "Invalid value: '" + f.getValue() + "'"; - if (sanitize) { - msg += ", replaced with '" + DatasetField.NA_VALUE + "'"; - f.setValue(DatasetField.NA_VALUE); - fixed = true; + for (ConstraintViolation v : invalidViolations) { + Object invalid = v.getRootBean(); + String msg = ""; + if (invalid instanceof DatasetField) { + DatasetField f = (DatasetField) invalid; + + msg += "Missing required field: " + f.getDatasetFieldType().getDisplayName() + ";"; + if (sanitize) { + msg += " populated with '" + DatasetField.NA_VALUE + "'"; + f.setSingleValue(DatasetField.NA_VALUE); + fixed = true; + } + } else if (invalid instanceof DatasetFieldValue) { + DatasetFieldValue fv = (DatasetFieldValue) invalid; + + msg += "Invalid metadata field: " + fv.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + + "Invalid value: '" + fv.getValue() + "'"; + if (sanitize) { + msg += ", replaced with '" + DatasetField.NA_VALUE + "'"; + fv.setValue(DatasetField.NA_VALUE); + fixed = true; + } + } else { + // DatasetVersion.validate() can also produce constraint violations + // in TermsOfUse and FileMetadata classes. + // We do not make any attempt to sanitize those. + if (invalid != null) { + msg += "Invalid " + invalid.getClass().getName() + ": " + v.getMessage(); + } } cleanupLog.println(msg); From ed8a889bdc0df615d2f49a66c67ecfa41559099b Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 31 Oct 2024 12:49:26 +0000 Subject: [PATCH 190/402] Fixed: always querying the owner if the dataverse is not MetadataBlock root in findAllInMetadataBlockAndDataverse --- .../iq/dataverse/DatasetFieldServiceBean.java | 4 +++ .../iq/dataverse/api/DataversesIT.java | 29 +++++-------------- 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index c977ae784bd..e87d11dd7eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -891,6 +891,10 @@ public List findAllDisplayedOnCreateInMetadataBlock(MetadataBl } public List findAllInMetadataBlockAndDataverse(MetadataBlock metadataBlock, Dataverse dataverse, boolean onlyDisplayedOnCreate) { + if (!dataverse.isMetadataBlockRoot() && dataverse.getOwner() != null) { + return findAllInMetadataBlockAndDataverse(metadataBlock, dataverse.getOwner(), onlyDisplayedOnCreate); + } + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(DatasetFieldType.class); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 9e3555555e8..f59d152f6be 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -911,27 +911,14 @@ public void testListMetadataBlocks() { createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - boolean issue10984fixed = false; - // See https://github.com/IQSS/dataverse/issues/10984 - if (issue10984fixed) { - Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); - listMetadataBlocks.prettyPrint(); - listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); - listMetadataBlocks.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].name", is("citation")) - .body("data[0].fields.title.displayOnCreate", equalTo(true)); - - } else { - Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); - listMetadataBlocks.prettyPrint(); - listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); - listMetadataBlocks.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].name", is("citation")) - // "fields" should be more like 28, not 0 - .body("data[0].fields.size()", is(0)); - } + Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); + listMetadataBlocks.prettyPrint(); + listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[0].fields.title.displayOnCreate", equalTo(true)) + .body("data[0].fields.size()", is(28)); Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); From cfe9dbddb720a34e3360a18ace8808942bf93d22 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 31 Oct 2024 12:55:52 +0000 Subject: [PATCH 191/402] Removed: FIXME comment --- .../java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index e87d11dd7eb..ded7c83de62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -951,7 +951,7 @@ public List findAllInMetadataBlockAndDataverse(MetadataBlock m criteriaQuery.where( criteriaBuilder.equal(dataverseRoot.get("id"), dataverse.getId()), // Match the Dataverse ID. criteriaBuilder.equal(metadataBlockRoot.get("id"), metadataBlock.getId()), // Match the MetadataBlock ID. - metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse. FIXME: inherit blocks from parent + metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse. datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")), // Ensure the DatasetFieldType is part of the MetadataBlock. criteriaBuilder.or(includedAsInputLevelPredicate, hasNoInputLevelPredicate), // Include DatasetFieldTypes based on the input level predicates. displayedOnCreatePredicate // Apply the display-on-create filter if necessary. From a8dbae4e19e9d2ec963c6bacdfbb2f0f00c80482 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 31 Oct 2024 09:54:15 -0400 Subject: [PATCH 192/402] #8184 update disable button labels --- src/main/java/propertyFiles/Bundle.properties | 4 ++++ src/main/webapp/dataset.xhtml | 9 ++++++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 2911af8f247..b5739ef7633 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1738,7 +1738,11 @@ dataset.privateurl.introduction=You can create a Preview URL to copy and share w dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published dataset.privateurl.disablePrivateUrl=Disable Preview URL +dataset.privateurl.disableGeneralPreviewUrl=Disable General Preview URL +dataset.privateurl.disableAnonPreviewUrl=Disable Anonymous Preview URL dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Preview URL +dataset.privateurl.disableGeneralPreviewUrlConfirm=Yes, Disable General Preview URL +dataset.privateurl.disableAnonPreviewUrlConfirm=Yes, Disable Anonymous Preview URL dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Preview URL? If you have shared the Preview URL with others they will no longer be able to use it to access your unpublished dataset. dataset.privateurl.cannotCreate=Preview URL can only be used with unpublished versions of datasets. dataset.privateurl.roleassigeeTitle=Preview URL Enabled diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index cb2f8361742..ee69f76cca2 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1215,7 +1215,7 @@ - +
    @@ -1254,7 +1254,7 @@ - + @@ -1293,7 +1293,10 @@

    #{bundle['dataset.privateurl.disableConfirmationText']}

    - + + + + From 42ac8c01a03deed86ef67722c2c77ebd7a49192e Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 31 Oct 2024 10:11:34 -0400 Subject: [PATCH 193/402] #8184 update popup message per Julian --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index b5739ef7633..d03b15937b8 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1732,7 +1732,7 @@ dataset.privateurl.general.title=General Preview dataset.privateurl.anonymous.title=Anonymous Preview dataset.privateurl.anonymous.button.label=Create Anonymous Preview URL dataset.privateurl.anonymous.description=Create a URL that others can use to access an anonymized view of this unpublished dataset version. Metadata that could identify the dataset author will not be displayed. Non-identifying metadata will be visible. -dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and will be accessible if they're not restricted. Users of the preview URL will be able to see the name of the repository but not the name of the collection that this dataset is in. +dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and will be accessible if they're not restricted. Users of the Anonymous Preview URL will not be able to see the name of the Dataverse that this dataset is in but will be able to see the name of the repository, which might expose the dataset authors' identities. dataset.privateurl.createPrivateUrl=Create Preview URL dataset.privateurl.introduction=You can create a Preview URL to copy and share with others who will not need a repository account to review this unpublished dataset version. Once the dataset is published ot if the URL is disabled, the URL will no longer work and will point to a "Page not found" page. dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access From f317ab005f241777fc4c8b4be531c7e17f9fdf54 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 31 Oct 2024 14:26:40 +0000 Subject: [PATCH 194/402] Added: tweaks to DataversesIT listMetadataBlocks --- src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index f59d152f6be..31a6c60bef9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -911,11 +911,13 @@ public void testListMetadataBlocks() { createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + // New Dataverse should return just the citation block and its displayOnCreate fields when onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); listMetadataBlocks.prettyPrint(); listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); listMetadataBlocks.then().assertThat() .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) .body("data[0].name", is("citation")) .body("data[0].fields.title.displayOnCreate", equalTo(true)) .body("data[0].fields.size()", is(28)); From e7eace38ad31b2ab9995ac7f1485d477acd035c8 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 31 Oct 2024 10:52:12 -0400 Subject: [PATCH 195/402] Update FeatureFlags apiNotes While adding a flag I noticed a typo and missing apiNote --- .../edu/harvard/iq/dataverse/settings/FeatureFlags.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java index 33e828e619d..20632c170e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java @@ -97,12 +97,16 @@ public enum FeatureFlags { * for the dataset. * * @apiNote Raise flag by setting - * "dataverse.feature.enable-dataset-thumbnail-autoselect" + * "dataverse.feature.disable-dataset-thumbnail-autoselect" * @since Dataverse 6.4 */ DISABLE_DATASET_THUMBNAIL_AUTOSELECT("disable-dataset-thumbnail-autoselect"), /** * Feature flag for the new Globus upload framework. + * + * @apiNote Raise flag by setting + * "dataverse.feature.globus-use-experimental-async-framework" + * @since Dataverse 6.4 */ GLOBUS_USE_EXPERIMENTAL_ASYNC_FRAMEWORK("globus-use-experimental-async-framework"), ; From fea4b640a127786208b9dbe66d2b179c1c39b531 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:52:37 -0400 Subject: [PATCH 196/402] Update doc/release-notes/7239-mydata-results-by-username.md Co-authored-by: Philip Durbin --- doc/release-notes/7239-mydata-results-by-username.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/7239-mydata-results-by-username.md b/doc/release-notes/7239-mydata-results-by-username.md index 7530783661c..fa1ce56d89e 100644 --- a/doc/release-notes/7239-mydata-results-by-username.md +++ b/doc/release-notes/7239-mydata-results-by-username.md @@ -1,3 +1,3 @@ ## Fix My Data filter results by username for Administrators -The filtering for the username on the MyData page was not working. This is only available for Administrators. This fixes the "Results for Username" field to return the data for the desired user. +The filtering for the username on the MyData page was not working. This is only available for superusers. This fixes the "Results for Username" field to return the data for the desired user. See also #7239 and #10980. From 5456803d4405d6e5dda2b9b8dd4c18e1605abb35 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Fri, 1 Nov 2024 10:31:33 -0400 Subject: [PATCH 197/402] #10889 Update doc/release-notes/10889_bump_PG17_FlyWay10.md Co-authored-by: Philip Durbin --- doc/release-notes/10889_bump_PG17_FlyWay10.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/release-notes/10889_bump_PG17_FlyWay10.md b/doc/release-notes/10889_bump_PG17_FlyWay10.md index 0f74568e5cd..7bb509886fb 100644 --- a/doc/release-notes/10889_bump_PG17_FlyWay10.md +++ b/doc/release-notes/10889_bump_PG17_FlyWay10.md @@ -1,3 +1,7 @@ This release bumps both the Postgres JDBC driver and Flyway versions. This should better support Postgres version 17, and as of version 10 Flyway no longer requires a paid subscription to support older versions of Postgres. While we don't encourage the use of older Postgres versions, this flexibility may benefit some of our long-standing installations in their upgrade paths. Postgres 13 remains the version used with automated testing. + +As part of this update, the containerized development environment now uses Postgres 17 instead of 16. Worst case, developers can start with a fresh database, if necessary. + +The Docker compose file used for [evaluations or demos](https://dataverse-guide--10912.org.readthedocs.build/en/10912/container/running/demo.html) has been upgraded from Postgres 13 to 17. From 5b2b35d16cc4e76038343401af848c87809403d0 Mon Sep 17 00:00:00 2001 From: Omer Fahim Date: Fri, 1 Nov 2024 12:59:59 -0400 Subject: [PATCH 198/402] Update dev-usage.rst - Add Details for Accessing and Saving Harvesting Logs to Local --- .../source/container/dev-usage.rst | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 6a1edcf7ebd..e481bcfd5ae 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -140,6 +140,48 @@ Alternatives: - If you used Docker Compose for running, you may use ``docker compose -f docker-compose-dev.yml logs ``. Options are the same. +Accessing Harvesting Log Files +------------------------------ + +1. Open a Terminal and Access Dataverse Container +Run the following command to access the Dataverse container (assuming your container is named dataverse-1): + +.. code-block:: +docker exec -it dataverse-1 bash +Code updated + +This command opens an interactive shell within the dataverse-1 container. + +2. Navigate to the Log Files Directory +Once inside the container, navigate to the directory where Dataverse logs are stored: + +.. code-block:: +cd /opt/payara/appserver/glassfish/domains/domain1/logs +Code updated + +This directory contains various log files, including those relevant to harvesting. + +3. Create a Directory for Copying Files +Create a directory where you’ll copy the files you want to access on your local machine: + +mkdir /dv/filesToCopy + +This will create a new folder named filesToCopy inside /dv. + +4. Copy the Files to the New Directory +Copy all files from the current directory to the newly created filesToCopy directory: + +cp * /dv/filesToCopy + +This command copies all files in the logs directory to /dv/filesToCopy. + +5. Access the Files on Your Mac +On your Mac, the copied files should appear in the following directory: + +docker-dev-volumes/app/data/filesToCopy + + + Redeploying ----------- From 90665f9ab705634d775656ab53f6b8c17823661e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 1 Nov 2024 13:02:21 -0400 Subject: [PATCH 199/402] Revert "Bump actions/download-artifact from 3 to 4.1.7 in /.github/workflows" (#10987) will check after the reverted code is merged --- .github/workflows/deploy_beta_testing.yml | 2 +- .github/workflows/maven_unit_test.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index efe3e0d8621..028f0140cc9 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -45,7 +45,7 @@ jobs: - uses: actions/checkout@v3 - name: Download war artifact - uses: actions/download-artifact@v4.1.7 + uses: actions/download-artifact@v3 with: name: built-app path: ./ diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 102fb1d5882..a94b17a67ba 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -107,7 +107,7 @@ jobs: cache: maven # Get the build output from the unit test job - - uses: actions/download-artifact@v4.1.7 + - uses: actions/download-artifact@v3 with: name: java-artifacts - run: | @@ -140,7 +140,7 @@ jobs: cache: maven # Get the build output from the integration test job - - uses: actions/download-artifact@v4.1.7 + - uses: actions/download-artifact@v3 with: name: java-reportdir - run: tar -xvf java-reportdir.tar From 899cc25996e25a0ddd1cbb514ae912cf927eea9a Mon Sep 17 00:00:00 2001 From: Omer Fahim Date: Fri, 1 Nov 2024 14:17:10 -0400 Subject: [PATCH 200/402] Update doc/sphinx-guides/source/container/dev-usage.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/container/dev-usage.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index e481bcfd5ae..7e4a640a45d 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -147,6 +147,7 @@ Accessing Harvesting Log Files Run the following command to access the Dataverse container (assuming your container is named dataverse-1): .. code-block:: + docker exec -it dataverse-1 bash Code updated From 356da8fc09c429d8776698b4bc8aa81333cb56c4 Mon Sep 17 00:00:00 2001 From: Omer Fahim Date: Fri, 1 Nov 2024 14:21:04 -0400 Subject: [PATCH 201/402] Update doc/sphinx-guides/source/container/dev-usage.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/container/dev-usage.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 7e4a640a45d..bd2ff6f0382 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -156,6 +156,7 @@ This command opens an interactive shell within the dataverse-1 container. 2. Navigate to the Log Files Directory Once inside the container, navigate to the directory where Dataverse logs are stored: + .. code-block:: cd /opt/payara/appserver/glassfish/domains/domain1/logs Code updated From 9119c4b4799fd4a14223ac9c07471ef8db34f095 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Fri, 1 Nov 2024 14:30:37 -0400 Subject: [PATCH 202/402] Update dev-usage.rst --- doc/sphinx-guides/source/container/dev-usage.rst | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index bd2ff6f0382..7b5274844f6 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -149,7 +149,6 @@ Run the following command to access the Dataverse container (assuming your conta .. code-block:: docker exec -it dataverse-1 bash -Code updated This command opens an interactive shell within the dataverse-1 container. @@ -159,28 +158,33 @@ Once inside the container, navigate to the directory where Dataverse logs are st .. code-block:: cd /opt/payara/appserver/glassfish/domains/domain1/logs -Code updated This directory contains various log files, including those relevant to harvesting. 3. Create a Directory for Copying Files Create a directory where you’ll copy the files you want to access on your local machine: -mkdir /dv/filesToCopy +.. code-block:: + + mkdir /dv/filesToCopy This will create a new folder named filesToCopy inside /dv. 4. Copy the Files to the New Directory Copy all files from the current directory to the newly created filesToCopy directory: -cp * /dv/filesToCopy +.. code-block:: + + cp * /dv/filesToCopy This command copies all files in the logs directory to /dv/filesToCopy. 5. Access the Files on Your Mac On your Mac, the copied files should appear in the following directory: -docker-dev-volumes/app/data/filesToCopy +.. code-block:: + + docker-dev-volumes/app/data/filesToCopy From 41a1d13288c31c938395bdab80601bc916391a15 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Fri, 1 Nov 2024 14:39:21 -0400 Subject: [PATCH 203/402] Update dev-usage.rst --- doc/sphinx-guides/source/container/dev-usage.rst | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 7b5274844f6..da7d6845bb5 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -148,16 +148,16 @@ Run the following command to access the Dataverse container (assuming your conta .. code-block:: -docker exec -it dataverse-1 bash + docker exec -it dataverse-1 bash This command opens an interactive shell within the dataverse-1 container. 2. Navigate to the Log Files Directory Once inside the container, navigate to the directory where Dataverse logs are stored: - .. code-block:: -cd /opt/payara/appserver/glassfish/domains/domain1/logs + + cd /opt/payara/appserver/glassfish/domains/domain1/logs This directory contains various log files, including those relevant to harvesting. @@ -186,9 +186,6 @@ On your Mac, the copied files should appear in the following directory: docker-dev-volumes/app/data/filesToCopy - - - Redeploying ----------- From 432feb0cba801a982b7d156b8d1d19a6625817a4 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Fri, 1 Nov 2024 14:48:05 -0400 Subject: [PATCH 204/402] Update dev-usage.rst --- doc/sphinx-guides/source/container/dev-usage.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index da7d6845bb5..bfad3d34cf2 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -143,7 +143,7 @@ Alternatives: Accessing Harvesting Log Files ------------------------------ -1. Open a Terminal and Access Dataverse Container +1. Open a terminal and access Dataverse container. Run the following command to access the Dataverse container (assuming your container is named dataverse-1): .. code-block:: @@ -152,7 +152,7 @@ Run the following command to access the Dataverse container (assuming your conta This command opens an interactive shell within the dataverse-1 container. -2. Navigate to the Log Files Directory +2. Navigate to the Log Files directory Once inside the container, navigate to the directory where Dataverse logs are stored: .. code-block:: From 0d59b16f8c05ddc190f38efaf1891cf6750f8107 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Fri, 1 Nov 2024 14:57:15 -0400 Subject: [PATCH 205/402] Update dev-usage.rst --- doc/sphinx-guides/source/container/dev-usage.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index bfad3d34cf2..27fbb28dd26 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -143,7 +143,7 @@ Alternatives: Accessing Harvesting Log Files ------------------------------ -1. Open a terminal and access Dataverse container. +1. Open a terminal and access the Dataverse container. Run the following command to access the Dataverse container (assuming your container is named dataverse-1): .. code-block:: @@ -152,7 +152,7 @@ Run the following command to access the Dataverse container (assuming your conta This command opens an interactive shell within the dataverse-1 container. -2. Navigate to the Log Files directory +2. Navigate to the Log files directory. Once inside the container, navigate to the directory where Dataverse logs are stored: .. code-block:: @@ -161,7 +161,7 @@ Once inside the container, navigate to the directory where Dataverse logs are st This directory contains various log files, including those relevant to harvesting. -3. Create a Directory for Copying Files +3. Create a directory for copying files. Create a directory where you’ll copy the files you want to access on your local machine: .. code-block:: @@ -170,7 +170,7 @@ Create a directory where you’ll copy the files you want to access on your loca This will create a new folder named filesToCopy inside /dv. -4. Copy the Files to the New Directory +4. Copy the files to the new directory. Copy all files from the current directory to the newly created filesToCopy directory: .. code-block:: @@ -179,7 +179,7 @@ Copy all files from the current directory to the newly created filesToCopy direc This command copies all files in the logs directory to /dv/filesToCopy. -5. Access the Files on Your Mac +5. Access the files on Your Mac On your Mac, the copied files should appear in the following directory: .. code-block:: From c76d62a6200382e6b584e220acde64bfd33716d2 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Fri, 1 Nov 2024 15:03:46 -0400 Subject: [PATCH 206/402] Update dev-usage.rst --- doc/sphinx-guides/source/container/dev-usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 27fbb28dd26..80e3cac989c 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -141,7 +141,7 @@ Alternatives: Options are the same. Accessing Harvesting Log Files ------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 1. Open a terminal and access the Dataverse container. Run the following command to access the Dataverse container (assuming your container is named dataverse-1): From e28b6d342eddc97e2632a53cdca17cf789d58022 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 1 Nov 2024 15:27:17 -0400 Subject: [PATCH 207/402] tweaks #10996 --- .../source/container/dev-usage.rst | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 80e3cac989c..c02c1d4010f 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -143,7 +143,8 @@ Alternatives: Accessing Harvesting Log Files ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -1. Open a terminal and access the Dataverse container. +\1. Open a terminal and access the Dataverse container. + Run the following command to access the Dataverse container (assuming your container is named dataverse-1): .. code-block:: @@ -152,7 +153,8 @@ Run the following command to access the Dataverse container (assuming your conta This command opens an interactive shell within the dataverse-1 container. -2. Navigate to the Log files directory. +\2. Navigate to the log files directory. + Once inside the container, navigate to the directory where Dataverse logs are stored: .. code-block:: @@ -161,8 +163,9 @@ Once inside the container, navigate to the directory where Dataverse logs are st This directory contains various log files, including those relevant to harvesting. -3. Create a directory for copying files. -Create a directory where you’ll copy the files you want to access on your local machine: +\3. Create a directory for copying files. + +Create a directory where you'll copy the files you want to access on your local machine: .. code-block:: @@ -170,7 +173,8 @@ Create a directory where you’ll copy the files you want to access on your loca This will create a new folder named filesToCopy inside /dv. -4. Copy the files to the new directory. +\4. Copy the files to the new directory. + Copy all files from the current directory to the newly created filesToCopy directory: .. code-block:: @@ -179,8 +183,9 @@ Copy all files from the current directory to the newly created filesToCopy direc This command copies all files in the logs directory to /dv/filesToCopy. -5. Access the files on Your Mac -On your Mac, the copied files should appear in the following directory: +\5. Access the files on your local machine. + +On your local machine, the copied files should appear in the following directory: .. code-block:: From c094c8a6e45d02cac5c136e5b13bd5de113465bc Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 16:37:39 -0400 Subject: [PATCH 208/402] github actions from v3 to v4 --- .github/workflows/container_app_pr.yml | 6 +++--- .github/workflows/container_app_push.yml | 2 +- .github/workflows/deploy_beta_testing.yml | 10 +++++----- .github/workflows/guides_build_sphinx.yml | 2 +- .github/workflows/maven_unit_test.yml | 22 +++++++++++----------- .github/workflows/reviewdog_checkstyle.yml | 2 +- .github/workflows/shellcheck.yml | 2 +- .github/workflows/shellspec.yml | 6 +++--- .github/workflows/spi_release.yml | 14 +++++++------- 9 files changed, 33 insertions(+), 33 deletions(-) diff --git a/.github/workflows/container_app_pr.yml b/.github/workflows/container_app_pr.yml index c86d284e74b..a3468cbc220 100644 --- a/.github/workflows/container_app_pr.yml +++ b/.github/workflows/container_app_pr.yml @@ -20,14 +20,14 @@ jobs: if: ${{ github.repository_owner == 'IQSS' }} steps: # Checkout the pull request code as when merged - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: 'refs/pull/${{ github.event.client_payload.pull_request.number }}/merge' - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: java-version: "17" distribution: 'adopt' - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml index 3b7ce066d73..184b69583a5 100644 --- a/.github/workflows/container_app_push.yml +++ b/.github/workflows/container_app_push.yml @@ -68,7 +68,7 @@ jobs: if: ${{ github.event_name != 'pull_request' && github.ref_name == 'develop' && github.repository_owner == 'IQSS' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: peter-evans/dockerhub-description@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index 028f0140cc9..87eb6e8c150 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -10,9 +10,9 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.7 - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4.1.7 with: distribution: 'zulu' java-version: '17' @@ -32,7 +32,7 @@ jobs: run: echo "war_file=$(ls *.war | head -1)">> $GITHUB_ENV - name: Upload war artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4.1.7 with: name: built-app path: ./target/${{ env.war_file }} @@ -42,10 +42,10 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.7 - name: Download war artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4.1.7 with: name: built-app path: ./ diff --git a/.github/workflows/guides_build_sphinx.yml b/.github/workflows/guides_build_sphinx.yml index 86b59b11d35..fa3a876c418 100644 --- a/.github/workflows/guides_build_sphinx.yml +++ b/.github/workflows/guides_build_sphinx.yml @@ -10,7 +10,7 @@ jobs: docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: uncch-rdmc/sphinx-action@master with: docs-folder: "doc/sphinx-guides/" diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index a94b17a67ba..5c7a154e2f5 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -32,9 +32,9 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.7 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4.1.7 with: java-version: ${{ matrix.jdk }} distribution: temurin @@ -57,7 +57,7 @@ jobs: # Upload the built war file. For download, it will be wrapped in a ZIP by GitHub. # See also https://github.com/actions/upload-artifact#zipped-artifact-downloads - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4.1.7 with: name: dataverse-java${{ matrix.jdk }}.war path: target/dataverse*.war @@ -67,7 +67,7 @@ jobs: - run: | tar -cvf java-builddir.tar target tar -cvf java-m2-selection.tar ~/.m2/repository/io/gdcc/dataverse-* - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4.1.7 with: name: java-artifacts path: | @@ -98,16 +98,16 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.7 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4.1.7 with: java-version: ${{ matrix.jdk }} distribution: temurin cache: maven # Get the build output from the unit test job - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4.1.7 with: name: java-artifacts - run: | @@ -119,7 +119,7 @@ jobs: # Wrap up and send to coverage job - run: tar -cvf java-reportdir.tar target/site - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4.1.7 with: name: java-reportdir path: java-reportdir.tar @@ -132,15 +132,15 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4.1.7 + - uses: actions/setup-java@v4.1.7 with: java-version: '17' distribution: temurin cache: maven # Get the build output from the integration test job - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4.1.7 with: name: java-reportdir - run: tar -xvf java-reportdir.tar diff --git a/.github/workflows/reviewdog_checkstyle.yml b/.github/workflows/reviewdog_checkstyle.yml index 90a0dd7d06b..804b04f696a 100644 --- a/.github/workflows/reviewdog_checkstyle.yml +++ b/.github/workflows/reviewdog_checkstyle.yml @@ -10,7 +10,7 @@ jobs: name: Checkstyle job steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Run check style uses: nikitasavinov/checkstyle-action@master with: diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml index 56f7d648dc4..fb9cf5a0a1f 100644 --- a/.github/workflows/shellcheck.yml +++ b/.github/workflows/shellcheck.yml @@ -21,7 +21,7 @@ jobs: permissions: pull-requests: write steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: shellcheck uses: reviewdog/action-shellcheck@v1 with: diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml index 3320d9d08a4..cc09992edac 100644 --- a/.github/workflows/shellspec.yml +++ b/.github/workflows/shellspec.yml @@ -19,7 +19,7 @@ jobs: steps: - name: Install shellspec run: curl -fsSL https://git.io/shellspec | sh -s ${{ env.SHELLSPEC_VERSION }} --yes - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Run Shellspec run: | cd tests/shell @@ -30,7 +30,7 @@ jobs: container: image: rockylinux/rockylinux:9 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Install shellspec run: | curl -fsSL https://github.com/shellspec/shellspec/releases/download/${{ env.SHELLSPEC_VERSION }}/shellspec-dist.tar.gz | tar -xz -C /usr/share @@ -47,7 +47,7 @@ jobs: steps: - name: Install shellspec run: curl -fsSL https://git.io/shellspec | sh -s 0.28.1 --yes - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Run Shellspec run: | cd tests/shell diff --git a/.github/workflows/spi_release.yml b/.github/workflows/spi_release.yml index 8ad74b3e4bb..6398edca412 100644 --- a/.github/workflows/spi_release.yml +++ b/.github/workflows/spi_release.yml @@ -37,15 +37,15 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'pull_request' && needs.check-secrets.outputs.available == 'true' steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: java-version: '17' distribution: 'adopt' server-id: ossrh server-username: MAVEN_USERNAME server-password: MAVEN_PASSWORD - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} @@ -63,12 +63,12 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'push' && needs.check-secrets.outputs.available == 'true' steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: java-version: '17' distribution: 'adopt' - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} @@ -76,7 +76,7 @@ jobs: # Running setup-java again overwrites the settings.xml - IT'S MANDATORY TO DO THIS SECOND SETUP!!! - name: Set up Maven Central Repository - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: '17' distribution: 'adopt' From 12c1597c5f788f6d23ee1035d9f996af4749a003 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 16:45:26 -0400 Subject: [PATCH 209/402] Modify test file to force workflow to build and test DO NOT MERGEgit add .git add . --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 93f1024ae7a..db1e6eb5169 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -70,7 +70,8 @@ public class DatasetsIT { @BeforeAll public static void setUpClass() { - + // !!!!!!!! DO NOT CHECK THIS FILE IN + logger.warning(">>>>>>>>>>> Just a line change to force GitHub actions to build and test Dataverse !!!!!!!"); RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); From 570d1285ba5f4a39596cec88295d8a45acc2e08c Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 16:49:32 -0400 Subject: [PATCH 210/402] Modify test file to force workflow to build and test DO NOT MERGE --- .github/workflows/deploy_beta_testing.yml | 6 +++--- .github/workflows/maven_unit_test.yml | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index 87eb6e8c150..c6a8bd909df 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -10,9 +10,9 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4.1.7 + - uses: actions/checkout@v4 - - uses: actions/setup-java@v4.1.7 + - uses: actions/setup-java@v4 with: distribution: 'zulu' java-version: '17' @@ -42,7 +42,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4.1.7 + - uses: actions/checkout@v4 - name: Download war artifact uses: actions/download-artifact@v4.1.7 diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 5c7a154e2f5..67f20bbdc93 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -32,9 +32,9 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v4.1.7 + - uses: actions/checkout@v4 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v4.1.7 + uses: actions/setup-java@v4 with: java-version: ${{ matrix.jdk }} distribution: temurin @@ -98,9 +98,9 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v4.1.7 + - uses: actions/checkout@v4 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v4.1.7 + uses: actions/setup-java@v4 with: java-version: ${{ matrix.jdk }} distribution: temurin @@ -132,8 +132,8 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v4.1.7 - - uses: actions/setup-java@v4.1.7 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: java-version: '17' distribution: temurin From 534ecdc0ab8834973d20c061fb84dc3a4e098de3 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 16:55:09 -0400 Subject: [PATCH 211/402] Modify test file to force workflow to build and test DO NOT MERGE --- .github/workflows/deploy_beta_testing.yml | 2 +- .github/workflows/maven_unit_test.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index c6a8bd909df..c36a4dfab43 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -32,7 +32,7 @@ jobs: run: echo "war_file=$(ls *.war | head -1)">> $GITHUB_ENV - name: Upload war artifact - uses: actions/upload-artifact@v4.1.7 + uses: actions/upload-artifact@v4 with: name: built-app path: ./target/${{ env.war_file }} diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 67f20bbdc93..18a741e5ca5 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -57,7 +57,7 @@ jobs: # Upload the built war file. For download, it will be wrapped in a ZIP by GitHub. # See also https://github.com/actions/upload-artifact#zipped-artifact-downloads - - uses: actions/upload-artifact@v4.1.7 + - uses: actions/upload-artifact@v4 with: name: dataverse-java${{ matrix.jdk }}.war path: target/dataverse*.war @@ -67,7 +67,7 @@ jobs: - run: | tar -cvf java-builddir.tar target tar -cvf java-m2-selection.tar ~/.m2/repository/io/gdcc/dataverse-* - - uses: actions/upload-artifact@v4.1.7 + - uses: actions/upload-artifact@v4 with: name: java-artifacts path: | @@ -119,7 +119,7 @@ jobs: # Wrap up and send to coverage job - run: tar -cvf java-reportdir.tar target/site - - uses: actions/upload-artifact@v4.1.7 + - uses: actions/upload-artifact@v4 with: name: java-reportdir path: java-reportdir.tar From 5f19030e77861bf0ca92f9dc9ed8f91760ac21b8 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:00:13 -0400 Subject: [PATCH 212/402] Modify test file to force workflow to build and test DO NOT MERGE --- conf/solr/update-fields.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/conf/solr/update-fields.sh b/conf/solr/update-fields.sh index 386c1ee4e87..5c896a98a04 100755 --- a/conf/solr/update-fields.sh +++ b/conf/solr/update-fields.sh @@ -22,6 +22,7 @@ COPY_FIELDS="" TRIGGER_CHAIN=0 ED_DELETE_FIELDS="'a+,'b-d" ED_DELETE_COPYFIELDS="'a+,'b-d" +FAKEDONOTMERGE=0 SOLR_SCHEMA_FIELD_BEGIN_MARK="SCHEMA-FIELDS::BEGIN" SOLR_SCHEMA_FIELD_END_MARK="SCHEMA-FIELDS::END" From caa8bdef67fb137a593b0ce0a490c5077b811b60 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:03:03 -0400 Subject: [PATCH 213/402] Modify test file to force workflow to build and test DO NOT MERGE --- .github/workflows/shellcheck.yml | 2 +- .github/workflows/shellspec.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml index fb9cf5a0a1f..d83c0a5315b 100644 --- a/.github/workflows/shellcheck.yml +++ b/.github/workflows/shellcheck.yml @@ -21,7 +21,7 @@ jobs: permissions: pull-requests: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v2 - name: shellcheck uses: reviewdog/action-shellcheck@v1 with: diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml index cc09992edac..3320d9d08a4 100644 --- a/.github/workflows/shellspec.yml +++ b/.github/workflows/shellspec.yml @@ -19,7 +19,7 @@ jobs: steps: - name: Install shellspec run: curl -fsSL https://git.io/shellspec | sh -s ${{ env.SHELLSPEC_VERSION }} --yes - - uses: actions/checkout@v4 + - uses: actions/checkout@v2 - name: Run Shellspec run: | cd tests/shell @@ -30,7 +30,7 @@ jobs: container: image: rockylinux/rockylinux:9 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v2 - name: Install shellspec run: | curl -fsSL https://github.com/shellspec/shellspec/releases/download/${{ env.SHELLSPEC_VERSION }}/shellspec-dist.tar.gz | tar -xz -C /usr/share @@ -47,7 +47,7 @@ jobs: steps: - name: Install shellspec run: curl -fsSL https://git.io/shellspec | sh -s 0.28.1 --yes - - uses: actions/checkout@v4 + - uses: actions/checkout@v2 - name: Run Shellspec run: | cd tests/shell From 25d0caca81999859ad4cad65f71113dbfdbb9951 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:03:40 -0400 Subject: [PATCH 214/402] Modify test file to force workflow to build and test DO NOT MERGE --- .github/workflows/shellcheck.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml index d83c0a5315b..56f7d648dc4 100644 --- a/.github/workflows/shellcheck.yml +++ b/.github/workflows/shellcheck.yml @@ -21,7 +21,7 @@ jobs: permissions: pull-requests: write steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: shellcheck uses: reviewdog/action-shellcheck@v1 with: From 74fe482e7dc6d4663e08fa9756b7e76ed58c93a4 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:05:53 -0400 Subject: [PATCH 215/402] Modify test file to force workflow to build and test DO NOT MERGE --- .github/workflows/spi_release.yml | 14 +++++++------- conf/solr/update-fields.sh | 1 - 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/spi_release.yml b/.github/workflows/spi_release.yml index 6398edca412..8ad74b3e4bb 100644 --- a/.github/workflows/spi_release.yml +++ b/.github/workflows/spi_release.yml @@ -37,15 +37,15 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'pull_request' && needs.check-secrets.outputs.available == 'true' steps: - - uses: actions/checkout@v4 - - uses: actions/setup-java@v4 + - uses: actions/checkout@v3 + - uses: actions/setup-java@v3 with: java-version: '17' distribution: 'adopt' server-id: ossrh server-username: MAVEN_USERNAME server-password: MAVEN_PASSWORD - - uses: actions/cache@v4 + - uses: actions/cache@v2 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} @@ -63,12 +63,12 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'push' && needs.check-secrets.outputs.available == 'true' steps: - - uses: actions/checkout@v4 - - uses: actions/setup-java@v4 + - uses: actions/checkout@v3 + - uses: actions/setup-java@v3 with: java-version: '17' distribution: 'adopt' - - uses: actions/cache@v4 + - uses: actions/cache@v2 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} @@ -76,7 +76,7 @@ jobs: # Running setup-java again overwrites the settings.xml - IT'S MANDATORY TO DO THIS SECOND SETUP!!! - name: Set up Maven Central Repository - uses: actions/setup-java@v4 + uses: actions/setup-java@v3 with: java-version: '17' distribution: 'adopt' diff --git a/conf/solr/update-fields.sh b/conf/solr/update-fields.sh index 5c896a98a04..386c1ee4e87 100755 --- a/conf/solr/update-fields.sh +++ b/conf/solr/update-fields.sh @@ -22,7 +22,6 @@ COPY_FIELDS="" TRIGGER_CHAIN=0 ED_DELETE_FIELDS="'a+,'b-d" ED_DELETE_COPYFIELDS="'a+,'b-d" -FAKEDONOTMERGE=0 SOLR_SCHEMA_FIELD_BEGIN_MARK="SCHEMA-FIELDS::BEGIN" SOLR_SCHEMA_FIELD_END_MARK="SCHEMA-FIELDS::END" From ccee96af5021e31ff7eefaeeda655b93f3c26bab Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:07:14 -0400 Subject: [PATCH 216/402] Modify test file to force workflow to build and test DO NOT MERGE --- .github/workflows/guides_build_sphinx.yml | 2 +- .github/workflows/reviewdog_checkstyle.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/guides_build_sphinx.yml b/.github/workflows/guides_build_sphinx.yml index fa3a876c418..86b59b11d35 100644 --- a/.github/workflows/guides_build_sphinx.yml +++ b/.github/workflows/guides_build_sphinx.yml @@ -10,7 +10,7 @@ jobs: docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v2 - uses: uncch-rdmc/sphinx-action@master with: docs-folder: "doc/sphinx-guides/" diff --git a/.github/workflows/reviewdog_checkstyle.yml b/.github/workflows/reviewdog_checkstyle.yml index 804b04f696a..90a0dd7d06b 100644 --- a/.github/workflows/reviewdog_checkstyle.yml +++ b/.github/workflows/reviewdog_checkstyle.yml @@ -10,7 +10,7 @@ jobs: name: Checkstyle job steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v2 - name: Run check style uses: nikitasavinov/checkstyle-action@master with: From 9263b8a8aea13ad420a5b7a7db360e48ab1561c0 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:08:45 -0400 Subject: [PATCH 217/402] Modify test file to force workflow to build and test DO NOT MERGE --- .github/workflows/container_app_pr.yml | 6 +++--- .github/workflows/container_app_push.yml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/container_app_pr.yml b/.github/workflows/container_app_pr.yml index a3468cbc220..c86d284e74b 100644 --- a/.github/workflows/container_app_pr.yml +++ b/.github/workflows/container_app_pr.yml @@ -20,14 +20,14 @@ jobs: if: ${{ github.repository_owner == 'IQSS' }} steps: # Checkout the pull request code as when merged - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 with: ref: 'refs/pull/${{ github.event.client_payload.pull_request.number }}/merge' - - uses: actions/setup-java@v4 + - uses: actions/setup-java@v3 with: java-version: "17" distribution: 'adopt' - - uses: actions/cache@v4 + - uses: actions/cache@v3 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml index 184b69583a5..3b7ce066d73 100644 --- a/.github/workflows/container_app_push.yml +++ b/.github/workflows/container_app_push.yml @@ -68,7 +68,7 @@ jobs: if: ${{ github.event_name != 'pull_request' && github.ref_name == 'develop' && github.repository_owner == 'IQSS' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - uses: peter-evans/dockerhub-description@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} From d719dde6b3a7274621bcafc2257607e012acfb6c Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:20:05 -0400 Subject: [PATCH 218/402] remove test file --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index db1e6eb5169..ce123b60b13 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -70,8 +70,6 @@ public class DatasetsIT { @BeforeAll public static void setUpClass() { - // !!!!!!!! DO NOT CHECK THIS FILE IN - logger.warning(">>>>>>>>>>> Just a line change to force GitHub actions to build and test Dataverse !!!!!!!"); RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); From 37f44b52a5435141c4dd43b8155222f0746802d9 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:21:42 -0400 Subject: [PATCH 219/402] remove test file --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index ce123b60b13..c7184f0c78b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -70,7 +70,8 @@ public class DatasetsIT { @BeforeAll public static void setUpClass() { - + + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); Response removeIdentifierGenerationStyle = UtilIT.deleteSetting(SettingsServiceBean.Key.IdentifierGenerationStyle); From a094d81f36c12cdd65f0f48656ed7948d3245b0d Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:27:01 -0400 Subject: [PATCH 220/402] remove test file --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index c7184f0c78b..93f1024ae7a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -70,8 +70,8 @@ public class DatasetsIT { @BeforeAll public static void setUpClass() { - - + + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); Response removeIdentifierGenerationStyle = UtilIT.deleteSetting(SettingsServiceBean.Key.IdentifierGenerationStyle); From 052262fe5badf98395704773f6ddfc4a179d9897 Mon Sep 17 00:00:00 2001 From: jo-pol Date: Mon, 4 Nov 2024 10:25:02 +0100 Subject: [PATCH 221/402] replaced deprecated mime type with mp4 --- .../propertyFiles/MimeTypeDetectionByFileExtension.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties index 05e61a40c17..4507c22fdf8 100644 --- a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties +++ b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties @@ -15,7 +15,7 @@ m=text/x-matlab mat=application/matlab-mat md=text/markdown mp3=audio/mp3 -m4a=audio/x-m4a +m4a=audio/mp4 nii=image/nii nc=application/netcdf ods=application/vnd.oasis.opendocument.spreadsheet From a55d31f19f9d422e2160d15b68d9519c9e29d394 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 4 Nov 2024 16:19:17 +0000 Subject: [PATCH 222/402] Fixed: unit test assertion in JsonParserTest --- .../java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index f241a5d1dda..236344a9200 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -283,7 +283,7 @@ public void parseDataverseDTO() throws JsonParseException { assertEquals("We do all the science.", actual.getDescription()); assertEquals("LABORATORY", actual.getDataverseType().toString()); assertEquals(2, actual.getDataverseContacts().size()); - assertEquals("pi@example.edu,student@example.edu", actual.getDataverseContacts().get(0).getContactEmail()); + assertEquals("pi@example.edu", actual.getDataverseContacts().get(0).getContactEmail()); assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); } catch (IOException ioe) { From 1ed0d304307e8839493b00aa48cf1002ec8e5afa Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 4 Nov 2024 16:24:14 +0000 Subject: [PATCH 223/402] Added: assertion to JsonParserTest --- .../java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 236344a9200..52e9c6de678 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -284,6 +284,7 @@ public void parseDataverseDTO() throws JsonParseException { assertEquals("LABORATORY", actual.getDataverseType().toString()); assertEquals(2, actual.getDataverseContacts().size()); assertEquals("pi@example.edu", actual.getDataverseContacts().get(0).getContactEmail()); + assertEquals("student@example.edu", actual.getDataverseContacts().get(1).getContactEmail()); assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); } catch (IOException ioe) { From b1dcb00b8ad46549e7f74304b11e2dcc9d3a1e64 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 4 Nov 2024 16:25:50 +0000 Subject: [PATCH 224/402] Refactor: JsonParserTest.parseDataverseDTO --- .../iq/dataverse/util/json/JsonParserTest.java | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 52e9c6de678..d1cb30e2bc3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -277,16 +277,17 @@ public void parseDataverseDTO() throws JsonParseException { try (FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-complete.json")) { dvJson = Json.createReader(reader).readObject(); DataverseDTO actual = sut.parseDataverseDTO(dvJson); + List actualDataverseContacts = actual.getDataverseContacts(); assertEquals("Scientific Research", actual.getName()); assertEquals("science", actual.getAlias()); assertEquals("Scientific Research University", actual.getAffiliation()); assertEquals("We do all the science.", actual.getDescription()); assertEquals("LABORATORY", actual.getDataverseType().toString()); - assertEquals(2, actual.getDataverseContacts().size()); - assertEquals("pi@example.edu", actual.getDataverseContacts().get(0).getContactEmail()); - assertEquals("student@example.edu", actual.getDataverseContacts().get(1).getContactEmail()); - assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); - assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); + assertEquals(2, actualDataverseContacts.size()); + assertEquals("pi@example.edu", actualDataverseContacts.get(0).getContactEmail()); + assertEquals("student@example.edu", actualDataverseContacts.get(1).getContactEmail()); + assertEquals(0, actualDataverseContacts.get(0).getDisplayOrder()); + assertEquals(1, actualDataverseContacts.get(1).getDisplayOrder()); } catch (IOException ioe) { throw new JsonParseException("Couldn't read test file", ioe); } From cf1f18dc3cdb4d008ecdb4ba43bc05aa2ceee0ab Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:41:02 -0500 Subject: [PATCH 225/402] fix backwards newest oldest sort order --- src/main/webapp/filesFragment.xhtml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 117710cfd53..154700f7cf4 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -301,24 +301,24 @@
  • - + +
  • - + -
  • From 391a249171736b9ac811da7b198af443d6f549bd Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 4 Nov 2024 13:42:40 -0500 Subject: [PATCH 226/402] adding release note --- doc/release-notes/10742-newest-oldest-sort-order-backwards.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/10742-newest-oldest-sort-order-backwards.md diff --git a/doc/release-notes/10742-newest-oldest-sort-order-backwards.md b/doc/release-notes/10742-newest-oldest-sort-order-backwards.md new file mode 100644 index 00000000000..0afaf45449d --- /dev/null +++ b/doc/release-notes/10742-newest-oldest-sort-order-backwards.md @@ -0,0 +1,3 @@ +## Minor bug fix to UI to fix the order of the files on the Dataset Files page when ordering by Date + +A fix was made to the ui to fix the ordering 'Newest' and 'Oldest' which were reversed From 40abc7e15230eac599b66d44c5f953973b391928 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Mon, 4 Nov 2024 14:54:20 -0500 Subject: [PATCH 227/402] Update doc/release-notes/10889_bump_PG17_FlyWay10.md describe zapping database in Docker environment Co-authored-by: Philip Durbin --- doc/release-notes/10889_bump_PG17_FlyWay10.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10889_bump_PG17_FlyWay10.md b/doc/release-notes/10889_bump_PG17_FlyWay10.md index 7bb509886fb..c35b083fcd4 100644 --- a/doc/release-notes/10889_bump_PG17_FlyWay10.md +++ b/doc/release-notes/10889_bump_PG17_FlyWay10.md @@ -2,6 +2,6 @@ This release bumps both the Postgres JDBC driver and Flyway versions. This shoul While we don't encourage the use of older Postgres versions, this flexibility may benefit some of our long-standing installations in their upgrade paths. Postgres 13 remains the version used with automated testing. -As part of this update, the containerized development environment now uses Postgres 17 instead of 16. Worst case, developers can start with a fresh database, if necessary. +As part of this update, the containerized development environment now uses Postgres 17 instead of 16. Developers must delete their data (`rm -rf docker-dev-volumes`) and start an empty database. They can rerun the quickstart in the dev guide. The Docker compose file used for [evaluations or demos](https://dataverse-guide--10912.org.readthedocs.build/en/10912/container/running/demo.html) has been upgraded from Postgres 13 to 17. From 59be2a8ccb63874d89c1f82e456a1ee2c1da98a5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 5 Nov 2024 08:34:45 -0500 Subject: [PATCH 228/402] Update doc/release-notes/10889_bump_PG17_FlyWay10.md --- doc/release-notes/10889_bump_PG17_FlyWay10.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10889_bump_PG17_FlyWay10.md b/doc/release-notes/10889_bump_PG17_FlyWay10.md index c35b083fcd4..932c06fbc3d 100644 --- a/doc/release-notes/10889_bump_PG17_FlyWay10.md +++ b/doc/release-notes/10889_bump_PG17_FlyWay10.md @@ -2,6 +2,6 @@ This release bumps both the Postgres JDBC driver and Flyway versions. This shoul While we don't encourage the use of older Postgres versions, this flexibility may benefit some of our long-standing installations in their upgrade paths. Postgres 13 remains the version used with automated testing. -As part of this update, the containerized development environment now uses Postgres 17 instead of 16. Developers must delete their data (`rm -rf docker-dev-volumes`) and start an empty database. They can rerun the quickstart in the dev guide. +As part of this update, the containerized development environment now uses Postgres 17 instead of 16. Developers must delete their data (`rm -rf docker-dev-volumes`) and start with an empty database. They can rerun the quickstart in the dev guide. The Docker compose file used for [evaluations or demos](https://dataverse-guide--10912.org.readthedocs.build/en/10912/container/running/demo.html) has been upgraded from Postgres 13 to 17. From 79c8cf38888563c8a7de40c99316a3ed8acbd192 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 5 Nov 2024 17:26:17 -0500 Subject: [PATCH 229/402] docs "Update a Dataverse Collection" vs. "Change Collection Attributes" #10904 --- doc/sphinx-guides/source/api/native-api.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6254742eebb..b464b6df393 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -126,6 +126,8 @@ Same as in :ref:`create-dataverse-api`, the request JSON supports an optional `` To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. +See also :ref:`collection-attributes-api`. + .. _view-dataverse: View a Dataverse Collection @@ -1058,6 +1060,8 @@ The following attributes are supported: * ``affiliation`` Affiliation * ``filePIDsEnabled`` ("true" or "false") Restricted to use by superusers and only when the :ref:`:AllowEnablingFilePIDsPerCollection <:AllowEnablingFilePIDsPerCollection>` setting is true. Enables or disables registration of file-level PIDs in datasets within the collection (overriding the instance-wide setting). +See also :ref:`update-dataverse-api`. + .. _collection-storage-quotas: Update Collection Input Levels From 6ea27e17ced9824a2b213880c81a04eaf7bb0c3d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 6 Nov 2024 10:41:23 -0500 Subject: [PATCH 230/402] Use normalized version of PID Will use upper case form of DOI identifier to support case-insensitive retrieval --- .../edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index b203738a9fd..59b9f970f30 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -313,6 +313,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve // Creating a new dataset from scratch: harvestedDataset = parser.parseDataset(obj); + //Use normalized form (e.g. upper case DOI) + harvestedDataset.setGlobalId(globalId); harvestedDataset.setHarvestedFrom(harvestingClient); harvestedDataset.setHarvestIdentifier(harvestIdentifier); From fa0389d87992562822518878f2cb01065f483f76 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 6 Nov 2024 11:40:57 -0500 Subject: [PATCH 231/402] adding check for dataset order incorrect --- doc/sphinx-guides/source/api/native-api.rst | 3 +++ src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 3 +++ src/main/java/propertyFiles/Bundle.properties | 1 + src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 6 ++++++ 4 files changed, 13 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 5e4f9debe6b..371b1440d8d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1698,6 +1698,9 @@ Compare Versions of a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Returns a list of fields that have changed between 2 Dataset versions within the Metadata and Terms of Access. Also includes the files that have been added or removed as well as files that have been modified. +When compare includes an unpublished/draft version the api token must be associated with a user having view unpublished privileges +An error will be returned if VERSION0 was not created before VERSION1 + .. code-block:: bash export SERVER_URL=https://demo.dataverse.org diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index d7b0c78e611..d60f797f35c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3003,6 +3003,9 @@ public Response getCompareVersions(@Context ContainerRequestContext crc, @PathPa DataverseRequest req = createDataverseRequest(getRequestUser(crc)); DatasetVersion dsv1 = getDatasetVersionOrDie(req, versionId1, findDatasetOrDie(id), uriInfo, headers); DatasetVersion dsv2 = getDatasetVersionOrDie(req, versionId2, findDatasetOrDie(id), uriInfo, headers); + if (dsv1.getCreateTime().getTime() > dsv2.getCreateTime().getTime()) { + return error(BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order")); + } return ok(DatasetVersion.compareVersions(dsv1, dsv2)); } catch (WrappedResponse wr) { return wr.getResponse(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 149e6a7e828..461de5c49de 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2498,6 +2498,7 @@ dataset.version.file.changed=Files (Changed File Metadata: {0} dataset.version.file.changed2=; Changed File Metadata: {0} dataset.version.variablemetadata.changed=Variable Metadata (Changed Variable Metadata: {0} dataset.version.variablemetadata.changed2=; Changed Variable Metadata: {0} +dataset.version.compare.incorrect.order=Compare requires the older dataset version to be listed first. #DataversePage.java dataverse.item.required=Required diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 61977f6446c..9397b2246ec 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -5291,5 +5291,11 @@ public void testCompareDatasetVersionsAPI() throws InterruptedException { .body("data.filesReplaced[0].newFile.fileName", CoreMatchers.equalTo("favicon-32x32.png")) .body("data.TermsOfAccess", CoreMatchers.notNullValue()) .statusCode(OK.getStatusCode()); + + compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":draft", ":latest-published", apiToken); + compareResponse.prettyPrint(); + compareResponse.then().assertThat() + .body("message", CoreMatchers.equalTo(BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order"))) + .statusCode(BAD_REQUEST.getStatusCode()); } } From 91fef44c0080a5c505a6e170489762d4982e4f43 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 6 Nov 2024 11:44:43 -0500 Subject: [PATCH 232/402] adding check for dataset order incorrect --- .../10888-add-api-for-comparing-dataset-versions.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md b/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md index f9b3822d29d..b82441ee11a 100644 --- a/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md +++ b/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md @@ -3,6 +3,8 @@ The following API have been added: /api/datasets/{persistentId}/versions/{versionId0}/compare/{versionId1} This API lists the changes between 2 dataset versions. The Json response shows the changes per field within the Metadata block and the Terms Of Access. Also listed are the files that have been added or removed. Files that have been modified will also display the new file data plus the fields that have been modified. +When compare includes an unpublished/draft version the api token must be associated with a user having view unpublished privileges +An error will be returned if VERSION0 was not created before VERSION1 Example of Metadata Block field change: ```json From b6df149b155cdfd37c27e3c5945cd201777f1303 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Wed, 6 Nov 2024 11:48:57 -0500 Subject: [PATCH 233/402] Update making-releases.rst --- doc/sphinx-guides/source/developers/making-releases.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 25297a23fca..888a56d7001 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -62,6 +62,11 @@ The task at or near release time is to collect these snippets into a single file - Include instructions describing the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure. - Take the release notes .md through the regular Code Review and QA process. That is, make a pull request. Here's an example: https://github.com/IQSS/dataverse/pull/10866 +Upgrade Instructions for Internal +--------------------------------- +To upgrade internal, go to /doc/release-notes, open the release-notes.md file for the current release and perform all the steps under "Upgrade Instructions". + + Deploy Release Candidate to Demo -------------------------------- From 0105fc0b91aa4cfe7e8a03894908d1e37184e819 Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Wed, 6 Nov 2024 11:55:18 -0500 Subject: [PATCH 234/402] Update making-releases.rst --- doc/sphinx-guides/source/developers/making-releases.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 888a56d7001..58589d9debc 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -64,6 +64,7 @@ The task at or near release time is to collect these snippets into a single file Upgrade Instructions for Internal --------------------------------- + To upgrade internal, go to /doc/release-notes, open the release-notes.md file for the current release and perform all the steps under "Upgrade Instructions". From 07e78c9207d5203c189c300a12fa7642260f157a Mon Sep 17 00:00:00 2001 From: ofahimIQSS Date: Wed, 6 Nov 2024 11:55:51 -0500 Subject: [PATCH 235/402] Update making-releases.rst --- doc/sphinx-guides/source/developers/making-releases.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 58589d9debc..4b52b3ce922 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -67,7 +67,6 @@ Upgrade Instructions for Internal To upgrade internal, go to /doc/release-notes, open the release-notes.md file for the current release and perform all the steps under "Upgrade Instructions". - Deploy Release Candidate to Demo -------------------------------- From 3266d882a8700d272c501d3fde166a993c03caef Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 7 Nov 2024 09:47:00 -0500 Subject: [PATCH 236/402] Update doc/release-notes/8184-rename-private-url.md Co-authored-by: Philip Durbin --- doc/release-notes/8184-rename-private-url.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/8184-rename-private-url.md b/doc/release-notes/8184-rename-private-url.md index 600e8dd228c..7c78080d4ea 100644 --- a/doc/release-notes/8184-rename-private-url.md +++ b/doc/release-notes/8184-rename-private-url.md @@ -4,4 +4,6 @@ With this release the name of the URL that may be used by dataset administrators Also, additional information about the creation of Preview URLs has been added to the popup accessed via edit menu of the Dataset Page. -Any Private URLs created in previous versions of Dataverse will continue to work as will the api for creation and deletion of Private URLs. +Any Private URLs created in previous versions of Dataverse will continue to work. + +The old "privateUrl" API endpoints for the creation and deletion of Preview (formerly Private) URLs have been deprecated. They will continue to work but please switch to the "previewUrl" equivalents that have been [documented](https://dataverse-guide--10961.org.readthedocs.build/en/10961/api/native-api.html#create-a-preview-url-for-a-dataset) in the API Guide. From d6001c92d7822dbf12d4645c56c6c289211c8c1c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 7 Nov 2024 09:59:30 -0500 Subject: [PATCH 237/402] #8184 remove deprecated code redundancy --- .../harvard/iq/dataverse/api/Datasets.java | 26 +++---------------- 1 file changed, 4 insertions(+), 22 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 52610e10323..0ce06e204c3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2174,24 +2174,15 @@ public Response getAssignments(@Context ContainerRequestContext crc, @PathParam( @Deprecated(forRemoval = true, since = "2024-10-17") @Path("{id}/privateUrl") public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { - return response( req -> { - PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied))); - return (privateUrl != null) ? ok(json(privateUrl)) - : error(Response.Status.NOT_FOUND, "Private URL not found."); - }, getRequestUser(crc)); + return getPreviewUrlData(crc, idSupplied); } @POST @AuthRequired @Deprecated(forRemoval = true, since = "2024-10-17") @Path("{id}/privateUrl") - public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) { - if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) { - throw new NotAcceptableException("Anonymized Access not enabled"); - } - return response(req -> - ok(json(execCommand( - new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))), getRequestUser(crc)); + public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @DefaultValue("false") @QueryParam("anonymizedAccess") boolean anonymizedAccess) { + return createPreviewUrl(crc, idSupplied, anonymizedAccess); } @DELETE @@ -2199,16 +2190,7 @@ public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathPara @Deprecated(forRemoval = true, since = "2024-10-17") @Path("{id}/privateUrl") public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { - return response( req -> { - Dataset dataset = findDatasetOrDie(idSupplied); - PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset)); - if (privateUrl != null) { - execCommand(new DeletePrivateUrlCommand(req, dataset)); - return ok("Private URL deleted."); - } else { - return notFound("No Private URL to delete."); - } - }, getRequestUser(crc)); + return deletePreviewUrl(crc, idSupplied); } @GET From b64d44897011e3f61509688af79ec1d98928dd40 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 7 Nov 2024 10:32:01 -0500 Subject: [PATCH 238/402] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 3cc5ac6c00b..d239d1cc0a1 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2028,6 +2028,7 @@ Setting anonymizedAccess=true in your call will create a PreviewURL that only al curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/previewUrl?anonymizedAccess=true" +Note: Previous endpoints with privateUrl instead of previewUrl are deprecated, but supported. Get the Preview URL for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From db46d8abb6bf5b121fe08f2f08c62acf6859b007 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 7 Nov 2024 10:51:03 -0500 Subject: [PATCH 239/402] Adding UPPER() to the named queries that search on persistent identifiers, making the searches case-insensitive and making it possible to look up any lower case or mixed-case persistent ids already in the database. #11003 --- src/main/java/edu/harvard/iq/dataverse/DvObject.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index a4882f772d6..30f45064582 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -27,9 +27,9 @@ @NamedQuery(name = "DvObject.ownedObjectsById", query="SELECT COUNT(obj) FROM DvObject obj WHERE obj.owner.id=:id"), @NamedQuery(name = "DvObject.findByGlobalId", - query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), + query = "SELECT o FROM DvObject o WHERE UPPER(o.identifier)=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findIdByGlobalId", - query = "SELECT o.id FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), + query = "SELECT o.id FROM DvObject o WHERE UPPER(o.identifier)=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findByAlternativeGlobalId", query = "SELECT o FROM DvObject o, AlternativePersistentIdentifier a WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"), @@ -37,7 +37,7 @@ query = "SELECT o.id FROM DvObject o, AlternativePersistentIdentifier a WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findByProtocolIdentifierAuthority", - query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol"), + query = "SELECT o FROM DvObject o WHERE UPPER(o.identifier)=:identifier and o.authority=:authority and o.protocol=:protocol"), @NamedQuery(name = "DvObject.findByOwnerId", query = "SELECT o FROM DvObject o WHERE o.owner.id=:ownerId order by o.dtype desc, o.id"), @NamedQuery(name = "DvObject.findByAuthenticatedUserId", From 0accbc465f7ee446c042acf585d8a6d257335512 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 7 Nov 2024 12:27:11 -0500 Subject: [PATCH 240/402] tweak tests for case insensitivity, update docs/notes for clarity --- doc/sphinx-guides/source/installation/config.rst | 4 ++++ .../pidproviders/handle/HandlePidProvider.java | 5 +++++ .../pidproviders/perma/PermaLinkPidProvider.java | 3 +++ .../iq/dataverse/pidproviders/PidUtilTest.java | 15 +++++++++++---- 4 files changed, 23 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index a2c27598b76..e3965e3cd7c 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -236,6 +236,10 @@ Dataverse automatically manages assigning PIDs and making them findable when dat allow updating the PID target URLs and metadata of already-published datasets manually if needed `, e.g. if a Dataverse instance is moved to a new URL or when the software is updated to generate additional metadata or address schema changes at the PID service. +Note that while some forms of PIDs (Handles, PermaLinks) are technically case sensitive, common practice is to avoid creating PIDs that differ only by case. +Dataverse treats PIDs of all types as case-insensitive (as DOIs are by definition). This means that Dataverse will find datasets (in search, to display dataset pages, etc.) +when the PIDs entered do not match the case of the original but will have a problem if two PIDs that differ only by case exist in one instance. + Testing PID Providers +++++++++++++++++++++ diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java index 9d61663d034..1f03d8a6cfb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java @@ -59,6 +59,11 @@ * service. * As of now, it only does the registration updates, to accommodate * the modifyRegistration datasets API sub-command. + * + * Note that while Handles are nominally case sensitive, handle.net is + * configured to be case-insensitive and Dataverse makes case-insensitve + * database look-ups to find Handles (See #11003). That said, database + * entries are stored in the case matching the configuration of the provider. */ public class HandlePidProvider extends AbstractPidProvider { diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java index 7b55292350f..2cc0d41ede7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java @@ -24,6 +24,9 @@ * overridable by a configurable parameter to support use of an external * resolver. * + * Note that while PermaLinks are nominally case sensitive, Dataverse makes + * case-insensitve database look-ups to find them (See #11003). That said, database + * entries are stored in the case matching the configuration of the provider. */ public class PermaLinkPidProvider extends AbstractPidProvider { diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java index ecf18e6b1ca..bacb231b4d5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java @@ -99,7 +99,7 @@ @JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "FAKE 1", varArgs = "fake1") @JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = FakeDOIProvider.TYPE, varArgs = "fake1") @JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "10.5074", varArgs = "fake1") -@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "FK", varArgs = "fake1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "fk", varArgs = "fake1") @JvmSetting(key = JvmSettings.PID_PROVIDER_MANAGED_LIST, value = "doi:10.5073/FK3ABCDEF", varArgs ="fake1") //HANDLE 1 @@ -315,6 +315,13 @@ public void testUnmanagedParsing() throws IOException { GlobalId pid6 = PidUtil.parseAsGlobalID(pid6String); assertEquals(pid6String, pid6.asString()); assertEquals(UnmanagedPermaLinkPidProvider.ID, pid6.getProviderId()); + + //Lowercase test for unmanaged DOIs + String pid7String = "doi:10.5281/zenodo.6381129"; + GlobalId pid7 = PidUtil.parseAsGlobalID(pid7String); + assertEquals(UnmanagedDOIProvider.ID, pid5.getProviderId()); + assertEquals(pid7String.toUpperCase().replace("DOI", "doi"), pid7.asString()); + } @@ -353,15 +360,15 @@ public void testExcludedSetParsing() throws IOException { @Test public void testManagedSetParsing() throws IOException { - String pid1String = "doi:10.5073/FK3ABCDEF"; + String pid1String = "doi:10.5073/fk3ABCDEF"; GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); - assertEquals(pid1String, pid2.asString()); + assertEquals(pid1String.toUpperCase().replace("DOI", "doi"), pid2.asString()); assertEquals("fake1", pid2.getProviderId()); assertEquals("https://doi.org/" + pid2.getAuthority() + PidUtil.getPidProvider(pid2.getProviderId()).getSeparator() + pid2.getIdentifier(),pid2.asURL()); assertEquals("10.5073", pid2.getAuthority()); assertEquals(AbstractDOIProvider.DOI_PROTOCOL, pid2.getProtocol()); GlobalId pid3 = PidUtil.parseAsGlobalID(pid2.asURL()); - assertEquals(pid1String, pid3.asString()); + assertEquals(pid1String.toUpperCase().replace("DOI", "doi"), pid3.asString()); assertEquals("fake1", pid3.getProviderId()); assertFalse(PidUtil.getPidProvider(pid3.getProviderId()).canCreatePidsLike(pid3)); From 4c05bce9c1277bba361a374e73aa5a752ae302f0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 7 Nov 2024 12:53:07 -0500 Subject: [PATCH 241/402] store in original form so we can use it if the source is case-sensitive --- .../harvard/iq/dataverse/api/imports/ImportServiceBean.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index 59b9f970f30..ee4609a7c56 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -313,9 +313,7 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve // Creating a new dataset from scratch: harvestedDataset = parser.parseDataset(obj); - //Use normalized form (e.g. upper case DOI) - harvestedDataset.setGlobalId(globalId); - + harvestedDataset.setHarvestedFrom(harvestingClient); harvestedDataset.setHarvestIdentifier(harvestIdentifier); From 73d17a225056d83011285e2c4ecf3cec879814e7 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 7 Nov 2024 14:22:56 -0500 Subject: [PATCH 242/402] add UPPER for both sides of comparison --- src/main/java/edu/harvard/iq/dataverse/DvObject.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 30f45064582..cc874937632 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -27,9 +27,9 @@ @NamedQuery(name = "DvObject.ownedObjectsById", query="SELECT COUNT(obj) FROM DvObject obj WHERE obj.owner.id=:id"), @NamedQuery(name = "DvObject.findByGlobalId", - query = "SELECT o FROM DvObject o WHERE UPPER(o.identifier)=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), + query = "SELECT o FROM DvObject o WHERE UPPER(o.identifier)=UPPER(:identifier) and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findIdByGlobalId", - query = "SELECT o.id FROM DvObject o WHERE UPPER(o.identifier)=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), + query = "SELECT o.id FROM DvObject o WHERE UPPER(o.identifier)=UPPER(:identifier) and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findByAlternativeGlobalId", query = "SELECT o FROM DvObject o, AlternativePersistentIdentifier a WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"), @@ -37,7 +37,7 @@ query = "SELECT o.id FROM DvObject o, AlternativePersistentIdentifier a WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findByProtocolIdentifierAuthority", - query = "SELECT o FROM DvObject o WHERE UPPER(o.identifier)=:identifier and o.authority=:authority and o.protocol=:protocol"), + query = "SELECT o FROM DvObject o WHERE UPPER(o.identifier)=UPPER(:identifier) and o.authority=:authority and o.protocol=:protocol"), @NamedQuery(name = "DvObject.findByOwnerId", query = "SELECT o FROM DvObject o WHERE o.owner.id=:ownerId order by o.dtype desc, o.id"), @NamedQuery(name = "DvObject.findByAuthenticatedUserId", From dae9287cfdfef6f153fc7c984692ea1167fdf805 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 7 Nov 2024 15:58:31 -0500 Subject: [PATCH 243/402] Add a new index with upper(identifier) --- src/main/java/edu/harvard/iq/dataverse/DvObject.java | 3 ++- src/main/resources/db/migration/V6.4.0.1.sql | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 src/main/resources/db/migration/V6.4.0.1.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index cc874937632..5dab43fbdbd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -53,7 +53,8 @@ @Table(indexes = {@Index(columnList="dtype") , @Index(columnList="owner_id") , @Index(columnList="creator_id") - , @Index(columnList="releaseuser_id")}, + , @Index(columnList="releaseuser_id") + , @Index(columnList="authority,protocol, UPPER(identifier)", name="INDEX_DVOBJECT_authority_protocol_upper_identifier")}, uniqueConstraints = {@UniqueConstraint(columnNames = {"authority,protocol,identifier"}),@UniqueConstraint(columnNames = {"owner_id,storageidentifier"})}) public abstract class DvObject extends DataverseEntity implements java.io.Serializable { diff --git a/src/main/resources/db/migration/V6.4.0.1.sql b/src/main/resources/db/migration/V6.4.0.1.sql new file mode 100644 index 00000000000..0bcd87dd736 --- /dev/null +++ b/src/main/resources/db/migration/V6.4.0.1.sql @@ -0,0 +1,4 @@ +-- Adding a case-insensitive index related to #11003 +-- + +CREATE UNIQUE INDEX IF NOT EXISTS INDEX_DVOBJECT_authority_protocol_upper_identifier ON dvobject (authority, protocol, UPPER(identifier)); \ No newline at end of file From b8c0c405984ea36c1bc7dfb9b54ff6411ffd2dc2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 7 Nov 2024 16:01:22 -0500 Subject: [PATCH 244/402] tweak prior release note --- .../10708 - MDC Citation and DOI parsing improvements.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10708 - MDC Citation and DOI parsing improvements.md b/doc/release-notes/10708 - MDC Citation and DOI parsing improvements.md index 1dcd293df77..86c1bb14d32 100644 --- a/doc/release-notes/10708 - MDC Citation and DOI parsing improvements.md +++ b/doc/release-notes/10708 - MDC Citation and DOI parsing improvements.md @@ -1,3 +1,3 @@ MDC Citation retrieval with the PID settings has been fixed. -DOI parsing in Dataverse is case insensitive, improving interaction with services that may change the case. +PID parsing in Dataverse is now case insensitive, improving interaction with services that may change the case of PIDs. Warnings related to managed/excluded PID lists for PID providers have been reduced From 0688783d39e58724820cce1c69019271f0129900 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Nov 2024 12:18:34 +0000 Subject: [PATCH 245/402] Added: isMetadataBlockRoot and isFacetRoot to getDataverse json response --- .../edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 4 +++- .../java/edu/harvard/iq/dataverse/api/DataversesIT.java | 6 ++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 1bdee48b14d..f884d313d64 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -276,7 +276,9 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail, Boolean re } bld.add("permissionRoot", dv.isPermissionRoot()) .add("description", dv.getDescription()) - .add("dataverseType", dv.getDataverseType().name()); + .add("dataverseType", dv.getDataverseType().name()) + .add("isMetadataBlockRoot", dv.isMetadataBlockRoot()) + .add("isFacetRoot", dv.isFacetRoot()); if (dv.getOwner() != null) { bld.add("ownerId", dv.getOwner().getId()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 01c02900158..9567cf3910a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -135,14 +135,16 @@ public void testDataverseCategory() { public void testMinimalDataverse() throws FileNotFoundException { Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); - String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); JsonObject dvJson; FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-minimal.json"); dvJson = Json.createReader(reader).readObject(); Response create = UtilIT.createDataverse(dvJson, apiToken); create.prettyPrint(); - create.then().assertThat().statusCode(CREATED.getStatusCode()); + create.then().assertThat() + .body("data.isMetadataBlockRoot", equalTo(false)) + .body("data.isFacetRoot", equalTo(false)) + .statusCode(CREATED.getStatusCode()); Response deleteDataverse = UtilIT.deleteDataverse("science", apiToken); deleteDataverse.prettyPrint(); deleteDataverse.then().assertThat().statusCode(OK.getStatusCode()); From 5198a9f47b80e09f0a3f22a4a0af1279b679a2d0 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 12 Nov 2024 12:20:59 +0000 Subject: [PATCH 246/402] Added: release notes for #11012 --- doc/release-notes/11012-get-dataverse-api-ext.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/11012-get-dataverse-api-ext.md diff --git a/doc/release-notes/11012-get-dataverse-api-ext.md b/doc/release-notes/11012-get-dataverse-api-ext.md new file mode 100644 index 00000000000..641aa373174 --- /dev/null +++ b/doc/release-notes/11012-get-dataverse-api-ext.md @@ -0,0 +1 @@ +The JSON payload of the getDataverse endpoint has been extended to include properties isMetadataBlockRoot and isFacetRoot. From 373f0f7a79716106b42f76430e20b62487f320bb Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 12 Nov 2024 12:13:41 -0500 Subject: [PATCH 247/402] link to issues and PR in release note --- doc/release-notes/8184-rename-private-url.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/8184-rename-private-url.md b/doc/release-notes/8184-rename-private-url.md index 7c78080d4ea..7acb03fd735 100644 --- a/doc/release-notes/8184-rename-private-url.md +++ b/doc/release-notes/8184-rename-private-url.md @@ -7,3 +7,5 @@ Also, additional information about the creation of Preview URLs has been added t Any Private URLs created in previous versions of Dataverse will continue to work. The old "privateUrl" API endpoints for the creation and deletion of Preview (formerly Private) URLs have been deprecated. They will continue to work but please switch to the "previewUrl" equivalents that have been [documented](https://dataverse-guide--10961.org.readthedocs.build/en/10961/api/native-api.html#create-a-preview-url-for-a-dataset) in the API Guide. + +See also #8184, #8185, #10950, and #10961. From 9367eb1e8be85f91e0e0b015a084084671cb4b0e Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 12 Nov 2024 14:17:59 -0500 Subject: [PATCH 248/402] add comment to yml files as to reason for v4.1.7 --- .github/workflows/deploy_beta_testing.yml | 1 + .github/workflows/maven_unit_test.yml | 2 ++ 2 files changed, 3 insertions(+) diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index c36a4dfab43..2c684aa8ad1 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -45,6 +45,7 @@ jobs: - uses: actions/checkout@v4 - name: Download war artifact + # using v4.1.7 due to a bug in v4 uses: actions/download-artifact@v4.1.7 with: name: built-app diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 18a741e5ca5..2ce872cd55c 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -107,6 +107,7 @@ jobs: cache: maven # Get the build output from the unit test job + # using v4.1.7 due to a bug in v4 - uses: actions/download-artifact@v4.1.7 with: name: java-artifacts @@ -140,6 +141,7 @@ jobs: cache: maven # Get the build output from the integration test job + # using v4.1.7 due to a bug in v4 - uses: actions/download-artifact@v4.1.7 with: name: java-reportdir From 594acb7c0cdaa3815f0fc3861d97022e0df5a66e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 12 Nov 2024 14:29:58 -0500 Subject: [PATCH 249/402] remove Gson and test based on content, not string match #10739 --- .../edu/harvard/iq/dataverse/api/InfoIT.java | 21 +++-- src/test/resources/json/export-formats.json | 84 ++++++++++++++++++- 2 files changed, 95 insertions(+), 10 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java index 286f9789ed6..b198d2769a0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java @@ -3,17 +3,17 @@ import static io.restassured.RestAssured.given; import io.restassured.response.Response; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import com.google.gson.Gson; -import com.google.gson.JsonObject; import org.junit.jupiter.api.AfterAll; -import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; import static jakarta.ws.rs.core.Response.Status.OK; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Paths; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; +import org.skyscreamer.jsonassert.JSONAssert; public class InfoIT { @@ -85,16 +85,19 @@ public void testGetZipDownloadLimit() { } @Test - public void testGetExportFormats() { + public void testGetExportFormats() throws IOException { Response response = given().urlEncodingEnabled(false) .get("/api/info/exportFormats"); response.prettyPrint(); response.then().assertThat().statusCode(OK.getStatusCode()); - String expectedJson = UtilIT.getDatasetJson("src/test/resources/json/export-formats.json"); - JsonObject expectedJsonObject = new Gson().fromJson(expectedJson, JsonObject.class); - JsonObject actualJsonObject = new Gson().fromJson(response.getBody().asString(), JsonObject.class); - assertEquals(expectedJsonObject, actualJsonObject.get("data")); + String actual = response.getBody().asString(); + String expected = + java.nio.file.Files.readString( + Paths.get("src/test/resources/json/export-formats.json"), + StandardCharsets.UTF_8); + JSONAssert.assertEquals(expected, actual, true); + } diff --git a/src/test/resources/json/export-formats.json b/src/test/resources/json/export-formats.json index 9cf1984eb6a..0bca2314a8e 100644 --- a/src/test/resources/json/export-formats.json +++ b/src/test/resources/json/export-formats.json @@ -1 +1,83 @@ -{"OAI_ORE":{"displayName":"OAI_ORE","mediaType":"application/json","isHarvestable":false,"isVisibleInUserInterface":true},"Datacite":{"displayName":"DataCite","mediaType":"application/xml","isHarvestable":true,"isVisibleInUserInterface":true,"XMLNameSpace":"http://datacite.org/schema/kernel-3","XMLSchemaLocation":"http://datacite.org/schema/kernel-3 http://schema.datacite.org/meta/kernel-3/metadata.xsd","XMLSchemaVersion":"3.0"},"oai_dc":{"displayName":"Dublin Core","mediaType":"application/xml","isHarvestable":true,"isVisibleInUserInterface":false,"XMLNameSpace":"http://www.openarchives.org/OAI/2.0/oai_dc/","XMLSchemaLocation":"http://www.openarchives.org/OAI/2.0/oai_dc.xsd","XMLSchemaVersion":"2.0"},"oai_datacite":{"displayName":"OpenAIRE","mediaType":"application/xml","isHarvestable":true,"isVisibleInUserInterface":true,"XMLNameSpace":"http://datacite.org/schema/kernel-4","XMLSchemaLocation":"http://schema.datacite.org/meta/kernel-4.1/metadata.xsd","XMLSchemaVersion":"4.1"},"schema.org":{"displayName":"Schema.org JSON-LD","mediaType":"application/json","isHarvestable":false,"isVisibleInUserInterface":true},"ddi":{"displayName":"DDI","mediaType":"application/xml","isHarvestable":false,"isVisibleInUserInterface":true,"XMLNameSpace":"ddi:codebook:2_5","XMLSchemaLocation":"https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd","XMLSchemaVersion":"2.5"},"dcterms":{"displayName":"Dublin Core","mediaType":"application/xml","isHarvestable":false,"isVisibleInUserInterface":true,"XMLNameSpace":"http://purl.org/dc/terms/","XMLSchemaLocation":"http://dublincore.org/schemas/xmls/qdc/dcterms.xsd","XMLSchemaVersion":"2.0"},"html":{"displayName":"DDI HTML Codebook","mediaType":"text/html","isHarvestable":false,"isVisibleInUserInterface":true},"dataverse_json":{"displayName":"JSON","mediaType":"application/json","isHarvestable":true,"isVisibleInUserInterface":true},"oai_ddi":{"displayName":"DDI","mediaType":"application/xml","isHarvestable":true,"isVisibleInUserInterface":false,"XMLNameSpace":"ddi:codebook:2_5","XMLSchemaLocation":"https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd","XMLSchemaVersion":"2.5"}} \ No newline at end of file +{ + "status": "OK", + "data": { + "OAI_ORE": { + "displayName": "OAI_ORE", + "mediaType": "application/json", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "Datacite": { + "displayName": "DataCite", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://datacite.org/schema/kernel-3", + "XMLSchemaLocation": "http://datacite.org/schema/kernel-3 http://schema.datacite.org/meta/kernel-3/metadata.xsd", + "XMLSchemaVersion": "3.0" + }, + "oai_dc": { + "displayName": "Dublin Core", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": false, + "XMLNameSpace": "http://www.openarchives.org/OAI/2.0/oai_dc/", + "XMLSchemaLocation": "http://www.openarchives.org/OAI/2.0/oai_dc.xsd", + "XMLSchemaVersion": "2.0" + }, + "oai_datacite": { + "displayName": "OpenAIRE", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://datacite.org/schema/kernel-4", + "XMLSchemaLocation": "http://schema.datacite.org/meta/kernel-4.1/metadata.xsd", + "XMLSchemaVersion": "4.1" + }, + "schema.org": { + "displayName": "Schema.org JSON-LD", + "mediaType": "application/json", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "ddi": { + "displayName": "DDI", + "mediaType": "application/xml", + "isHarvestable": false, + "isVisibleInUserInterface": true, + "XMLNameSpace": "ddi:codebook:2_5", + "XMLSchemaLocation": "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd", + "XMLSchemaVersion": "2.5" + }, + "dcterms": { + "displayName": "Dublin Core", + "mediaType": "application/xml", + "isHarvestable": false, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://purl.org/dc/terms/", + "XMLSchemaLocation": "http://dublincore.org/schemas/xmls/qdc/dcterms.xsd", + "XMLSchemaVersion": "2.0" + }, + "html": { + "displayName": "DDI HTML Codebook", + "mediaType": "text/html", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "dataverse_json": { + "displayName": "JSON", + "mediaType": "application/json", + "isHarvestable": true, + "isVisibleInUserInterface": true + }, + "oai_ddi": { + "displayName": "DDI", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": false, + "XMLNameSpace": "ddi:codebook:2_5", + "XMLSchemaLocation": "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd", + "XMLSchemaVersion": "2.5" + } + } +} From 60d6f92c6985f0b489a8f56dace2ad3d8b1628e5 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 13 Nov 2024 09:36:17 -0500 Subject: [PATCH 250/402] audit physical files --- .../220-harvard-edu-audit-files.md | 16 ++ doc/sphinx-guides/source/api/native-api.rst | 55 ++++++ .../edu/harvard/iq/dataverse/api/Admin.java | 161 +++++++++++++++--- .../iq/dataverse/dataaccess/S3AccessIO.java | 6 + .../edu/harvard/iq/dataverse/api/AdminIT.java | 49 +++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 16 ++ 6 files changed, 275 insertions(+), 28 deletions(-) create mode 100644 doc/release-notes/220-harvard-edu-audit-files.md diff --git a/doc/release-notes/220-harvard-edu-audit-files.md b/doc/release-notes/220-harvard-edu-audit-files.md new file mode 100644 index 00000000000..536554313cf --- /dev/null +++ b/doc/release-notes/220-harvard-edu-audit-files.md @@ -0,0 +1,16 @@ +### New API to Audit Datafiles across the database + +This is a superuser only tool to audit Datasets with DataFiles where the physical files are missing or the file metadata is missing. +The Datasets scanned can be limited by optional firstId and lastId query parameters, or a given CSV list of Dataset Identifiers. +Once the audit report is generated, an Administrator can either delete the missing file(s) from the Dataset or contact the author to re-upload the missing file(s). + +The Json response includes: +- List of files in each DataFile where the file exists in the database but the physical file is not on the file store. +- List of DataFiles where the FileMetadata is missing. +- Other failures found when trying to process the Datasets + +curl "http://localhost:8080/api/admin/datafiles/auditFiles +curl "http://localhost:8080/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" +curl "http://localhost:8080/api/admin/datafiles/auditFiles?DatasetIdentifierList=doi:10.5072/FK2/RVNT9Q,doi:10.5072/FK2/RVNT9Q + +For more information, see issue [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6254742eebb..6fc10bdfa08 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6200,6 +6200,61 @@ Note that if you are attempting to validate a very large number of datasets in y asadmin set server-config.network-config.protocols.protocol.http-listener-1.http.request-timeout-seconds=3600 +Datafile Audit +~~~~~~~~~~~~~~ + +Produce an Audit report of missing files and FileMetadata for Datasets. +Scans the Datasets in the database and verifies that the stored files exist. If the files are missing or if the FileMetadata is missing this information is returned in a Json response:: + + curl "$SERVER_URL/api/admin/datafiles/auditFiles" + +Optional Parameters are available for filtering the Datasets scanned. + +For auditing the Datasets in a paged manor (firstId and lastId):: + + curl "$SERVER_URL/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" + +Auditing specific Datasets (comma separated list):: + + curl "$SERVER_URL/api/admin/datafiles/auditFiles?DatasetIdentifierList=doi.org/10.5072/FK2/JXYBJS,doi.org/10.7910/DVN/MPU019 + +Sample Json Audit Response:: + + { + "status": "OK", + "data": { + "firstId": 0, + "lastId": 100, + "DatasetIdentifierList": [ + "doi.org/10.5072/FK2/XXXXXX", + "doi.org/10.5072/FK2/JXYBJS", + "doi.org/10.7910/DVN/MPU019" + ], + "datasetsChecked": 100, + "datasets": [ + { + "id": 6, + "identifier": "FK2/JXYBJS", + "persistentURL": "https://doi.org/10.5072/FK2/JXYBJS", + "missingFileMetadata": [ + "local://1930cce4f2d-855ccc51fcbb, DataFile Id:7" + ] + }, + { + "id": 47731, + "identifier": "DVN/MPU019", + "persistentURL": "https://doi.org/10.7910/DVN/MPU019", + "missingFiles": [ + "s3://dvn-cloud:298910, jihad_metadata_edited.csv" + ] + } + ], + "failures": [ + "DatasetIdentifier Not Found: doi.org/10.5072/FK2/XXXXXX" + ] + } + } + Workflows ~~~~~~~~~ diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 54e5eaf7b84..ecd9b71cc8d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1,28 +1,11 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.BannerMessage; -import edu.harvard.iq.dataverse.BannerMessageServiceBean; -import edu.harvard.iq.dataverse.BannerMessageText; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetServiceBean; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseRequestServiceBean; -import edu.harvard.iq.dataverse.DataverseServiceBean; -import edu.harvard.iq.dataverse.DataverseSession; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.DvObjectServiceBean; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.StringUtil; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.EMailValidator; -import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.Template; -import edu.harvard.iq.dataverse.TemplateServiceBean; -import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; @@ -66,8 +49,9 @@ import java.io.InputStream; import java.io.StringReader; import java.nio.charset.StandardCharsets; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; +import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; import jakarta.ejb.EJB; @@ -81,7 +65,6 @@ import org.apache.commons.io.IOUtils; -import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationProvidersRegistrationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; @@ -118,9 +101,7 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.rolesToJson; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; + import jakarta.inject.Inject; import jakarta.json.JsonArray; import jakarta.persistence.Query; @@ -128,7 +109,6 @@ import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.StreamingOutput; import java.nio.file.Paths; -import java.util.TreeMap; /** * Where the secure, setup API calls live. @@ -2541,4 +2521,135 @@ public Response getFeatureFlag(@PathParam("flag") String flagIn) { } } + @GET + @AuthRequired + @Path("/datafiles/auditFiles") + public Response getAuditFiles(@Context ContainerRequestContext crc, + @QueryParam("firstId") Long firstId, @QueryParam("lastId") Long lastId, + @QueryParam("DatasetIdentifierList") String DatasetIdentifierList) throws WrappedResponse { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + List failures = new ArrayList<>(); + int datasetsChecked = 0; + long startId = (firstId == null ? 0 : firstId); + long endId = (lastId == null ? Long.MAX_VALUE : lastId); + + List datasetIdentifiers; + if (DatasetIdentifierList == null || DatasetIdentifierList.isEmpty()) { + datasetIdentifiers = Collections.emptyList(); + } else { + startId = 0; + endId = Long.MAX_VALUE; + datasetIdentifiers = List.of(DatasetIdentifierList.split(",")); + } + if (endId < startId) { + return badRequest("Invalid Parameters: lastId must be equal to or greater than firstId"); + } + + NullSafeJsonBuilder jsonObjectBuilder = NullSafeJsonBuilder.jsonObjectBuilder(); + if (startId > 0) { + jsonObjectBuilder.add("firstId", startId); + } + if (endId < Long.MAX_VALUE) { + jsonObjectBuilder.add("lastId", endId); + } + + // compile the list of ids to process + List datasetIds; + if (datasetIdentifiers.isEmpty()) { + datasetIds = datasetService.findAllLocalDatasetIds(); + } else { + datasetIds = new ArrayList<>(datasetIdentifiers.size()); + JsonArrayBuilder jab = Json.createArrayBuilder(); + datasetIdentifiers.forEach(id -> { + String dId = id.trim(); + jab.add(dId); + Dataset d = datasetService.findByGlobalId(dId); + if (d != null) { + datasetIds.add(d.getId()); + } else { + failures.add("DatasetIdentifier Not Found: " + dId); + } + }); + jsonObjectBuilder.add("DatasetIdentifierList", jab); + } + + JsonArrayBuilder jsonDatasetsArrayBuilder = Json.createArrayBuilder(); + for (Long datasetId : datasetIds) { + if (datasetId < startId) { + continue; + } else if (datasetId > endId) { + break; + } + Dataset dataset; + try { + dataset = findDatasetOrDie(String.valueOf(datasetId)); + datasetsChecked++; + } catch (WrappedResponse ex) { + failures.add("DatasetId:" + datasetId + " Reason:" + ex.getMessage()); + continue; + } + + List missingFiles = new ArrayList<>(); + List missingFileMetadata = new ArrayList<>(); + try { + Predicate filter = s -> true; + StorageIO datasetIO = DataAccess.getStorageIO(dataset); + final List result = datasetIO.cleanUp(filter, true); + // add files that are in dataset files but not in cleanup result or DataFiles with missing FileMetadata + dataset.getFiles().forEach(df -> { + try { + StorageIO datafileIO = df.getStorageIO(); + String storageId = df.getStorageIdentifier(); + FileMetadata fm = df.getFileMetadata(); + if (!datafileIO.exists()) { + missingFiles.add(storageId + ", " + (fm != null ? fm.getLabel() : df.getContentType())); + } + if (fm == null) { + missingFileMetadata.add(storageId + ", DataFile Id:" + df.getId()); + } + } catch (IOException e) { + failures.add("DataFileId:" + df.getId() + ", " + e.getMessage()); + } + }); + } catch (IOException e) { + failures.add("DatasetId:" + datasetId + ", " + e.getMessage()); + } + + JsonObjectBuilder job = Json.createObjectBuilder(); + if (!missingFiles.isEmpty() || !missingFileMetadata.isEmpty()) { + job.add("id", dataset.getId()); + job.add("identifier", dataset.getIdentifier()); + job.add("persistentURL", dataset.getPersistentURL()); + if (!missingFileMetadata.isEmpty()) { + JsonArrayBuilder jabMissingFileMetadata = Json.createArrayBuilder(); + missingFileMetadata.forEach(jabMissingFileMetadata::add); + job.add("missingFileMetadata", jabMissingFileMetadata); + } + if (!missingFiles.isEmpty()) { + JsonArrayBuilder jabMissingFiles = Json.createArrayBuilder(); + missingFiles.forEach(jabMissingFiles::add); + job.add("missingFiles", jabMissingFiles); + } + jsonDatasetsArrayBuilder.add(job); + } + } + + jsonObjectBuilder.add("datasetsChecked", datasetsChecked); + jsonObjectBuilder.add("datasets", jsonDatasetsArrayBuilder); + if (!failures.isEmpty()) { + JsonArrayBuilder jsonFailuresArrayBuilder = Json.createArrayBuilder(); + failures.forEach(jsonFailuresArrayBuilder::add); + jsonObjectBuilder.add("failures", jsonFailuresArrayBuilder); + } + + return ok(jsonObjectBuilder); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index d2fdec7b323..5b9e496281f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -753,6 +753,12 @@ public Path getFileSystemPath() throws UnsupportedDataAccessOperationException { @Override public boolean exists() { + try { + key = getMainFileKey(); + } catch (IOException e) { + logger.warning("Caught an IOException in S3AccessIO.exists(): " + e.getMessage()); + return false; + } String destinationKey = null; if (dvObject instanceof DataFile) { destinationKey = key; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 6d7dd2eae29..e639a2f011d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -16,6 +16,8 @@ import java.util.HashMap; import java.util.List; +import jakarta.json.Json; +import jakarta.json.JsonArray; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeAll; @@ -26,13 +28,11 @@ import java.util.Map; import java.util.UUID; -import java.util.logging.Level; import java.util.logging.Logger; import static jakarta.ws.rs.core.Response.Status.*; +import static org.hamcrest.CoreMatchers.*; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; import static org.junit.jupiter.api.Assertions.assertTrue; public class AdminIT { @@ -901,6 +901,49 @@ public void testDownloadTmpFile() throws IOException { .body("message", equalTo("Path must begin with '/tmp' but after normalization was '/etc/passwd'.")); } + @Test + public void testFindMissingFiles() { + Response createUserResponse = UtilIT.createRandomUser(); + createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUserResponse); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + UtilIT.setSuperuserStatus(username, true); + + String dataverseAlias = ":root"; + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + + // Upload file + Response uploadResponse = UtilIT.uploadRandomFile(datasetPersistentId, apiToken); + uploadResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + // Audit files + Response resp = UtilIT.auditFiles(apiToken, null, 100L, null); + resp.prettyPrint(); + JsonArray emptyArray = Json.createArrayBuilder().build(); + resp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.lastId", equalTo(100)); + + // Audit files with invalid parameters + resp = UtilIT.auditFiles(apiToken, 100L, 0L, null); + resp.prettyPrint(); + resp.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo("ERROR")) + .body("message", equalTo("Invalid Parameters: lastId must be equal to or greater than firstId")); + + // Audit files with list of dataset identifiers parameter + resp = UtilIT.auditFiles(apiToken, 1L, null, "bad/id, " + datasetPersistentId); + resp.prettyPrint(); + resp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.failures[0]", equalTo("DatasetIdentifier Not Found: bad/id")); + } + private String createTestNonSuperuserApiToken() { Response createUserResponse = UtilIT.createRandomUser(); createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 502f1ecb0a8..2fb205f1271 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -241,6 +241,22 @@ public static Response clearThumbnailFailureFlag(long fileId) { return response; } + public static Response auditFiles(String apiToken, Long firstId, Long lastId, String csvList) { + String params = ""; + if (firstId != null) { + params = "?firstId="+ firstId; + } + if (lastId != null) { + params = params + (params.isEmpty() ? "?" : "&") + "lastId="+ lastId; + } + if (csvList != null) { + params = params + (params.isEmpty() ? "?" : "&") + "DatasetIdentifierList="+ csvList; + } + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/admin/datafiles/auditFiles" + params); + } + private static String getAuthenticatedUserAsJsonString(String persistentUserId, String firstName, String lastName, String authenticationProviderId, String identifier) { JsonObjectBuilder builder = Json.createObjectBuilder(); builder.add("authenticationProviderId", authenticationProviderId); From c9b685aa942c156670aaa78355bf57b45accd20d Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 13 Nov 2024 11:40:22 -0500 Subject: [PATCH 251/402] Update flyway script --- ..._10017-failure-with-long-custom-question.sql => V6.4.0.2.sql} | 1 + 1 file changed, 1 insertion(+) rename src/main/resources/db/migration/{V6.1.0.6__10017-failure-with-long-custom-question.sql => V6.4.0.2.sql} (90%) diff --git a/src/main/resources/db/migration/V6.1.0.6__10017-failure-with-long-custom-question.sql b/src/main/resources/db/migration/V6.4.0.2.sql similarity index 90% rename from src/main/resources/db/migration/V6.1.0.6__10017-failure-with-long-custom-question.sql rename to src/main/resources/db/migration/V6.4.0.2.sql index 9a3002378b3..414e98975db 100644 --- a/src/main/resources/db/migration/V6.1.0.6__10017-failure-with-long-custom-question.sql +++ b/src/main/resources/db/migration/V6.4.0.2.sql @@ -1 +1,2 @@ +#10118 ALTER TABLE customquestion ALTER COLUMN questionstring TYPE text; From 89892a4862b780d47a90699c729c798726a2c507 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 13 Nov 2024 12:52:45 -0500 Subject: [PATCH 252/402] getget typo --- .../edu/harvard/iq/dataverse/DatasetVersionDifference.java | 2 +- .../harvard/iq/dataverse/DatasetVersionDifferenceTest.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index 19c3b02f4ee..27868e3c7ed 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -1616,7 +1616,7 @@ private static boolean fieldsAreDifferent(DatasetField originalField, DatasetFie return false; } - List getgetChangedVariableMetadata() { + List getChangedVariableMetadata() { return changedVariableMetadata; } diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java index 44ac267abaf..8508c9ac34e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java @@ -221,7 +221,7 @@ private void compareResults(DatasetVersion datasetVersion, DatasetVersion datase assertEquals(addedFiles, diff.getAddedFiles()); assertEquals(removedFiles, diff.getRemovedFiles()); assertEquals(changedFileMetadata, diff.getChangedFileMetadata()); - assertEquals(changedVariableMetadata, diff.getgetChangedVariableMetadata()); + assertEquals(changedVariableMetadata, diff.getChangedVariableMetadata()); assertEquals(replacedFiles.size(), diff.getReplacedFiles().size()); for (int i = 0; i < replacedFiles.size(); i++) { assertEquals(replacedFiles.get(i)[0], diff.getReplacedFiles().get(i)[0]); @@ -233,7 +233,7 @@ private void compareResults(DatasetVersion datasetVersion, DatasetVersion datase assertEquals(expectedAddedFiles, diff.getAddedFiles()); assertEquals(expectedRemovedFiles, diff.getRemovedFiles()); assertEquals(expectedChangedFileMetadata, diff.getChangedFileMetadata()); - assertEquals(expectedChangedVariableMetadata, diff.getgetChangedVariableMetadata()); + assertEquals(expectedChangedVariableMetadata, diff.getChangedVariableMetadata()); assertEquals(expectedReplacedFiles.size(), diff.getReplacedFiles().size()); for (int i = 0; i < expectedReplacedFiles.size(); i++) { assertEquals(expectedReplacedFiles.get(i)[0], diff.getReplacedFiles().get(i)[0]); From a663ad443e44ae2b01c8fde66f4234763e73c3c1 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 13 Nov 2024 15:29:08 -0500 Subject: [PATCH 253/402] fix comment --- src/main/resources/db/migration/V6.4.0.2.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/resources/db/migration/V6.4.0.2.sql b/src/main/resources/db/migration/V6.4.0.2.sql index 414e98975db..bc4a85b278f 100644 --- a/src/main/resources/db/migration/V6.4.0.2.sql +++ b/src/main/resources/db/migration/V6.4.0.2.sql @@ -1,2 +1,2 @@ -#10118 +-- #10118 ALTER TABLE customquestion ALTER COLUMN questionstring TYPE text; From 646ebd5a4f408f56bc7bfdbefc53c17a09724b26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20ROUCOU?= Date: Thu, 14 Nov 2024 10:59:02 +0100 Subject: [PATCH 254/402] Review made by pdurbin --- doc/sphinx-guides/source/api/native-api.rst | 4 +- .../iq/dataverse/api/SavedSearches.java | 2 +- .../iq/dataverse/api/SavedSearchIT.java | 58 +++++-------------- .../edu/harvard/iq/dataverse/api/UtilIT.java | 33 ++++++++++- 4 files changed, 48 insertions(+), 49 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6254742eebb..54f9eed2703 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6079,7 +6079,7 @@ Saved Search ~~~~~~~~~~~~ The Saved Search, Linked Dataverses, and Linked Datasets features are only accessible to superusers except for linking a dataset. The following API endpoints were added to help people with access to the "admin" API make use of these features in their current form. Keep in mind that they are partially experimental. -The update of all saved search is run by a timer once a week (See :ref:`saved-search-timer`) so if you just created a saved search, you can run manually ``makelinks`` endpoint that will find new dataverses and datasets that match the saved search and then link the search results to the dataverse in which the saved search is defined. +The update of all saved search is run by a timer once a week (See :ref:`saved-search-timer`) so if you just created a saved search, you can run manually the ``makelinks`` endpoint that will find new dataverses and datasets that match the saved search and then link the search results to the dataverse in which the saved search is defined. List all saved searches. :: @@ -6091,7 +6091,7 @@ List a saved search by database id. :: Delete a saved search by database id. -The ``unlink=true`` query parameter unlinks all links (linked dataset or Dataverse collection) associated with the deleted saved search. Use of this parameter should be well considered as you cannot know if the links were created manually or by the saved search. After deleting a saved search with ``unlink=true``, we recommend running ``/makelinks/all`` just in case there was a dataset that was linked by another saved search. (Saved searches can link the same dataset.) Reindexing might be necessary as well.:: +The ``unlink=true`` query parameter unlinks all links (linked dataset or Dataverse collection) associated with the deleted saved search. Use of this parameter should be well considered as you cannot know if the links were created manually or by the saved search. After deleting a saved search with ``unlink=true``, we recommend running ``/makelinks/all`` just in case there was a dataset that was linked by another saved search. (Saved searches can link the same dataset.) Reindexing might be necessary as well. :: DELETE http://$SERVER/api/admin/savedsearches/$id?unlink=true diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java index 33a11a2df23..e6519c9ff36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java @@ -181,7 +181,7 @@ public Response delete(@PathParam("id") long doomedId, @QueryParam("unlink") boo try { wasDeleted = savedSearchSvc.delete(doomedId, unlink); } catch (Exception e) { - return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId); + return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId + ". Exception: " + e.getLocalizedMessage()); } if (wasDeleted) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java index 90357596c25..08ebec31cd6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java @@ -22,7 +22,7 @@ public class SavedSearchIT { @BeforeAll public static void setUpClass() { - + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); } @AfterAll @@ -53,81 +53,55 @@ public void testSavedSearches() { Integer datasetId2 = UtilIT.getDatasetIdFromResponse(createDatasetResponse2); // missing body - Response resp = RestAssured.given() - .contentType("application/json") - .post("/api/admin/savedsearches"); + Response resp = UtilIT.setSavedSearch(); resp.prettyPrint(); resp.then().assertThat() .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); // creatorId null - resp = RestAssured.given() - .body(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // creatorId string - resp = RestAssured.given() - .body(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // creatorId not found - resp = RestAssured.given() - .body(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); // definitionPointId null - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // definitionPointId string - resp = RestAssured.given() - .body(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // definitionPointId not found - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); // missing filter - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, dataverseId)) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, dataverseId)); resp.prettyPrint(); resp.then().assertThat() .statusCode(OK.getStatusCode()); // create a saved search as superuser : OK - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(OK.getStatusCode()); @@ -136,8 +110,7 @@ public void testSavedSearches() { Integer createdSavedSearchId = path.getInt("data.id"); // get list as non superuser : OK - Response getListReponse = RestAssured.given() - .get("/api/admin/savedsearches/list"); + Response getListReponse = UtilIT.getSavedSearchList(); getListReponse.prettyPrint(); getListReponse.then().assertThat() .statusCode(OK.getStatusCode()); @@ -146,22 +119,19 @@ public void testSavedSearches() { List listBeforeDelete = path2.getList("data.savedSearches"); // makelinks/all as non superuser : OK - Response makelinksAll = RestAssured.given() - .put("/api/admin/savedsearches/makelinks/all"); + Response makelinksAll = UtilIT.setSavedSearchMakelinksAll(); makelinksAll.prettyPrint(); makelinksAll.then().assertThat() .statusCode(OK.getStatusCode()); //delete a saved search as non superuser : OK - Response deleteReponse = RestAssured.given() - .delete("/api/admin/savedsearches/" + createdSavedSearchId); + Response deleteReponse = UtilIT.deleteSavedSearchById(createdSavedSearchId); deleteReponse.prettyPrint(); deleteReponse.then().assertThat() .statusCode(OK.getStatusCode()); // check list count minus 1 - getListReponse = RestAssured.given() - .get("/api/admin/savedsearches/list"); + getListReponse = UtilIT.getSavedSearchList(); getListReponse.prettyPrint(); JsonPath path3 = JsonPath.from(getListReponse.body().asString()); List listAfterDelete = path3.getList("data.savedSearches"); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 502f1ecb0a8..255263f4cd3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -4131,8 +4131,37 @@ static Response setDatasetStorageDriver(Integer datasetId, String driverLabel, S .body(driverLabel) .put("/api/datasets/" + datasetId + "/storageDriver"); } - - + + /** GET on /api/admin/savedsearches/list */ + static Response getSavedSearchList() { + return given().get("/api/admin/savedsearches/list"); + } + + /** POST on /api/admin/savedsearches without body */ + static Response setSavedSearch() { + return given() + .contentType("application/json") + .post("/api/admin/savedsearches"); + } + + /** POST on /api/admin/savedsearches with body */ + static Response setSavedSearch(String body) { + return given() + .body(body) + .contentType("application/json") + .post("/api/admin/savedsearches"); + } + + /** PUT on /api/admin/savedsearches/makelinks/all */ + static Response setSavedSearchMakelinksAll() { + return given().put("/api/admin/savedsearches/makelinks/all"); + } + + /** DELETE on /api/admin/savedsearches/{id} with identifier */ + static Response deleteSavedSearchById(Integer id) { + return given().delete("/api/admin/savedsearches/" + id); + } + //Globus Store related - not currently used static Response getDatasetGlobusUploadParameters(Integer datasetId, String locale, String apiToken) { From 1d2d77630e9b1a3929114757e86cfa700d61a3fa Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 14 Nov 2024 19:19:28 -0500 Subject: [PATCH 255/402] preliminary #10977 --- .../datasetutility/AddReplaceFileHelper.java | 27 ++++------ .../datasetutility/OptionalFileParams.java | 22 +++++++- .../impl/CreateNewDataFilesCommand.java | 6 +++ .../dataverse/globus/GlobusServiceBean.java | 52 ++++++++++++++++--- .../dataverse/ingest/IngestServiceBean.java | 25 ++++++--- 5 files changed, 101 insertions(+), 31 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index a470f08f736..3943e3ad7d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -138,7 +138,8 @@ public class AddReplaceFileHelper{ private String newStorageIdentifier; // step 30 private String newCheckSum; // step 30 private ChecksumType newCheckSumType; //step 30 - + private Long suppliedFileSize = null; + // -- Optional private DataFile fileToReplace; // step 25 @@ -610,11 +611,14 @@ private boolean runAddReplacePhase1(Dataset owner, return false; } - if(optionalFileParams != null) { - if(optionalFileParams.hasCheckSum()) { - newCheckSum = optionalFileParams.getCheckSum(); - newCheckSumType = optionalFileParams.getCheckSumType(); - } + if (optionalFileParams != null) { + if (optionalFileParams.hasCheckSum()) { + newCheckSum = optionalFileParams.getCheckSum(); + newCheckSumType = optionalFileParams.getCheckSumType(); + } + if (optionalFileParams.hasFileSize()) { + suppliedFileSize = optionalFileParams.getFileSize(); + } } msgt("step_030_createNewFilesViaIngest"); @@ -1204,20 +1208,11 @@ private boolean step_030_createNewFilesViaIngest(){ clone = workingVersion.cloneDatasetVersion(); } try { - /*CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, - this.newFileInputStream, - this.newFileName, - this.newFileContentType, - this.newStorageIdentifier, - this.newCheckSum, - this.newCheckSumType, - this.systemConfig);*/ - UploadSessionQuotaLimit quota = null; if (systemConfig.isStorageQuotasEnforced()) { quota = fileService.getUploadSessionQuotaLimit(dataset); } - Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType); + Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, suppliedFileSize); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); initialFileList = createDataFilesResult.getDataFiles(); diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java index 959dbc4e262..c1be6424a84 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java @@ -76,6 +76,8 @@ public class OptionalFileParams { public static final String MIME_TYPE_ATTR_NAME = "mimeType"; private String checkSumValue; private ChecksumType checkSumType; + public static final String FILE_SIZE_ATTR_NAME = "fileSize"; + private Long fileSize; public static final String LEGACY_CHECKSUM_ATTR_NAME = "md5Hash"; public static final String CHECKSUM_OBJECT_NAME = "checksum"; public static final String CHECKSUM_OBJECT_TYPE = "@type"; @@ -268,6 +270,18 @@ public String getCheckSum() { public ChecksumType getCheckSumType() { return checkSumType; } + + public boolean hasFileSize() { + return fileSize != null; + } + + public Long getFileSize() { + return fileSize; + } + + public void setFileSize(long fileSize) { + this.fileSize = fileSize; + } /** * Set tags @@ -416,7 +430,13 @@ else if ((jsonObj.has(CHECKSUM_OBJECT_NAME)) && (!jsonObj.get(CHECKSUM_OBJECT_NA this.checkSumType = ChecksumType.fromString(((JsonObject) jsonObj.get(CHECKSUM_OBJECT_NAME)).get(CHECKSUM_OBJECT_TYPE).getAsString()); } - + // ------------------------------- + // get file size as a Long, if supplied + // ------------------------------- + if ((jsonObj.has(FILE_SIZE_ATTR_NAME)) && (!jsonObj.get(FILE_SIZE_ATTR_NAME).isJsonNull())){ + + this.fileSize = jsonObj.get(FILE_SIZE_ATTR_NAME).getAsLong(); + } // ------------------------------- // get tags // ------------------------------- diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 76939751899..172c92dc1fd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -93,6 +93,10 @@ public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion versi this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, null, null); } + public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, UploadSessionQuotaLimit quota, String newCheckSum, DataFile.ChecksumType newCheckSumType, Long newFileSize) { + this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, newFileSize, null); + } + // This version of the command must be used when files are created in the // context of creating a brand new dataset (from the Add Dataset page): @@ -636,6 +640,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException createIngestFailureReport(datafile, warningMessage); datafile.SetIngestProblem(); } + logger.info("datafile size: " + datafile.getFilesize()); if (datafile.getFilesize() < 0) { datafile.setFilesize(fileSize); } @@ -654,6 +659,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize); } + logger.info("datafile size (again): " + datafile.getFilesize()); return CreateDataFileResult.success(fileName, finalType, datafiles); } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index ac3c81622fc..3573b8e05df 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -284,6 +284,48 @@ private int makeDir(GlobusEndpoint endpoint, String dir) { return result.status; } + private Map lookupFileSizes(GlobusEndpoint endpoint, String dir) { + Map ret = new HashMap<>(); + + MakeRequestResponse result; + + try { + URL url = new URL( + "https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint.getId() + + "/ls?path=" + dir); + result = makeRequest(url, "Bearer", endpoint.getClientToken(), "GET", null); + + switch (result.status) { + case 200: + logger.fine("Looked up directory " + dir + " successfully."); + break; + default: + logger.warning("Status " + result.status + " received when looking up dir " + dir); + logger.fine("Response: " + result.jsonResponse); + } + } catch (MalformedURLException ex) { + // Misconfiguration + logger.warning("Failed to create dir on " + endpoint.getId()); + return null; + } + + JsonObject listObject = JsonUtil.getJsonObject(result.jsonResponse); + JsonArray dataArray = listObject.getJsonArray("DATA"); + + if (dataArray != null && !dataArray.isEmpty()) { + for (int i = 0; i < dataArray.size(); i++) { + String dataType = dataArray.getJsonObject(i).getString("DATA_TYPE", null); + if (dataType != null && dataType.equals("file")) { + String fileName = dataArray.getJsonObject(i).getString("name"); + long fileSize = dataArray.getJsonObject(i).getJsonNumber("size").longValueExact(); + ret.put(fileName, fileSize); + } + } + } + + return ret; + } + private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissions permissions) { Gson gson = new GsonBuilder().create(); MakeRequestResponse result = null; @@ -972,12 +1014,6 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut fileJsonObject = path.apply(fileJsonObject); addFilesJsonData.add(fileJsonObject); countSuccess++; - // } else { - // globusLogger.info(fileName - // + " will be skipped from adding to dataset by second API due to missing - // values "); - // countError++; - // } } else { myLogger.info(fileName + " will be skipped from adding to dataset in the final AddReplaceFileHelper.addFiles() call. "); @@ -1211,7 +1247,7 @@ private GlobusTaskState globusStatusCheck(GlobusEndpoint endpoint, String taskId return task; } - public JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger) + private JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger) throws InterruptedException, ExecutionException, IOException { List> hashvalueCompletableFutures = inputList.stream() @@ -1230,7 +1266,7 @@ public JsonObject calculateMissingMetadataFields(List inputList, Logger }); JsonArrayBuilder filesObject = (JsonArrayBuilder) completableFuture.get(); - + JsonObject output = Json.createObjectBuilder().add("files", filesObject).build(); return output; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index b42fd950528..fad02c76c78 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -344,10 +344,20 @@ public List saveAndAddFilesToDataset(DatasetVersion version, try { StorageIO dataAccess = DataAccess.getStorageIO(dataFile); //Populate metadata - dataAccess.open(DataAccessOption.READ_ACCESS); - // (the .open() above makes a remote call to check if - // the file exists and obtains its size) - confirmedFileSize = dataAccess.getSize(); + + // There are direct upload sub-cases where the file size + // is already known at this point. For example, direct uploads + // to S3 that go through the jsf dataset page. Or the Globus + // uploads, where the file sizes are looked up in bulk on + // the completion of the remote upload task. + if (dataFile.getFilesize() > 0) { + confirmedFileSize = dataFile.getFilesize(); + } else { + dataAccess.open(DataAccessOption.READ_ACCESS); + // (the .open() above makes a remote call to check if + // the file exists and obtains its size) + confirmedFileSize = dataAccess.getSize(); + } // For directly-uploaded files, we will perform the file size // limit and quota checks here. Perform them *again*, in @@ -362,13 +372,16 @@ public List saveAndAddFilesToDataset(DatasetVersion version, if (fileSizeLimit == null || confirmedFileSize < fileSizeLimit) { //set file size - logger.fine("Setting file size: " + confirmedFileSize); - dataFile.setFilesize(confirmedFileSize); + if (dataFile.getFilesize() < 1) { + logger.fine("Setting file size: " + confirmedFileSize); + dataFile.setFilesize(confirmedFileSize); + } if (dataAccess instanceof S3AccessIO) { ((S3AccessIO) dataAccess).removeTempTag(); } savedSuccess = true; + logger.info("directly uploaded file successfully saved. file size: "+dataFile.getFilesize()); } } catch (IOException ioex) { logger.warning("Failed to get file size, storage id, or failed to remove the temp tag on the saved S3 object" + dataFile.getStorageIdentifier() + " (" From c645af208d3df7e805b7ec08545320c7fdf21bbb Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 15 Nov 2024 09:49:59 -0500 Subject: [PATCH 256/402] validate guestbook email field --- src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 1ea7d02791d..1eea1f3ac7d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -15,6 +15,8 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; + +import edu.harvard.iq.dataverse.validation.ValidateEmail; import jakarta.persistence.*; import jakarta.validation.constraints.Size; import java.util.Collections; @@ -80,8 +82,8 @@ public class GuestbookResponse implements Serializable { @Size(max = 255, message = "{guestbook.response.nameLength}") private String name; - // TODO: Consider using EMailValidator as well. @Size(max = 255, message = "{guestbook.response.nameLength}") + @ValidateEmail(message = "{user.invalidEmail}") private String email; @Size(max = 255, message = "{guestbook.response.nameLength}") From d324db8dae6a4476cd1e707c9c47200c8522451c Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 15 Nov 2024 10:02:16 -0500 Subject: [PATCH 257/402] add release note --- doc/release-notes/10661-guestbook-email-bug-fix.md | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 doc/release-notes/10661-guestbook-email-bug-fix.md diff --git a/doc/release-notes/10661-guestbook-email-bug-fix.md b/doc/release-notes/10661-guestbook-email-bug-fix.md new file mode 100644 index 00000000000..7b317c4dd61 --- /dev/null +++ b/doc/release-notes/10661-guestbook-email-bug-fix.md @@ -0,0 +1,4 @@ + +### Guestbook Email Validation Bug fixe + +Guestbook UI Form: Email address is now checked for valid email From 99ea2499bdbf7a6dbdd3ac8c2059abf3f1a49535 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 15 Nov 2024 10:02:43 -0500 Subject: [PATCH 258/402] add release note --- doc/release-notes/10661-guestbook-email-bug-fix.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10661-guestbook-email-bug-fix.md b/doc/release-notes/10661-guestbook-email-bug-fix.md index 7b317c4dd61..887b3ff86c7 100644 --- a/doc/release-notes/10661-guestbook-email-bug-fix.md +++ b/doc/release-notes/10661-guestbook-email-bug-fix.md @@ -1,4 +1,4 @@ -### Guestbook Email Validation Bug fixe +### Guestbook Email Validation Bug fix Guestbook UI Form: Email address is now checked for valid email From b2d57f53b28c516a1017ccc83626ec2ae4746723 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 15 Nov 2024 10:16:52 -0500 Subject: [PATCH 259/402] add release note --- doc/release-notes/10661-guestbook-email-bug-fix.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/10661-guestbook-email-bug-fix.md b/doc/release-notes/10661-guestbook-email-bug-fix.md index 887b3ff86c7..05e70c9762a 100644 --- a/doc/release-notes/10661-guestbook-email-bug-fix.md +++ b/doc/release-notes/10661-guestbook-email-bug-fix.md @@ -1,4 +1,4 @@ ### Guestbook Email Validation Bug fix -Guestbook UI Form: Email address is now checked for valid email +Guestbook UI Form: Email address is now checked for valid email format From a6d835c32b09593555614e79a9610bc1acec15c9 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Fri, 15 Nov 2024 14:20:03 -0500 Subject: [PATCH 260/402] fix failing email validation for api calls --- src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 1eea1f3ac7d..830c7740e34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -200,7 +200,8 @@ public String getEmail() { } public void setEmail(String email) { - this.email = email; + // ValidateEmail requires NULL or valid email. Empty String will fail validation + this.email = (email == null || email.trim().isEmpty()) ? null : email; } public Guestbook getGuestbook() { From 5ef2d15986cbc862a52c7a3464d6b56af4a0804c Mon Sep 17 00:00:00 2001 From: Guido Schmutz Date: Fri, 15 Nov 2024 20:44:12 +0100 Subject: [PATCH 261/402] add previewers-provider and register-previewers container --- docker/compose/demo/compose.yml | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index a0a85ef5217..0da3bcaae6e 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -74,6 +74,34 @@ services: volumes: - ./data/app/data:/dv + dataverse-previewers-provider: + image: trivadis/dataverse-previewers-provider:latest + container_name: dataverse-previewers-provider + hostname: dataverse-previewers-provider + ports: + - 9000:80 + environment: + - NGINX_HTTP_PORT=80 + - PREVIEWERS_PROVIDER_URL=http://${PUBLIC_IP:-localhost}:9000 + - VERSIONS="v1.4,betatest" + restart: unless-stopped + + dataverse-register-previewers: + container_name: dataverse-register-previewers + hostname: dataverse-register-previewers + image: trivadis/dataverse-deploy-previewers:latest + environment: + - API_TOKEN=${API_TOKEN} + - DATAVERSE_URL=http://${PUBLIC_IP:-localhost}:8080 + - TIMEOUT=10m + - PREVIEWER_PROVIDER_URL=http://${PUBLIC_IP:-localhost}:9000 + - INCLUDE_PREVIEWERS=text,html,pdf,csv,comma-separated-values,tsv,tab-separated-values,jpeg,png,gif,markdown,x-markdown + - EXCLUDE_PREVIEWERS= + - REMOVE_EXISTING=True + command: + - deploy + restart: no + postgres: container_name: "postgres" hostname: postgres From 5b4d931c111e203b379841eea8ab8dcd5ac7d890 Mon Sep 17 00:00:00 2001 From: Guido Schmutz Date: Fri, 15 Nov 2024 20:53:25 +0100 Subject: [PATCH 262/402] Update compose.yml --- docker/compose/demo/compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 0da3bcaae6e..ba413ff1d98 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -94,7 +94,7 @@ services: - API_TOKEN=${API_TOKEN} - DATAVERSE_URL=http://${PUBLIC_IP:-localhost}:8080 - TIMEOUT=10m - - PREVIEWER_PROVIDER_URL=http://${PUBLIC_IP:-localhost}:9000 + - PREVIEWERS_PROVIDER_URL=http://${PUBLIC_IP:-localhost}:9000 - INCLUDE_PREVIEWERS=text,html,pdf,csv,comma-separated-values,tsv,tab-separated-values,jpeg,png,gif,markdown,x-markdown - EXCLUDE_PREVIEWERS= - REMOVE_EXISTING=True From 22184aedbf940fff2b7e8e03490016a3dc2153a6 Mon Sep 17 00:00:00 2001 From: Guido Schmutz Date: Fri, 15 Nov 2024 20:57:12 +0100 Subject: [PATCH 263/402] Update compose.yml --- docker/compose/demo/compose.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index ba413ff1d98..979ad0aba2a 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -82,7 +82,7 @@ services: - 9000:80 environment: - NGINX_HTTP_PORT=80 - - PREVIEWERS_PROVIDER_URL=http://${PUBLIC_IP:-localhost}:9000 + - PREVIEWERS_PROVIDER_URL=http://${IP:-localhost}:9000 - VERSIONS="v1.4,betatest" restart: unless-stopped @@ -92,9 +92,9 @@ services: image: trivadis/dataverse-deploy-previewers:latest environment: - API_TOKEN=${API_TOKEN} - - DATAVERSE_URL=http://${PUBLIC_IP:-localhost}:8080 + - DATAVERSE_URL=http://${IP:-localhost}:8080 - TIMEOUT=10m - - PREVIEWERS_PROVIDER_URL=http://${PUBLIC_IP:-localhost}:9000 + - PREVIEWERS_PROVIDER_URL=http://${IP:-localhost}:9000 - INCLUDE_PREVIEWERS=text,html,pdf,csv,comma-separated-values,tsv,tab-separated-values,jpeg,png,gif,markdown,x-markdown - EXCLUDE_PREVIEWERS= - REMOVE_EXISTING=True From 799ba6c57bbe21823cca75d40ea4363aa2fa82b5 Mon Sep 17 00:00:00 2001 From: Guido Schmutz Date: Fri, 15 Nov 2024 20:59:25 +0100 Subject: [PATCH 264/402] Update compose.yml --- docker/compose/demo/compose.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 979ad0aba2a..ba413ff1d98 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -82,7 +82,7 @@ services: - 9000:80 environment: - NGINX_HTTP_PORT=80 - - PREVIEWERS_PROVIDER_URL=http://${IP:-localhost}:9000 + - PREVIEWERS_PROVIDER_URL=http://${PUBLIC_IP:-localhost}:9000 - VERSIONS="v1.4,betatest" restart: unless-stopped @@ -92,9 +92,9 @@ services: image: trivadis/dataverse-deploy-previewers:latest environment: - API_TOKEN=${API_TOKEN} - - DATAVERSE_URL=http://${IP:-localhost}:8080 + - DATAVERSE_URL=http://${PUBLIC_IP:-localhost}:8080 - TIMEOUT=10m - - PREVIEWERS_PROVIDER_URL=http://${IP:-localhost}:9000 + - PREVIEWERS_PROVIDER_URL=http://${PUBLIC_IP:-localhost}:9000 - INCLUDE_PREVIEWERS=text,html,pdf,csv,comma-separated-values,tsv,tab-separated-values,jpeg,png,gif,markdown,x-markdown - EXCLUDE_PREVIEWERS= - REMOVE_EXISTING=True From b14a5eb469efbc48d98ce0a183a65ef8decf90b0 Mon Sep 17 00:00:00 2001 From: Guido Schmutz Date: Fri, 15 Nov 2024 21:01:18 +0100 Subject: [PATCH 265/402] Update compose.yml --- docker/compose/demo/compose.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index ba413ff1d98..19bd23f5bfb 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -82,7 +82,7 @@ services: - 9000:80 environment: - NGINX_HTTP_PORT=80 - - PREVIEWERS_PROVIDER_URL=http://${PUBLIC_IP:-localhost}:9000 + - PREVIEWERS_PROVIDER_URL=http://${IP:-localhost}:9000 - VERSIONS="v1.4,betatest" restart: unless-stopped @@ -92,9 +92,9 @@ services: image: trivadis/dataverse-deploy-previewers:latest environment: - API_TOKEN=${API_TOKEN} - - DATAVERSE_URL=http://${PUBLIC_IP:-localhost}:8080 + - DATAVERSE_URL=http://dataverse:8080 - TIMEOUT=10m - - PREVIEWERS_PROVIDER_URL=http://${PUBLIC_IP:-localhost}:9000 + - PREVIEWERS_PROVIDER_URL=http://${IP:-localhost}:9000 - INCLUDE_PREVIEWERS=text,html,pdf,csv,comma-separated-values,tsv,tab-separated-values,jpeg,png,gif,markdown,x-markdown - EXCLUDE_PREVIEWERS= - REMOVE_EXISTING=True From 0fdfd80fe6513f76c981cbcd6a04101efc7226c8 Mon Sep 17 00:00:00 2001 From: Guido Schmutz Date: Fri, 15 Nov 2024 21:11:45 +0100 Subject: [PATCH 266/402] Update compose.yml --- docker/compose/demo/compose.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 19bd23f5bfb..1e89eb8d021 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -80,6 +80,8 @@ services: hostname: dataverse-previewers-provider ports: - 9000:80 + networks: + - dataverse environment: - NGINX_HTTP_PORT=80 - PREVIEWERS_PROVIDER_URL=http://${IP:-localhost}:9000 @@ -90,8 +92,9 @@ services: container_name: dataverse-register-previewers hostname: dataverse-register-previewers image: trivadis/dataverse-deploy-previewers:latest + networks: + - dataverse environment: - - API_TOKEN=${API_TOKEN} - DATAVERSE_URL=http://dataverse:8080 - TIMEOUT=10m - PREVIEWERS_PROVIDER_URL=http://${IP:-localhost}:9000 From 8401c779a51f36ee33f8c7acd1aaf68c6d388b61 Mon Sep 17 00:00:00 2001 From: Guido Schmutz Date: Fri, 15 Nov 2024 21:31:29 +0100 Subject: [PATCH 267/402] Update compose.yml --- docker/compose/demo/compose.yml | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 1e89eb8d021..e2565545b0a 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -74,30 +74,29 @@ services: volumes: - ./data/app/data:/dv - dataverse-previewers-provider: + previewers-provider: image: trivadis/dataverse-previewers-provider:latest - container_name: dataverse-previewers-provider - hostname: dataverse-previewers-provider + container_name: previewers-provider + hostname: previewers-provider ports: - 9000:80 networks: - dataverse environment: - NGINX_HTTP_PORT=80 - - PREVIEWERS_PROVIDER_URL=http://${IP:-localhost}:9000 + - PREVIEWERS_PROVIDER_URL=http://${MACHINE_IP:-localhost}:9000 - VERSIONS="v1.4,betatest" - restart: unless-stopped - dataverse-register-previewers: - container_name: dataverse-register-previewers - hostname: dataverse-register-previewers + register-previewers: + container_name: register-previewers + hostname: register-previewers image: trivadis/dataverse-deploy-previewers:latest networks: - dataverse environment: - DATAVERSE_URL=http://dataverse:8080 - TIMEOUT=10m - - PREVIEWERS_PROVIDER_URL=http://${IP:-localhost}:9000 + - PREVIEWERS_PROVIDER_URL=http://${MACHINE_IP:-localhost}:9000 - INCLUDE_PREVIEWERS=text,html,pdf,csv,comma-separated-values,tsv,tab-separated-values,jpeg,png,gif,markdown,x-markdown - EXCLUDE_PREVIEWERS= - REMOVE_EXISTING=True From f54e51af476c895c8425907f3eaf96055b3aff5f Mon Sep 17 00:00:00 2001 From: Guido Schmutz Date: Fri, 15 Nov 2024 21:45:56 +0100 Subject: [PATCH 268/402] Update compose.yml --- docker/compose/demo/compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index e2565545b0a..c940fde1786 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -79,11 +79,11 @@ services: container_name: previewers-provider hostname: previewers-provider ports: - - 9000:80 + - 9000:9000 networks: - dataverse environment: - - NGINX_HTTP_PORT=80 + - NGINX_HTTP_PORT=9000 - PREVIEWERS_PROVIDER_URL=http://${MACHINE_IP:-localhost}:9000 - VERSIONS="v1.4,betatest" From d84ac89d166052a774e6a697b4d978aa86810a65 Mon Sep 17 00:00:00 2001 From: Guido Schmutz Date: Fri, 15 Nov 2024 22:21:31 +0100 Subject: [PATCH 269/402] Update compose.yml --- docker/compose/demo/compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index c940fde1786..f03d81f5957 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -9,7 +9,7 @@ services: restart: on-failure user: payara environment: - DATAVERSE_SITEURL: "https://demo.example.org" + DATAVERSE_SITEURL: "http://${MACHINE_IP:-localhost}:8080" DATAVERSE_DB_HOST: postgres DATAVERSE_DB_PASSWORD: secret DATAVERSE_DB_USER: dataverse From 2664077dcb05e49b7ea67557903cc8165b81ac1f Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 18 Nov 2024 14:08:26 +0000 Subject: [PATCH 270/402] Added: new resetRelationsOnNullValues boolean flag to AbstractWriteDataverseCommand --- .../edu/harvard/iq/dataverse/Dataverse.java | 4 +++ .../harvard/iq/dataverse/api/Dataverses.java | 2 +- .../impl/AbstractWriteDataverseCommand.java | 36 ++++++++++++++----- .../command/impl/CreateDataverseCommand.java | 2 +- .../command/impl/UpdateDataverseCommand.java | 7 ++-- 5 files changed, 38 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 86e2e0207c1..1f11725e581 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -595,6 +595,10 @@ public void setMetadataBlocks(List metadataBlocks) { this.metadataBlocks = new ArrayList<>(metadataBlocks); } + public void clearMetadataBlocks() { + this.metadataBlocks.clear(); + } + public List getCitationDatasetFieldTypes() { return citationDatasetFieldTypes; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index f05bba8830e..f864a5a9d1c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -195,7 +195,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod List facets = parseFacets(body); AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO)); + dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO, true)); return ok(json(dataverse)); } catch (WrappedResponse ww) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 40c2abf5d21..364e7bc6233 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -19,13 +19,15 @@ abstract class AbstractWriteDataverseCommand extends AbstractCommand private final List inputLevels; private final List facets; protected final List metadataBlocks; + private final boolean resetRelationsOnNullValues; public AbstractWriteDataverseCommand(Dataverse dataverse, Dataverse affectedDataverse, DataverseRequest request, List facets, List inputLevels, - List metadataBlocks) { + List metadataBlocks, + boolean resetRelationsOnNullValues) { super(request, affectedDataverse); this.dataverse = dataverse; if (facets != null) { @@ -43,17 +45,31 @@ public AbstractWriteDataverseCommand(Dataverse dataverse, } else { this.metadataBlocks = null; } + this.resetRelationsOnNullValues = resetRelationsOnNullValues; } @Override public Dataverse execute(CommandContext ctxt) throws CommandException { dataverse = innerExecute(ctxt); + processMetadataBlocks(); + processFacets(ctxt); + processInputLevels(ctxt); + + return ctxt.dataverses().save(dataverse); + } + + private void processMetadataBlocks() { if (metadataBlocks != null && !metadataBlocks.isEmpty()) { dataverse.setMetadataBlockRoot(true); dataverse.setMetadataBlocks(metadataBlocks); + } else if (resetRelationsOnNullValues) { + dataverse.setMetadataBlockRoot(false); + dataverse.clearMetadataBlocks(); } + } + private void processFacets(CommandContext ctxt) { if (facets != null) { ctxt.facets().deleteFacetsFor(dataverse); @@ -61,24 +77,28 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { dataverse.setFacetRoot(true); } - int i = 0; - for (DatasetFieldType df : facets) { - ctxt.facets().create(i++, df, dataverse); + for (int i = 0; i < facets.size(); i++) { + ctxt.facets().create(i, facets.get(i), dataverse); } + } else if (resetRelationsOnNullValues) { + ctxt.facets().deleteFacetsFor(dataverse); + dataverse.setFacetRoot(false); } + } + private void processInputLevels(CommandContext ctxt) { if (inputLevels != null) { if (!inputLevels.isEmpty()) { dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); } ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); - for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + inputLevels.forEach(inputLevel -> { inputLevel.setDataverse(dataverse); ctxt.fieldTypeInputLevels().create(inputLevel); - } + }); + } else if (resetRelationsOnNullValues) { + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); } - - return ctxt.dataverses().save(dataverse); } abstract protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 145cfb6199c..3728f3ee6ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -39,7 +39,7 @@ public CreateDataverseCommand(Dataverse created, List facets, List inputLevels, List metadataBlocks) { - super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks); + super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks, false); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 55cc3708097..6dc4ab4d00d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -32,7 +32,7 @@ public UpdateDataverseCommand(Dataverse dataverse, List featuredDataverses, DataverseRequest request, List inputLevels) { - this(dataverse, facets, featuredDataverses, request, inputLevels, null, null); + this(dataverse, facets, featuredDataverses, request, inputLevels, null, null, false); } public UpdateDataverseCommand(Dataverse dataverse, @@ -41,8 +41,9 @@ public UpdateDataverseCommand(Dataverse dataverse, DataverseRequest request, List inputLevels, List metadataBlocks, - DataverseDTO updatedDataverseDTO) { - super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks); + DataverseDTO updatedDataverseDTO, + boolean resetRelationsOnNullValues) { + super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks, resetRelationsOnNullValues); if (featuredDataverses != null) { this.featuredDataverseList = new ArrayList<>(featuredDataverses); } else { From 2e71045e4027042ef58a4b1bf89e84f191ad3254 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 18 Nov 2024 16:04:05 +0000 Subject: [PATCH 271/402] Added: IT cases for updateDataverse API endpoint --- .../iq/dataverse/api/DataversesIT.java | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 9567cf3910a..76bb515beb2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1379,6 +1379,48 @@ public void testUpdateDataverse() { Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // Update the dataverse without setting metadata blocks, facets, or input levels + updateDataverseResponse = UtilIT.updateDataverse( + newAlias, + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + null, + null, + null, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the metadata blocks are inherited from the parent + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data[0].name", equalTo("citation")); + + // Assert that the facets are inherited from the parent + String[] rootFacetIds = new String[]{"authorName", "subject", "keywordValue", "dateOfDeposit"}; + listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken); + String actualFacetName1 = listDataverseFacetsResponse.then().extract().path("data[0]"); + String actualFacetName2 = listDataverseFacetsResponse.then().extract().path("data[1]"); + String actualFacetName3 = listDataverseFacetsResponse.then().extract().path("data[2]"); + String actualFacetName4 = listDataverseFacetsResponse.then().extract().path("data[3]"); + assertThat(rootFacetIds, hasItemInArray(actualFacetName1)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName2)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName3)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName4)); + + // Assert that the dataverse should not have any input level + listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken); + listDataverseInputLevelsResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(0)); + // Should return error when the dataverse to edit does not exist updateDataverseResponse = UtilIT.updateDataverse( "unexistingDataverseAlias", From 6b908e8668409621b53ce96c262c17ab82021519 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 18 Nov 2024 16:30:22 +0000 Subject: [PATCH 272/402] Added: docs for #11018 --- doc/sphinx-guides/source/api/native-api.rst | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index b464b6df393..9ac6fe196ff 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -118,11 +118,18 @@ The fully expanded example above (without environment variables) looks like this You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the updated Dataverse collection. -Same as in :ref:`create-dataverse-api`, the request JSON supports an optional ``metadataBlocks`` object, with the following supported sub-objects: +Same as in :ref:`create-dataverse-api`, the request JSON supports a ``metadataBlocks`` object, with the following supported sub-objects: -- ``metadataBlockNames``: The names of the metadata blocks you want to add to the Dataverse collection. -- ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the new Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection. -- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the new Dataverse collection. Note that the order of the facets is defined by their order in the provided JSON array. +- ``metadataBlockNames``: The names of the metadata blocks to be assigned to the Dataverse collection. +- ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection. +- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the Dataverse collection. Note that the order of the facets is defined by their order in the provided JSON array. + +Note that setting any of these fields overwrites the previous configuration. + +When it comes to omitting these fields in the JSON: + +- Omitting ``facetIds`` or ``metadataBlockNames`` causes the Dataverse collection to inherit the corresponding configuration from its parent. +- Omitting ``inputLevels`` removes any existing input levels in the Dataverse collection. To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. From 8c3cdaac1be0fc7163f4dc327a54bf7ee1664f1d Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 18 Nov 2024 16:48:22 +0000 Subject: [PATCH 273/402] Added: release notes for #11018 --- .../11018-update-dataverse-endpoint-update.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 doc/release-notes/11018-update-dataverse-endpoint-update.md diff --git a/doc/release-notes/11018-update-dataverse-endpoint-update.md b/doc/release-notes/11018-update-dataverse-endpoint-update.md new file mode 100644 index 00000000000..dcd8eb0c90d --- /dev/null +++ b/doc/release-notes/11018-update-dataverse-endpoint-update.md @@ -0,0 +1,8 @@ +The updateDataverse API endpoint has been updated to support an "inherit from parent" configuration for metadata blocks, facets, and input levels. + +When it comes to omitting any of these fields in the request JSON: + +- Omitting ``facetIds`` or ``metadataBlockNames`` causes the Dataverse collection to inherit the corresponding configuration from its parent. +- Omitting ``inputLevels`` removes any existing input levels in the Dataverse collection. + +Previously, not setting these fields meant keeping the existing ones in the Dataverse. From 19cd329dc7ac94614014180e50e4be14af23041e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 18 Nov 2024 12:24:57 -0500 Subject: [PATCH 274/402] add "Alert Translators About the New Release" #10931 --- doc/sphinx-guides/source/developers/making-releases.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index 4b52b3ce922..aed174f60d4 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -294,6 +294,13 @@ Get the attention of the core team and ask someone to update it for the new rele Consider updating `the thread `_ on the mailing list once the update is in place. +Alert Translators About the New Release +--------------------------------------- + +Create an issue at https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs/issues to say a new release is out and that we would love for the properties files for English to be added. + +For example, for 6.4 we wrote "Update en_US/Bundle.properties etc. for Dataverse 6.4" at https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs/issues/125 + Add the Release to the Dataverse Roadmap ---------------------------------------- From 9a580f8d683b60da0f4dad1f4f157db926620594 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 18 Nov 2024 14:29:31 -0500 Subject: [PATCH 275/402] add wait to async export call --- .../edu/harvard/iq/dataverse/api/DatasetsIT.java | 4 ++-- .../java/edu/harvard/iq/dataverse/api/UtilIT.java | 14 +++++++++++--- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 93f1024ae7a..10584f2df71 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -4242,7 +4242,7 @@ public void testCitationDate() throws IOException { .statusCode(OK.getStatusCode()) .body("data.message", is(expectedCitation)); - Response exportDatasetAsDublinCore = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken); + Response exportDatasetAsDublinCore = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken, true); exportDatasetAsDublinCore.prettyPrint(); exportDatasetAsDublinCore.then().assertThat() .body("oai_dc.type", equalTo("Dataset")) @@ -4259,7 +4259,7 @@ public void testCitationDate() throws IOException { rexport.then().assertThat().statusCode(OK.getStatusCode()); String todayDate = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); - Response exportPostClear = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken); + Response exportPostClear = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken, true); exportPostClear.prettyPrint(); exportPostClear.then().assertThat() .body("oai_dc.type", equalTo("Dataset")) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 502f1ecb0a8..56a680966ba 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2161,20 +2161,28 @@ static Response uploadProvFreeForm(String idOrPersistentId, JsonObject jsonObjec // return requestSpecification.delete("/api/files/" + idInPath + "/prov-freeform" + optionalQueryParam); // } static Response exportDataset(String datasetPersistentId, String exporter) { - return exportDataset(datasetPersistentId, exporter, null); + return exportDataset(datasetPersistentId, exporter, null, false); } - static Response exportDataset(String datasetPersistentId, String exporter, String apiToken) { + return exportDataset(datasetPersistentId, exporter, apiToken, false); + } + static Response exportDataset(String datasetPersistentId, String exporter, String apiToken, boolean wait) { // http://localhost:8080/api/datasets/export?exporter=dataverse_json&persistentId=doi%3A10.5072/FK2/W6WIMQ RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken); } - return requestSpecification + Response resp = requestSpecification // .header(API_TOKEN_HTTP_HEADER, apiToken) // .get("/api/datasets/:persistentId/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); .get("/api/datasets/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); + // Wait for the Async call to finish to get the updated data + if (wait) { + sleepForReexport(datasetPersistentId, apiToken, 10); + resp = requestSpecification.get("/api/datasets/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); + } + return resp; } static Response reexportDatasetAllFormats(String idOrPersistentId) { From a04cdabe224843cadabe4cbdd62ac4c4ec46cc42 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 18 Nov 2024 14:59:11 -0500 Subject: [PATCH 276/402] add wait to async export call --- .../java/edu/harvard/iq/dataverse/api/UtilIT.java | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 56a680966ba..232e8132c5c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2167,22 +2167,17 @@ static Response exportDataset(String datasetPersistentId, String exporter, Strin return exportDataset(datasetPersistentId, exporter, apiToken, false); } static Response exportDataset(String datasetPersistentId, String exporter, String apiToken, boolean wait) { -// http://localhost:8080/api/datasets/export?exporter=dataverse_json&persistentId=doi%3A10.5072/FK2/W6WIMQ + // Wait for the Async call to finish to get the updated data + if (wait) { + sleepForReexport(datasetPersistentId, apiToken, 10); + } RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken); } - Response resp = requestSpecification - // .header(API_TOKEN_HTTP_HEADER, apiToken) - // .get("/api/datasets/:persistentId/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); + return requestSpecification .get("/api/datasets/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); - // Wait for the Async call to finish to get the updated data - if (wait) { - sleepForReexport(datasetPersistentId, apiToken, 10); - resp = requestSpecification.get("/api/datasets/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); - } - return resp; } static Response reexportDatasetAllFormats(String idOrPersistentId) { From eef0d22ff277152409870484e781069ebf10cab5 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 18 Nov 2024 17:37:34 -0500 Subject: [PATCH 277/402] more incremental changes #10977 --- .../dataaccess/GlobusOverlayAccessIO.java | 2 +- .../dataverse/globus/GlobusServiceBean.java | 26 ++++++++++++++++--- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 3bf2107e52b..d0da66c38e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -215,7 +215,7 @@ public long retrieveSizeFromMedia() { JsonArray dataArray = responseJson.getJsonArray("DATA"); if (dataArray != null && dataArray.size() != 0) { //File found - return (long) responseJson.getJsonArray("DATA").getJsonObject(0).getJsonNumber("size").longValueExact(); + return (long) dataArray.getJsonObject(0).getJsonNumber("size").longValueExact(); } } else { logger.warning("Response from " + get.getURI().toString() + " was " diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 3573b8e05df..013fefd1e34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -74,6 +74,7 @@ import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.UrlSignerUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import jakarta.json.JsonNumber; import jakarta.json.JsonReader; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -980,9 +981,16 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); } + + // Look up the sizes of all the files in the dataset folder, to avoid + // looking them up one by one later: + // @todo: we should only be doing this if this is a managed store, probably? + GlobusEndpoint endpoint = getGlobusEndpoint(dataset); + Map fileSizeMap = lookupFileSizes(endpoint, endpoint.getBasePath()); // calculateMissingMetadataFields: checksum, mimetype JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, myLogger); + JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); logger.fine("Size: " + newfilesJsonArray.size()); logger.fine("Val: " + JsonUtil.prettyPrint(newfilesJsonArray.getJsonObject(0))); @@ -1006,13 +1014,23 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut if (newfileJsonObject != null) { logger.fine("List Size: " + newfileJsonObject.size()); // if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) { - JsonPatch path = Json.createPatchBuilder() + JsonPatch patch = Json.createPatchBuilder() .add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build(); - fileJsonObject = path.apply(fileJsonObject); - path = Json.createPatchBuilder() + fileJsonObject = patch.apply(fileJsonObject); + patch = Json.createPatchBuilder() .add("/mimeType", newfileJsonObject.get(0).getString("mime")).build(); - fileJsonObject = path.apply(fileJsonObject); + fileJsonObject = patch.apply(fileJsonObject); addFilesJsonData.add(fileJsonObject); + // If we already know the size of this file on the Globus end, + // we'll pass it to /addFiles, to avoid looking up file sizes + // one by one: + if (fileSizeMap != null && fileSizeMap.get(fileId) != null) { + Long uploadedFileSize = fileSizeMap.get(fileId); + myLogger.fine("Found size for file " + fileId + ": " + uploadedFileSize + " bytes"); + patch = Json.createPatchBuilder() + .add("/fileSize", Json.createValue(uploadedFileSize)).build(); + fileJsonObject = patch.apply(fileJsonObject); + } countSuccess++; } else { myLogger.info(fileName From 2b22f9f011c76a9c21a00dc11012c66a396d01e2 Mon Sep 17 00:00:00 2001 From: Florian Fritze Date: Wed, 23 Oct 2024 13:37:39 +0200 Subject: [PATCH 278/402] bugfix: metadataFragment.xhtml --- src/main/webapp/metadataFragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index 723f95148cd..0a3ad249061 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -130,7 +130,7 @@ - + Date: Tue, 19 Nov 2024 10:43:54 +0100 Subject: [PATCH 279/402] added docu for the fix --- doc/release-notes/display_overview_fix.md | 1 + doc/sphinx-guides/source/api/changelog.rst | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 doc/release-notes/display_overview_fix.md diff --git a/doc/release-notes/display_overview_fix.md b/doc/release-notes/display_overview_fix.md new file mode 100644 index 00000000000..73a01435caf --- /dev/null +++ b/doc/release-notes/display_overview_fix.md @@ -0,0 +1 @@ +This bugfix corrects an issue when there are duplicated entries on the metadata page. It is fixed by correcting an IF-clause in metadataFragment.xhtml. \ No newline at end of file diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 92cd4fc941b..e76990f13c5 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -7,6 +7,10 @@ This API changelog is experimental and we would love feedback on its usefulness. :local: :depth: 1 +v6.5 +--- +- duplicated entries are corrected on the metadata page + v6.4 ---- From 54677ce2a5411e46aafeb8d81a7daac5753f5e13 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Nov 2024 08:49:54 -0500 Subject: [PATCH 280/402] file detection using first few bytes disabled on direct upload --- doc/sphinx-guides/source/api/native-api.rst | 1 + doc/sphinx-guides/source/developers/big-data-support.rst | 1 + 2 files changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index b464b6df393..4d6d6c28a49 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3617,6 +3617,7 @@ The fully expanded example above (without environment variables) looks like this Currently the following methods are used to detect file types: - The file type detected by the browser (or sent via API). +- Custom code that reads the first few bytes. As explained at :ref:`s3-direct-upload-features-disabled`, this code is disabled during direct upload to S3. However, this code is active when the "redetect" API is used. - JHOVE: https://jhove.openpreservation.org - The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``. - The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``. diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index 759dd40413b..dad0af5ae87 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -44,6 +44,7 @@ Features that are Disabled if S3 Direct Upload is Enabled The following features are disabled when S3 direct upload is enabled. - Unzipping of zip files. (See :ref:`compressed-files`.) +- Detection of file type based on custom code that reads the first few bytes. (See :ref:`redetect-file-type`.) - Extraction of metadata from FITS files. (See :ref:`fits`.) - Creation of NcML auxiliary files (See :ref:`netcdf-and-hdf5`.) - Extraction of a geospatial bounding box from NetCDF and HDF5 files (see :ref:`netcdf-and-hdf5`) unless :ref:`dataverse.netcdf.geo-extract-s3-direct-upload` is set to true. From a4261014177e2306c7000b24d934dd140ebae2d1 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 09:55:33 -0500 Subject: [PATCH 281/402] revert v4.1.7 to v4 (latest) --- .github/workflows/deploy_beta_testing.yml | 3 +-- .github/workflows/maven_unit_test.yml | 6 ++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index 2c684aa8ad1..4cec08564a4 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -45,8 +45,7 @@ jobs: - uses: actions/checkout@v4 - name: Download war artifact - # using v4.1.7 due to a bug in v4 - uses: actions/download-artifact@v4.1.7 + uses: actions/download-artifact@v4 with: name: built-app path: ./ diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 2ce872cd55c..45180ea7aec 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -107,8 +107,7 @@ jobs: cache: maven # Get the build output from the unit test job - # using v4.1.7 due to a bug in v4 - - uses: actions/download-artifact@v4.1.7 + - uses: actions/download-artifact@v4 with: name: java-artifacts - run: | @@ -141,8 +140,7 @@ jobs: cache: maven # Get the build output from the integration test job - # using v4.1.7 due to a bug in v4 - - uses: actions/download-artifact@v4.1.7 + - uses: actions/download-artifact@v4 with: name: java-reportdir - run: tar -xvf java-reportdir.tar From 89b8a1ccb46d553d0b31af55e6fb3e03c58cbabb Mon Sep 17 00:00:00 2001 From: landreev Date: Tue, 19 Nov 2024 10:02:27 -0500 Subject: [PATCH 282/402] Update doc/sphinx-guides/source/api/native-api.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 4d6d6c28a49..929dac592e0 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3617,7 +3617,7 @@ The fully expanded example above (without environment variables) looks like this Currently the following methods are used to detect file types: - The file type detected by the browser (or sent via API). -- Custom code that reads the first few bytes. As explained at :ref:`s3-direct-upload-features-disabled`, this code is disabled during direct upload to S3. However, this code is active when the "redetect" API is used. +- Custom code that reads the first few bytes. As explained at :ref:`s3-direct-upload-features-disabled`, this method of file type detection is not utilized during direct upload to S3, since by nature of direct upload Dataverse never sees the contents of the file. However, this code is utilized when the "redetect" API is used. - JHOVE: https://jhove.openpreservation.org - The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``. - The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``. From be0979d8047d5e1aaa2191830b332233a42d91fd Mon Sep 17 00:00:00 2001 From: landreev Date: Tue, 19 Nov 2024 10:02:33 -0500 Subject: [PATCH 283/402] Update doc/sphinx-guides/source/api/native-api.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 929dac592e0..442e4862dab 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3618,7 +3618,7 @@ Currently the following methods are used to detect file types: - The file type detected by the browser (or sent via API). - Custom code that reads the first few bytes. As explained at :ref:`s3-direct-upload-features-disabled`, this method of file type detection is not utilized during direct upload to S3, since by nature of direct upload Dataverse never sees the contents of the file. However, this code is utilized when the "redetect" API is used. -- JHOVE: https://jhove.openpreservation.org +- JHOVE: https://jhove.openpreservation.org . Note that the same applies about direct upload to S3 and the "redirect" API. - The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``. - The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``. From 813063e5454623c0cc7b5a32d76b57346ca01d5d Mon Sep 17 00:00:00 2001 From: landreev Date: Tue, 19 Nov 2024 10:02:45 -0500 Subject: [PATCH 284/402] Update doc/sphinx-guides/source/developers/big-data-support.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/big-data-support.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index dad0af5ae87..f3d98fae0bf 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -44,7 +44,7 @@ Features that are Disabled if S3 Direct Upload is Enabled The following features are disabled when S3 direct upload is enabled. - Unzipping of zip files. (See :ref:`compressed-files`.) -- Detection of file type based on custom code that reads the first few bytes. (See :ref:`redetect-file-type`.) +- Detection of file type based on JHOVE and custom code that reads the first few bytes. (See :ref:`redetect-file-type`.) - Extraction of metadata from FITS files. (See :ref:`fits`.) - Creation of NcML auxiliary files (See :ref:`netcdf-and-hdf5`.) - Extraction of a geospatial bounding box from NetCDF and HDF5 files (see :ref:`netcdf-and-hdf5`) unless :ref:`dataverse.netcdf.geo-extract-s3-direct-upload` is set to true. From e5f841597dcd4be4150c969da40d70777a3ed773 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Nov 2024 10:04:48 -0500 Subject: [PATCH 285/402] Update doc/sphinx-guides/source/api/native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 442e4862dab..691aa94c834 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3618,7 +3618,7 @@ Currently the following methods are used to detect file types: - The file type detected by the browser (or sent via API). - Custom code that reads the first few bytes. As explained at :ref:`s3-direct-upload-features-disabled`, this method of file type detection is not utilized during direct upload to S3, since by nature of direct upload Dataverse never sees the contents of the file. However, this code is utilized when the "redetect" API is used. -- JHOVE: https://jhove.openpreservation.org . Note that the same applies about direct upload to S3 and the "redirect" API. +- JHOVE: https://jhove.openpreservation.org . Note that the same applies about direct upload to S3 and the "redetect" API. - The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``. - The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``. From dcb2f7daa2ee05cd88ce01fd7bd0a7de9daba234 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Nov 2024 10:55:06 -0500 Subject: [PATCH 286/402] we no longer show the shibboleth.te file on the Shib page #10193 --- doc/sphinx-guides/source/developers/selinux.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/selinux.rst b/doc/sphinx-guides/source/developers/selinux.rst index ca41ab82d25..1d3d01610fe 100644 --- a/doc/sphinx-guides/source/developers/selinux.rst +++ b/doc/sphinx-guides/source/developers/selinux.rst @@ -8,7 +8,7 @@ SELinux Introduction ------------ -The ``shibboleth.te`` file below that is mentioned in the :doc:`/installation/shibboleth` section of the Installation Guide was created on CentOS 6 as part of https://github.com/IQSS/dataverse/issues/3406 but may need to be revised for future versions of RHEL/CentOS (pull requests welcome!). The file is versioned with the docs and can be found in the following location: +The ``shibboleth.te`` file below that was mentioned in the :doc:`/installation/shibboleth` section of the Installation Guide was created on CentOS 6 as part of https://github.com/IQSS/dataverse/issues/3406 but may need to be revised for future versions of RHEL/CentOS (pull requests welcome!). The file is versioned with the docs and can be found in the following location: ``doc/sphinx-guides/source/_static/installation/files/etc/selinux/targeted/src/policy/domains/misc/shibboleth.te`` From 804d284c99322c619db2ec36d3751db6b1ad5f0f Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:04:49 -0500 Subject: [PATCH 287/402] Update doc/release-notes/220-harvard-edu-audit-files.md Co-authored-by: Philip Durbin --- doc/release-notes/220-harvard-edu-audit-files.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/220-harvard-edu-audit-files.md b/doc/release-notes/220-harvard-edu-audit-files.md index 536554313cf..10c27af0dec 100644 --- a/doc/release-notes/220-harvard-edu-audit-files.md +++ b/doc/release-notes/220-harvard-edu-audit-files.md @@ -13,4 +13,4 @@ curl "http://localhost:8080/api/admin/datafiles/auditFiles curl "http://localhost:8080/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" curl "http://localhost:8080/api/admin/datafiles/auditFiles?DatasetIdentifierList=doi:10.5072/FK2/RVNT9Q,doi:10.5072/FK2/RVNT9Q -For more information, see issue [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220) +For more information, see issue [the docs](https://dataverse-guide--11016.org.readthedocs.build/en/11016/api/native-api.html#datafile-audit), #11016, and [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220) From a62193cfece34408616bfc54f6547291f8c38b62 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:05:05 -0500 Subject: [PATCH 288/402] Update doc/sphinx-guides/source/api/native-api.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6fc10bdfa08..455a0bc1ac2 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6203,7 +6203,7 @@ Note that if you are attempting to validate a very large number of datasets in y Datafile Audit ~~~~~~~~~~~~~~ -Produce an Audit report of missing files and FileMetadata for Datasets. +Produce an audit report of missing files and FileMetadata for Datasets. Scans the Datasets in the database and verifies that the stored files exist. If the files are missing or if the FileMetadata is missing this information is returned in a Json response:: curl "$SERVER_URL/api/admin/datafiles/auditFiles" From d0df4f07f126556760091f288baab46077bb9752 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:05:19 -0500 Subject: [PATCH 289/402] Update doc/sphinx-guides/source/api/native-api.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 455a0bc1ac2..c7230a9f48c 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6204,7 +6204,7 @@ Datafile Audit ~~~~~~~~~~~~~~ Produce an audit report of missing files and FileMetadata for Datasets. -Scans the Datasets in the database and verifies that the stored files exist. If the files are missing or if the FileMetadata is missing this information is returned in a Json response:: +Scans the Datasets in the database and verifies that the stored files exist. If the files are missing or if the FileMetadata is missing, this information is returned in a JSON response:: curl "$SERVER_URL/api/admin/datafiles/auditFiles" From a1d1030b845561161d0bc8b509d45b38673e9306 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:05:27 -0500 Subject: [PATCH 290/402] Update doc/sphinx-guides/source/api/native-api.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index c7230a9f48c..6c20641b72e 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6218,7 +6218,7 @@ Auditing specific Datasets (comma separated list):: curl "$SERVER_URL/api/admin/datafiles/auditFiles?DatasetIdentifierList=doi.org/10.5072/FK2/JXYBJS,doi.org/10.7910/DVN/MPU019 -Sample Json Audit Response:: +Sample JSON Audit Response:: { "status": "OK", From e433ee26925062a3f80c829199bfa7d86f061a59 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:07:36 -0500 Subject: [PATCH 291/402] Update doc/release-notes/220-harvard-edu-audit-files.md Co-authored-by: Philip Durbin --- doc/release-notes/220-harvard-edu-audit-files.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/220-harvard-edu-audit-files.md b/doc/release-notes/220-harvard-edu-audit-files.md index 10c27af0dec..b184c0e74dc 100644 --- a/doc/release-notes/220-harvard-edu-audit-files.md +++ b/doc/release-notes/220-harvard-edu-audit-files.md @@ -1,6 +1,6 @@ ### New API to Audit Datafiles across the database -This is a superuser only tool to audit Datasets with DataFiles where the physical files are missing or the file metadata is missing. +This is a superuser only API endpoint to audit Datasets with DataFiles where the physical files are missing or the file metadata is missing. The Datasets scanned can be limited by optional firstId and lastId query parameters, or a given CSV list of Dataset Identifiers. Once the audit report is generated, an Administrator can either delete the missing file(s) from the Dataset or contact the author to re-upload the missing file(s). From e4751c59d3ecc1a5ad4d3d3aeea22f140a1f585b Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:07:49 -0500 Subject: [PATCH 292/402] Update doc/release-notes/220-harvard-edu-audit-files.md Co-authored-by: Philip Durbin --- doc/release-notes/220-harvard-edu-audit-files.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/220-harvard-edu-audit-files.md b/doc/release-notes/220-harvard-edu-audit-files.md index b184c0e74dc..a0bc60842e5 100644 --- a/doc/release-notes/220-harvard-edu-audit-files.md +++ b/doc/release-notes/220-harvard-edu-audit-files.md @@ -2,7 +2,7 @@ This is a superuser only API endpoint to audit Datasets with DataFiles where the physical files are missing or the file metadata is missing. The Datasets scanned can be limited by optional firstId and lastId query parameters, or a given CSV list of Dataset Identifiers. -Once the audit report is generated, an Administrator can either delete the missing file(s) from the Dataset or contact the author to re-upload the missing file(s). +Once the audit report is generated, a superuser can either delete the missing file(s) from the Dataset or contact the author to re-upload the missing file(s). The Json response includes: - List of files in each DataFile where the file exists in the database but the physical file is not on the file store. From 456f9f6feaef342ea2db3c306b6fc691b2985bed Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:08:00 -0500 Subject: [PATCH 293/402] Update doc/release-notes/220-harvard-edu-audit-files.md Co-authored-by: Philip Durbin --- doc/release-notes/220-harvard-edu-audit-files.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/220-harvard-edu-audit-files.md b/doc/release-notes/220-harvard-edu-audit-files.md index a0bc60842e5..160aa2e4b2f 100644 --- a/doc/release-notes/220-harvard-edu-audit-files.md +++ b/doc/release-notes/220-harvard-edu-audit-files.md @@ -4,7 +4,7 @@ This is a superuser only API endpoint to audit Datasets with DataFiles where the The Datasets scanned can be limited by optional firstId and lastId query parameters, or a given CSV list of Dataset Identifiers. Once the audit report is generated, a superuser can either delete the missing file(s) from the Dataset or contact the author to re-upload the missing file(s). -The Json response includes: +The JSON response includes: - List of files in each DataFile where the file exists in the database but the physical file is not on the file store. - List of DataFiles where the FileMetadata is missing. - Other failures found when trying to process the Datasets From 9b156817cc376f121d86184433d2994dbf36cc47 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:08:31 -0500 Subject: [PATCH 294/402] Update src/main/java/edu/harvard/iq/dataverse/api/Admin.java Co-authored-by: Philip Durbin --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index ecd9b71cc8d..e8e8402bfbc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2526,7 +2526,7 @@ public Response getFeatureFlag(@PathParam("flag") String flagIn) { @Path("/datafiles/auditFiles") public Response getAuditFiles(@Context ContainerRequestContext crc, @QueryParam("firstId") Long firstId, @QueryParam("lastId") Long lastId, - @QueryParam("DatasetIdentifierList") String DatasetIdentifierList) throws WrappedResponse { + @QueryParam("datasetIdentifierList") String datasetIdentifierList) throws WrappedResponse { try { AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); if (!user.isSuperuser()) { From 2586c331498cfff72f32c057fb021f5724b27adb Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:11:09 -0500 Subject: [PATCH 295/402] fix camelcase for datasetIdentifierList --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index e8e8402bfbc..ac01d669ef0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2542,12 +2542,12 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, long endId = (lastId == null ? Long.MAX_VALUE : lastId); List datasetIdentifiers; - if (DatasetIdentifierList == null || DatasetIdentifierList.isEmpty()) { + if (datasetIdentifierList == null || datasetIdentifierList.isEmpty()) { datasetIdentifiers = Collections.emptyList(); } else { startId = 0; endId = Long.MAX_VALUE; - datasetIdentifiers = List.of(DatasetIdentifierList.split(",")); + datasetIdentifiers = List.of(datasetIdentifierList.split(",")); } if (endId < startId) { return badRequest("Invalid Parameters: lastId must be equal to or greater than firstId"); From abfc7385a304284e5611b09c4c557c1e33d48d11 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:21:35 -0500 Subject: [PATCH 296/402] fix camelcase for datasetIdentifierList --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6c20641b72e..2f7bc0a4880 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6216,7 +6216,7 @@ For auditing the Datasets in a paged manor (firstId and lastId):: Auditing specific Datasets (comma separated list):: - curl "$SERVER_URL/api/admin/datafiles/auditFiles?DatasetIdentifierList=doi.org/10.5072/FK2/JXYBJS,doi.org/10.7910/DVN/MPU019 + curl "$SERVER_URL/api/admin/datafiles/auditFiles?datasetIdentifierList=doi.org/10.5072/FK2/JXYBJS,doi.org/10.7910/DVN/MPU019 Sample JSON Audit Response:: From d77ff0d7c9377cb7b0aa25d0a02750753f537460 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Nov 2024 12:11:34 -0500 Subject: [PATCH 297/402] better formatting for saved search docs #10893 --- doc/sphinx-guides/source/api/native-api.rst | 55 ++++++++++++++++----- 1 file changed, 44 insertions(+), 11 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 54f9eed2703..01e7173c84f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6081,27 +6081,60 @@ Saved Search The Saved Search, Linked Dataverses, and Linked Datasets features are only accessible to superusers except for linking a dataset. The following API endpoints were added to help people with access to the "admin" API make use of these features in their current form. Keep in mind that they are partially experimental. The update of all saved search is run by a timer once a week (See :ref:`saved-search-timer`) so if you just created a saved search, you can run manually the ``makelinks`` endpoint that will find new dataverses and datasets that match the saved search and then link the search results to the dataverse in which the saved search is defined. -List all saved searches. :: +List All Saved Searches +^^^^^^^^^^^^^^^^^^^^^^^ - GET http://$SERVER/api/admin/savedsearches/list +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + + curl "$SERVER_URL/api/admin/savedsearches/list" + +List a Saved Search by Database ID +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=1 + + curl "$SERVER_URL/api/admin/savedsearches/$ID" + +Delete a Saved Search by Database ID +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The ``unlink=true`` query parameter unlinks all links (linked dataset or Dataverse collection) associated with the deleted saved search. Use of this parameter should be well considered as you cannot know if the links were created manually or by the saved search. After deleting a saved search with ``unlink=true``, we recommend running ``/makelinks/all`` just in case there was a dataset that was linked by another saved search. (Saved searches can link the same dataset.) Reindexing might be necessary as well. -List a saved search by database id. :: +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=1 - GET http://$SERVER/api/admin/savedsearches/$id + curl -X DELETE "$SERVER_URL/api/admin/savedsearches/$ID?unlink=true" -Delete a saved search by database id. +Execute a Saved Search and Make Links +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The ``unlink=true`` query parameter unlinks all links (linked dataset or Dataverse collection) associated with the deleted saved search. Use of this parameter should be well considered as you cannot know if the links were created manually or by the saved search. After deleting a saved search with ``unlink=true``, we recommend running ``/makelinks/all`` just in case there was a dataset that was linked by another saved search. (Saved searches can link the same dataset.) Reindexing might be necessary as well. :: +Execute a saved search by database id and make links to Dataverse collections and datasets that are found. The JSON response indicates which Dataverse collections and datasets were newly linked versus already linked. The ``debug=true`` query parameter adds to the JSON response extra information about the saved search being executed (which you could also get by listing the saved search). - DELETE http://$SERVER/api/admin/savedsearches/$id?unlink=true +.. code-block:: bash -Execute a saved search by database id and make links to Dataverse collections and datasets that are found. The JSON response indicates which Dataverse collections and datasets were newly linked versus already linked. The ``debug=true`` query parameter adds to the JSON response extra information about the saved search being executed (which you could also get by listing the saved search). :: + export SERVER_URL=https://demo.dataverse.org + export ID=1 - PUT http://$SERVER/api/admin/savedsearches/makelinks/$id?debug=true + curl -X PUT "$SERVER_URL/api/admin/savedsearches/makelinks/$ID?debug=true" -Execute all saved searches and make links to Dataverse collections and datasets that are found. ``debug`` works as described above. This happens automatically with a timer. For details, see :ref:`saved-search-timer` in the Admin Guide. :: +Execute All Saved Searches and Make Links +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Execute all saved searches and make links to Dataverse collections and datasets that are found. ``debug`` works as described above. This happens automatically with a timer. For details, see :ref:`saved-search-timer` in the Admin Guide. + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=1 - PUT http://$SERVER/api/admin/savedsearches/makelinks/all?debug=true + curl -X PUT "$SERVER_URL/api/admin/savedsearches/makelinks/all?debug=true" Dataset Integrity ~~~~~~~~~~~~~~~~~ From 18db323ab6ac8c11390d306536dcd723861da354 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 19 Nov 2024 13:12:43 -0500 Subject: [PATCH 298/402] #8184 fix typo --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 622a307ad22..bdf8634ef59 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1734,7 +1734,7 @@ dataset.privateurl.anonymous.button.label=Create Anonymous Preview URL dataset.privateurl.anonymous.description=Create a URL that others can use to access an anonymized view of this unpublished dataset version. Metadata that could identify the dataset author will not be displayed. Non-identifying metadata will be visible. dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and will be accessible if they're not restricted. Users of the Anonymous Preview URL will not be able to see the name of the Dataverse that this dataset is in but will be able to see the name of the repository, which might expose the dataset authors' identities. dataset.privateurl.createPrivateUrl=Create Preview URL -dataset.privateurl.introduction=You can create a Preview URL to copy and share with others who will not need a repository account to review this unpublished dataset version. Once the dataset is published ot if the URL is disabled, the URL will no longer work and will point to a "Page not found" page. +dataset.privateurl.introduction=You can create a Preview URL to copy and share with others who will not need a repository account to review this unpublished dataset version. Once the dataset is published or if the URL is disabled, the URL will no longer work and will point to a "Page not found" page. dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published dataset.privateurl.disablePrivateUrl=Disable Preview URL From 32b7d32361822e2a87fdc51f124596335c9d72c8 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 19 Nov 2024 14:00:02 -0500 Subject: [PATCH 299/402] #8184 update refresh delete popup --- src/main/webapp/dataset.xhtml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index ee69f76cca2..dc1517f2457 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1215,7 +1215,7 @@ - + @@ -1254,7 +1254,7 @@ - + From b64addc288607f9bfb03acef00274a862757f3d4 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 14:03:57 -0500 Subject: [PATCH 300/402] reformat json output --- doc/sphinx-guides/source/api/native-api.rst | 15 ++++-- .../edu/harvard/iq/dataverse/api/Admin.java | 53 +++++++++++++------ .../edu/harvard/iq/dataverse/api/AdminIT.java | 3 +- .../edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 4 files changed, 52 insertions(+), 21 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2f7bc0a4880..c4eaa405efb 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6237,7 +6237,10 @@ Sample JSON Audit Response:: "identifier": "FK2/JXYBJS", "persistentURL": "https://doi.org/10.5072/FK2/JXYBJS", "missingFileMetadata": [ - "local://1930cce4f2d-855ccc51fcbb, DataFile Id:7" + { + "StorageIdentifier": "local://1930cce4f2d-855ccc51fcbb", + "DataFileId": "7" + } ] }, { @@ -6245,12 +6248,18 @@ Sample JSON Audit Response:: "identifier": "DVN/MPU019", "persistentURL": "https://doi.org/10.7910/DVN/MPU019", "missingFiles": [ - "s3://dvn-cloud:298910, jihad_metadata_edited.csv" + { + "StorageIdentifier": "s3://dvn-cloud:298910", + "label": "jihad_metadata_edited.csv" + } ] } ], "failures": [ - "DatasetIdentifier Not Found: doi.org/10.5072/FK2/XXXXXX" + { + "DatasetIdentifier": "doi.org/10.5072/FK2/XXXXXX", + "Reason": "Not Found" + } ] } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index ac01d669ef0..774ee675949 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2536,7 +2536,6 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, return wr.getResponse(); } - List failures = new ArrayList<>(); int datasetsChecked = 0; long startId = (firstId == null ? 0 : firstId); long endId = (lastId == null ? Long.MAX_VALUE : lastId); @@ -2554,6 +2553,9 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, } NullSafeJsonBuilder jsonObjectBuilder = NullSafeJsonBuilder.jsonObjectBuilder(); + JsonArrayBuilder jsonDatasetsArrayBuilder = Json.createArrayBuilder(); + JsonArrayBuilder jsonFailuresArrayBuilder = Json.createArrayBuilder(); + if (startId > 0) { jsonObjectBuilder.add("firstId", startId); } @@ -2575,13 +2577,15 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, if (d != null) { datasetIds.add(d.getId()); } else { - failures.add("DatasetIdentifier Not Found: " + dId); + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("DatasetIdentifier",dId); + job.add("Reason","Not Found"); + jsonFailuresArrayBuilder.add(job); } }); jsonObjectBuilder.add("DatasetIdentifierList", jab); } - JsonArrayBuilder jsonDatasetsArrayBuilder = Json.createArrayBuilder(); for (Long datasetId : datasetIds) { if (datasetId < startId) { continue; @@ -2592,8 +2596,11 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, try { dataset = findDatasetOrDie(String.valueOf(datasetId)); datasetsChecked++; - } catch (WrappedResponse ex) { - failures.add("DatasetId:" + datasetId + " Reason:" + ex.getMessage()); + } catch (WrappedResponse e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("DatasetId", datasetId); + job.add("Reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); continue; } @@ -2610,17 +2617,23 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, String storageId = df.getStorageIdentifier(); FileMetadata fm = df.getFileMetadata(); if (!datafileIO.exists()) { - missingFiles.add(storageId + ", " + (fm != null ? fm.getLabel() : df.getContentType())); + missingFiles.add(storageId + "," + (fm != null ? "label,"+fm.getLabel() : "type,"+df.getContentType())); } if (fm == null) { - missingFileMetadata.add(storageId + ", DataFile Id:" + df.getId()); + missingFileMetadata.add(storageId + ",DataFileId," + df.getId()); } } catch (IOException e) { - failures.add("DataFileId:" + df.getId() + ", " + e.getMessage()); + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("DataFileId", df.getId()); + job.add("Reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); } }); } catch (IOException e) { - failures.add("DatasetId:" + datasetId + ", " + e.getMessage()); + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("DatasetId", datasetId); + job.add("Reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); } JsonObjectBuilder job = Json.createObjectBuilder(); @@ -2630,12 +2643,24 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, job.add("persistentURL", dataset.getPersistentURL()); if (!missingFileMetadata.isEmpty()) { JsonArrayBuilder jabMissingFileMetadata = Json.createArrayBuilder(); - missingFileMetadata.forEach(jabMissingFileMetadata::add); + missingFileMetadata.forEach(mm -> { + String[] missingMetadata = mm.split(","); + NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() + .add("StorageIdentifier", missingMetadata[0]) + .add(missingMetadata[1], missingMetadata[2]); + jabMissingFileMetadata.add(jobj); + }); job.add("missingFileMetadata", jabMissingFileMetadata); } if (!missingFiles.isEmpty()) { JsonArrayBuilder jabMissingFiles = Json.createArrayBuilder(); - missingFiles.forEach(jabMissingFiles::add); + missingFiles.forEach(mf -> { + String[] missingFile = mf.split(","); + NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() + .add("StorageIdentifier", missingFile[0]) + .add(missingFile[1], missingFile[2]); + jabMissingFiles.add(jobj); + }); job.add("missingFiles", jabMissingFiles); } jsonDatasetsArrayBuilder.add(job); @@ -2644,11 +2669,7 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, jsonObjectBuilder.add("datasetsChecked", datasetsChecked); jsonObjectBuilder.add("datasets", jsonDatasetsArrayBuilder); - if (!failures.isEmpty()) { - JsonArrayBuilder jsonFailuresArrayBuilder = Json.createArrayBuilder(); - failures.forEach(jsonFailuresArrayBuilder::add); - jsonObjectBuilder.add("failures", jsonFailuresArrayBuilder); - } + jsonObjectBuilder.add("failures", jsonFailuresArrayBuilder); return ok(jsonObjectBuilder); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index e639a2f011d..84011d7ac73 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -941,7 +941,8 @@ public void testFindMissingFiles() { resp.prettyPrint(); resp.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.failures[0]", equalTo("DatasetIdentifier Not Found: bad/id")); + .body("data.failures[0].DatasetIdentifier", equalTo("bad/id")) + .body("data.failures[0].Reason", equalTo("Not Found")); } private String createTestNonSuperuserApiToken() { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 2fb205f1271..c450c587543 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -250,7 +250,7 @@ public static Response auditFiles(String apiToken, Long firstId, Long lastId, St params = params + (params.isEmpty() ? "?" : "&") + "lastId="+ lastId; } if (csvList != null) { - params = params + (params.isEmpty() ? "?" : "&") + "DatasetIdentifierList="+ csvList; + params = params + (params.isEmpty() ? "?" : "&") + "datasetIdentifierList="+ csvList; } return given() .header(API_TOKEN_HTTP_HEADER, apiToken) From e89f1ca91e939d99521b2eeddc3643dd4333b579 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 15:17:34 -0500 Subject: [PATCH 301/402] reformat json output --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 774ee675949..29f366d91b9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2640,6 +2640,8 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, if (!missingFiles.isEmpty() || !missingFileMetadata.isEmpty()) { job.add("id", dataset.getId()); job.add("identifier", dataset.getIdentifier()); + job.add("authority", dataset.getAuthority()); + job.add("protocol", dataset.getProtocol()); job.add("persistentURL", dataset.getPersistentURL()); if (!missingFileMetadata.isEmpty()) { JsonArrayBuilder jabMissingFileMetadata = Json.createArrayBuilder(); From 7e9aae98574e357b4b64b1feb024a89fe2c9dac2 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 15:19:28 -0500 Subject: [PATCH 302/402] reformat json output --- doc/sphinx-guides/source/api/native-api.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index c4eaa405efb..b9c30d71fa2 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6235,6 +6235,8 @@ Sample JSON Audit Response:: { "id": 6, "identifier": "FK2/JXYBJS", + "authority": "10.5072", + "protocol": "doi", "persistentURL": "https://doi.org/10.5072/FK2/JXYBJS", "missingFileMetadata": [ { @@ -6246,6 +6248,8 @@ Sample JSON Audit Response:: { "id": 47731, "identifier": "DVN/MPU019", + "authority": "10.7910", + "protocol": "doi", "persistentURL": "https://doi.org/10.7910/DVN/MPU019", "missingFiles": [ { From 11cbe8515e0c60443d76c78ce972ca4f1c83c16a Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 15:42:27 -0500 Subject: [PATCH 303/402] reformat json output --- doc/release-notes/220-harvard-edu-audit-files.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/220-harvard-edu-audit-files.md b/doc/release-notes/220-harvard-edu-audit-files.md index 160aa2e4b2f..c697bc225c0 100644 --- a/doc/release-notes/220-harvard-edu-audit-files.md +++ b/doc/release-notes/220-harvard-edu-audit-files.md @@ -11,6 +11,6 @@ The JSON response includes: curl "http://localhost:8080/api/admin/datafiles/auditFiles curl "http://localhost:8080/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" -curl "http://localhost:8080/api/admin/datafiles/auditFiles?DatasetIdentifierList=doi:10.5072/FK2/RVNT9Q,doi:10.5072/FK2/RVNT9Q +curl "http://localhost:8080/api/admin/datafiles/auditFiles?datasetIdentifierList=doi:10.5072/FK2/RVNT9Q,doi:10.5072/FK2/RVNT9Q For more information, see issue [the docs](https://dataverse-guide--11016.org.readthedocs.build/en/11016/api/native-api.html#datafile-audit), #11016, and [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220) From 62937872c5cb7b408ce90cd79f26f6cf45921721 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 19 Nov 2024 16:37:05 -0500 Subject: [PATCH 304/402] a quick bug fix; changed verbose logging to .fine. #10977 --- .../datasetutility/OptionalFileParams.java | 6 ++++++ .../impl/CreateNewDataFilesCommand.java | 2 -- .../iq/dataverse/globus/GlobusServiceBean.java | 18 ++++++++++++------ 3 files changed, 18 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java index c1be6424a84..54844160163 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java @@ -39,6 +39,12 @@ * - Provenance related information * * @author rmp553 + * @todo (?) We may want to consider renaming this class to DataFileParams or + * DataFileInfo... it was originally created to encode some bits of info - + * the file "tags" specifically, that didn't fit in elsewhere in the normal + * workflow; but it's been expanded to cover pretty much everything else associated + * with DataFiles and it's not really "optional" anymore when, for example, used + * in the direct upload workflow. (?) */ public class OptionalFileParams { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 172c92dc1fd..e9a2025b112 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -640,7 +640,6 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException createIngestFailureReport(datafile, warningMessage); datafile.SetIngestProblem(); } - logger.info("datafile size: " + datafile.getFilesize()); if (datafile.getFilesize() < 0) { datafile.setFilesize(fileSize); } @@ -659,7 +658,6 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize); } - logger.info("datafile size (again): " + datafile.getFilesize()); return CreateDataFileResult.success(fileName, finalType, datafiles); } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 013fefd1e34..3d1c5a1044d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -285,12 +285,11 @@ private int makeDir(GlobusEndpoint endpoint, String dir) { return result.status; } - private Map lookupFileSizes(GlobusEndpoint endpoint, String dir) { - Map ret = new HashMap<>(); - + private Map lookupFileSizes(GlobusEndpoint endpoint, String dir) { MakeRequestResponse result; try { + logger.fine("Attempting to look up the contents of the Globus folder "+dir); URL url = new URL( "https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint.getId() + "/ls?path=" + dir); @@ -303,13 +302,16 @@ private Map lookupFileSizes(GlobusEndpoint endpoint, String dir) { default: logger.warning("Status " + result.status + " received when looking up dir " + dir); logger.fine("Response: " + result.jsonResponse); + return null; } } catch (MalformedURLException ex) { // Misconfiguration - logger.warning("Failed to create dir on " + endpoint.getId()); + logger.warning("Failed to list the contents of the directory "+ dir + " on endpoint " + endpoint.getId()); return null; } + Map ret = new HashMap<>(); + JsonObject listObject = JsonUtil.getJsonObject(result.jsonResponse); JsonArray dataArray = listObject.getJsonArray("DATA"); @@ -317,6 +319,8 @@ private Map lookupFileSizes(GlobusEndpoint endpoint, String dir) { for (int i = 0; i < dataArray.size(); i++) { String dataType = dataArray.getJsonObject(i).getString("DATA_TYPE", null); if (dataType != null && dataType.equals("file")) { + // is it safe to assume that any entry with a valid "DATA_TYPE": "file" + // will also have valid "name" and "size" entries? String fileName = dataArray.getJsonObject(i).getString("name"); long fileSize = dataArray.getJsonObject(i).getJsonNumber("size").longValueExact(); ret.put(fileName, fileSize); @@ -1020,17 +1024,19 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut patch = Json.createPatchBuilder() .add("/mimeType", newfileJsonObject.get(0).getString("mime")).build(); fileJsonObject = patch.apply(fileJsonObject); - addFilesJsonData.add(fileJsonObject); // If we already know the size of this file on the Globus end, // we'll pass it to /addFiles, to avoid looking up file sizes // one by one: if (fileSizeMap != null && fileSizeMap.get(fileId) != null) { Long uploadedFileSize = fileSizeMap.get(fileId); - myLogger.fine("Found size for file " + fileId + ": " + uploadedFileSize + " bytes"); + myLogger.info("Found size for file " + fileId + ": " + uploadedFileSize + " bytes"); patch = Json.createPatchBuilder() .add("/fileSize", Json.createValue(uploadedFileSize)).build(); fileJsonObject = patch.apply(fileJsonObject); + } else { + logger.warning("No file size entry found for file "+fileId); } + addFilesJsonData.add(fileJsonObject); countSuccess++; } else { myLogger.info(fileName From 3eec3663c96835b14f6a6444b8bc0055b4835241 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 19 Nov 2024 16:53:28 -0500 Subject: [PATCH 305/402] adding directory label to json and changing camelCase --- doc/sphinx-guides/source/api/native-api.rst | 17 +++++----- .../edu/harvard/iq/dataverse/api/Admin.java | 32 +++++++++++-------- .../edu/harvard/iq/dataverse/api/AdminIT.java | 4 +-- 3 files changed, 29 insertions(+), 24 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index b9c30d71fa2..84e8bf45d9d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6216,7 +6216,7 @@ For auditing the Datasets in a paged manor (firstId and lastId):: Auditing specific Datasets (comma separated list):: - curl "$SERVER_URL/api/admin/datafiles/auditFiles?datasetIdentifierList=doi.org/10.5072/FK2/JXYBJS,doi.org/10.7910/DVN/MPU019 + curl "$SERVER_URL/api/admin/datafiles/auditFiles?datasetIdentifierList=doi:10.5072/FK2/JXYBJS,doi:10.7910/DVN/MPU019 Sample JSON Audit Response:: @@ -6225,7 +6225,7 @@ Sample JSON Audit Response:: "data": { "firstId": 0, "lastId": 100, - "DatasetIdentifierList": [ + "datasetIdentifierList": [ "doi.org/10.5072/FK2/XXXXXX", "doi.org/10.5072/FK2/JXYBJS", "doi.org/10.7910/DVN/MPU019" @@ -6240,8 +6240,8 @@ Sample JSON Audit Response:: "persistentURL": "https://doi.org/10.5072/FK2/JXYBJS", "missingFileMetadata": [ { - "StorageIdentifier": "local://1930cce4f2d-855ccc51fcbb", - "DataFileId": "7" + "storageIdentifier": "local://1930cce4f2d-855ccc51fcbb", + "dataFileId": "7" } ] }, @@ -6253,16 +6253,17 @@ Sample JSON Audit Response:: "persistentURL": "https://doi.org/10.7910/DVN/MPU019", "missingFiles": [ { - "StorageIdentifier": "s3://dvn-cloud:298910", - "label": "jihad_metadata_edited.csv" + "storageIdentifier": "s3://dvn-cloud:298910", + "directoryLabel": "trees", + "label": "trees.png" } ] } ], "failures": [ { - "DatasetIdentifier": "doi.org/10.5072/FK2/XXXXXX", - "Reason": "Not Found" + "datasetIdentifier": "doi.org/10.5072/FK2/XXXXXX", + "reason": "Not Found" } ] } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 29f366d91b9..793e472ddac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2578,12 +2578,12 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, datasetIds.add(d.getId()); } else { NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); - job.add("DatasetIdentifier",dId); - job.add("Reason","Not Found"); + job.add("datasetIdentifier",dId); + job.add("reason","Not Found"); jsonFailuresArrayBuilder.add(job); } }); - jsonObjectBuilder.add("DatasetIdentifierList", jab); + jsonObjectBuilder.add("datasetIdentifierList", jab); } for (Long datasetId : datasetIds) { @@ -2598,8 +2598,8 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, datasetsChecked++; } catch (WrappedResponse e) { NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); - job.add("DatasetId", datasetId); - job.add("Reason", e.getMessage()); + job.add("datasetId", datasetId); + job.add("reason", e.getMessage()); jsonFailuresArrayBuilder.add(job); continue; } @@ -2617,22 +2617,24 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, String storageId = df.getStorageIdentifier(); FileMetadata fm = df.getFileMetadata(); if (!datafileIO.exists()) { - missingFiles.add(storageId + "," + (fm != null ? "label,"+fm.getLabel() : "type,"+df.getContentType())); + missingFiles.add(storageId + "," + (fm != null ? + (fm.getDirectoryLabel() != null || !fm.getDirectoryLabel().isEmpty() ? "directoryLabel,"+fm.getDirectoryLabel()+"," : "") + +"label,"+fm.getLabel() : "type,"+df.getContentType())); } if (fm == null) { - missingFileMetadata.add(storageId + ",DataFileId," + df.getId()); + missingFileMetadata.add(storageId + ",dataFileId," + df.getId()); } } catch (IOException e) { NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); - job.add("DataFileId", df.getId()); - job.add("Reason", e.getMessage()); + job.add("dataFileId", df.getId()); + job.add("reason", e.getMessage()); jsonFailuresArrayBuilder.add(job); } }); } catch (IOException e) { NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); - job.add("DatasetId", datasetId); - job.add("Reason", e.getMessage()); + job.add("datasetId", datasetId); + job.add("reason", e.getMessage()); jsonFailuresArrayBuilder.add(job); } @@ -2648,7 +2650,7 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, missingFileMetadata.forEach(mm -> { String[] missingMetadata = mm.split(","); NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() - .add("StorageIdentifier", missingMetadata[0]) + .add("storageIdentifier", missingMetadata[0]) .add(missingMetadata[1], missingMetadata[2]); jabMissingFileMetadata.add(jobj); }); @@ -2659,8 +2661,10 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, missingFiles.forEach(mf -> { String[] missingFile = mf.split(","); NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() - .add("StorageIdentifier", missingFile[0]) - .add(missingFile[1], missingFile[2]); + .add("storageIdentifier", missingFile[0]); + for (int i = 2; i < missingFile.length; i+=2) { + jobj.add(missingFile[i-1], missingFile[i]); + } jabMissingFiles.add(jobj); }); job.add("missingFiles", jabMissingFiles); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 84011d7ac73..94aece95861 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -941,8 +941,8 @@ public void testFindMissingFiles() { resp.prettyPrint(); resp.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.failures[0].DatasetIdentifier", equalTo("bad/id")) - .body("data.failures[0].Reason", equalTo("Not Found")); + .body("data.failures[0].datasetIdentifier", equalTo("bad/id")) + .body("data.failures[0].reason", equalTo("Not Found")); } private String createTestNonSuperuserApiToken() { From 714b0f2ebd7f32131887fbfa6057701d10ced14e Mon Sep 17 00:00:00 2001 From: Florian Fritze Date: Wed, 20 Nov 2024 07:33:11 +0100 Subject: [PATCH 306/402] removed entry from changelog.rst as requested --- doc/sphinx-guides/source/api/changelog.rst | 4 ---- 1 file changed, 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index e76990f13c5..92cd4fc941b 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -7,10 +7,6 @@ This API changelog is experimental and we would love feedback on its usefulness. :local: :depth: 1 -v6.5 ---- -- duplicated entries are corrected on the metadata page - v6.4 ---- From 48d04e89de2f41de2b1941fb42a7c893ad22c701 Mon Sep 17 00:00:00 2001 From: julian-schneider <130765495+julian-schneider@users.noreply.github.com> Date: Wed, 20 Nov 2024 12:48:19 +0100 Subject: [PATCH 307/402] Add PR number to doc/release-notes/expose-export-formats.md Co-authored-by: Philip Durbin --- doc/release-notes/expose-export-formats.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/expose-export-formats.md b/doc/release-notes/expose-export-formats.md index 7e685b426fa..a21906d7bbb 100644 --- a/doc/release-notes/expose-export-formats.md +++ b/doc/release-notes/expose-export-formats.md @@ -1,2 +1,2 @@ # New API method for listing the available exporters -Found at `/api/info/exportFormats`, produces an object with available format names as keys, and as values an object with various info about the exporter. \ No newline at end of file +Found at `/api/info/exportFormats`, produces an object with available format names as keys, and as values an object with various info about the exporter. See also #10739. \ No newline at end of file From f3b72c66fc89f8ec5fa7eb1f48fe187eabe7c8bf Mon Sep 17 00:00:00 2001 From: Julian Schneider Date: Wed, 20 Nov 2024 15:56:19 +0100 Subject: [PATCH 308/402] Update expected JSON for api/info/exportFormats IT Test --- src/test/resources/json/export-formats.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/test/resources/json/export-formats.json b/src/test/resources/json/export-formats.json index 0bca2314a8e..b4dc0168629 100644 --- a/src/test/resources/json/export-formats.json +++ b/src/test/resources/json/export-formats.json @@ -12,9 +12,9 @@ "mediaType": "application/xml", "isHarvestable": true, "isVisibleInUserInterface": true, - "XMLNameSpace": "http://datacite.org/schema/kernel-3", - "XMLSchemaLocation": "http://datacite.org/schema/kernel-3 http://schema.datacite.org/meta/kernel-3/metadata.xsd", - "XMLSchemaVersion": "3.0" + "XMLNameSpace": "http://datacite.org/schema/kernel-4", + "XMLSchemaLocation": "http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.5/metadata.xsd", + "XMLSchemaVersion": "4.5" }, "oai_dc": { "displayName": "Dublin Core", @@ -41,7 +41,7 @@ "isVisibleInUserInterface": true }, "ddi": { - "displayName": "DDI", + "displayName": "DDI Codebook v2", "mediaType": "application/xml", "isHarvestable": false, "isVisibleInUserInterface": true, @@ -71,7 +71,7 @@ "isVisibleInUserInterface": true }, "oai_ddi": { - "displayName": "DDI", + "displayName": "DDI Codebook v2", "mediaType": "application/xml", "isHarvestable": true, "isVisibleInUserInterface": false, From 26e85745f450a11f8e23e748fe0f0f05a647af76 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 20 Nov 2024 10:02:15 -0500 Subject: [PATCH 309/402] tabs to spaces --- .../edu/harvard/iq/dataverse/api/Admin.java | 2132 ++++++++--------- 1 file changed, 1066 insertions(+), 1066 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 793e472ddac..61f76c9928c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -119,7 +119,7 @@ @Path("admin") public class Admin extends AbstractApiBean { - private static final Logger logger = Logger.getLogger(Admin.class.getName()); + private static final Logger logger = Logger.getLogger(Admin.class.getName()); @EJB AuthenticationProvidersRegistrationServiceBean authProvidersRegistrationSvc; @@ -164,53 +164,53 @@ public class Admin extends AbstractApiBean { @Inject DataverseSession session; - public static final String listUsersPartialAPIPath = "list-users"; - public static final String listUsersFullAPIPath = "/api/admin/" + listUsersPartialAPIPath; - - @Path("settings") - @GET - public Response listAllSettings() { - JsonObjectBuilder bld = jsonObjectBuilder(); - settingsSvc.listAll().forEach(s -> bld.add(s.getName(), s.getContent())); - return ok(bld); - } - - @Path("settings/{name}") - @PUT - public Response putSetting(@PathParam("name") String name, String content) { - Setting s = settingsSvc.set(name, content); - return ok(jsonObjectBuilder().add(s.getName(), s.getContent())); - } - - @Path("settings/{name}/lang/{lang}") - @PUT - public Response putSettingLang(@PathParam("name") String name, @PathParam("lang") String lang, String content) { - Setting s = settingsSvc.set(name, lang, content); - return ok("Setting " + name + " - " + lang + " - added."); - } - - @Path("settings/{name}") - @GET - public Response getSetting(@PathParam("name") String name) { - String s = settingsSvc.get(name); - - return (s != null) ? ok(s) : notFound("Setting " + name + " not found"); - } - - @Path("settings/{name}") - @DELETE - public Response deleteSetting(@PathParam("name") String name) { - settingsSvc.delete(name); - - return ok("Setting " + name + " deleted."); - } - - @Path("settings/{name}/lang/{lang}") - @DELETE - public Response deleteSettingLang(@PathParam("name") String name, @PathParam("lang") String lang) { - settingsSvc.delete(name, lang); - return ok("Setting " + name + " - " + lang + " deleted."); - } + public static final String listUsersPartialAPIPath = "list-users"; + public static final String listUsersFullAPIPath = "/api/admin/" + listUsersPartialAPIPath; + + @Path("settings") + @GET + public Response listAllSettings() { + JsonObjectBuilder bld = jsonObjectBuilder(); + settingsSvc.listAll().forEach(s -> bld.add(s.getName(), s.getContent())); + return ok(bld); + } + + @Path("settings/{name}") + @PUT + public Response putSetting(@PathParam("name") String name, String content) { + Setting s = settingsSvc.set(name, content); + return ok(jsonObjectBuilder().add(s.getName(), s.getContent())); + } + + @Path("settings/{name}/lang/{lang}") + @PUT + public Response putSettingLang(@PathParam("name") String name, @PathParam("lang") String lang, String content) { + Setting s = settingsSvc.set(name, lang, content); + return ok("Setting " + name + " - " + lang + " - added."); + } + + @Path("settings/{name}") + @GET + public Response getSetting(@PathParam("name") String name) { + String s = settingsSvc.get(name); + + return (s != null) ? ok(s) : notFound("Setting " + name + " not found"); + } + + @Path("settings/{name}") + @DELETE + public Response deleteSetting(@PathParam("name") String name) { + settingsSvc.delete(name); + + return ok("Setting " + name + " deleted."); + } + + @Path("settings/{name}/lang/{lang}") + @DELETE + public Response deleteSettingLang(@PathParam("name") String name, @PathParam("lang") String lang) { + settingsSvc.delete(name, lang); + return ok("Setting " + name + " - " + lang + " deleted."); + } @Path("template/{id}") @DELETE @@ -281,130 +281,130 @@ public Response findTemplates(@PathParam("alias") String alias) { } - @Path("authenticationProviderFactories") - @GET - public Response listAuthProviderFactories() { - return ok(authSvc.listProviderFactories().stream() - .map(f -> jsonObjectBuilder().add("alias", f.getAlias()).add("info", f.getInfo())) - .collect(toJsonArray())); - } - - @Path("authenticationProviders") - @GET - public Response listAuthProviders() { - return ok(em.createNamedQuery("AuthenticationProviderRow.findAll", AuthenticationProviderRow.class) - .getResultList().stream().map(r -> json(r)).collect(toJsonArray())); - } - - @Path("authenticationProviders") - @POST - public Response addProvider(AuthenticationProviderRow row) { - try { - AuthenticationProviderRow managed = em.find(AuthenticationProviderRow.class, row.getId()); - if (managed != null) { - managed = em.merge(row); - } else { - em.persist(row); - managed = row; - } - if (managed.isEnabled()) { - AuthenticationProvider provider = authProvidersRegistrationSvc.loadProvider(managed); - authProvidersRegistrationSvc.deregisterProvider(provider.getId()); - authProvidersRegistrationSvc.registerProvider(provider); - } - return created("/api/admin/authenticationProviders/" + managed.getId(), json(managed)); - } catch (AuthorizationSetupException e) { - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } - } - - @Path("authenticationProviders/{id}") - @GET - public Response showProvider(@PathParam("id") String id) { - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - return (row != null) ? ok(json(row)) - : error(Status.NOT_FOUND, "Can't find authetication provider with id '" + id + "'"); - } - - @POST - @Path("authenticationProviders/{id}/:enabled") - public Response enableAuthenticationProvider_deprecated(@PathParam("id") String id, String body) { - return enableAuthenticationProvider(id, body); - } - - @PUT - @Path("authenticationProviders/{id}/enabled") - @Produces("application/json") - public Response enableAuthenticationProvider(@PathParam("id") String id, String body) { - body = body.trim(); - if (!Util.isBoolean(body)) { - return error(Response.Status.BAD_REQUEST, "Illegal value '" + body + "'. Use 'true' or 'false'"); - } - boolean enable = Util.isTrue(body); - - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - if (row == null) { - return notFound("Can't find authentication provider with id '" + id + "'"); - } - - row.setEnabled(enable); - em.merge(row); - - if (enable) { - // enable a provider - if (authSvc.getAuthenticationProvider(id) != null) { - return ok(String.format("Authentication provider '%s' already enabled", id)); - } - try { - authProvidersRegistrationSvc.registerProvider(authProvidersRegistrationSvc.loadProvider(row)); - return ok(String.format("Authentication Provider %s enabled", row.getId())); - - } catch (AuthenticationProviderFactoryNotFoundException ex) { - return notFound(String.format("Can't instantiate provider, as there's no factory with alias %s", - row.getFactoryAlias())); - } catch (AuthorizationSetupException ex) { - logger.log(Level.WARNING, "Error instantiating authentication provider: " + ex.getMessage(), ex); - return error(Status.INTERNAL_SERVER_ERROR, - String.format("Can't instantiate provider: %s", ex.getMessage())); - } - - } else { - // disable a provider - authProvidersRegistrationSvc.deregisterProvider(id); - return ok("Authentication Provider '" + id + "' disabled. " - + (authSvc.getAuthenticationProviderIds().isEmpty() - ? "WARNING: no enabled authentication providers left." - : "")); - } - } - - @GET - @Path("authenticationProviders/{id}/enabled") - public Response checkAuthenticationProviderEnabled(@PathParam("id") String id) { - List prvs = em - .createNamedQuery("AuthenticationProviderRow.findById", AuthenticationProviderRow.class) - .setParameter("id", id).getResultList(); - if (prvs.isEmpty()) { - return notFound("Can't find a provider with id '" + id + "'."); - } else { - return ok(Boolean.toString(prvs.get(0).isEnabled())); - } - } - - @DELETE - @Path("authenticationProviders/{id}/") - public Response deleteAuthenticationProvider(@PathParam("id") String id) { - authProvidersRegistrationSvc.deregisterProvider(id); - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - if (row != null) { - em.remove(row); - } - - return ok("AuthenticationProvider " + id + " deleted. " - + (authSvc.getAuthenticationProviderIds().isEmpty() - ? "WARNING: no enabled authentication providers left." - : "")); - } + @Path("authenticationProviderFactories") + @GET + public Response listAuthProviderFactories() { + return ok(authSvc.listProviderFactories().stream() + .map(f -> jsonObjectBuilder().add("alias", f.getAlias()).add("info", f.getInfo())) + .collect(toJsonArray())); + } + + @Path("authenticationProviders") + @GET + public Response listAuthProviders() { + return ok(em.createNamedQuery("AuthenticationProviderRow.findAll", AuthenticationProviderRow.class) + .getResultList().stream().map(r -> json(r)).collect(toJsonArray())); + } + + @Path("authenticationProviders") + @POST + public Response addProvider(AuthenticationProviderRow row) { + try { + AuthenticationProviderRow managed = em.find(AuthenticationProviderRow.class, row.getId()); + if (managed != null) { + managed = em.merge(row); + } else { + em.persist(row); + managed = row; + } + if (managed.isEnabled()) { + AuthenticationProvider provider = authProvidersRegistrationSvc.loadProvider(managed); + authProvidersRegistrationSvc.deregisterProvider(provider.getId()); + authProvidersRegistrationSvc.registerProvider(provider); + } + return created("/api/admin/authenticationProviders/" + managed.getId(), json(managed)); + } catch (AuthorizationSetupException e) { + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } + } + + @Path("authenticationProviders/{id}") + @GET + public Response showProvider(@PathParam("id") String id) { + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + return (row != null) ? ok(json(row)) + : error(Status.NOT_FOUND, "Can't find authetication provider with id '" + id + "'"); + } + + @POST + @Path("authenticationProviders/{id}/:enabled") + public Response enableAuthenticationProvider_deprecated(@PathParam("id") String id, String body) { + return enableAuthenticationProvider(id, body); + } + + @PUT + @Path("authenticationProviders/{id}/enabled") + @Produces("application/json") + public Response enableAuthenticationProvider(@PathParam("id") String id, String body) { + body = body.trim(); + if (!Util.isBoolean(body)) { + return error(Response.Status.BAD_REQUEST, "Illegal value '" + body + "'. Use 'true' or 'false'"); + } + boolean enable = Util.isTrue(body); + + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + if (row == null) { + return notFound("Can't find authentication provider with id '" + id + "'"); + } + + row.setEnabled(enable); + em.merge(row); + + if (enable) { + // enable a provider + if (authSvc.getAuthenticationProvider(id) != null) { + return ok(String.format("Authentication provider '%s' already enabled", id)); + } + try { + authProvidersRegistrationSvc.registerProvider(authProvidersRegistrationSvc.loadProvider(row)); + return ok(String.format("Authentication Provider %s enabled", row.getId())); + + } catch (AuthenticationProviderFactoryNotFoundException ex) { + return notFound(String.format("Can't instantiate provider, as there's no factory with alias %s", + row.getFactoryAlias())); + } catch (AuthorizationSetupException ex) { + logger.log(Level.WARNING, "Error instantiating authentication provider: " + ex.getMessage(), ex); + return error(Status.INTERNAL_SERVER_ERROR, + String.format("Can't instantiate provider: %s", ex.getMessage())); + } + + } else { + // disable a provider + authProvidersRegistrationSvc.deregisterProvider(id); + return ok("Authentication Provider '" + id + "' disabled. " + + (authSvc.getAuthenticationProviderIds().isEmpty() + ? "WARNING: no enabled authentication providers left." + : "")); + } + } + + @GET + @Path("authenticationProviders/{id}/enabled") + public Response checkAuthenticationProviderEnabled(@PathParam("id") String id) { + List prvs = em + .createNamedQuery("AuthenticationProviderRow.findById", AuthenticationProviderRow.class) + .setParameter("id", id).getResultList(); + if (prvs.isEmpty()) { + return notFound("Can't find a provider with id '" + id + "'."); + } else { + return ok(Boolean.toString(prvs.get(0).isEnabled())); + } + } + + @DELETE + @Path("authenticationProviders/{id}/") + public Response deleteAuthenticationProvider(@PathParam("id") String id) { + authProvidersRegistrationSvc.deregisterProvider(id); + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + if (row != null) { + em.remove(row); + } + + return ok("AuthenticationProvider " + id + " deleted. " + + (authSvc.getAuthenticationProviderIds().isEmpty() + ? "WARNING: no enabled authentication providers left." + : "")); + } @GET @Path("authenticatedUsers/{identifier}/") @@ -489,520 +489,520 @@ private Response deactivateAuthenticatedUser(AuthenticatedUser userToDisable) { } } - @POST - @Path("publishDataverseAsCreator/{id}") - public Response publishDataverseAsCreator(@PathParam("id") long id) { - try { - Dataverse dataverse = dataverseSvc.find(id); - if (dataverse != null) { - AuthenticatedUser authenticatedUser = dataverse.getCreator(); - return ok(json(execCommand( - new PublishDataverseCommand(createDataverseRequest(authenticatedUser), dataverse)))); - } else { - return error(Status.BAD_REQUEST, "Could not find dataverse with id " + id); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - } - - @Deprecated - @GET - @AuthRequired - @Path("authenticatedUsers") - public Response listAuthenticatedUsers(@Context ContainerRequestContext crc) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - JsonArrayBuilder userArray = Json.createArrayBuilder(); - authSvc.findAllAuthenticatedUsers().stream().forEach((user) -> { - userArray.add(json(user)); - }); - return ok(userArray); - } - - @GET - @AuthRequired - @Path(listUsersPartialAPIPath) - @Produces({ "application/json" }) - public Response filterAuthenticatedUsers( - @Context ContainerRequestContext crc, - @QueryParam("searchTerm") String searchTerm, - @QueryParam("selectedPage") Integer selectedPage, - @QueryParam("itemsPerPage") Integer itemsPerPage, - @QueryParam("sortKey") String sortKey - ) { - - User authUser = getRequestUser(crc); - - if (!authUser.isSuperuser()) { - return error(Response.Status.FORBIDDEN, - BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.not_superuser")); - } - - UserListMaker userListMaker = new UserListMaker(userService); - - // String sortKey = null; - UserListResult userListResult = userListMaker.runUserSearch(searchTerm, itemsPerPage, selectedPage, sortKey); - - return ok(userListResult.toJSON()); - } - - /** - * @todo Make this support creation of BuiltInUsers. - * - * @todo Add way more error checking. Only the happy path is tested by AdminIT. - */ - @POST - @Path("authenticatedUsers") - public Response createAuthenicatedUser(JsonObject jsonObject) { - logger.fine("JSON in: " + jsonObject); - String persistentUserId = jsonObject.getString("persistentUserId"); - String identifier = jsonObject.getString("identifier"); - String proposedAuthenticatedUserIdentifier = identifier.replaceFirst("@", ""); - String firstName = jsonObject.getString("firstName"); - String lastName = jsonObject.getString("lastName"); - String emailAddress = jsonObject.getString("email"); - String position = null; - String affiliation = null; - UserRecordIdentifier userRecordId = new UserRecordIdentifier(jsonObject.getString("authenticationProviderId"), - persistentUserId); - AuthenticatedUserDisplayInfo userDisplayInfo = new AuthenticatedUserDisplayInfo(firstName, lastName, - emailAddress, affiliation, position); - boolean generateUniqueIdentifier = true; - AuthenticatedUser authenticatedUser = authSvc.createAuthenticatedUser(userRecordId, - proposedAuthenticatedUserIdentifier, userDisplayInfo, true); - return ok(json(authenticatedUser)); - } + @POST + @Path("publishDataverseAsCreator/{id}") + public Response publishDataverseAsCreator(@PathParam("id") long id) { + try { + Dataverse dataverse = dataverseSvc.find(id); + if (dataverse != null) { + AuthenticatedUser authenticatedUser = dataverse.getCreator(); + return ok(json(execCommand( + new PublishDataverseCommand(createDataverseRequest(authenticatedUser), dataverse)))); + } else { + return error(Status.BAD_REQUEST, "Could not find dataverse with id " + id); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @Deprecated + @GET + @AuthRequired + @Path("authenticatedUsers") + public Response listAuthenticatedUsers(@Context ContainerRequestContext crc) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + JsonArrayBuilder userArray = Json.createArrayBuilder(); + authSvc.findAllAuthenticatedUsers().stream().forEach((user) -> { + userArray.add(json(user)); + }); + return ok(userArray); + } + + @GET + @AuthRequired + @Path(listUsersPartialAPIPath) + @Produces({ "application/json" }) + public Response filterAuthenticatedUsers( + @Context ContainerRequestContext crc, + @QueryParam("searchTerm") String searchTerm, + @QueryParam("selectedPage") Integer selectedPage, + @QueryParam("itemsPerPage") Integer itemsPerPage, + @QueryParam("sortKey") String sortKey + ) { + + User authUser = getRequestUser(crc); + + if (!authUser.isSuperuser()) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.not_superuser")); + } + + UserListMaker userListMaker = new UserListMaker(userService); + + // String sortKey = null; + UserListResult userListResult = userListMaker.runUserSearch(searchTerm, itemsPerPage, selectedPage, sortKey); + + return ok(userListResult.toJSON()); + } + + /** + * @todo Make this support creation of BuiltInUsers. + * + * @todo Add way more error checking. Only the happy path is tested by AdminIT. + */ + @POST + @Path("authenticatedUsers") + public Response createAuthenicatedUser(JsonObject jsonObject) { + logger.fine("JSON in: " + jsonObject); + String persistentUserId = jsonObject.getString("persistentUserId"); + String identifier = jsonObject.getString("identifier"); + String proposedAuthenticatedUserIdentifier = identifier.replaceFirst("@", ""); + String firstName = jsonObject.getString("firstName"); + String lastName = jsonObject.getString("lastName"); + String emailAddress = jsonObject.getString("email"); + String position = null; + String affiliation = null; + UserRecordIdentifier userRecordId = new UserRecordIdentifier(jsonObject.getString("authenticationProviderId"), + persistentUserId); + AuthenticatedUserDisplayInfo userDisplayInfo = new AuthenticatedUserDisplayInfo(firstName, lastName, + emailAddress, affiliation, position); + boolean generateUniqueIdentifier = true; + AuthenticatedUser authenticatedUser = authSvc.createAuthenticatedUser(userRecordId, + proposedAuthenticatedUserIdentifier, userDisplayInfo, true); + return ok(json(authenticatedUser)); + } //TODO: Delete this endpoint after 4.9.3. Was updated with change in docs. --MAD - /** - * curl -X PUT -d "shib@mailinator.com" - * http://localhost:8080/api/admin/authenticatedUsers/id/11/convertShibToBuiltIn - * - * @deprecated We have documented this API endpoint so we'll keep in around for - * a while but we should encourage everyone to switch to the - * "convertRemoteToBuiltIn" endpoint and then remove this - * Shib-specfic one. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/id/{id}/convertShibToBuiltIn") - @Deprecated - public Response convertShibUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - try { - BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); - if (builtinUser == null) { - return error(Response.Status.BAD_REQUEST, "User id " + id - + " could not be converted from Shibboleth to BuiltIn. An Exception was not thrown."); - } + /** + * curl -X PUT -d "shib@mailinator.com" + * http://localhost:8080/api/admin/authenticatedUsers/id/11/convertShibToBuiltIn + * + * @deprecated We have documented this API endpoint so we'll keep in around for + * a while but we should encourage everyone to switch to the + * "convertRemoteToBuiltIn" endpoint and then remove this + * Shib-specfic one. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/id/{id}/convertShibToBuiltIn") + @Deprecated + public Response convertShibUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + try { + BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); + if (builtinUser == null) { + return error(Response.Status.BAD_REQUEST, "User id " + id + + " could not be converted from Shibboleth to BuiltIn. An Exception was not thrown."); + } AuthenticatedUser authUser = authSvc.getAuthenticatedUser(builtinUser.getUserName()); - JsonObjectBuilder output = Json.createObjectBuilder(); - output.add("email", authUser.getEmail()); - output.add("username", builtinUser.getUserName()); - return ok(output); - } catch (Throwable ex) { - StringBuilder sb = new StringBuilder(); - sb.append(ex + " "); - while (ex.getCause() != null) { - ex = ex.getCause(); - sb.append(ex + " "); - } - String msg = "User id " + id - + " could not be converted from Shibboleth to BuiltIn. Details from Exception: " + sb; - logger.info(msg); - return error(Response.Status.BAD_REQUEST, msg); - } - } - - @PUT - @AuthRequired - @Path("authenticatedUsers/id/{id}/convertRemoteToBuiltIn") - public Response convertOAuthUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - try { - BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("email", authUser.getEmail()); + output.add("username", builtinUser.getUserName()); + return ok(output); + } catch (Throwable ex) { + StringBuilder sb = new StringBuilder(); + sb.append(ex + " "); + while (ex.getCause() != null) { + ex = ex.getCause(); + sb.append(ex + " "); + } + String msg = "User id " + id + + " could not be converted from Shibboleth to BuiltIn. Details from Exception: " + sb; + logger.info(msg); + return error(Response.Status.BAD_REQUEST, msg); + } + } + + @PUT + @AuthRequired + @Path("authenticatedUsers/id/{id}/convertRemoteToBuiltIn") + public Response convertOAuthUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + try { + BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); //AuthenticatedUser authUser = authService.getAuthenticatedUser(aUser.getUserName()); - if (builtinUser == null) { - return error(Response.Status.BAD_REQUEST, "User id " + id - + " could not be converted from remote to BuiltIn. An Exception was not thrown."); - } + if (builtinUser == null) { + return error(Response.Status.BAD_REQUEST, "User id " + id + + " could not be converted from remote to BuiltIn. An Exception was not thrown."); + } AuthenticatedUser authUser = authSvc.getAuthenticatedUser(builtinUser.getUserName()); - JsonObjectBuilder output = Json.createObjectBuilder(); - output.add("email", authUser.getEmail()); - output.add("username", builtinUser.getUserName()); - return ok(output); - } catch (Throwable ex) { - StringBuilder sb = new StringBuilder(); - sb.append(ex + " "); - while (ex.getCause() != null) { - ex = ex.getCause(); - sb.append(ex + " "); - } - String msg = "User id " + id + " could not be converted from remote to BuiltIn. Details from Exception: " - + sb; - logger.info(msg); - return error(Response.Status.BAD_REQUEST, msg); - } - } - - /** - * This is used in testing via AdminIT.java but we don't expect sysadmins to use - * this. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/convert/builtin2shib") - public Response builtin2shib(@Context ContainerRequestContext crc, String content) { - logger.info("entering builtin2shib..."); - try { - AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); - if (!userToRunThisMethod.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - boolean disabled = false; - if (disabled) { - return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); - } - AuthenticatedUser builtInUserToConvert = null; - String emailToFind; - String password; - String authuserId = "0"; // could let people specify id on authuser table. probably better to let them - // tell us their - String newEmailAddressToUse; - try { - String[] args = content.split(":"); - emailToFind = args[0]; - password = args[1]; - newEmailAddressToUse = args[2]; - // authuserId = args[666]; - } catch (ArrayIndexOutOfBoundsException ex) { - return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); - } - AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); - String existing = "NOT FOUND"; - if (existingAuthUserFoundByEmail != null) { - builtInUserToConvert = existingAuthUserFoundByEmail; - existing = existingAuthUserFoundByEmail.getIdentifier(); - } else { - long longToLookup = Long.parseLong(authuserId); - AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); - if (specifiedUserToConvert != null) { - builtInUserToConvert = specifiedUserToConvert; - } else { - return error(Response.Status.BAD_REQUEST, - "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind - + " and no user was found using specified id " + longToLookup); - } - } - String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; - Map randomUser = authTestDataService.getRandomUser(); - // String eppn = UUID.randomUUID().toString().substring(0, 8); - String eppn = randomUser.get("eppn"); - String idPEntityId = randomUser.get("idp"); - String notUsed = null; - String separator = "|"; - UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(idPEntityId + separator + eppn, notUsed); - String overwriteFirstName = randomUser.get("firstName"); - String overwriteLastName = randomUser.get("lastName"); - String overwriteEmail = randomUser.get("email"); - overwriteEmail = newEmailAddressToUse; - logger.info("overwriteEmail: " + overwriteEmail); - boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); - if (!validEmail) { - // See https://github.com/IQSS/dataverse/issues/2998 - return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); - } - /** - * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo - * constructor. - */ - /** - * Here we are exercising (via an API test) shibService.getAffiliation with the - * TestShib IdP and a non-production DevShibAccountType. - */ - idPEntityId = ShibUtil.testShibIdpEntityId; - String overwriteAffiliation = shibService.getAffiliation(idPEntityId, - ShibServiceBean.DevShibAccountType.RANDOM); - logger.info("overwriteAffiliation: " + overwriteAffiliation); - /** - * @todo Find a place to put "position" in the authenticateduser table: - * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 - */ - String overwritePosition = "staff;student"; - AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, - overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); - JsonObjectBuilder response = Json.createObjectBuilder(); - JsonArrayBuilder problems = Json.createArrayBuilder(); - if (password != null) { - response.add("password supplied", password); - boolean knowsExistingPassword = false; - BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); - if (oldBuiltInUser != null) { + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("email", authUser.getEmail()); + output.add("username", builtinUser.getUserName()); + return ok(output); + } catch (Throwable ex) { + StringBuilder sb = new StringBuilder(); + sb.append(ex + " "); + while (ex.getCause() != null) { + ex = ex.getCause(); + sb.append(ex + " "); + } + String msg = "User id " + id + " could not be converted from remote to BuiltIn. Details from Exception: " + + sb; + logger.info(msg); + return error(Response.Status.BAD_REQUEST, msg); + } + } + + /** + * This is used in testing via AdminIT.java but we don't expect sysadmins to use + * this. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/convert/builtin2shib") + public Response builtin2shib(@Context ContainerRequestContext crc, String content) { + logger.info("entering builtin2shib..."); + try { + AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); + if (!userToRunThisMethod.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + boolean disabled = false; + if (disabled) { + return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); + } + AuthenticatedUser builtInUserToConvert = null; + String emailToFind; + String password; + String authuserId = "0"; // could let people specify id on authuser table. probably better to let them + // tell us their + String newEmailAddressToUse; + try { + String[] args = content.split(":"); + emailToFind = args[0]; + password = args[1]; + newEmailAddressToUse = args[2]; + // authuserId = args[666]; + } catch (ArrayIndexOutOfBoundsException ex) { + return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); + } + AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); + String existing = "NOT FOUND"; + if (existingAuthUserFoundByEmail != null) { + builtInUserToConvert = existingAuthUserFoundByEmail; + existing = existingAuthUserFoundByEmail.getIdentifier(); + } else { + long longToLookup = Long.parseLong(authuserId); + AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); + if (specifiedUserToConvert != null) { + builtInUserToConvert = specifiedUserToConvert; + } else { + return error(Response.Status.BAD_REQUEST, + "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind + + " and no user was found using specified id " + longToLookup); + } + } + String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; + Map randomUser = authTestDataService.getRandomUser(); + // String eppn = UUID.randomUUID().toString().substring(0, 8); + String eppn = randomUser.get("eppn"); + String idPEntityId = randomUser.get("idp"); + String notUsed = null; + String separator = "|"; + UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(idPEntityId + separator + eppn, notUsed); + String overwriteFirstName = randomUser.get("firstName"); + String overwriteLastName = randomUser.get("lastName"); + String overwriteEmail = randomUser.get("email"); + overwriteEmail = newEmailAddressToUse; + logger.info("overwriteEmail: " + overwriteEmail); + boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); + if (!validEmail) { + // See https://github.com/IQSS/dataverse/issues/2998 + return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); + } + /** + * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo + * constructor. + */ + /** + * Here we are exercising (via an API test) shibService.getAffiliation with the + * TestShib IdP and a non-production DevShibAccountType. + */ + idPEntityId = ShibUtil.testShibIdpEntityId; + String overwriteAffiliation = shibService.getAffiliation(idPEntityId, + ShibServiceBean.DevShibAccountType.RANDOM); + logger.info("overwriteAffiliation: " + overwriteAffiliation); + /** + * @todo Find a place to put "position" in the authenticateduser table: + * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 + */ + String overwritePosition = "staff;student"; + AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, + overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); + JsonObjectBuilder response = Json.createObjectBuilder(); + JsonArrayBuilder problems = Json.createArrayBuilder(); + if (password != null) { + response.add("password supplied", password); + boolean knowsExistingPassword = false; + BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); + if (oldBuiltInUser != null) { if (builtInUserToConvert.isDeactivated()) { problems.add("builtin account has been deactivated"); return error(Status.BAD_REQUEST, problems.build().toString()); } - String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); - response.add("old username", usernameOfBuiltinAccountToConvert); - AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, - password); - if (authenticatedUser != null) { - knowsExistingPassword = true; - AuthenticatedUser convertedUser = authSvc.convertBuiltInToShib(builtInUserToConvert, shibProviderId, - newUserIdentifierInLookupTable); - if (convertedUser != null) { - /** - * @todo Display name is not being overwritten. Logic must be in Shib backing - * bean - */ - AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); - if (updatedInfoUser != null) { - response.add("display name overwritten with", updatedInfoUser.getName()); - } else { - problems.add("couldn't update display info"); - } - } else { - problems.add("unable to convert user"); - } - } - } else { - problems.add("couldn't find old username"); - } - if (!knowsExistingPassword) { - String message = "User doesn't know password."; - problems.add(message); - /** - * @todo Someday we should make a errorResponse method that takes JSON arrays - * and objects. - */ - return error(Status.BAD_REQUEST, problems.build().toString()); - } - // response.add("knows existing password", knowsExistingPassword); - } - - response.add("user to convert", builtInUserToConvert.getIdentifier()); - response.add("existing user found by email (prompt to convert)", existing); - response.add("changing to this provider", shibProviderId); - response.add("value to overwrite old first name", overwriteFirstName); - response.add("value to overwrite old last name", overwriteLastName); - response.add("value to overwrite old email address", overwriteEmail); - if (overwriteAffiliation != null) { - response.add("affiliation", overwriteAffiliation); - } - response.add("problems", problems); - return ok(response); - } - - /** - * This is used in testing via AdminIT.java but we don't expect sysadmins to use - * this. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/convert/builtin2oauth") - public Response builtin2oauth(@Context ContainerRequestContext crc, String content) { - logger.info("entering builtin2oauth..."); - try { - AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); - if (!userToRunThisMethod.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - boolean disabled = false; - if (disabled) { - return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); - } - AuthenticatedUser builtInUserToConvert = null; - String emailToFind; - String password; - String authuserId = "0"; // could let people specify id on authuser table. probably better to let them - // tell us their - String newEmailAddressToUse; - String newProviderId; - String newPersistentUserIdInLookupTable; - logger.info("content: " + content); - try { - String[] args = content.split(":"); - emailToFind = args[0]; - password = args[1]; - newEmailAddressToUse = args[2]; - newProviderId = args[3]; - newPersistentUserIdInLookupTable = args[4]; - // authuserId = args[666]; - } catch (ArrayIndexOutOfBoundsException ex) { - return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); - } - AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); - String existing = "NOT FOUND"; - if (existingAuthUserFoundByEmail != null) { - builtInUserToConvert = existingAuthUserFoundByEmail; - existing = existingAuthUserFoundByEmail.getIdentifier(); - } else { - long longToLookup = Long.parseLong(authuserId); - AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); - if (specifiedUserToConvert != null) { - builtInUserToConvert = specifiedUserToConvert; - } else { - return error(Response.Status.BAD_REQUEST, - "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind - + " and no user was found using specified id " + longToLookup); - } - } - // String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; - Map randomUser = authTestDataService.getRandomUser(); - // String eppn = UUID.randomUUID().toString().substring(0, 8); - String eppn = randomUser.get("eppn"); - String idPEntityId = randomUser.get("idp"); - String notUsed = null; - String separator = "|"; - // UserIdentifier newUserIdentifierInLookupTable = new - // UserIdentifier(idPEntityId + separator + eppn, notUsed); - UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(newPersistentUserIdInLookupTable, notUsed); - String overwriteFirstName = randomUser.get("firstName"); - String overwriteLastName = randomUser.get("lastName"); - String overwriteEmail = randomUser.get("email"); - overwriteEmail = newEmailAddressToUse; - logger.info("overwriteEmail: " + overwriteEmail); - boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); - if (!validEmail) { - // See https://github.com/IQSS/dataverse/issues/2998 - return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); - } - /** - * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo - * constructor. - */ - /** - * Here we are exercising (via an API test) shibService.getAffiliation with the - * TestShib IdP and a non-production DevShibAccountType. - */ - // idPEntityId = ShibUtil.testShibIdpEntityId; - // String overwriteAffiliation = shibService.getAffiliation(idPEntityId, - // ShibServiceBean.DevShibAccountType.RANDOM); - String overwriteAffiliation = null; - logger.info("overwriteAffiliation: " + overwriteAffiliation); - /** - * @todo Find a place to put "position" in the authenticateduser table: - * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 - */ - String overwritePosition = "staff;student"; - AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, - overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); - JsonObjectBuilder response = Json.createObjectBuilder(); - JsonArrayBuilder problems = Json.createArrayBuilder(); - if (password != null) { - response.add("password supplied", password); - boolean knowsExistingPassword = false; - BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); - if (oldBuiltInUser != null) { - String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); - response.add("old username", usernameOfBuiltinAccountToConvert); - AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, - password); - if (authenticatedUser != null) { - knowsExistingPassword = true; - AuthenticatedUser convertedUser = authSvc.convertBuiltInUserToRemoteUser(builtInUserToConvert, - newProviderId, newUserIdentifierInLookupTable); - if (convertedUser != null) { - /** - * @todo Display name is not being overwritten. Logic must be in Shib backing - * bean - */ - AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); - if (updatedInfoUser != null) { - response.add("display name overwritten with", updatedInfoUser.getName()); - } else { - problems.add("couldn't update display info"); - } - } else { - problems.add("unable to convert user"); - } - } - } else { - problems.add("couldn't find old username"); - } - if (!knowsExistingPassword) { - String message = "User doesn't know password."; - problems.add(message); - /** - * @todo Someday we should make a errorResponse method that takes JSON arrays - * and objects. - */ - return error(Status.BAD_REQUEST, problems.build().toString()); - } - // response.add("knows existing password", knowsExistingPassword); - } - - response.add("user to convert", builtInUserToConvert.getIdentifier()); - response.add("existing user found by email (prompt to convert)", existing); - response.add("changing to this provider", newProviderId); - response.add("value to overwrite old first name", overwriteFirstName); - response.add("value to overwrite old last name", overwriteLastName); - response.add("value to overwrite old email address", overwriteEmail); - if (overwriteAffiliation != null) { - response.add("affiliation", overwriteAffiliation); - } - response.add("problems", problems); - return ok(response); - } - - - - - @Path("roles") - @POST - public Response createNewBuiltinRole(RoleDTO roleDto) { - ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "createBuiltInRole") - .setInfo(roleDto.getAlias() + ":" + roleDto.getDescription()); - try { - return ok(json(rolesSvc.save(roleDto.asRole()))); - } catch (Exception e) { - alr.setActionResult(ActionLogRecord.Result.InternalError); - alr.setInfo(alr.getInfo() + "// " + e.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } finally { - actionLogSvc.log(alr); - } - } - - @Path("roles") - @GET - public Response listBuiltinRoles() { - try { - return ok(rolesToJson(rolesSvc.findBuiltinRoles())); - } catch (Exception e) { - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } - } + String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); + response.add("old username", usernameOfBuiltinAccountToConvert); + AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, + password); + if (authenticatedUser != null) { + knowsExistingPassword = true; + AuthenticatedUser convertedUser = authSvc.convertBuiltInToShib(builtInUserToConvert, shibProviderId, + newUserIdentifierInLookupTable); + if (convertedUser != null) { + /** + * @todo Display name is not being overwritten. Logic must be in Shib backing + * bean + */ + AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); + if (updatedInfoUser != null) { + response.add("display name overwritten with", updatedInfoUser.getName()); + } else { + problems.add("couldn't update display info"); + } + } else { + problems.add("unable to convert user"); + } + } + } else { + problems.add("couldn't find old username"); + } + if (!knowsExistingPassword) { + String message = "User doesn't know password."; + problems.add(message); + /** + * @todo Someday we should make a errorResponse method that takes JSON arrays + * and objects. + */ + return error(Status.BAD_REQUEST, problems.build().toString()); + } + // response.add("knows existing password", knowsExistingPassword); + } + + response.add("user to convert", builtInUserToConvert.getIdentifier()); + response.add("existing user found by email (prompt to convert)", existing); + response.add("changing to this provider", shibProviderId); + response.add("value to overwrite old first name", overwriteFirstName); + response.add("value to overwrite old last name", overwriteLastName); + response.add("value to overwrite old email address", overwriteEmail); + if (overwriteAffiliation != null) { + response.add("affiliation", overwriteAffiliation); + } + response.add("problems", problems); + return ok(response); + } + + /** + * This is used in testing via AdminIT.java but we don't expect sysadmins to use + * this. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/convert/builtin2oauth") + public Response builtin2oauth(@Context ContainerRequestContext crc, String content) { + logger.info("entering builtin2oauth..."); + try { + AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); + if (!userToRunThisMethod.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + boolean disabled = false; + if (disabled) { + return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); + } + AuthenticatedUser builtInUserToConvert = null; + String emailToFind; + String password; + String authuserId = "0"; // could let people specify id on authuser table. probably better to let them + // tell us their + String newEmailAddressToUse; + String newProviderId; + String newPersistentUserIdInLookupTable; + logger.info("content: " + content); + try { + String[] args = content.split(":"); + emailToFind = args[0]; + password = args[1]; + newEmailAddressToUse = args[2]; + newProviderId = args[3]; + newPersistentUserIdInLookupTable = args[4]; + // authuserId = args[666]; + } catch (ArrayIndexOutOfBoundsException ex) { + return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); + } + AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); + String existing = "NOT FOUND"; + if (existingAuthUserFoundByEmail != null) { + builtInUserToConvert = existingAuthUserFoundByEmail; + existing = existingAuthUserFoundByEmail.getIdentifier(); + } else { + long longToLookup = Long.parseLong(authuserId); + AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); + if (specifiedUserToConvert != null) { + builtInUserToConvert = specifiedUserToConvert; + } else { + return error(Response.Status.BAD_REQUEST, + "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind + + " and no user was found using specified id " + longToLookup); + } + } + // String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; + Map randomUser = authTestDataService.getRandomUser(); + // String eppn = UUID.randomUUID().toString().substring(0, 8); + String eppn = randomUser.get("eppn"); + String idPEntityId = randomUser.get("idp"); + String notUsed = null; + String separator = "|"; + // UserIdentifier newUserIdentifierInLookupTable = new + // UserIdentifier(idPEntityId + separator + eppn, notUsed); + UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(newPersistentUserIdInLookupTable, notUsed); + String overwriteFirstName = randomUser.get("firstName"); + String overwriteLastName = randomUser.get("lastName"); + String overwriteEmail = randomUser.get("email"); + overwriteEmail = newEmailAddressToUse; + logger.info("overwriteEmail: " + overwriteEmail); + boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); + if (!validEmail) { + // See https://github.com/IQSS/dataverse/issues/2998 + return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); + } + /** + * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo + * constructor. + */ + /** + * Here we are exercising (via an API test) shibService.getAffiliation with the + * TestShib IdP and a non-production DevShibAccountType. + */ + // idPEntityId = ShibUtil.testShibIdpEntityId; + // String overwriteAffiliation = shibService.getAffiliation(idPEntityId, + // ShibServiceBean.DevShibAccountType.RANDOM); + String overwriteAffiliation = null; + logger.info("overwriteAffiliation: " + overwriteAffiliation); + /** + * @todo Find a place to put "position" in the authenticateduser table: + * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 + */ + String overwritePosition = "staff;student"; + AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, + overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); + JsonObjectBuilder response = Json.createObjectBuilder(); + JsonArrayBuilder problems = Json.createArrayBuilder(); + if (password != null) { + response.add("password supplied", password); + boolean knowsExistingPassword = false; + BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); + if (oldBuiltInUser != null) { + String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); + response.add("old username", usernameOfBuiltinAccountToConvert); + AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, + password); + if (authenticatedUser != null) { + knowsExistingPassword = true; + AuthenticatedUser convertedUser = authSvc.convertBuiltInUserToRemoteUser(builtInUserToConvert, + newProviderId, newUserIdentifierInLookupTable); + if (convertedUser != null) { + /** + * @todo Display name is not being overwritten. Logic must be in Shib backing + * bean + */ + AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); + if (updatedInfoUser != null) { + response.add("display name overwritten with", updatedInfoUser.getName()); + } else { + problems.add("couldn't update display info"); + } + } else { + problems.add("unable to convert user"); + } + } + } else { + problems.add("couldn't find old username"); + } + if (!knowsExistingPassword) { + String message = "User doesn't know password."; + problems.add(message); + /** + * @todo Someday we should make a errorResponse method that takes JSON arrays + * and objects. + */ + return error(Status.BAD_REQUEST, problems.build().toString()); + } + // response.add("knows existing password", knowsExistingPassword); + } + + response.add("user to convert", builtInUserToConvert.getIdentifier()); + response.add("existing user found by email (prompt to convert)", existing); + response.add("changing to this provider", newProviderId); + response.add("value to overwrite old first name", overwriteFirstName); + response.add("value to overwrite old last name", overwriteLastName); + response.add("value to overwrite old email address", overwriteEmail); + if (overwriteAffiliation != null) { + response.add("affiliation", overwriteAffiliation); + } + response.add("problems", problems); + return ok(response); + } + + + + + @Path("roles") + @POST + public Response createNewBuiltinRole(RoleDTO roleDto) { + ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "createBuiltInRole") + .setInfo(roleDto.getAlias() + ":" + roleDto.getDescription()); + try { + return ok(json(rolesSvc.save(roleDto.asRole()))); + } catch (Exception e) { + alr.setActionResult(ActionLogRecord.Result.InternalError); + alr.setInfo(alr.getInfo() + "// " + e.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } finally { + actionLogSvc.log(alr); + } + } + + @Path("roles") + @GET + public Response listBuiltinRoles() { + try { + return ok(rolesToJson(rolesSvc.findBuiltinRoles())); + } catch (Exception e) { + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } + } @DELETE - @AuthRequired + @AuthRequired @Path("roles/{id}") public Response deleteRole(@Context ContainerRequestContext crc, @PathParam("id") String id) { @@ -1264,77 +1264,77 @@ public void write(OutputStream os) throws IOException, return Response.ok(stream).build(); } - @Path("assignments/assignees/{raIdtf: .*}") - @GET - public Response getAssignmentsFor(@PathParam("raIdtf") String raIdtf) { - - JsonArrayBuilder arr = Json.createArrayBuilder(); - roleAssigneeSvc.getAssignmentsFor(raIdtf).forEach(a -> arr.add(json(a))); - - return ok(arr); - } - - /** - * This method is used in integration tests. - * - * @param userId - * The database id of an AuthenticatedUser. - * @return The confirm email token. - */ - @Path("confirmEmail/{userId}") - @GET - public Response getConfirmEmailToken(@PathParam("userId") long userId) { - AuthenticatedUser user = authSvc.findByID(userId); - if (user != null) { - ConfirmEmailData confirmEmailData = confirmEmailSvc.findSingleConfirmEmailDataByUser(user); - if (confirmEmailData != null) { - return ok(Json.createObjectBuilder().add("token", confirmEmailData.getToken())); - } - } - return error(Status.BAD_REQUEST, "Could not find confirm email token for user " + userId); - } - - /** - * This method is used in integration tests. - * - * @param userId - * The database id of an AuthenticatedUser. - */ - @Path("confirmEmail/{userId}") - @POST - public Response startConfirmEmailProcess(@PathParam("userId") long userId) { - AuthenticatedUser user = authSvc.findByID(userId); - if (user != null) { - try { - ConfirmEmailInitResponse confirmEmailInitResponse = confirmEmailSvc.beginConfirm(user); - ConfirmEmailData confirmEmailData = confirmEmailInitResponse.getConfirmEmailData(); - return ok(Json.createObjectBuilder().add("tokenCreated", confirmEmailData.getCreated().toString()) - .add("identifier", user.getUserIdentifier())); - } catch (ConfirmEmailException ex) { - return error(Status.BAD_REQUEST, - "Could not start confirm email process for user " + userId + ": " + ex.getLocalizedMessage()); - } - } - return error(Status.BAD_REQUEST, "Could not find user based on " + userId); - } - - /** - * This method is used by an integration test in UsersIT.java to exercise bug - * https://github.com/IQSS/dataverse/issues/3287 . Not for use by users! - */ - @Path("convertUserFromBcryptToSha1") - @POST - public Response convertUserFromBcryptToSha1(String json) { - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); - BuiltinUser builtinUser = builtinUserService.find(new Long(object.getInt("builtinUserId"))); - builtinUser.updateEncryptedPassword("4G7xxL9z11/JKN4jHPn4g9iIQck=", 0); // password is "sha-1Pass", 0 means - // SHA-1 - BuiltinUser savedUser = builtinUserService.save(builtinUser); - return ok("foo: " + savedUser); - - } + @Path("assignments/assignees/{raIdtf: .*}") + @GET + public Response getAssignmentsFor(@PathParam("raIdtf") String raIdtf) { + + JsonArrayBuilder arr = Json.createArrayBuilder(); + roleAssigneeSvc.getAssignmentsFor(raIdtf).forEach(a -> arr.add(json(a))); + + return ok(arr); + } + + /** + * This method is used in integration tests. + * + * @param userId + * The database id of an AuthenticatedUser. + * @return The confirm email token. + */ + @Path("confirmEmail/{userId}") + @GET + public Response getConfirmEmailToken(@PathParam("userId") long userId) { + AuthenticatedUser user = authSvc.findByID(userId); + if (user != null) { + ConfirmEmailData confirmEmailData = confirmEmailSvc.findSingleConfirmEmailDataByUser(user); + if (confirmEmailData != null) { + return ok(Json.createObjectBuilder().add("token", confirmEmailData.getToken())); + } + } + return error(Status.BAD_REQUEST, "Could not find confirm email token for user " + userId); + } + + /** + * This method is used in integration tests. + * + * @param userId + * The database id of an AuthenticatedUser. + */ + @Path("confirmEmail/{userId}") + @POST + public Response startConfirmEmailProcess(@PathParam("userId") long userId) { + AuthenticatedUser user = authSvc.findByID(userId); + if (user != null) { + try { + ConfirmEmailInitResponse confirmEmailInitResponse = confirmEmailSvc.beginConfirm(user); + ConfirmEmailData confirmEmailData = confirmEmailInitResponse.getConfirmEmailData(); + return ok(Json.createObjectBuilder().add("tokenCreated", confirmEmailData.getCreated().toString()) + .add("identifier", user.getUserIdentifier())); + } catch (ConfirmEmailException ex) { + return error(Status.BAD_REQUEST, + "Could not start confirm email process for user " + userId + ": " + ex.getLocalizedMessage()); + } + } + return error(Status.BAD_REQUEST, "Could not find user based on " + userId); + } + + /** + * This method is used by an integration test in UsersIT.java to exercise bug + * https://github.com/IQSS/dataverse/issues/3287 . Not for use by users! + */ + @Path("convertUserFromBcryptToSha1") + @POST + public Response convertUserFromBcryptToSha1(String json) { + JsonReader jsonReader = Json.createReader(new StringReader(json)); + JsonObject object = jsonReader.readObject(); + jsonReader.close(); + BuiltinUser builtinUser = builtinUserService.find(new Long(object.getInt("builtinUserId"))); + builtinUser.updateEncryptedPassword("4G7xxL9z11/JKN4jHPn4g9iIQck=", 0); // password is "sha-1Pass", 0 means + // SHA-1 + BuiltinUser savedUser = builtinUserService.save(builtinUser); + return ok("foo: " + savedUser); + + } @Path("permissions/{dvo}") @AuthRequired @@ -1355,43 +1355,43 @@ public Response findPermissonsOn(@Context final ContainerRequestContext crc, @Pa } } - @Path("assignee/{idtf}") - @GET - public Response findRoleAssignee(@PathParam("idtf") String idtf) { - RoleAssignee ra = roleAssigneeSvc.getRoleAssignee(idtf); - return (ra == null) ? notFound("Role Assignee '" + idtf + "' not found.") : ok(json(ra.getDisplayInfo())); - } - - @Path("datasets/integrity/{datasetVersionId}/fixmissingunf") - @POST - public Response fixUnf(@PathParam("datasetVersionId") String datasetVersionId, - @QueryParam("forceRecalculate") boolean forceRecalculate) { - JsonObjectBuilder info = datasetVersionSvc.fixMissingUnf(datasetVersionId, forceRecalculate); - return ok(info); - } - - @Path("datafiles/integrity/fixmissingoriginaltypes") - @GET - public Response fixMissingOriginalTypes() { - JsonObjectBuilder info = Json.createObjectBuilder(); - - List affectedFileIds = fileService.selectFilesWithMissingOriginalTypes(); - - if (affectedFileIds.isEmpty()) { - info.add("message", - "All the tabular files in the database already have the original types set correctly; exiting."); - } else { - for (Long fileid : affectedFileIds) { - logger.fine("found file id: " + fileid); - } - info.add("message", "Found " + affectedFileIds.size() - + " tabular files with missing original types. Kicking off an async job that will repair the files in the background."); - } - - ingestService.fixMissingOriginalTypes(affectedFileIds); - - return ok(info); - } + @Path("assignee/{idtf}") + @GET + public Response findRoleAssignee(@PathParam("idtf") String idtf) { + RoleAssignee ra = roleAssigneeSvc.getRoleAssignee(idtf); + return (ra == null) ? notFound("Role Assignee '" + idtf + "' not found.") : ok(json(ra.getDisplayInfo())); + } + + @Path("datasets/integrity/{datasetVersionId}/fixmissingunf") + @POST + public Response fixUnf(@PathParam("datasetVersionId") String datasetVersionId, + @QueryParam("forceRecalculate") boolean forceRecalculate) { + JsonObjectBuilder info = datasetVersionSvc.fixMissingUnf(datasetVersionId, forceRecalculate); + return ok(info); + } + + @Path("datafiles/integrity/fixmissingoriginaltypes") + @GET + public Response fixMissingOriginalTypes() { + JsonObjectBuilder info = Json.createObjectBuilder(); + + List affectedFileIds = fileService.selectFilesWithMissingOriginalTypes(); + + if (affectedFileIds.isEmpty()) { + info.add("message", + "All the tabular files in the database already have the original types set correctly; exiting."); + } else { + for (Long fileid : affectedFileIds) { + logger.fine("found file id: " + fileid); + } + info.add("message", "Found " + affectedFileIds.size() + + " tabular files with missing original types. Kicking off an async job that will repair the files in the background."); + } + + ingestService.fixMissingOriginalTypes(affectedFileIds); + + return ok(info); + } @Path("datafiles/integrity/fixmissingoriginalsizes") @GET @@ -1421,60 +1421,60 @@ public Response fixMissingOriginalSizes(@QueryParam("limit") Integer limit) { return ok(info); } - /** - * This method is used in API tests, called from UtilIt.java. - */ - @GET - @Path("datasets/thumbnailMetadata/{id}") - public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) { - Dataset dataset = datasetSvc.find(idSupplied); - if (dataset == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + "."); - } - JsonObjectBuilder data = Json.createObjectBuilder(); - DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); - data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail()); - data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); - if (datasetThumbnail != null) { - data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image()); - DataFile dataFile = datasetThumbnail.getDataFile(); - if (dataFile != null) { - /** - * @todo Change this from a String to a long. - */ - data.add("dataFileId", dataFile.getId().toString()); - } - } - return ok(data); - } - - /** - * validatePassword - *

    - * Validate a password with an API call - * - * @param password - * The password - * @return A response with the validation result. - */ - @Path("validatePassword") - @POST - public Response validatePassword(String password) { - - final List errors = passwordValidatorService.validate(password, new Date(), false); - final JsonArrayBuilder errorArray = Json.createArrayBuilder(); - errors.forEach(errorArray::add); - return ok(Json.createObjectBuilder().add("password", password).add("errors", errorArray)); - } - - @GET - @Path("/isOrcid") - public Response isOrcidEnabled() { - return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you."); - } + /** + * This method is used in API tests, called from UtilIt.java. + */ + @GET + @Path("datasets/thumbnailMetadata/{id}") + public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) { + Dataset dataset = datasetSvc.find(idSupplied); + if (dataset == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + "."); + } + JsonObjectBuilder data = Json.createObjectBuilder(); + DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); + data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail()); + data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); + if (datasetThumbnail != null) { + data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image()); + DataFile dataFile = datasetThumbnail.getDataFile(); + if (dataFile != null) { + /** + * @todo Change this from a String to a long. + */ + data.add("dataFileId", dataFile.getId().toString()); + } + } + return ok(data); + } + + /** + * validatePassword + *

    + * Validate a password with an API call + * + * @param password + * The password + * @return A response with the validation result. + */ + @Path("validatePassword") + @POST + public Response validatePassword(String password) { + + final List errors = passwordValidatorService.validate(password, new Date(), false); + final JsonArrayBuilder errorArray = Json.createArrayBuilder(); + errors.forEach(errorArray::add); + return ok(Json.createObjectBuilder().add("password", password).add("errors", errorArray)); + } + + @GET + @Path("/isOrcid") + public Response isOrcidEnabled() { + return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you."); + } @POST - @AuthRequired + @AuthRequired @Path("{id}/reregisterHDLToPID") public Response reregisterHdlToPID(@Context ContainerRequestContext crc, @PathParam("id") String id) { logger.info("Starting to reregister " + id + " Dataset Id. (from hdl to doi)" + new Date()); @@ -1805,7 +1805,7 @@ public Response updateHashValues(@Context ContainerRequestContext crc, @PathPara } @POST - @AuthRequired + @AuthRequired @Path("/computeDataFileHashValue/{fileId}/algorithm/{alg}") public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @PathParam("alg") String alg) { @@ -1867,7 +1867,7 @@ public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @ } @POST - @AuthRequired + @AuthRequired @Path("/validateDataFileHashValue/{fileId}") public Response validateDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId) { @@ -1934,7 +1934,7 @@ public Response validateDataFileHashValue(@Context ContainerRequestContext crc, } @POST - @AuthRequired + @AuthRequired @Path("/submitDatasetVersionToArchive/{id}/{version}") public Response submitDatasetVersionToArchive(@Context ContainerRequestContext crc, @PathParam("id") String dsid, @PathParam("version") String versionNumber) { @@ -2007,7 +2007,7 @@ public void run() { * @return */ @POST - @AuthRequired + @AuthRequired @Path("/archiveAllUnarchivedDatasetVersions") public Response archiveAllUnarchivedDatasetVersions(@Context ContainerRequestContext crc, @QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) { @@ -2106,7 +2106,7 @@ public Response clearMetricsCacheByName(@PathParam("name") String name) { } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/addRoleAssignmentsToChildren") public Response addRoleAssignementsToChildren(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse owner = dataverseSvc.findByAlias(alias); @@ -2137,90 +2137,90 @@ public Response addRoleAssignementsToChildren(@Context ContainerRequestContext c } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response getStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver - return ok(dataverse.getStorageDriverId()); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver + return ok(dataverse.getStorageDriverId()); } @PUT - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response setStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias, String label) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - for (Entry store: DataAccess.getStorageDriverLabels().entrySet()) { - if(store.getKey().equals(label)) { - dataverse.setStorageDriverId(store.getValue()); - return ok("Storage set to: " + store.getKey() + "/" + store.getValue()); - } - } - return error(Response.Status.BAD_REQUEST, - "No Storage Driver found for : " + label); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + for (Entry store: DataAccess.getStorageDriverLabels().entrySet()) { + if(store.getKey().equals(label)) { + dataverse.setStorageDriverId(store.getValue()); + return ok("Storage set to: " + store.getKey() + "/" + store.getValue()); + } + } + return error(Response.Status.BAD_REQUEST, + "No Storage Driver found for : " + label); } @DELETE - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response resetStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - dataverse.setStorageDriverId(""); - return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + dataverse.setStorageDriverId(""); + return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/storageDrivers") public Response listStorageDrivers(@Context ContainerRequestContext crc) throws WrappedResponse { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - JsonObjectBuilder bld = jsonObjectBuilder(); - DataAccess.getStorageDriverLabels().entrySet().forEach(s -> bld.add(s.getKey(), s.getValue())); - return ok(bld); + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder bld = jsonObjectBuilder(); + DataAccess.getStorageDriverLabels().entrySet().forEach(s -> bld.add(s.getKey(), s.getValue())); + return ok(bld); } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2242,7 +2242,7 @@ public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathP } @PUT - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias, @QueryParam("name") String name) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2273,7 +2273,7 @@ public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathP } @DELETE - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2293,7 +2293,7 @@ public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @Pat } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/curationLabelSets") public Response listCurationLabelSets(@Context ContainerRequestContext crc) throws WrappedResponse { try { @@ -2403,7 +2403,7 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon } @POST - @AuthRequired + @AuthRequired @Consumes("application/json") @Path("/requestSignedUrl") public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject urlInfo) { @@ -2521,162 +2521,162 @@ public Response getFeatureFlag(@PathParam("flag") String flagIn) { } } - @GET - @AuthRequired - @Path("/datafiles/auditFiles") - public Response getAuditFiles(@Context ContainerRequestContext crc, - @QueryParam("firstId") Long firstId, @QueryParam("lastId") Long lastId, - @QueryParam("datasetIdentifierList") String datasetIdentifierList) throws WrappedResponse { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - - int datasetsChecked = 0; - long startId = (firstId == null ? 0 : firstId); - long endId = (lastId == null ? Long.MAX_VALUE : lastId); - - List datasetIdentifiers; - if (datasetIdentifierList == null || datasetIdentifierList.isEmpty()) { - datasetIdentifiers = Collections.emptyList(); - } else { - startId = 0; - endId = Long.MAX_VALUE; - datasetIdentifiers = List.of(datasetIdentifierList.split(",")); - } - if (endId < startId) { - return badRequest("Invalid Parameters: lastId must be equal to or greater than firstId"); - } - - NullSafeJsonBuilder jsonObjectBuilder = NullSafeJsonBuilder.jsonObjectBuilder(); - JsonArrayBuilder jsonDatasetsArrayBuilder = Json.createArrayBuilder(); - JsonArrayBuilder jsonFailuresArrayBuilder = Json.createArrayBuilder(); - - if (startId > 0) { - jsonObjectBuilder.add("firstId", startId); - } - if (endId < Long.MAX_VALUE) { - jsonObjectBuilder.add("lastId", endId); - } - - // compile the list of ids to process - List datasetIds; - if (datasetIdentifiers.isEmpty()) { - datasetIds = datasetService.findAllLocalDatasetIds(); - } else { - datasetIds = new ArrayList<>(datasetIdentifiers.size()); - JsonArrayBuilder jab = Json.createArrayBuilder(); - datasetIdentifiers.forEach(id -> { - String dId = id.trim(); - jab.add(dId); - Dataset d = datasetService.findByGlobalId(dId); - if (d != null) { - datasetIds.add(d.getId()); - } else { - NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); - job.add("datasetIdentifier",dId); - job.add("reason","Not Found"); - jsonFailuresArrayBuilder.add(job); - } - }); - jsonObjectBuilder.add("datasetIdentifierList", jab); - } - - for (Long datasetId : datasetIds) { - if (datasetId < startId) { - continue; - } else if (datasetId > endId) { - break; - } - Dataset dataset; - try { - dataset = findDatasetOrDie(String.valueOf(datasetId)); - datasetsChecked++; - } catch (WrappedResponse e) { - NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); - job.add("datasetId", datasetId); - job.add("reason", e.getMessage()); - jsonFailuresArrayBuilder.add(job); - continue; - } - - List missingFiles = new ArrayList<>(); - List missingFileMetadata = new ArrayList<>(); - try { - Predicate filter = s -> true; - StorageIO datasetIO = DataAccess.getStorageIO(dataset); - final List result = datasetIO.cleanUp(filter, true); - // add files that are in dataset files but not in cleanup result or DataFiles with missing FileMetadata - dataset.getFiles().forEach(df -> { - try { - StorageIO datafileIO = df.getStorageIO(); - String storageId = df.getStorageIdentifier(); - FileMetadata fm = df.getFileMetadata(); - if (!datafileIO.exists()) { - missingFiles.add(storageId + "," + (fm != null ? - (fm.getDirectoryLabel() != null || !fm.getDirectoryLabel().isEmpty() ? "directoryLabel,"+fm.getDirectoryLabel()+"," : "") - +"label,"+fm.getLabel() : "type,"+df.getContentType())); - } - if (fm == null) { - missingFileMetadata.add(storageId + ",dataFileId," + df.getId()); - } - } catch (IOException e) { - NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); - job.add("dataFileId", df.getId()); - job.add("reason", e.getMessage()); - jsonFailuresArrayBuilder.add(job); - } - }); - } catch (IOException e) { - NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); - job.add("datasetId", datasetId); - job.add("reason", e.getMessage()); - jsonFailuresArrayBuilder.add(job); - } - - JsonObjectBuilder job = Json.createObjectBuilder(); - if (!missingFiles.isEmpty() || !missingFileMetadata.isEmpty()) { - job.add("id", dataset.getId()); - job.add("identifier", dataset.getIdentifier()); - job.add("authority", dataset.getAuthority()); - job.add("protocol", dataset.getProtocol()); - job.add("persistentURL", dataset.getPersistentURL()); - if (!missingFileMetadata.isEmpty()) { - JsonArrayBuilder jabMissingFileMetadata = Json.createArrayBuilder(); - missingFileMetadata.forEach(mm -> { - String[] missingMetadata = mm.split(","); - NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() - .add("storageIdentifier", missingMetadata[0]) - .add(missingMetadata[1], missingMetadata[2]); - jabMissingFileMetadata.add(jobj); - }); - job.add("missingFileMetadata", jabMissingFileMetadata); - } - if (!missingFiles.isEmpty()) { - JsonArrayBuilder jabMissingFiles = Json.createArrayBuilder(); - missingFiles.forEach(mf -> { - String[] missingFile = mf.split(","); - NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() - .add("storageIdentifier", missingFile[0]); - for (int i = 2; i < missingFile.length; i+=2) { - jobj.add(missingFile[i-1], missingFile[i]); - } - jabMissingFiles.add(jobj); - }); - job.add("missingFiles", jabMissingFiles); - } - jsonDatasetsArrayBuilder.add(job); - } - } - - jsonObjectBuilder.add("datasetsChecked", datasetsChecked); - jsonObjectBuilder.add("datasets", jsonDatasetsArrayBuilder); - jsonObjectBuilder.add("failures", jsonFailuresArrayBuilder); - - return ok(jsonObjectBuilder); - } + @GET + @AuthRequired + @Path("/datafiles/auditFiles") + public Response getAuditFiles(@Context ContainerRequestContext crc, + @QueryParam("firstId") Long firstId, @QueryParam("lastId") Long lastId, + @QueryParam("datasetIdentifierList") String datasetIdentifierList) throws WrappedResponse { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + int datasetsChecked = 0; + long startId = (firstId == null ? 0 : firstId); + long endId = (lastId == null ? Long.MAX_VALUE : lastId); + + List datasetIdentifiers; + if (datasetIdentifierList == null || datasetIdentifierList.isEmpty()) { + datasetIdentifiers = Collections.emptyList(); + } else { + startId = 0; + endId = Long.MAX_VALUE; + datasetIdentifiers = List.of(datasetIdentifierList.split(",")); + } + if (endId < startId) { + return badRequest("Invalid Parameters: lastId must be equal to or greater than firstId"); + } + + NullSafeJsonBuilder jsonObjectBuilder = NullSafeJsonBuilder.jsonObjectBuilder(); + JsonArrayBuilder jsonDatasetsArrayBuilder = Json.createArrayBuilder(); + JsonArrayBuilder jsonFailuresArrayBuilder = Json.createArrayBuilder(); + + if (startId > 0) { + jsonObjectBuilder.add("firstId", startId); + } + if (endId < Long.MAX_VALUE) { + jsonObjectBuilder.add("lastId", endId); + } + + // compile the list of ids to process + List datasetIds; + if (datasetIdentifiers.isEmpty()) { + datasetIds = datasetService.findAllLocalDatasetIds(); + } else { + datasetIds = new ArrayList<>(datasetIdentifiers.size()); + JsonArrayBuilder jab = Json.createArrayBuilder(); + datasetIdentifiers.forEach(id -> { + String dId = id.trim(); + jab.add(dId); + Dataset d = datasetService.findByGlobalId(dId); + if (d != null) { + datasetIds.add(d.getId()); + } else { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetIdentifier",dId); + job.add("reason","Not Found"); + jsonFailuresArrayBuilder.add(job); + } + }); + jsonObjectBuilder.add("datasetIdentifierList", jab); + } + + for (Long datasetId : datasetIds) { + if (datasetId < startId) { + continue; + } else if (datasetId > endId) { + break; + } + Dataset dataset; + try { + dataset = findDatasetOrDie(String.valueOf(datasetId)); + datasetsChecked++; + } catch (WrappedResponse e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetId", datasetId); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + continue; + } + + List missingFiles = new ArrayList<>(); + List missingFileMetadata = new ArrayList<>(); + try { + Predicate filter = s -> true; + StorageIO datasetIO = DataAccess.getStorageIO(dataset); + final List result = datasetIO.cleanUp(filter, true); + // add files that are in dataset files but not in cleanup result or DataFiles with missing FileMetadata + dataset.getFiles().forEach(df -> { + try { + StorageIO datafileIO = df.getStorageIO(); + String storageId = df.getStorageIdentifier(); + FileMetadata fm = df.getFileMetadata(); + if (!datafileIO.exists()) { + missingFiles.add(storageId + "," + (fm != null ? + (fm.getDirectoryLabel() != null || !fm.getDirectoryLabel().isEmpty() ? "directoryLabel,"+fm.getDirectoryLabel()+"," : "") + +"label,"+fm.getLabel() : "type,"+df.getContentType())); + } + if (fm == null) { + missingFileMetadata.add(storageId + ",dataFileId," + df.getId()); + } + } catch (IOException e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("dataFileId", df.getId()); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + } + }); + } catch (IOException e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetId", datasetId); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + } + + JsonObjectBuilder job = Json.createObjectBuilder(); + if (!missingFiles.isEmpty() || !missingFileMetadata.isEmpty()) { + job.add("id", dataset.getId()); + job.add("identifier", dataset.getIdentifier()); + job.add("authority", dataset.getAuthority()); + job.add("protocol", dataset.getProtocol()); + job.add("persistentURL", dataset.getPersistentURL()); + if (!missingFileMetadata.isEmpty()) { + JsonArrayBuilder jabMissingFileMetadata = Json.createArrayBuilder(); + missingFileMetadata.forEach(mm -> { + String[] missingMetadata = mm.split(","); + NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() + .add("storageIdentifier", missingMetadata[0]) + .add(missingMetadata[1], missingMetadata[2]); + jabMissingFileMetadata.add(jobj); + }); + job.add("missingFileMetadata", jabMissingFileMetadata); + } + if (!missingFiles.isEmpty()) { + JsonArrayBuilder jabMissingFiles = Json.createArrayBuilder(); + missingFiles.forEach(mf -> { + String[] missingFile = mf.split(","); + NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() + .add("storageIdentifier", missingFile[0]); + for (int i = 2; i < missingFile.length; i+=2) { + jobj.add(missingFile[i-1], missingFile[i]); + } + jabMissingFiles.add(jobj); + }); + job.add("missingFiles", jabMissingFiles); + } + jsonDatasetsArrayBuilder.add(job); + } + } + + jsonObjectBuilder.add("datasetsChecked", datasetsChecked); + jsonObjectBuilder.add("datasets", jsonDatasetsArrayBuilder); + jsonObjectBuilder.add("failures", jsonFailuresArrayBuilder); + + return ok(jsonObjectBuilder); + } } From a79015f6785fbd572e01033584dfafe25fb153bc Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Wed, 20 Nov 2024 10:09:41 -0500 Subject: [PATCH 310/402] #11037 limit beta testing deployments to one concurrent action at a time --- .github/workflows/deploy_beta_testing.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index 4cec08564a4..eca8416732a 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -5,6 +5,10 @@ on: branches: - develop +concurrency: + group: deploy-beta-testing + cancel-in-progress: false + jobs: build: runs-on: ubuntu-latest From 2db26b20f3d01895cfa4d0c5093ca1ce4b539be2 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 20 Nov 2024 10:26:56 -0500 Subject: [PATCH 311/402] add pid --- doc/sphinx-guides/source/api/native-api.rst | 16 ++++++---------- .../java/edu/harvard/iq/dataverse/api/Admin.java | 4 +--- 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 84e8bf45d9d..9a5f469a4d0 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6226,17 +6226,15 @@ Sample JSON Audit Response:: "firstId": 0, "lastId": 100, "datasetIdentifierList": [ - "doi.org/10.5072/FK2/XXXXXX", - "doi.org/10.5072/FK2/JXYBJS", - "doi.org/10.7910/DVN/MPU019" + "doi:10.5072/FK2/XXXXXX", + "doi:10.5072/FK2/JXYBJS", + "doi:10.7910/DVN/MPU019" ], "datasetsChecked": 100, "datasets": [ { "id": 6, - "identifier": "FK2/JXYBJS", - "authority": "10.5072", - "protocol": "doi", + "pid": "doi:10.5072/FK2/JXYBJS", "persistentURL": "https://doi.org/10.5072/FK2/JXYBJS", "missingFileMetadata": [ { @@ -6247,9 +6245,7 @@ Sample JSON Audit Response:: }, { "id": 47731, - "identifier": "DVN/MPU019", - "authority": "10.7910", - "protocol": "doi", + "pid": "doi:10.5072/FK2/MPU019", "persistentURL": "https://doi.org/10.7910/DVN/MPU019", "missingFiles": [ { @@ -6262,7 +6258,7 @@ Sample JSON Audit Response:: ], "failures": [ { - "datasetIdentifier": "doi.org/10.5072/FK2/XXXXXX", + "datasetIdentifier": "doi:10.5072/FK2/XXXXXX", "reason": "Not Found" } ] diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 61f76c9928c..152bcf5066e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2641,9 +2641,7 @@ public Response getAuditFiles(@Context ContainerRequestContext crc, JsonObjectBuilder job = Json.createObjectBuilder(); if (!missingFiles.isEmpty() || !missingFileMetadata.isEmpty()) { job.add("id", dataset.getId()); - job.add("identifier", dataset.getIdentifier()); - job.add("authority", dataset.getAuthority()); - job.add("protocol", dataset.getProtocol()); + job.add("pid", dataset.getProtocol() + ":" + dataset.getAuthority() + "/" + dataset.getIdentifier()); job.add("persistentURL", dataset.getPersistentURL()); if (!missingFileMetadata.isEmpty()) { JsonArrayBuilder jabMissingFileMetadata = Json.createArrayBuilder(); From 2c5aca8e952b39fd1e1f7cfa99298303ca535799 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 20 Nov 2024 10:30:50 -0500 Subject: [PATCH 312/402] fix typos --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 9a5f469a4d0..9d6871041fd 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6210,7 +6210,7 @@ Scans the Datasets in the database and verifies that the stored files exist. If Optional Parameters are available for filtering the Datasets scanned. -For auditing the Datasets in a paged manor (firstId and lastId):: +For auditing the Datasets in a paged manner (firstId and lastId):: curl "$SERVER_URL/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" From 3c67a7977fdaa862460e25208bb1bcb7a9c4a3c4 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 20 Nov 2024 10:31:15 -0500 Subject: [PATCH 313/402] Update doc/release-notes/220-harvard-edu-audit-files.md Co-authored-by: Philip Durbin --- doc/release-notes/220-harvard-edu-audit-files.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/220-harvard-edu-audit-files.md b/doc/release-notes/220-harvard-edu-audit-files.md index c697bc225c0..79391703041 100644 --- a/doc/release-notes/220-harvard-edu-audit-files.md +++ b/doc/release-notes/220-harvard-edu-audit-files.md @@ -5,7 +5,7 @@ The Datasets scanned can be limited by optional firstId and lastId query paramet Once the audit report is generated, a superuser can either delete the missing file(s) from the Dataset or contact the author to re-upload the missing file(s). The JSON response includes: -- List of files in each DataFile where the file exists in the database but the physical file is not on the file store. +- List of files in each DataFile where the file exists in the database but the physical file is not in the file store. - List of DataFiles where the FileMetadata is missing. - Other failures found when trying to process the Datasets From 58d32357978ddbae7abe587e5645fbc52192e471 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 20 Nov 2024 10:31:24 -0500 Subject: [PATCH 314/402] Update doc/release-notes/220-harvard-edu-audit-files.md Co-authored-by: Philip Durbin --- doc/release-notes/220-harvard-edu-audit-files.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/220-harvard-edu-audit-files.md b/doc/release-notes/220-harvard-edu-audit-files.md index 79391703041..002c8e85063 100644 --- a/doc/release-notes/220-harvard-edu-audit-files.md +++ b/doc/release-notes/220-harvard-edu-audit-files.md @@ -13,4 +13,4 @@ curl "http://localhost:8080/api/admin/datafiles/auditFiles curl "http://localhost:8080/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" curl "http://localhost:8080/api/admin/datafiles/auditFiles?datasetIdentifierList=doi:10.5072/FK2/RVNT9Q,doi:10.5072/FK2/RVNT9Q -For more information, see issue [the docs](https://dataverse-guide--11016.org.readthedocs.build/en/11016/api/native-api.html#datafile-audit), #11016, and [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220) +For more information, see [the docs](https://dataverse-guide--11016.org.readthedocs.build/en/11016/api/native-api.html#datafile-audit), #11016, and [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220) From a1f057287ee1297e8ddf5ab9c0f8ec94dbcf640f Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 20 Nov 2024 10:42:30 -0500 Subject: [PATCH 315/402] added a configurable batch size limit for when to apply the single file size lookup method for the entire batch. #10977 --- .../iq/dataverse/globus/GlobusServiceBean.java | 16 ++++++++++------ .../dataverse/settings/SettingsServiceBean.java | 6 ++++++ .../harvard/iq/dataverse/util/SystemConfig.java | 6 ++++++ 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 3d1c5a1044d..5c9a2f1d946 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -986,11 +986,15 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); } - // Look up the sizes of all the files in the dataset folder, to avoid - // looking them up one by one later: - // @todo: we should only be doing this if this is a managed store, probably? - GlobusEndpoint endpoint = getGlobusEndpoint(dataset); - Map fileSizeMap = lookupFileSizes(endpoint, endpoint.getBasePath()); + Map fileSizeMap = null; + + if (filesJsonArray.size() >= systemConfig.getGlobusBatchLookupSize()) { + // Look up the sizes of all the files in the dataset folder, to avoid + // looking them up one by one later: + // @todo: we should only be doing this if this is a managed store, probably (?) + GlobusEndpoint endpoint = getGlobusEndpoint(dataset); + fileSizeMap = lookupFileSizes(endpoint, endpoint.getBasePath()); + } // calculateMissingMetadataFields: checksum, mimetype JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, myLogger); @@ -1034,7 +1038,7 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut .add("/fileSize", Json.createValue(uploadedFileSize)).build(); fileJsonObject = patch.apply(fileJsonObject); } else { - logger.warning("No file size entry found for file "+fileId); + logger.fine("No file size entry found for file "+fileId); } addFilesJsonData.add(fileJsonObject); countSuccess++; diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 8ed96690e84..b5eb483c2c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -539,6 +539,12 @@ Whether Harvesting (OAI) service is enabled * */ GlobusSingleFileTransfer, + /** Lower limit of the number of files in a Globus upload task where + * the batch mode should be utilized in looking up the file information + * on the remote end node (file sizes, primarily), instead of individual + * lookups. + */ + GlobusBatchLookupSize, /** * Optional external executables to run on the metadata for dataverses * and datasets being published; as an extra validation step, to diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 434b3bd8f8f..e769cacfdb1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -78,6 +78,7 @@ public class SystemConfig { public static final long defaultZipDownloadLimit = 104857600L; // 100MB private static final int defaultMultipleUploadFilesLimit = 1000; private static final int defaultLoginSessionTimeout = 480; // = 8 hours + private static final int defaultGlobusBatchLookupSize = 50; private String buildNumber = null; @@ -954,6 +955,11 @@ public boolean isGlobusFileDownload() { return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false)); } + public int getGlobusBatchLookupSize() { + String batchSizeOption = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusBatchLookupSize); + return getIntLimitFromStringOrDefault(batchSizeOption, defaultGlobusBatchLookupSize); + } + private Boolean getMethodAvailable(String method, boolean upload) { String methods = settingsService.getValueForKey( upload ? SettingsServiceBean.Key.UploadMethods : SettingsServiceBean.Key.DownloadMethods); From 50b752a0116d060de95b790a23a63840c90feb6c Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 20 Nov 2024 10:45:48 -0500 Subject: [PATCH 316/402] fix typos --- doc/sphinx-guides/source/api/native-api.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 9d6871041fd..bfcbbb96f93 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6204,7 +6204,8 @@ Datafile Audit ~~~~~~~~~~~~~~ Produce an audit report of missing files and FileMetadata for Datasets. -Scans the Datasets in the database and verifies that the stored files exist. If the files are missing or if the FileMetadata is missing, this information is returned in a JSON response:: +Scans the Datasets in the database and verifies that the stored files exist. If the files are missing or if the FileMetadata is missing, this information is returned in a JSON response. +The call will return a status code of 200 if the report was generated successfully. Issues found will be documented in the report and will not return a failure status code unless the report could not be generated:: curl "$SERVER_URL/api/admin/datafiles/auditFiles" From 536c1bfe9466242797ce5a037936560d5cd0e197 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 20 Nov 2024 12:08:21 -0500 Subject: [PATCH 317/402] release note #10977 --- doc/release-notes/10977-globus-filesize-lookup.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 doc/release-notes/10977-globus-filesize-lookup.md diff --git a/doc/release-notes/10977-globus-filesize-lookup.md b/doc/release-notes/10977-globus-filesize-lookup.md new file mode 100644 index 00000000000..49fd10d9ffe --- /dev/null +++ b/doc/release-notes/10977-globus-filesize-lookup.md @@ -0,0 +1,6 @@ +## A new Globus optimization setting + +An optimization has been added for the Globus upload workflow, with a corresponding new database setting: `:GlobusBatchLookupSize` + + +See the [Database Settings](https://guides.dataverse.org/en/6.5/installation/config.html#GlobusBatchLookupSize) section of the Guides for more information. \ No newline at end of file From 617b13ae40fbed89a14816aa0c07cb10b228db6d Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 20 Nov 2024 12:11:25 -0500 Subject: [PATCH 318/402] configuration guide entry #10977 --- doc/sphinx-guides/source/installation/config.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index e3965e3cd7c..30a36da9499 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -4849,6 +4849,13 @@ The URL where the `dataverse-globus Date: Thu, 21 Nov 2024 10:57:37 +0000 Subject: [PATCH 319/402] Changed: docs and release note tweak for input levels --- doc/release-notes/11018-update-dataverse-endpoint-update.md | 2 +- doc/sphinx-guides/source/api/native-api.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/11018-update-dataverse-endpoint-update.md b/doc/release-notes/11018-update-dataverse-endpoint-update.md index dcd8eb0c90d..c2d9cf64af3 100644 --- a/doc/release-notes/11018-update-dataverse-endpoint-update.md +++ b/doc/release-notes/11018-update-dataverse-endpoint-update.md @@ -3,6 +3,6 @@ The updateDataverse API endpoint has been updated to support an "inherit from pa When it comes to omitting any of these fields in the request JSON: - Omitting ``facetIds`` or ``metadataBlockNames`` causes the Dataverse collection to inherit the corresponding configuration from its parent. -- Omitting ``inputLevels`` removes any existing input levels in the Dataverse collection. +- Omitting ``inputLevels`` removes any existing custom input levels in the Dataverse collection. Previously, not setting these fields meant keeping the existing ones in the Dataverse. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 9ac6fe196ff..cb3c7750961 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -129,7 +129,7 @@ Note that setting any of these fields overwrites the previous configuration. When it comes to omitting these fields in the JSON: - Omitting ``facetIds`` or ``metadataBlockNames`` causes the Dataverse collection to inherit the corresponding configuration from its parent. -- Omitting ``inputLevels`` removes any existing input levels in the Dataverse collection. +- Omitting ``inputLevels`` removes any existing custom input levels in the Dataverse collection. To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. From c7da932cabfc3b395002bcbf373da4542b808a48 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 21 Nov 2024 11:05:44 +0000 Subject: [PATCH 320/402] Added: doc tweak related to excluding metadataBlocks in updateDataverse --- doc/sphinx-guides/source/api/native-api.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 351688e2731..4689a46b40b 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -130,6 +130,7 @@ When it comes to omitting these fields in the JSON: - Omitting ``facetIds`` or ``metadataBlockNames`` causes the Dataverse collection to inherit the corresponding configuration from its parent. - Omitting ``inputLevels`` removes any existing custom input levels in the Dataverse collection. +- Omitting the entire ``metadataBlocks`` object in the request JSON would exclude the three sub-objects, resulting in the application of the two changes described above. To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. From b63b1ffe9bb6c511d7445ccd2cc451bb44a39815 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 21 Nov 2024 11:09:02 +0000 Subject: [PATCH 321/402] Added: doc tweak explaining metadataBlocks is optional in updateDataverse endpoint --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 4689a46b40b..e542ad8bafd 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -118,7 +118,7 @@ The fully expanded example above (without environment variables) looks like this You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the updated Dataverse collection. -Same as in :ref:`create-dataverse-api`, the request JSON supports a ``metadataBlocks`` object, with the following supported sub-objects: +Same as in :ref:`create-dataverse-api`, the request JSON supports an optional ``metadataBlocks`` object, with the following supported sub-objects: - ``metadataBlockNames``: The names of the metadata blocks to be assigned to the Dataverse collection. - ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection. From 4449679d2af3614de54a077aaaae4ea2551ed22b Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 21 Oct 2024 17:14:19 -0400 Subject: [PATCH 322/402] quick draft implementation of addressing issue 1. from #10909. --- .../api/imports/ImportGenericServiceBean.java | 41 +++++++++++++++---- .../api/imports/ImportServiceBean.java | 13 +++++- .../harvest/client/HarvestingClient.java | 23 ++++++++++- src/main/resources/db/migration/V6.4.0.1.sql | 2 +- 4 files changed, 65 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java index 41a57665010..bf8d068a69c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java @@ -150,12 +150,16 @@ public DatasetDTO processXML( XMLStreamReader xmlr, ForeignMetadataFormatMapping } - // Helper method for importing harvested Dublin Core xml. + // Helper methods for importing harvested Dublin Core xml. // Dublin Core is considered a mandatory, built in metadata format mapping. // It is distributed as required content, in reference_data.sql. // Note that arbitrary formatting tags are supported for the outer xml // wrapper. -- L.A. 4.5 public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException { + return processOAIDCxml(DcXmlToParse, null); + } + + public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier) throws XMLStreamException { // look up DC metadata mapping: ForeignMetadataFormatMapping dublinCoreMapping = findFormatMappingByName(DCTERMS); @@ -185,18 +189,37 @@ public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException datasetDTO.getDatasetVersion().setVersionState(DatasetVersion.VersionState.RELEASED); - // Our DC import handles the contents of the dc:identifier field - // as an "other id". In the context of OAI harvesting, we expect - // the identifier to be a global id, so we need to rearrange that: + // In some cases, the identifier that we want to use for the dataset is + // already supplied to the method explicitly. For example, in some + // harvesting cases we'll want to use the OAI identifier (the identifier + // from the

    section of the OAI record) for that purpose, without + // expecting to find a valid persistent id in the body of the DC record: - String identifier = getOtherIdFromDTO(datasetDTO.getDatasetVersion()); - logger.fine("Imported identifier: "+identifier); + String globalIdentifier; - String globalIdentifier = reassignIdentifierAsGlobalId(identifier, datasetDTO); - logger.fine("Detected global identifier: "+globalIdentifier); + if (oaiIdentifier != null) { + logger.fine("Attempting to use " + oaiIdentifier + " as the persistentId of the imported dataset"); + + globalIdentifier = reassignIdentifierAsGlobalId(oaiIdentifier, datasetDTO); + } else { + // Our DC import handles the contents of the dc:identifier field + // as an "other id". Unless we are using an externally supplied + // global id, we will be using the first such "other id" that we + // can parse and recognize as the global id for the imported dataset + // (note that this is the default behavior during harvesting), + // so we need to reaassign it accordingly: + String identifier = getOtherIdFromDTO(datasetDTO.getDatasetVersion()); + logger.fine("Imported identifier: " + identifier); + + globalIdentifier = reassignIdentifierAsGlobalId(identifier, datasetDTO); + logger.fine("Detected global identifier: " + globalIdentifier); + } if (globalIdentifier == null) { - throw new EJBException("Failed to find a global identifier in the OAI_DC XML record."); + String exceptionMsg = oaiIdentifier == null ? + "Failed to find a global identifier in the OAI_DC XML record." : + "Failed to parse the supplied identifier as a valid Persistent Id"; + throw new EJBException(exceptionMsg); } return datasetDTO; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index ee4609a7c56..d0a0629e1ae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -208,7 +208,13 @@ public JsonObjectBuilder handleFile(DataverseRequest dataverseRequest, Dataverse } @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, String harvestIdentifier, String metadataFormat, File metadataFile, Date oaiDateStamp, PrintWriter cleanupLog) throws ImportException, IOException { + public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, + HarvestingClient harvestingClient, + String harvestIdentifier, + String metadataFormat, + File metadataFile, + Date oaiDateStamp, + PrintWriter cleanupLog) throws ImportException, IOException { if (harvestingClient == null || harvestingClient.getDataverse() == null) { throw new ImportException("importHarvestedDataset called with a null harvestingClient, or an invalid harvestingClient."); } @@ -245,7 +251,10 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve logger.fine("importing DC "+metadataFile.getAbsolutePath()); try { String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); - dsDTO = importGenericService.processOAIDCxml(xmlToParse); + String suggestedIdentifier = harvestingClient.isUseOaiIdentifiersAsPids() + ? harvestIdentifier + : null; + dsDTO = importGenericService.processOAIDCxml(xmlToParse, suggestedIdentifier); } catch (IOException | XMLStreamException e) { throw new ImportException("Failed to process Dublin Core XML record: "+ e.getClass() + " (" + e.getMessage() + ")"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java index 0667f5594ce..ec26729b685 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java @@ -214,6 +214,7 @@ public void setArchiveDescription(String archiveDescription) { this.archiveDescription = archiveDescription; } + @Column(columnDefinition="TEXT") private String harvestingSet; public String getHarvestingSet() { @@ -252,8 +253,26 @@ public void setAllowHarvestingMissingCVV(boolean allowHarvestingMissingCVV) { this.allowHarvestingMissingCVV = allowHarvestingMissingCVV; } - // TODO: do we need "orphanRemoval=true"? -- L.A. 4.4 - // TODO: should it be @OrderBy("startTime")? -- L.A. 4.4 + private Boolean useListRecords; + + public Boolean isUseListRecords() { + return useListRecords; + } + + public void setUseListrecords(boolean useListRecords) { + this.useListRecords = useListRecords; + } + + private Boolean useOaiIdAsPid; + + public Boolean isUseOaiIdentifiersAsPids() { + return useOaiIdAsPid; + } + + public void setUseOaiIdentifiersAsPids(boolean useOaiIdAsPid) { + this.useOaiIdAsPid = useOaiIdAsPid; + } + @OneToMany(mappedBy="harvestingClient", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) @OrderBy("id") private List harvestHistory; diff --git a/src/main/resources/db/migration/V6.4.0.1.sql b/src/main/resources/db/migration/V6.4.0.1.sql index 0bcd87dd736..438c52a192e 100644 --- a/src/main/resources/db/migration/V6.4.0.1.sql +++ b/src/main/resources/db/migration/V6.4.0.1.sql @@ -1,4 +1,4 @@ -- Adding a case-insensitive index related to #11003 -- -CREATE UNIQUE INDEX IF NOT EXISTS INDEX_DVOBJECT_authority_protocol_upper_identifier ON dvobject (authority, protocol, UPPER(identifier)); \ No newline at end of file +CREATE UNIQUE INDEX IF NOT EXISTS INDEX_DVOBJECT_authority_protocol_upper_identifier ON dvobject (authority, protocol, UPPER(identifier)); From 2656ccdcc0c7f59eebfcf52e82c011c829b7dda7 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 29 Oct 2024 10:27:09 -0400 Subject: [PATCH 323/402] Adding the new client options to the json printer and parser #10909 --- .../java/edu/harvard/iq/dataverse/util/json/JsonParser.java | 2 ++ .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 2 ++ 2 files changed, 4 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 3f60317655a..8bb8fd93dd1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -1052,6 +1052,8 @@ public String parseHarvestingClient(JsonObject obj, HarvestingClient harvestingC harvestingClient.setHarvestingSet(obj.getString("set",null)); harvestingClient.setCustomHttpHeaders(obj.getString("customHeaders", null)); harvestingClient.setAllowHarvestingMissingCVV(obj.getBoolean("allowHarvestingMissingCVV", false)); + harvestingClient.setUseListrecords(obj.getBoolean("useListRecords", false)); + harvestingClient.setUseOaiIdentifiersAsPids(obj.getBoolean("useOaiIdentifiersAsPids", false)); return dataverseAlias; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index f884d313d64..6666a7f0e7d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -1013,6 +1013,8 @@ public static JsonObjectBuilder json(HarvestingClient harvestingClient) { add("status", harvestingClient.isHarvestingNow() ? "inProgress" : "inActive"). add("customHeaders", harvestingClient.getCustomHttpHeaders()). add("allowHarvestingMissingCVV", harvestingClient.getAllowHarvestingMissingCVV()). + add("useListRecords", harvestingClient.isUseListRecords()). + add("useOaiIdentifiersAsPids", harvestingClient.isUseOaiIdentifiersAsPids()). add("lastHarvest", harvestingClient.getLastHarvestTime() == null ? null : harvestingClient.getLastHarvestTime().toString()). add("lastResult", harvestingClient.getLastResult()). add("lastSuccessful", harvestingClient.getLastSuccessfulHarvestTime() == null ? null : harvestingClient.getLastSuccessfulHarvestTime().toString()). From 5c043cdb642bc9fa17cc0620292b11004813ed56 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 1 Nov 2024 10:26:39 -0400 Subject: [PATCH 324/402] we DO want to include the persistent id in the search cards for all harvested datasets. #10909. (that whole block of extra checks on the harvest "style" may be redundant by now - I'll think about it) --- src/main/java/edu/harvard/iq/dataverse/DataCitation.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java index 3977023fc4b..02fb59751fb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java @@ -792,6 +792,7 @@ private GlobalId getPIDFrom(DatasetVersion dsv, DvObject dv) { if (!dsv.getDataset().isHarvested() || HarvestingClient.HARVEST_STYLE_VDC.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle()) || HarvestingClient.HARVEST_STYLE_ICPSR.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle()) + || HarvestingClient.HARVEST_STYLE_DEFAULT.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle()) || HarvestingClient.HARVEST_STYLE_DATAVERSE .equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())) { if(!isDirect()) { From b7efee0cfbd45104c5432b69a2732def5889b868 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 22 Nov 2024 19:50:21 -0500 Subject: [PATCH 325/402] a flyway script for the "use the oai id as the pid" harvesting client flag. --- src/main/resources/db/migration/V6.4.0.3.sql | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 src/main/resources/db/migration/V6.4.0.3.sql diff --git a/src/main/resources/db/migration/V6.4.0.3.sql b/src/main/resources/db/migration/V6.4.0.3.sql new file mode 100644 index 00000000000..307d8ed206c --- /dev/null +++ b/src/main/resources/db/migration/V6.4.0.3.sql @@ -0,0 +1,2 @@ +-- Add this boolean flag to accommodate a new harvesting client feature +ALTER TABLE harvestingclient ADD COLUMN IF NOT EXISTS useOaiIdAsPid BOOLEAN DEFAULT FALSE; From eca03896f3d0a1e36505ea958e7dc77c118d62c5 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 22 Nov 2024 19:53:25 -0500 Subject: [PATCH 326/402] removed the part of the cherry-picked commit that I'm not going to need in this branch. --- .../dataverse/harvest/client/HarvestingClient.java | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java index ec26729b685..de9cc7c0db6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java @@ -253,19 +253,9 @@ public void setAllowHarvestingMissingCVV(boolean allowHarvestingMissingCVV) { this.allowHarvestingMissingCVV = allowHarvestingMissingCVV; } - private Boolean useListRecords; + private boolean useOaiIdAsPid; - public Boolean isUseListRecords() { - return useListRecords; - } - - public void setUseListrecords(boolean useListRecords) { - this.useListRecords = useListRecords; - } - - private Boolean useOaiIdAsPid; - - public Boolean isUseOaiIdentifiersAsPids() { + public boolean isUseOaiIdentifiersAsPids() { return useOaiIdAsPid; } From 29114175c316dda01e0e240d63951cb89a4f9fdd Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 22 Nov 2024 19:55:40 -0500 Subject: [PATCH 327/402] removed pieces of another cherry-picked commit not needed in this branch. --- src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java | 1 - .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 1 - 2 files changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 8bb8fd93dd1..232b7431a24 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -1052,7 +1052,6 @@ public String parseHarvestingClient(JsonObject obj, HarvestingClient harvestingC harvestingClient.setHarvestingSet(obj.getString("set",null)); harvestingClient.setCustomHttpHeaders(obj.getString("customHeaders", null)); harvestingClient.setAllowHarvestingMissingCVV(obj.getBoolean("allowHarvestingMissingCVV", false)); - harvestingClient.setUseListrecords(obj.getBoolean("useListRecords", false)); harvestingClient.setUseOaiIdentifiersAsPids(obj.getBoolean("useOaiIdentifiersAsPids", false)); return dataverseAlias; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 6666a7f0e7d..91af13c79a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -1013,7 +1013,6 @@ public static JsonObjectBuilder json(HarvestingClient harvestingClient) { add("status", harvestingClient.isHarvestingNow() ? "inProgress" : "inActive"). add("customHeaders", harvestingClient.getCustomHttpHeaders()). add("allowHarvestingMissingCVV", harvestingClient.getAllowHarvestingMissingCVV()). - add("useListRecords", harvestingClient.isUseListRecords()). add("useOaiIdentifiersAsPids", harvestingClient.isUseOaiIdentifiersAsPids()). add("lastHarvest", harvestingClient.getLastHarvestTime() == null ? null : harvestingClient.getLastHarvestTime().toString()). add("lastResult", harvestingClient.getLastResult()). From 0967b7a8363381d8e6f5c52a230215f87cf4f671 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 22 Nov 2024 20:20:35 -0500 Subject: [PATCH 328/402] A "hybrid" implementation of the support for using OAI identifiers for the pid of the imported datasets - merging 2 different approaches implemented in the PRs --- .../api/imports/ImportGenericServiceBean.java | 26 ++++++++++++++++--- .../api/imports/ImportServiceBean.java | 7 ++--- 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java index bf8d068a69c..7bce0947a0e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java @@ -156,10 +156,10 @@ public DatasetDTO processXML( XMLStreamReader xmlr, ForeignMetadataFormatMapping // Note that arbitrary formatting tags are supported for the outer xml // wrapper. -- L.A. 4.5 public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException { - return processOAIDCxml(DcXmlToParse, null); + return processOAIDCxml(DcXmlToParse, null, false); } - public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier) throws XMLStreamException { + public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier, boolean preferSuppliedIdentifier) throws XMLStreamException { // look up DC metadata mapping: ForeignMetadataFormatMapping dublinCoreMapping = findFormatMappingByName(DCTERMS); @@ -208,7 +208,7 @@ public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier) thr // can parse and recognize as the global id for the imported dataset // (note that this is the default behavior during harvesting), // so we need to reaassign it accordingly: - String identifier = getOtherIdFromDTO(datasetDTO.getDatasetVersion()); + String identifier = selectIdentifier(datasetDTO.getDatasetVersion(), oaiIdentifier, preferSuppliedIdentifier); logger.fine("Imported identifier: " + identifier); globalIdentifier = reassignIdentifierAsGlobalId(identifier, datasetDTO); @@ -367,8 +367,16 @@ private FieldDTO makeDTO(DatasetFieldType dataverseFieldType, FieldDTO value, St return value; } - private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) { + private String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier, boolean preferSuppliedIdentifier) { List otherIds = new ArrayList<>(); + + if (suppliedIdentifier != null && preferSuppliedIdentifier) { + // This supplied identifier (in practice, his is likely the OAI-PMH + // identifier from the
    section) will be our first + // choice candidate for the pid of the imported dataset: + otherIds.add(suppliedIdentifier); + } + for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { String key = entry.getKey(); MetadataBlockDTO value = entry.getValue(); @@ -386,6 +394,16 @@ private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) { } } } + + if (suppliedIdentifier != null && !preferSuppliedIdentifier) { + // Unless specifically instructed to prefer this extra identifier + // (in practice, this is likely the OAI-PMH identifier from the + //
    section), we will try to use it as the *last* + // possible candidate for the pid, so, adding it to the end of the + // list: + otherIds.add(suppliedIdentifier); + } + if (!otherIds.isEmpty()) { // We prefer doi or hdl identifiers like "doi:10.7910/DVN/1HE30F" for (String otherId : otherIds) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index d0a0629e1ae..7dc2aed799e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -250,11 +250,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, } else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) { logger.fine("importing DC "+metadataFile.getAbsolutePath()); try { - String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); - String suggestedIdentifier = harvestingClient.isUseOaiIdentifiersAsPids() - ? harvestIdentifier - : null; - dsDTO = importGenericService.processOAIDCxml(xmlToParse, suggestedIdentifier); + String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); + dsDTO = importGenericService.processOAIDCxml(xmlToParse, harvestIdentifier, harvestingClient.isUseOaiIdentifiersAsPids()); } catch (IOException | XMLStreamException e) { throw new ImportException("Failed to process Dublin Core XML record: "+ e.getClass() + " (" + e.getMessage() + ")"); } From 00943e1e2a6ce9825dbc29d898431664cad07498 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 24 Nov 2024 20:35:27 -0500 Subject: [PATCH 329/402] guide entry --- doc/sphinx-guides/source/api/native-api.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 99a8a9f7cf4..1f36691be0d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -5246,6 +5246,7 @@ Shows a Harvesting Client with a defined nickname:: "dataverseAlias": "fooData", "nickName": "myClient", "set": "fooSet", + "useOaiIdentifiersAsPids": false "schedule": "none", "status": "inActive", "lastHarvest": "Thu Oct 13 14:48:57 EDT 2022", @@ -5280,6 +5281,7 @@ The following optional fields are supported: - style: Defaults to "default" - a generic OAI archive. (Make sure to use "dataverse" when configuring harvesting from another Dataverse installation). - customHeaders: This can be used to configure this client with a specific HTTP header that will be added to every OAI request. This is to accommodate a use case where the remote server requires this header to supply some form of a token in order to offer some content not available to other clients. See the example below. Multiple headers can be supplied separated by `\\n` - actual "backslash" and "n" characters, not a single "new line" character. - allowHarvestingMissingCVV: Flag to allow datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. (Default is false). Currently only settable using API. +- useOaiIdentifiersAsPids: Defaults to false; if set to true, Harvester will attempt to use the identifier from the OAI-PMH record header as the **first choice** for the persistent id of the harvested dataset. When set to false, Dataverse will still attempt to use this identifier, but only if none of the `` entries in the OAI_DC record contain a valid persistent id (this is new as of v6.5). Generally, the API will accept the output of the GET version of the API for an existing client as valid input, but some fields will be ignored. For example, as of writing this there is no way to configure a harvesting schedule via this API. From cc7fb45d43f8b842d079d0a688fc2487bd0f56f8 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 24 Nov 2024 20:54:32 -0500 Subject: [PATCH 330/402] release note. --- doc/release-notes/11049-oai-identifiers-as-pids.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 doc/release-notes/11049-oai-identifiers-as-pids.md diff --git a/doc/release-notes/11049-oai-identifiers-as-pids.md b/doc/release-notes/11049-oai-identifiers-as-pids.md new file mode 100644 index 00000000000..2f857bf2198 --- /dev/null +++ b/doc/release-notes/11049-oai-identifiers-as-pids.md @@ -0,0 +1,5 @@ +## When harvesting, Dataverse can now use the identifier from the OAI-PMH record header as the persistent id for the harvested dataset. + +This will allow harvesting from sources that do not include a persistent id in their oai_dc metadata records, but use valid dois or handles as the OAI-PMH record header identifiers. + +It is also possible to optionally configure a harvesting client to use this OAI-PMH identifier as the **preferred** choice for the persistent id. See the [Harvesting Clients API](https://guides.dataverse.org/en/6.5/api/native-api.html#create-a-harvesting-client) section of the Guides for more information. \ No newline at end of file From d6fc24022d4cd0cd6759064b7dd01e6885ad7c12 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 24 Nov 2024 21:12:58 -0500 Subject: [PATCH 331/402] json files for the new tests (from PR #11010 by @stevenferey) --- .../json/importGenericWithOtherId.json | 307 ++++++++++++++++++ .../json/importGenericWithoutOtherId.json | 258 +++++++++++++++ 2 files changed, 565 insertions(+) create mode 100644 src/test/resources/json/importGenericWithOtherId.json create mode 100644 src/test/resources/json/importGenericWithoutOtherId.json diff --git a/src/test/resources/json/importGenericWithOtherId.json b/src/test/resources/json/importGenericWithOtherId.json new file mode 100644 index 00000000000..af9241393e9 --- /dev/null +++ b/src/test/resources/json/importGenericWithOtherId.json @@ -0,0 +1,307 @@ +{ + "UNF": "UNF", + "createTime": "2014-11-12 12:17:55 -05", + "distributionDate": "Distribution Date", + "id": 2, + "lastUpdateTime": "2014-11-12 12:20:32 -05", + "metadataBlocks": { + "astrophysics": { + "displayName": "Astronomy and Astrophysics Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "astroType", + "value": [ + "Image", + "Mosaic", + "EventList" + ] + } + ] + }, + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "multiple": false, + "typeClass": "primitive", + "typeName": "title", + "value": "My Dataset" + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "author", + "value": [ + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Top" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "ellenid" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "ORCID" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Privileged, Pete" + } + }, + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Bottom" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "audreyId" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "DAISY" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Awesome, Audrey" + } + } + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "datasetContact", + "value": [ + "pete@malinator.com" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescription", + "value": "Here is my description" + }, + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "subject", + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics", + "Business and Management" + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "keyword", + "value": [ + "keyword1", + "keyword2" + ] + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "otherId", + "value": [ + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "my agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "otherId" + } + }, + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "another agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "otherId2" + } + }, + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "another agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "doi:10.7910/DVN/TJCLKP" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "depositor", + "value": "Ellen K" + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dateOfDeposit", + "value": "2014-11-12" + } + ] + }, + "geospatial": { + "displayName": "Geospatial Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "geographicCoverage", + "value": [ + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "Arlington" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "United States" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "MA" + } + }, + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "beachcity" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "Aruba" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "beach" + } + } + ] + }, + { + "multiple": false, + "typeClass": "compound", + "typeName": "geographicBoundingBox", + "value": + { + "eastLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "eastLongitude", + "value": "23" + }, + "northLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "northLatitude", + "value": "786" + }, + "southLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "southLatitude", + "value": "34" + }, + "westLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "westLongitude", + "value": "45" + } + } + + } + ] + }, + "socialscience": { + "displayName": "Social Science and Humanities Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "software", + "value": [ + { + "softwareName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareName", + "value": "softwareName" + }, + "softwareVersion": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareVersion", + "value": "software version" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "unitOfAnalysis", + "value": "unit of analysis" + } + ] + } + }, + "productionDate": "Production Date", + "versionState": "DRAFT" + } diff --git a/src/test/resources/json/importGenericWithoutOtherId.json b/src/test/resources/json/importGenericWithoutOtherId.json new file mode 100644 index 00000000000..ceb2263c2cf --- /dev/null +++ b/src/test/resources/json/importGenericWithoutOtherId.json @@ -0,0 +1,258 @@ +{ + "UNF": "UNF", + "createTime": "2014-11-12 12:17:55 -05", + "distributionDate": "Distribution Date", + "id": 2, + "lastUpdateTime": "2014-11-12 12:20:32 -05", + "metadataBlocks": { + "astrophysics": { + "displayName": "Astronomy and Astrophysics Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "astroType", + "value": [ + "Image", + "Mosaic", + "EventList" + ] + } + ] + }, + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "multiple": false, + "typeClass": "primitive", + "typeName": "title", + "value": "My Dataset" + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "author", + "value": [ + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Top" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "ellenid" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "ORCID" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Privileged, Pete" + } + }, + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Bottom" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "audreyId" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "DAISY" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Awesome, Audrey" + } + } + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "datasetContact", + "value": [ + "pete@malinator.com" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescription", + "value": "Here is my description" + }, + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "subject", + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics", + "Business and Management" + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "keyword", + "value": [ + "keyword1", + "keyword2" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "depositor", + "value": "Ellen K" + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dateOfDeposit", + "value": "2014-11-12" + } + ] + }, + "geospatial": { + "displayName": "Geospatial Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "geographicCoverage", + "value": [ + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "Arlington" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "United States" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "MA" + } + }, + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "beachcity" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "Aruba" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "beach" + } + } + ] + }, + { + "multiple": false, + "typeClass": "compound", + "typeName": "geographicBoundingBox", + "value": + { + "eastLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "eastLongitude", + "value": "23" + }, + "northLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "northLatitude", + "value": "786" + }, + "southLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "southLatitude", + "value": "34" + }, + "westLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "westLongitude", + "value": "45" + } + } + + } + ] + }, + "socialscience": { + "displayName": "Social Science and Humanities Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "software", + "value": [ + { + "softwareName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareName", + "value": "softwareName" + }, + "softwareVersion": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareVersion", + "value": "software version" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "unitOfAnalysis", + "value": "unit of analysis" + } + ] + } + }, + "productionDate": "Production Date", + "versionState": "DRAFT" + } From 86b226008e3ba53b0a0aefb0fd0fe9b15087c3f3 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 24 Nov 2024 21:38:03 -0500 Subject: [PATCH 332/402] tests for selecting persistent ids in the GenericImportService (from PR #11010 by @stevenferey) --- .../api/imports/ImportGenericServiceBean.java | 4 ++ .../imports/ImportGenericServiceBeanTest.java | 53 ++++++++++++++++++- 2 files changed, 56 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java index 7bce0947a0e..aa5b25e3967 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java @@ -367,6 +367,10 @@ private FieldDTO makeDTO(DatasetFieldType dataverseFieldType, FieldDTO value, St return value; } + public String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier) { + return selectIdentifier(datasetVersionDTO, suppliedIdentifier, false); + } + private String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier, boolean preferSuppliedIdentifier) { List otherIds = new ArrayList<>(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java index 44739f3f62a..acf5d970358 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java @@ -1,6 +1,13 @@ package edu.harvard.iq.dataverse.api.imports; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; +import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; + +import org.apache.commons.io.FileUtils; +import com.google.gson.Gson; +import java.io.File; +import java.io.IOException; + import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; @@ -8,6 +15,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; +import java.nio.charset.StandardCharsets; + @ExtendWith(MockitoExtension.class) public class ImportGenericServiceBeanTest { @@ -15,7 +24,47 @@ public class ImportGenericServiceBeanTest { private ImportGenericServiceBean importGenericService; @Test - public void testReassignIdentifierAsGlobalId() { + void testIdentifierHarvestableWithOtherID() throws IOException { + // "otherIdValue" containing the value : doi:10.7910/DVN/TJCLKP + File file = new File("src/test/resources/json/importGenericWithOtherId.json"); + String text = FileUtils.readFileToString(file, StandardCharsets.UTF_8); + DatasetVersionDTO dto = new Gson().fromJson(text, DatasetVersionDTO.class); + + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://doi.org/10.7910/DVN/TJCLKP")); + // junk or null + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "junk")); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, null)); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://www.example.com")); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://dataverse.org")); + } + + @Test + void testIdentifierHarvestableWithoutOtherID() throws IOException { + // Does not contain data of type "otherIdValue" + File file = new File("src/test/resources/json/importGenericWithoutOtherId.json"); + String text = FileUtils.readFileToString(file, StandardCharsets.UTF_8); + DatasetVersionDTO dto = new Gson().fromJson(text, DatasetVersionDTO.class); + + // non-URL + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "doi:10.7910/DVN/TJCLKP")); + assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "hdl:10.7910/DVN/TJCLKP")); + // HTTPS + assertEquals("https://doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://doi.org/10.7910/DVN/TJCLKP")); + assertEquals("https://dx.doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://dx.doi.org/10.7910/DVN/TJCLKP")); + assertEquals("https://hdl.handle.net/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://hdl.handle.net/10.7910/DVN/TJCLKP")); + // HTTP (no S) + assertEquals("http://doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://doi.org/10.7910/DVN/TJCLKP")); + assertEquals("http://dx.doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://dx.doi.org/10.7910/DVN/TJCLKP")); + assertEquals("http://hdl.handle.net/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://hdl.handle.net/10.7910/DVN/TJCLKP")); + // junk or null + assertNull(importGenericService.selectIdentifier(dto, "junk")); + assertNull(importGenericService.selectIdentifier(dto, null)); + assertNull(importGenericService.selectIdentifier(dto, "http://www.example.com")); + assertNull(importGenericService.selectIdentifier(dto, "https://dataverse.org")); + } + + @Test + void testReassignIdentifierAsGlobalId() { // non-URL assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("doi:10.7910/DVN/TJCLKP", new DatasetDTO())); assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("hdl:10.7910/DVN/TJCLKP", new DatasetDTO())); @@ -29,6 +78,8 @@ public void testReassignIdentifierAsGlobalId() { assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO())); // junk assertNull(importGenericService.reassignIdentifierAsGlobalId("junk", new DatasetDTO())); + assertNull(importGenericService.reassignIdentifierAsGlobalId("http://www.example.com", new DatasetDTO())); + assertNull(importGenericService.reassignIdentifierAsGlobalId("https://dataverse.org", new DatasetDTO())); } } From 115c88eb37d6288d9af1d9a8a2abacd230407700 Mon Sep 17 00:00:00 2001 From: landreev Date: Mon, 25 Nov 2024 11:07:47 -0500 Subject: [PATCH 333/402] Update doc/release-notes/11049-oai-identifiers-as-pids.md Co-authored-by: Philip Durbin --- doc/release-notes/11049-oai-identifiers-as-pids.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/11049-oai-identifiers-as-pids.md b/doc/release-notes/11049-oai-identifiers-as-pids.md index 2f857bf2198..8b53a461a70 100644 --- a/doc/release-notes/11049-oai-identifiers-as-pids.md +++ b/doc/release-notes/11049-oai-identifiers-as-pids.md @@ -2,4 +2,4 @@ This will allow harvesting from sources that do not include a persistent id in their oai_dc metadata records, but use valid dois or handles as the OAI-PMH record header identifiers. -It is also possible to optionally configure a harvesting client to use this OAI-PMH identifier as the **preferred** choice for the persistent id. See the [Harvesting Clients API](https://guides.dataverse.org/en/6.5/api/native-api.html#create-a-harvesting-client) section of the Guides for more information. \ No newline at end of file +It is also possible to optionally configure a harvesting client to use this OAI-PMH identifier as the **preferred** choice for the persistent id. See the [Harvesting Clients API](https://guides.dataverse.org/en/6.5/api/native-api.html#create-a-harvesting-client) section of the Guides, #11049 and #10982 for more information. \ No newline at end of file From a295cc4a34b4836b087fd662e932838ddb278a87 Mon Sep 17 00:00:00 2001 From: landreev Date: Mon, 25 Nov 2024 11:08:07 -0500 Subject: [PATCH 334/402] Update doc/sphinx-guides/source/api/native-api.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 1f36691be0d..641e443e54a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -5281,7 +5281,7 @@ The following optional fields are supported: - style: Defaults to "default" - a generic OAI archive. (Make sure to use "dataverse" when configuring harvesting from another Dataverse installation). - customHeaders: This can be used to configure this client with a specific HTTP header that will be added to every OAI request. This is to accommodate a use case where the remote server requires this header to supply some form of a token in order to offer some content not available to other clients. See the example below. Multiple headers can be supplied separated by `\\n` - actual "backslash" and "n" characters, not a single "new line" character. - allowHarvestingMissingCVV: Flag to allow datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. (Default is false). Currently only settable using API. -- useOaiIdentifiersAsPids: Defaults to false; if set to true, Harvester will attempt to use the identifier from the OAI-PMH record header as the **first choice** for the persistent id of the harvested dataset. When set to false, Dataverse will still attempt to use this identifier, but only if none of the `` entries in the OAI_DC record contain a valid persistent id (this is new as of v6.5). +- useOaiIdentifiersAsPids: Defaults to false; if set to true, the harvester will attempt to use the identifier from the OAI-PMH record header as the **first choice** for the persistent id of the harvested dataset. When set to false, Dataverse will still attempt to use this identifier, but only if none of the `` entries in the OAI_DC record contain a valid persistent id (this is new as of v6.5). Generally, the API will accept the output of the GET version of the API for an existing client as valid input, but some fields will be ignored. For example, as of writing this there is no way to configure a harvesting schedule via this API. From 3c4628786f18c235707afc6a282969da11ed4c9d Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 25 Nov 2024 11:22:01 -0500 Subject: [PATCH 335/402] reverted the flyway script back to its original state (a newline was added when resolving a conflict with a cherry-picked commit, which of course changes the checksum) --- src/main/resources/db/migration/V6.4.0.1.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/resources/db/migration/V6.4.0.1.sql b/src/main/resources/db/migration/V6.4.0.1.sql index 438c52a192e..0bcd87dd736 100644 --- a/src/main/resources/db/migration/V6.4.0.1.sql +++ b/src/main/resources/db/migration/V6.4.0.1.sql @@ -1,4 +1,4 @@ -- Adding a case-insensitive index related to #11003 -- -CREATE UNIQUE INDEX IF NOT EXISTS INDEX_DVOBJECT_authority_protocol_upper_identifier ON dvobject (authority, protocol, UPPER(identifier)); +CREATE UNIQUE INDEX IF NOT EXISTS INDEX_DVOBJECT_authority_protocol_upper_identifier ON dvobject (authority, protocol, UPPER(identifier)); \ No newline at end of file From 8a361be347e639aeedc68b6f9bc93c18e2f6eaaf Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 25 Nov 2024 11:26:37 -0500 Subject: [PATCH 336/402] another cherry-picked commit not needed in this branch. --- .../harvard/iq/dataverse/harvest/client/HarvestingClient.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java index de9cc7c0db6..7280b6af129 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java @@ -214,7 +214,6 @@ public void setArchiveDescription(String archiveDescription) { this.archiveDescription = archiveDescription; } - @Column(columnDefinition="TEXT") private String harvestingSet; public String getHarvestingSet() { From f0e19168ebedf708e3e0e0df1876b57ca378af88 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 25 Nov 2024 15:33:28 -0500 Subject: [PATCH 337/402] #11044 refresh facet array --- .../iq/dataverse/DataverseFacetServiceBean.java | 14 ++++++++++---- .../impl/AbstractWriteDataverseCommand.java | 3 ++- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java index 5c77989f6d6..67dc183ba66 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java @@ -4,6 +4,8 @@ import java.util.List; import jakarta.ejb.EJB; import jakarta.ejb.Stateless; +import jakarta.ejb.TransactionAttribute; +import jakarta.ejb.TransactionAttributeType; import jakarta.inject.Named; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -42,23 +44,27 @@ public void delete(DataverseFacet dataverseFacet) { cache.invalidate(); } + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void deleteFacetsFor( Dataverse d ) { em.createNamedQuery("DataverseFacet.removeByOwnerId") .setParameter("ownerId", d.getId()) .executeUpdate(); cache.invalidate(d.getId()); - + } - public DataverseFacet create(int displayOrder, DatasetFieldType fieldType, Dataverse ownerDv) { + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) + public DataverseFacet create(int displayOrder, DatasetFieldType fieldType, Dataverse ownerDv) { DataverseFacet dataverseFacet = new DataverseFacet(); dataverseFacet.setDisplayOrder(displayOrder); dataverseFacet.setDatasetFieldType(fieldType); dataverseFacet.setDataverse(ownerDv); - - ownerDv.getDataverseFacets().add(dataverseFacet); + em.persist(dataverseFacet); + ownerDv.getDataverseFacets().add(dataverseFacet); + em.merge(ownerDv); + cache.invalidate(ownerDv.getId()); return dataverseFacet; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 40c2abf5d21..ede07ba5ab7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -56,7 +56,8 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { if (facets != null) { ctxt.facets().deleteFacetsFor(dataverse); - + dataverse.setDataverseFacets(new ArrayList<>()); + if (!facets.isEmpty()) { dataverse.setFacetRoot(true); } From e8093c62089460f8cadd6c8b9fb9e6da1530c60f Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 25 Nov 2024 17:30:30 -0500 Subject: [PATCH 338/402] Per review feedback, made it impossible to supply the file sizes via the /addFiles API (i.e., we don't want to trust the users of the direct s3 upload api when it comes to file sizes). #10977 --- .../datasetutility/AddReplaceFileHelper.java | 48 ++++++++++++------- .../dataverse/globus/GlobusServiceBean.java | 2 +- 2 files changed, 33 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 3943e3ad7d8..6b98848021c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -136,10 +136,7 @@ public class AddReplaceFileHelper{ private String newFileName; // step 30 private String newFileContentType; // step 30 private String newStorageIdentifier; // step 30 - private String newCheckSum; // step 30 - private ChecksumType newCheckSumType; //step 30 - private Long suppliedFileSize = null; - + // -- Optional private DataFile fileToReplace; // step 25 @@ -147,6 +144,7 @@ public class AddReplaceFileHelper{ private DatasetVersion clone; List initialFileList; List finalFileList; + private boolean trustSuppliedFileSizes; // ----------------------------------- // Ingested files @@ -611,18 +609,9 @@ private boolean runAddReplacePhase1(Dataset owner, return false; } - if (optionalFileParams != null) { - if (optionalFileParams.hasCheckSum()) { - newCheckSum = optionalFileParams.getCheckSum(); - newCheckSumType = optionalFileParams.getCheckSumType(); - } - if (optionalFileParams.hasFileSize()) { - suppliedFileSize = optionalFileParams.getFileSize(); - } - } msgt("step_030_createNewFilesViaIngest"); - if (!this.step_030_createNewFilesViaIngest()){ + if (!this.step_030_createNewFilesViaIngest(optionalFileParams)){ return false; } @@ -1195,7 +1184,7 @@ private boolean step_007_auto_isReplacementInLatestVersion(DataFile existingFile } - private boolean step_030_createNewFilesViaIngest(){ + private boolean step_030_createNewFilesViaIngest(OptionalFileParams optionalFileParams){ if (this.hasError()){ return false; @@ -1207,6 +1196,22 @@ private boolean step_030_createNewFilesViaIngest(){ //Don't repeatedly update the clone (losing changes) in multifile case clone = workingVersion.cloneDatasetVersion(); } + + Long suppliedFileSize = null; + String newCheckSum = null; + ChecksumType newCheckSumType = null; + + + if (optionalFileParams != null) { + if (optionalFileParams.hasCheckSum()) { + newCheckSum = optionalFileParams.getCheckSum(); + newCheckSumType = optionalFileParams.getCheckSumType(); + } + if (trustSuppliedFileSizes && optionalFileParams.hasFileSize()) { + suppliedFileSize = optionalFileParams.getFileSize(); + } + } + try { UploadSessionQuotaLimit quota = null; if (systemConfig.isStorageQuotasEnforced()) { @@ -2028,9 +2033,15 @@ public void setDuplicateFileWarning(String duplicateFileWarning) { * @param jsonData - an array of jsonData entries (one per file) using the single add file jsonData format * @param dataset * @param authUser + * @param trustSuppliedSizes - whether to accept the fileSize values passed + * in jsonData (we don't want to trust the users of the S3 direct + * upload API with that information - we will verify the status of + * the files in the S3 bucket and confirm the sizes in the process. + * we do want GlobusService to be able to pass the file sizes, since + * they are obtained and verified via a Globus API lookup). * @return */ - public Response addFiles(String jsonData, Dataset dataset, User authUser) { + public Response addFiles(String jsonData, Dataset dataset, User authUser, boolean trustSuppliedFileSizes) { msgt("(addFilesToDataset) jsonData: " + jsonData.toString()); JsonArrayBuilder jarr = Json.createArrayBuilder(); @@ -2039,6 +2050,7 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) { int totalNumberofFiles = 0; int successNumberofFiles = 0; + this.trustSuppliedFileSizes = trustSuppliedFileSizes; // ----------------------------------------------------------- // Read jsonData and Parse files information from jsondata : // ----------------------------------------------------------- @@ -2171,6 +2183,10 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) { .add("data", Json.createObjectBuilder().add("Files", jarr).add("Result", result)).build() ).build(); } + public Response addFiles(String jsonData, Dataset dataset, User authUser) { + return addFiles(jsonData, dataset, authUser, false); + } + /** * Replace multiple files with prepositioned replacements as listed in the * jsonData. Works with direct upload, Globus, and other out-of-band methods. diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 5c9a2f1d946..58992805dc8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -1093,7 +1093,7 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut // The old code had 2 sec. of sleep, so ... Thread.sleep(2000); - Response addFilesResponse = addFileHelper.addFiles(newjsonData, dataset, authUser); + Response addFilesResponse = addFileHelper.addFiles(newjsonData, dataset, authUser, true); if (addFilesResponse == null) { logger.info("null response from addFiles call"); From 3cd9a82d381f543cd3cb9b3a5560cb44bee1bbee Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 25 Nov 2024 17:39:46 -0500 Subject: [PATCH 339/402] We do support 0-size files! #10977 --- .../java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index fad02c76c78..52b7d4f1861 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -350,7 +350,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, // to S3 that go through the jsf dataset page. Or the Globus // uploads, where the file sizes are looked up in bulk on // the completion of the remote upload task. - if (dataFile.getFilesize() > 0) { + if (dataFile.getFilesize() >= 0) { confirmedFileSize = dataFile.getFilesize(); } else { dataAccess.open(DataAccessOption.READ_ACCESS); From 644a52491665d4e0f43a655f6b9094a2c41848b0 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 25 Nov 2024 18:33:37 -0500 Subject: [PATCH 340/402] added a bunch of globus-related entries that were missing from the bundle, per #11030 (these are used by the notification page, apparently??) --- src/main/java/propertyFiles/Bundle.properties | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 012b389ce32..2b74e24ea29 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -307,7 +307,13 @@ notification.typeDescription.WORKFLOW_FAILURE=External workflow run has failed notification.typeDescription.STATUSUPDATED=Status of dataset has been updated notification.typeDescription.DATASETCREATED=Dataset was created by user notification.typeDescription.DATASETMENTIONED=Dataset was referenced in remote system - +notification.typeDescription.GLOBUSUPLOADCOMPLETED=Globus upload is completed +notification.typeDescription.GLOBUSUPLOADCOMPLETEDWITHERRORS=Globus upload completed with errors +notification.typeDescription.GLOBUSDOWNLOADCOMPLETED=Globus download is completed +notification.typeDescription.GLOBUSDOWNLOADCOMPLETEDWITHERRORS=Globus download completed with errors +notification.typeDescription.GLOBUSUPLOADLOCALFAILURE=Globus upload failed, internal error +notification.typeDescription.GLOBUSUPLOADREMOTEFAILURE=Globus upload failed, remote transfer error +notification.typeDescription.REQUESTEDFILEACCESS=File access requested groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. user.message.signup.label=Create Account user.message.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files. From 28e25eae93de311a2eb4e5e4971d5463fafb9193 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 25 Nov 2024 18:52:35 -0500 Subject: [PATCH 341/402] one more missing notification entry in the bundle #10977 --- src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java | 2 +- src/main/java/propertyFiles/Bundle.properties | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index 2995c0c5f47..c67a0293847 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -283,7 +283,7 @@ public Boolean sendNotificationEmail(UserNotification notification, String comme if (objectOfNotification != null){ String messageText = getMessageTextBasedOnNotification(notification, objectOfNotification, comment, requestor); String subjectText = MailUtil.getSubjectTextBasedOnNotification(notification, objectOfNotification); - if (!(messageText.isEmpty() || subjectText.isEmpty())){ + if (!(StringUtils.isEmpty(messageText) || StringUtils.isEmpty(subjectText))){ retval = sendSystemEmail(emailAddress, subjectText, messageText, isHtmlContent); } else { logger.warning("Skipping " + notification.getType() + " notification, because couldn't get valid message"); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 2b74e24ea29..02bc19f86cf 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -843,7 +843,8 @@ notification.email.datasetWasMentioned=Hello {0},

    The {1} has just been notification.email.datasetWasMentioned.subject={0}: A Dataset Relationship has been reported! notification.email.globus.uploadCompleted.subject={0}: Files uploaded successfully via Globus and verified notification.email.globus.downloadCompleted.subject={0}: Files downloaded successfully via Globus -notification.email.globus.uploadCompletedWithErrors.subject={0}: Uploaded files via Globus with errors +notification.email.globus.downloadCompletedWithErrors.subject={0}: Globus download task completed, errors encountered +notification.email.globus.uploadCompletedWithErrors.subject={0}: Globus upload task completed with errors notification.email.globus.uploadFailedRemotely.subject={0}: Failed to upload files via Globus notification.email.globus.uploadFailedLocally.subject={0}: Failed to add files uploaded via Globus to dataset # dataverse.xhtml From 1325cee6cc7aa707481db68590fa16b65a7ad2ef Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 26 Nov 2024 09:47:26 -0500 Subject: [PATCH 342/402] This should make the filesize setting logic less confusing potentially #10977 --- .../java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 52b7d4f1861..71c498a4d0b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -372,7 +372,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, if (fileSizeLimit == null || confirmedFileSize < fileSizeLimit) { //set file size - if (dataFile.getFilesize() < 1) { + if (dataFile.getFilesize() < 0) { logger.fine("Setting file size: " + confirmedFileSize); dataFile.setFilesize(confirmedFileSize); } From 321de7c46b34a53bc9df64b9947fab47f9f126a6 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 26 Nov 2024 10:35:59 -0500 Subject: [PATCH 343/402] there's no need to slap the "incomplete metadata" label on harvested dataset cards #10909 --- src/main/webapp/search-include-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml index 505fe681363..fcc5aff6f92 100644 --- a/src/main/webapp/search-include-fragment.xhtml +++ b/src/main/webapp/search-include-fragment.xhtml @@ -582,7 +582,7 @@ - +
    From 51e1ad7b2c0bd79cf84d58147285831755b01ff1 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 26 Nov 2024 13:09:49 -0500 Subject: [PATCH 344/402] #11044 reset input levels prior to update --- .../engine/command/impl/AbstractWriteDataverseCommand.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index ede07ba5ab7..0b3d1da0f6d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -73,6 +73,7 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); } ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + dataverse.setDataverseFieldTypeInputLevels(new ArrayList<>()); for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { inputLevel.setDataverse(dataverse); ctxt.fieldTypeInputLevels().create(inputLevel); From 8fd500dc9328f5e21472e0140f4b4a11befc91b4 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 26 Nov 2024 13:35:44 -0500 Subject: [PATCH 345/402] #11044 reset after merge conflict --- .../engine/command/impl/AbstractWriteDataverseCommand.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 91f3a5b823c..2a8bb18a942 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -93,6 +93,7 @@ private void processInputLevels(CommandContext ctxt) { dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); } ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + dataverse.setDataverseFieldTypeInputLevels(new ArrayList<>()); inputLevels.forEach(inputLevel -> { inputLevel.setDataverse(dataverse); ctxt.fieldTypeInputLevels().create(inputLevel); From 232804619656e2637888c9ac4602bb1003eda69b Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 26 Nov 2024 14:42:19 -0500 Subject: [PATCH 346/402] #11044 code cleanup --- .../dataverse/DataverseFacetServiceBean.java | 23 ++++++++----------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java index 67dc183ba66..aa750e96bc9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java @@ -4,8 +4,6 @@ import java.util.List; import jakarta.ejb.EJB; import jakarta.ejb.Stateless; -import jakarta.ejb.TransactionAttribute; -import jakarta.ejb.TransactionAttributeType; import jakarta.inject.Named; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -44,26 +42,23 @@ public void delete(DataverseFacet dataverseFacet) { cache.invalidate(); } - @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public void deleteFacetsFor( Dataverse d ) { - em.createNamedQuery("DataverseFacet.removeByOwnerId") - .setParameter("ownerId", d.getId()) - .executeUpdate(); + public void deleteFacetsFor(Dataverse d) { + em.createNamedQuery("DataverseFacet.removeByOwnerId") + .setParameter("ownerId", d.getId()) + .executeUpdate(); cache.invalidate(d.getId()); - } - - @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public DataverseFacet create(int displayOrder, DatasetFieldType fieldType, Dataverse ownerDv) { + } + + public DataverseFacet create(int displayOrder, DatasetFieldType fieldType, Dataverse ownerDv) { DataverseFacet dataverseFacet = new DataverseFacet(); - + dataverseFacet.setDisplayOrder(displayOrder); dataverseFacet.setDatasetFieldType(fieldType); dataverseFacet.setDataverse(ownerDv); - + em.persist(dataverseFacet); ownerDv.getDataverseFacets().add(dataverseFacet); - em.merge(ownerDv); cache.invalidate(ownerDv.getId()); return dataverseFacet; } From ea97785bc0a49ff762b2a37d97ef3980a4c6ab94 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 26 Nov 2024 14:48:04 -0500 Subject: [PATCH 347/402] #11044 more cleanup --- .../edu/harvard/iq/dataverse/DataverseFacetServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java index aa750e96bc9..804f1fe2943 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java @@ -49,7 +49,7 @@ public void deleteFacetsFor(Dataverse d) { cache.invalidate(d.getId()); } - + public DataverseFacet create(int displayOrder, DatasetFieldType fieldType, Dataverse ownerDv) { DataverseFacet dataverseFacet = new DataverseFacet(); From 40fe6656749ebfd8f1d87c551b2099f75ee3190b Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 26 Nov 2024 16:22:43 -0500 Subject: [PATCH 348/402] a typo in search include fragment --- src/main/webapp/search-include-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml index fcc5aff6f92..fc224443a8e 100644 --- a/src/main/webapp/search-include-fragment.xhtml +++ b/src/main/webapp/search-include-fragment.xhtml @@ -582,7 +582,7 @@ - +
    From e914b625f8a96e2e3faa8804865e16d2bbdcd878 Mon Sep 17 00:00:00 2001 From: Florian Fritze Date: Wed, 27 Nov 2024 07:50:57 +0100 Subject: [PATCH 349/402] check the bugfix --- src/main/webapp/metadataFragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index 0a3ad249061..f8367ce01f8 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -130,7 +130,7 @@ - + Date: Wed, 27 Nov 2024 11:22:33 -0500 Subject: [PATCH 350/402] Update appendix.rst I removed the broken links behind the names of the metadata blocks that are listed in the Supported Metadata section. And I remove "version" from the "(see .tsv version)" text that follows the name of each of the metadata blocks. --- doc/sphinx-guides/source/user/appendix.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/user/appendix.rst b/doc/sphinx-guides/source/user/appendix.rst index f7843b8bf40..601274a50bd 100755 --- a/doc/sphinx-guides/source/user/appendix.rst +++ b/doc/sphinx-guides/source/user/appendix.rst @@ -22,14 +22,14 @@ Supported Metadata Detailed below are what metadata schemas we support for Citation and Domain Specific Metadata in the Dataverse Project: -- `Citation Metadata `__ (`see .tsv version `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 4.0 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary. -- `Geospatial Metadata `__ (`see .tsv version `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 4.0 `__, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary. -- `Social Science & Humanities Metadata `__ (`see .tsv version `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, and Dublin Core. -- `Astronomy and Astrophysics Metadata `__ (`see .tsv version `__): These metadata elements can be mapped/exported to the International Virtual Observatory Alliance’s (IVOA) +- Citation Metadata (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 4.0 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary. +- Geospatial Metadata (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 4.0 `__, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary. +- Social Science & Humanities Metadata__ (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, and Dublin Core. +- Astronomy and Astrophysics Metadata (`see .tsv `__): These metadata elements can be mapped/exported to the International Virtual Observatory Alliance’s (IVOA) `VOResource Schema format `__ and is based on `Virtual Observatory (VO) Discovery and Provenance Metadata `__. -- `Life Sciences Metadata `__ (`see .tsv version `__): based on `ISA-Tab Specification `__, along with controlled vocabulary from subsets of the `OBI Ontology `__ and the `NCBI Taxonomy for Organisms `__. -- `Journal Metadata `__ (`see .tsv version `__): based on the `Journal Archiving and Interchange Tag Set, version 1.2 `__. +- Life Sciences Metadata (`see .tsv `__): based on `ISA-Tab Specification `__, along with controlled vocabulary from subsets of the `OBI Ontology `__ and the `NCBI Taxonomy for Organisms `__. +- Journal Metadata (`see .tsv `__): based on the `Journal Archiving and Interchange Tag Set, version 1.2 `__. Experimental Metadata ~~~~~~~~~~~~~~~~~~~~~ @@ -37,7 +37,7 @@ Experimental Metadata Unlike supported metadata, experimental metadata is not enabled by default in a new Dataverse installation. Feedback via any `channel `_ is welcome! - `CodeMeta Software Metadata `__: based on the `CodeMeta Software Metadata Schema, version 2.0 `__ (`see .tsv version `__) -- `Computational Workflow Metadata `__ (`see .tsv version `__): adapted from `Bioschemas Computational Workflow Profile, version 1.0 `__ and `Codemeta `__. +- Computational Workflow Metadata (`see .tsv `__): adapted from `Bioschemas Computational Workflow Profile, version 1.0 `__ and `Codemeta `__. Please note: these custom metadata schemas are not included in the Solr schema for indexing by default, you will need to add them as necessary for your custom metadata blocks. See "Update the Solr Schema" in :doc:`../admin/metadatacustomization`. From 3d50ba86d7db1c46ff1f384960af327e94011e4a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 27 Nov 2024 17:56:02 -0500 Subject: [PATCH 351/402] typo --- doc/sphinx-guides/source/user/appendix.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/user/appendix.rst b/doc/sphinx-guides/source/user/appendix.rst index 601274a50bd..df9b6704209 100755 --- a/doc/sphinx-guides/source/user/appendix.rst +++ b/doc/sphinx-guides/source/user/appendix.rst @@ -24,7 +24,7 @@ Detailed below are what metadata schemas we support for Citation and Domain Spec - Citation Metadata (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 4.0 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary. - Geospatial Metadata (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 4.0 `__, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary. -- Social Science & Humanities Metadata__ (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, and Dublin Core. +- Social Science & Humanities Metadata (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, and Dublin Core. - Astronomy and Astrophysics Metadata (`see .tsv `__): These metadata elements can be mapped/exported to the International Virtual Observatory Alliance’s (IVOA) `VOResource Schema format `__ and is based on `Virtual Observatory (VO) Discovery and Provenance Metadata `__. From a192c177da18469c411bf42cfa064fd719a38d4c Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 2 Dec 2024 15:35:36 -0500 Subject: [PATCH 352/402] fix release note --- doc/release-notes/220-harvard-edu-audit-files.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/release-notes/220-harvard-edu-audit-files.md b/doc/release-notes/220-harvard-edu-audit-files.md index 002c8e85063..fc857e3a02b 100644 --- a/doc/release-notes/220-harvard-edu-audit-files.md +++ b/doc/release-notes/220-harvard-edu-audit-files.md @@ -9,8 +9,8 @@ The JSON response includes: - List of DataFiles where the FileMetadata is missing. - Other failures found when trying to process the Datasets -curl "http://localhost:8080/api/admin/datafiles/auditFiles -curl "http://localhost:8080/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" -curl "http://localhost:8080/api/admin/datafiles/auditFiles?datasetIdentifierList=doi:10.5072/FK2/RVNT9Q,doi:10.5072/FK2/RVNT9Q +curl -H "X-Dataverse-key:$API_TOKEN" "http://localhost:8080/api/admin/datafiles/auditFiles" +curl -H "X-Dataverse-key:$API_TOKEN" "http://localhost:8080/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" +curl -H "X-Dataverse-key:$API_TOKEN" "http://localhost:8080/api/admin/datafiles/auditFiles?datasetIdentifierList=doi:10.5072/FK2/RVNT9Q,doi:10.5072/FK2/RVNT9Q" For more information, see [the docs](https://dataverse-guide--11016.org.readthedocs.build/en/11016/api/native-api.html#datafile-audit), #11016, and [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220) From e06e1d244f89df7ffa392712a2cbca3f5040dcdc Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 2 Dec 2024 15:38:27 -0500 Subject: [PATCH 353/402] fix api doc --- doc/sphinx-guides/source/api/native-api.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index bfcbbb96f93..cedc71c27dc 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6207,17 +6207,17 @@ Produce an audit report of missing files and FileMetadata for Datasets. Scans the Datasets in the database and verifies that the stored files exist. If the files are missing or if the FileMetadata is missing, this information is returned in a JSON response. The call will return a status code of 200 if the report was generated successfully. Issues found will be documented in the report and will not return a failure status code unless the report could not be generated:: - curl "$SERVER_URL/api/admin/datafiles/auditFiles" + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/admin/datafiles/auditFiles" Optional Parameters are available for filtering the Datasets scanned. For auditing the Datasets in a paged manner (firstId and lastId):: - curl "$SERVER_URL/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" Auditing specific Datasets (comma separated list):: - curl "$SERVER_URL/api/admin/datafiles/auditFiles?datasetIdentifierList=doi:10.5072/FK2/JXYBJS,doi:10.7910/DVN/MPU019 + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/admin/datafiles/auditFiles?datasetIdentifierList=doi:10.5072/FK2/JXYBJS,doi:10.7910/DVN/MPU019 Sample JSON Audit Response:: From 8c79f6710a7f20f2fcc5e8e8c12a10263c44fafc Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 2 Dec 2024 15:38:59 -0500 Subject: [PATCH 354/402] fix api doc --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index cedc71c27dc..a24716f715b 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -6217,7 +6217,7 @@ For auditing the Datasets in a paged manner (firstId and lastId):: Auditing specific Datasets (comma separated list):: - curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/admin/datafiles/auditFiles?datasetIdentifierList=doi:10.5072/FK2/JXYBJS,doi:10.7910/DVN/MPU019 + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/admin/datafiles/auditFiles?datasetIdentifierList=doi:10.5072/FK2/JXYBJS,doi:10.7910/DVN/MPU019" Sample JSON Audit Response:: From ca95ad8d0b260060c7e7ed435767bd134cbf3907 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 3 Dec 2024 15:24:04 -0500 Subject: [PATCH 355/402] #11044 fix failing tests --- .../edu/harvard/iq/dataverse/DataverseFacetServiceBean.java | 4 ++-- .../engine/command/impl/AbstractWriteDataverseCommand.java | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java index 804f1fe2943..56f522fa816 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java @@ -56,9 +56,9 @@ public DataverseFacet create(int displayOrder, DatasetFieldType fieldType, Datav dataverseFacet.setDisplayOrder(displayOrder); dataverseFacet.setDatasetFieldType(fieldType); dataverseFacet.setDataverse(ownerDv); - - em.persist(dataverseFacet); ownerDv.getDataverseFacets().add(dataverseFacet); + em.persist(dataverseFacet); + cache.invalidate(ownerDv.getId()); return dataverseFacet; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 2a8bb18a942..91f3a5b823c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -93,7 +93,6 @@ private void processInputLevels(CommandContext ctxt) { dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); } ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); - dataverse.setDataverseFieldTypeInputLevels(new ArrayList<>()); inputLevels.forEach(inputLevel -> { inputLevel.setDataverse(dataverse); ctxt.fieldTypeInputLevels().create(inputLevel); From 04e9ebaeee6aed9b91606ec2b358f2466f133eb0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Dec 2024 11:56:22 -0500 Subject: [PATCH 356/402] comment out /dev/lang to prevent verbose logging #11043 --- .../source/container/running/demo.rst | 18 ++++++++++++------ docker-compose-dev.yml | 2 +- docker/compose/demo/compose.yml | 2 +- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index 2e404e7a09a..fe094bb3449 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -140,19 +140,25 @@ One you make this change it should be visible in the copyright in the bottom lef Multiple Languages ++++++++++++++++++ -Generally speaking, you'll want to follow :ref:`i18n` in the Installation Guide to set up multiple languages such as English and French. +Generally speaking, you'll want to follow :ref:`i18n` in the Installation Guide to set up multiple languages. (You need to create your own "languages.zip" file, for example.) Here will give you guidance specific to this demo tutorial. We'll be setting up a toggle between English and French. -To set up the toggle between English and French, we'll use a slight variation on the command in the instructions above, adding the unblock key we created above: +First, edit the ``compose.yml`` file and uncomment the following line: -``curl "http://localhost:8080/api/admin/settings/:Languages?unblock-key=unblockme" -X PUT -d '[{"locale":"en","title":"English"},{"locale":"fr","title":"Français"}]'`` +.. code-block:: text + + #-Ddataverse.lang.directory=/dv/lang -Similarly, when loading the "languages.zip" file, we'll add the unblock key: +Next, upload "languages.zip" to the "loadpropertyfiles" API endpoint as shown below. This should place files ending in ".properties" into the ``/dv/lang`` directory configured above. + +Please note that we are using a slight variation on the command in the instructions above, adding the unblock key we created above: ``curl "http://localhost:8080/api/admin/datasetfield/loadpropertyfiles?unblock-key=unblockme" -X POST --upload-file /tmp/languages/languages.zip -H "Content-Type: application/zip"`` -Stop and start the Dataverse container in order for the language toggle to work. +Next, set up the UI toggle between English and French, again using the unblock key: -Note that ``dataverse.lang.directory=/dv/lang`` has already been configured for you in the ``compose.yml`` file. The step where you loaded "languages.zip" should have populated the ``/dv/lang`` directory with files ending in ".properties". +``curl "http://localhost:8080/api/admin/settings/:Languages?unblock-key=unblockme" -X PUT -d '[{"locale":"en","title":"English"},{"locale":"fr","title":"Français"}]'`` + +Stop and start the Dataverse container in order for the language toggle to work. Next Steps ---------- diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 384b70b7a7b..c8515f43136 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -57,7 +57,7 @@ services: -Ddataverse.pid.fake.label=FakeDOIProvider -Ddataverse.pid.fake.authority=10.5072 -Ddataverse.pid.fake.shoulder=FK2/ - -Ddataverse.lang.directory=/dv/lang + #-Ddataverse.lang.directory=/dv/lang ports: - "8080:8080" # HTTP (Dataverse Application) - "4949:4848" # HTTPS (Payara Admin Console) diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index f03d81f5957..60ed130612e 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -26,7 +26,7 @@ services: -Ddataverse.pid.fake.label=FakeDOIProvider -Ddataverse.pid.fake.authority=10.5072 -Ddataverse.pid.fake.shoulder=FK2/ - -Ddataverse.lang.directory=/dv/lang + #-Ddataverse.lang.directory=/dv/lang ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) From d9214a32396e490d918e7e18c9eb7cf696e2a1f1 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Dec 2024 14:27:28 -0500 Subject: [PATCH 357/402] reword #11043 --- doc/sphinx-guides/source/container/running/demo.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index fe094bb3449..b1945070714 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -148,7 +148,7 @@ First, edit the ``compose.yml`` file and uncomment the following line: #-Ddataverse.lang.directory=/dv/lang -Next, upload "languages.zip" to the "loadpropertyfiles" API endpoint as shown below. This should place files ending in ".properties" into the ``/dv/lang`` directory configured above. +Next, upload "languages.zip" to the "loadpropertyfiles" API endpoint as shown below. This will place files ending in ".properties" into the ``/dv/lang`` directory configured above. Please note that we are using a slight variation on the command in the instructions above, adding the unblock key we created above: From b87fdf732e4837ea137804eea4fe6af7da707515 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 2 Dec 2024 13:44:39 -0500 Subject: [PATCH 358/402] #10952 create doc/outline --- doc/release-notes/6.5-release-notes.md | 192 +++++++++++++++++++++++++ 1 file changed, 192 insertions(+) create mode 100644 doc/release-notes/6.5-release-notes.md diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md new file mode 100644 index 00000000000..ccc8a161ae5 --- /dev/null +++ b/doc/release-notes/6.5-release-notes.md @@ -0,0 +1,192 @@ +# Dataverse 6.5 + +Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.5 rather than the list of releases, which will cut them off. + +This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +## Release Highlights + +New features in Dataverse 6.5: + +- and more! Please see below. + + +## Features Added + + + + +## Bugs Fixed + + +## API Updates + + + +## Settings Added + + + +## Backward Incompatible Changes + + + +## Complete List of Changes + +For the complete list of code changes in this release, see the [6.5 milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.5+is%3Aclosed) in GitHub. + +## Getting Help + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/g/dataverse-community) or email support@dataverse.org. + + +## Installation + +If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it! + +Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club! + +You are also very welcome to join the [Global Dataverse Community Consortium](https://www.gdcc.io/) (GDCC). + + +## Upgrade Instructions + +Upgrading requires a maintenance window and downtime. Please plan accordingly, create backups of your database, etc. + +These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.4. + +0\. These instructions assume that you are upgrading from the immediate previous version. If you are running an earlier version, the only supported way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to this version. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +In the following commands, we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed. + +```shell +export PAYARA=/usr/local/payara6` +``` + +(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) + +1\. Undeploy the previous version + +```shell +$PAYARA/bin/asadmin undeploy dataverse-6.4 +``` + +2\. Stop and start Payara + +```shell +service payara stop +sudo service payara start +``` + +3\. Deploy this version + +```shell +$PAYARA/bin/asadmin deploy dataverse-6.5.war +``` + +Note: if you have any trouble deploying, stop Payara, remove the following directories, start Payara, and try to deploy again. + +```shell +service payara stop +rm -rf $PAYARA/glassfish/domains/domain1/generated +rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache +rm -rf $PAYARA/glassfish/domains/domain1/lib/databases +``` + +4\. For installations with internationalization: + +Please remember to update translations via [Dataverse language packs](https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs). + +5\. Restart Payara + +```shell +service payara stop +service payara start +``` + +6\. Update metadata blocks + +These changes reflect incremental improvements made to the handling of core metadata fields. + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/scripts/api/data/metadatablocks/citation.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv +``` + +7\. Update Solr schema.xml file. Start with the standard v6.4 schema.xml, then, if your installation uses any custom or experimental metadata blocks, update it to include the extra fields (step 7a). + +Stop Solr (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.4/installation/prerequisites.html#solr-init-script)). + +```shell +service solr stop +``` + +Replace schema.xml + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/conf/solr/schema.xml +cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf +``` + +Start Solr (but if you use any custom metadata blocks, perform the next step, 7a first). + +```shell +service solr start +``` + +7a\. For installations with custom or experimental metadata blocks: + +Before starting Solr, update the schema to include all the extra metadata fields that your installation uses. We do this by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the names of the directories, if different): + +```shell + wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/conf/solr/update-fields.sh + chmod +x update-fields.sh + curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.4.1/server/solr/collection1/conf/schema.xml +``` + +Now start Solr. + +8\. Reindex Solr + +Below is the simplest way to reindex Solr: + +```shell +curl http://localhost:8080/api/admin/index +``` + +The API above rebuilds the existing index "in place". If you want to be absolutely sure that your index is up-to-date and consistent, you may consider wiping it clean and reindexing everything from scratch (see [the guides](https://guides.dataverse.org/en/latest/admin/solr-search-index.html)). Just note that, depending on the size of your database, a full reindex may take a while and the users will be seeing incomplete search results during that window. + +9\. Run reExportAll to update dataset metadata exports + +This step is necessary because of changes described above for the `Datacite` and `oai_dc` export formats. + +Below is the simple way to reexport all dataset metadata. For more advanced usage, please see [the guides](http://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api). + +```shell +curl http://localhost:8080/api/admin/metadata/reExportAll +``` + +10\. Pushing updated metadata to DataCite + +(If you don't use DataCite, you can skip this.) + +Above you updated the citation metadata block and Solr with the new "relationType" field. With these two changes, the "Relation Type" fields will be available and creation/publication of datasets will result in the expanded XML being sent to DataCite. You've also already run "reExportAll" to update the `Datacite` metadata export format. + +Entries at DataCite for published datasets can be updated by a superuser using an API call (newly [documented](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-all-published-datasets-at-the-pid-provider)): + +`curl -X POST -H 'X-Dataverse-key:' http://localhost:8080/api/datasets/modifyRegistrationPIDMetadataAll` + +This will loop through all published datasets (and released files with PIDs). As long as the loop completes, the call will return a 200/OK response. Any PIDs for which the update fails can be found using the following command: + +`grep 'Failure for id' server.log` + +Failures may occur if PIDs were never registered, or if they were never made findable. Any such cases can be fixed manually in DataCite Fabrica or using the [Reserve a PID](https://guides.dataverse.org/en/6.4/api/native-api.html#reserve-a-pid) API call and the newly documented `/api/datasets//modifyRegistration` call respectively. See https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#send-dataset-metadata-to-pid-provider. Please reach out with any questions. + +PIDs can also be updated by a superuser on a per-dataset basis using + +`curl -X POST -H 'X-Dataverse-key:' http://localhost:8080/api/datasets//modifyRegistrationMetadata` + +### Additional Upgrade Steps From eae7478e6d0e8cdf55d16e9a9abfe5e23645eb78 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 3 Dec 2024 15:34:01 -0500 Subject: [PATCH 359/402] start adding features, bug fixes, etc to notes #10952 --- .../11018-update-dataverse-endpoint-update.md | 8 ---- .../11049-oai-identifiers-as-pids.md | 5 -- .../220-harvard-edu-audit-files.md | 16 ------- doc/release-notes/6.5-release-notes.md | 47 +++++++++++++++++++ .../7239-mydata-results-by-username.md | 3 -- doc/release-notes/8184-rename-private-url.md | 11 ----- .../8941-adding-fileCount-in-solr.md | 15 ------ ...s-labels-not-translated-in-result-block.md | 7 --- ...50-5-improve-list-linked-dataverses-API.md | 5 -- doc/release-notes/expose-export-formats.md | 2 - 10 files changed, 47 insertions(+), 72 deletions(-) delete mode 100644 doc/release-notes/11018-update-dataverse-endpoint-update.md delete mode 100644 doc/release-notes/11049-oai-identifiers-as-pids.md delete mode 100644 doc/release-notes/220-harvard-edu-audit-files.md delete mode 100644 doc/release-notes/7239-mydata-results-by-username.md delete mode 100644 doc/release-notes/8184-rename-private-url.md delete mode 100644 doc/release-notes/8941-adding-fileCount-in-solr.md delete mode 100644 doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md delete mode 100644 doc/release-notes/9650-5-improve-list-linked-dataverses-API.md delete mode 100644 doc/release-notes/expose-export-formats.md diff --git a/doc/release-notes/11018-update-dataverse-endpoint-update.md b/doc/release-notes/11018-update-dataverse-endpoint-update.md deleted file mode 100644 index c2d9cf64af3..00000000000 --- a/doc/release-notes/11018-update-dataverse-endpoint-update.md +++ /dev/null @@ -1,8 +0,0 @@ -The updateDataverse API endpoint has been updated to support an "inherit from parent" configuration for metadata blocks, facets, and input levels. - -When it comes to omitting any of these fields in the request JSON: - -- Omitting ``facetIds`` or ``metadataBlockNames`` causes the Dataverse collection to inherit the corresponding configuration from its parent. -- Omitting ``inputLevels`` removes any existing custom input levels in the Dataverse collection. - -Previously, not setting these fields meant keeping the existing ones in the Dataverse. diff --git a/doc/release-notes/11049-oai-identifiers-as-pids.md b/doc/release-notes/11049-oai-identifiers-as-pids.md deleted file mode 100644 index 8b53a461a70..00000000000 --- a/doc/release-notes/11049-oai-identifiers-as-pids.md +++ /dev/null @@ -1,5 +0,0 @@ -## When harvesting, Dataverse can now use the identifier from the OAI-PMH record header as the persistent id for the harvested dataset. - -This will allow harvesting from sources that do not include a persistent id in their oai_dc metadata records, but use valid dois or handles as the OAI-PMH record header identifiers. - -It is also possible to optionally configure a harvesting client to use this OAI-PMH identifier as the **preferred** choice for the persistent id. See the [Harvesting Clients API](https://guides.dataverse.org/en/6.5/api/native-api.html#create-a-harvesting-client) section of the Guides, #11049 and #10982 for more information. \ No newline at end of file diff --git a/doc/release-notes/220-harvard-edu-audit-files.md b/doc/release-notes/220-harvard-edu-audit-files.md deleted file mode 100644 index fc857e3a02b..00000000000 --- a/doc/release-notes/220-harvard-edu-audit-files.md +++ /dev/null @@ -1,16 +0,0 @@ -### New API to Audit Datafiles across the database - -This is a superuser only API endpoint to audit Datasets with DataFiles where the physical files are missing or the file metadata is missing. -The Datasets scanned can be limited by optional firstId and lastId query parameters, or a given CSV list of Dataset Identifiers. -Once the audit report is generated, a superuser can either delete the missing file(s) from the Dataset or contact the author to re-upload the missing file(s). - -The JSON response includes: -- List of files in each DataFile where the file exists in the database but the physical file is not in the file store. -- List of DataFiles where the FileMetadata is missing. -- Other failures found when trying to process the Datasets - -curl -H "X-Dataverse-key:$API_TOKEN" "http://localhost:8080/api/admin/datafiles/auditFiles" -curl -H "X-Dataverse-key:$API_TOKEN" "http://localhost:8080/api/admin/datafiles/auditFiles?firstId=0&lastId=1000" -curl -H "X-Dataverse-key:$API_TOKEN" "http://localhost:8080/api/admin/datafiles/auditFiles?datasetIdentifierList=doi:10.5072/FK2/RVNT9Q,doi:10.5072/FK2/RVNT9Q" - -For more information, see [the docs](https://dataverse-guide--11016.org.readthedocs.build/en/11016/api/native-api.html#datafile-audit), #11016, and [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index ccc8a161ae5..7a0d4f81da8 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -13,15 +13,57 @@ New features in Dataverse 6.5: ## Features Added +### Private URL Renamed to Preview URL +With this release the name of the URL that may be used by dataset administrators to share a draft version of a dataset has been changed from Private URL to Preview URL. +Also, additional information about the creation of Preview URLs has been added to the popup accessed via edit menu of the Dataset Page. + +Any Private URLs created in previous versions of Dataverse will continue to work. + +The old "privateUrl" API endpoints for the creation and deletion of Preview (formerly Private) URLs have been deprecated. They will continue to work but please switch to the "previewUrl" equivalents that have been [documented](https://dataverse-guide--10961.org.readthedocs.build/en/10961/api/native-api.html#create-a-preview-url-for-a-dataset) in the API Guide. + +See also #8184, #8185, #10950, and #10961. + +### Harvested Dataset PID from Record Header + +When harvesting, Dataverse can now use the identifier from the OAI-PMH record header as the persistent id for the harvested dataset. + +This will allow harvesting from sources that do not include a persistent id in their oai_dc metadata records, but use valid DOIs or handles as the OAI-PMH record header identifiers. + +It is also possible to optionally configure a harvesting client to use this OAI-PMH identifier as the **preferred** choice for the persistent id. See the [Harvesting Clients API](https://guides.dataverse.org/en/6.5/api/native-api.html#create-a-harvesting-client) section of the Guides, #11049 and #10982 for more information. ## Bugs Fixed +### My Data Filter by Username Feature Fixed + +The superuser-only feature of filtering by a username on the My Data page was not working. The "Results for Username" field now returns data for the desired user. See also #7239 and #10980. + +### Facets Filter Labels Now Translated Above Search Results + +On the main page, it's possible to filter results using search facets. If internationalization (i18n) has been activated in the Dataverse installation, allowing pages to be displayed in several languages, the facets are translated in the filter column. However, they weren't being translated above the search results, remaining in the default language, English. + +This version of Dataverse fix this, and includes internationalization in the facets visible in the search results section. For more information, see #9408 and #10158. ## API Updates +### fileCount Added to Search API + +A new search field called `fileCount` can be searched to discover the number of files per dataset. See also #8941 and #10598. + +### List Dataset Metadata Exporters + +A list of available dataset metadata exporters can now be retrieved programmatically via API. See [the docs](https://dataverse-guide--10739.org.readthedocs.build/en/10739/api/native-api.html#get-export-formats) and #10739. + +### Audit Data Files +A superuser-only API endpoint has been added to audit datasets with data files where the physical files are missing or the file metadata is missing. See [the docs](https://dataverse-guide--11016.org.readthedocs.build/en/11016/api/native-api.html#datafile-audit), #11016, and [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220). + +### Update Collection API Inheritance + +The update collection (dataverse) API endpoint has been updated to support an "inherit from parent" configuration for metadata blocks, facets, and input levels. + +Previously, not setting these fields meant using a copy of the settings from the parent collection, which could get out of sync.. See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#update-a-dataverse-collection), #11018, and #11026. ## Settings Added @@ -29,6 +71,11 @@ New features in Dataverse 6.5: ## Backward Incompatible Changes +Generally speaking, see the [API Changelog](https://preview.guides.gdcc.io/en/develop/api/changelog.html) for a list of backward-incompatible changes. + +### List Collections Linked to a Dataset + +The API endpoint that returns a list of collections that a dataset has been linked to has been improved to provide a more structured JSON response. See [the docs](https://dataverse-guide--9665.org.readthedocs.build/en/9665/admin/dataverses-datasets.html#list-collections-that-are-linked-from-a-dataset), #9650, and #9665. ## Complete List of Changes diff --git a/doc/release-notes/7239-mydata-results-by-username.md b/doc/release-notes/7239-mydata-results-by-username.md deleted file mode 100644 index fa1ce56d89e..00000000000 --- a/doc/release-notes/7239-mydata-results-by-username.md +++ /dev/null @@ -1,3 +0,0 @@ -## Fix My Data filter results by username for Administrators - -The filtering for the username on the MyData page was not working. This is only available for superusers. This fixes the "Results for Username" field to return the data for the desired user. See also #7239 and #10980. diff --git a/doc/release-notes/8184-rename-private-url.md b/doc/release-notes/8184-rename-private-url.md deleted file mode 100644 index 7acb03fd735..00000000000 --- a/doc/release-notes/8184-rename-private-url.md +++ /dev/null @@ -1,11 +0,0 @@ -###Private URL renamed Preview URL - -With this release the name of the URL that may be used by dataset administrators to share a draft version of a dataset has been changed from Private URL to Preview URL. - -Also, additional information about the creation of Preview URLs has been added to the popup accessed via edit menu of the Dataset Page. - -Any Private URLs created in previous versions of Dataverse will continue to work. - -The old "privateUrl" API endpoints for the creation and deletion of Preview (formerly Private) URLs have been deprecated. They will continue to work but please switch to the "previewUrl" equivalents that have been [documented](https://dataverse-guide--10961.org.readthedocs.build/en/10961/api/native-api.html#create-a-preview-url-for-a-dataset) in the API Guide. - -See also #8184, #8185, #10950, and #10961. diff --git a/doc/release-notes/8941-adding-fileCount-in-solr.md b/doc/release-notes/8941-adding-fileCount-in-solr.md deleted file mode 100644 index 164b91e6123..00000000000 --- a/doc/release-notes/8941-adding-fileCount-in-solr.md +++ /dev/null @@ -1,15 +0,0 @@ -## Release Highlights - -### Adding fileCount as SOLR field - -A new search field called `fileCount` can be searched to discover the number of files per dataset. (#10598) - -## Upgrade Instructions - -1. Update your Solr `schema.xml` to include the new field. -For details, please see https://guides.dataverse.org/en/latest/admin/metadatacustomization.html#updating-the-solr-schema - -2. Reindex Solr. -Once the schema.xml is updated, Solr must be restarted and a reindex initiated. -For details, see https://guides.dataverse.org/en/latest/admin/solr-search-index.html but here is the reindex command: -`curl http://localhost:8080/api/admin/index` diff --git a/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md b/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md deleted file mode 100644 index 344859e2dbd..00000000000 --- a/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md +++ /dev/null @@ -1,7 +0,0 @@ -## Fix facets filter labels not translated in result block - -On the main page, it's possible to filter results using search facets. If internationalization (i18n) has been activated in the Dataverse installation, allowing pages to be displayed in several languages, the facets are translated in the filter column. However, they aren't translated in the search results and remain in the default language, English. - -This version of Dataverse fix this, and includes internationalization in the facets visible in the search results section. - -For more information, see issue [#9408](https://github.com/IQSS/dataverse/issues/9408) and pull request [#10158](https://github.com/IQSS/dataverse/pull/10158) diff --git a/doc/release-notes/9650-5-improve-list-linked-dataverses-API.md b/doc/release-notes/9650-5-improve-list-linked-dataverses-API.md deleted file mode 100644 index 8c79955891b..00000000000 --- a/doc/release-notes/9650-5-improve-list-linked-dataverses-API.md +++ /dev/null @@ -1,5 +0,0 @@ -The following API have been added: - -/api/datasets/{datasetId}/links - -It lists the linked dataverses to a dataset. It can be executed only by administrators. \ No newline at end of file diff --git a/doc/release-notes/expose-export-formats.md b/doc/release-notes/expose-export-formats.md deleted file mode 100644 index a21906d7bbb..00000000000 --- a/doc/release-notes/expose-export-formats.md +++ /dev/null @@ -1,2 +0,0 @@ -# New API method for listing the available exporters -Found at `/api/info/exportFormats`, produces an object with available format names as keys, and as values an object with various info about the exporter. See also #10739. \ No newline at end of file From e520a9a746aacd01e0bc3d9f633cdf98cfbe14ac Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 3 Dec 2024 15:54:18 -0500 Subject: [PATCH 360/402] add more to notes #10952 --- ...pearing-in-search-results-for-anon-user.md | 11 --------- ...0969-order-subfields-version-difference.md | 2 -- .../10977-globus-filesize-lookup.md | 6 ----- .../11012-get-dataverse-api-ext.md | 1 - doc/release-notes/6.5-release-notes.md | 24 ++++++++++++++++++- 5 files changed, 23 insertions(+), 21 deletions(-) delete mode 100644 doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md delete mode 100644 doc/release-notes/10969-order-subfields-version-difference.md delete mode 100644 doc/release-notes/10977-globus-filesize-lookup.md delete mode 100644 doc/release-notes/11012-get-dataverse-api-ext.md diff --git a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md b/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md deleted file mode 100644 index 66ea04b124f..00000000000 --- a/doc/release-notes/10947-unpublished-files-appearing-in-search-results-for-anon-user.md +++ /dev/null @@ -1,11 +0,0 @@ -## Unpublished file bug fix - -A bug fix was made that gets the major version of a Dataset when all major versions were deaccessioned. This fixes the incorrect showing of the files as "Unpublished" in the search list even when they are published. -This fix affects the indexing, meaning these datasets must be re-indexed once Dataverse is updated. This can be manually done by calling the index API for each affected Dataset. - -Example: -```shell -curl http://localhost:8080/api/admin/index/dataset?persistentId=doi:10.7910/DVN/6X4ZZL -``` - -See also #10947 and #10974. diff --git a/doc/release-notes/10969-order-subfields-version-difference.md b/doc/release-notes/10969-order-subfields-version-difference.md deleted file mode 100644 index 3f245ebe069..00000000000 --- a/doc/release-notes/10969-order-subfields-version-difference.md +++ /dev/null @@ -1,2 +0,0 @@ -Bug Fix: -In order to facilitate the comparison between the draft version and the published version of a dataset, a sort on subfields has been added (#10969) \ No newline at end of file diff --git a/doc/release-notes/10977-globus-filesize-lookup.md b/doc/release-notes/10977-globus-filesize-lookup.md deleted file mode 100644 index 49fd10d9ffe..00000000000 --- a/doc/release-notes/10977-globus-filesize-lookup.md +++ /dev/null @@ -1,6 +0,0 @@ -## A new Globus optimization setting - -An optimization has been added for the Globus upload workflow, with a corresponding new database setting: `:GlobusBatchLookupSize` - - -See the [Database Settings](https://guides.dataverse.org/en/6.5/installation/config.html#GlobusBatchLookupSize) section of the Guides for more information. \ No newline at end of file diff --git a/doc/release-notes/11012-get-dataverse-api-ext.md b/doc/release-notes/11012-get-dataverse-api-ext.md deleted file mode 100644 index 641aa373174..00000000000 --- a/doc/release-notes/11012-get-dataverse-api-ext.md +++ /dev/null @@ -1 +0,0 @@ -The JSON payload of the getDataverse endpoint has been extended to include properties isMetadataBlockRoot and isFacetRoot. diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 7a0d4f81da8..30bd0f68732 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -25,6 +25,12 @@ The old "privateUrl" API endpoints for the creation and deletion of Preview (for See also #8184, #8185, #10950, and #10961. +### GlobusBatchLookupSize + +An optimization has been added for the Globus upload workflow, with a corresponding new database setting: `:GlobusBatchLookupSize` + +See the [Database Settings](https://guides.dataverse.org/en/6.5/installation/config.html#GlobusBatchLookupSize) section of the Guides, #10977, and #11040 for more information. + ### Harvested Dataset PID from Record Header When harvesting, Dataverse can now use the identifier from the OAI-PMH record header as the persistent id for the harvested dataset. @@ -39,12 +45,24 @@ It is also possible to optionally configure a harvesting client to use this OAI- The superuser-only feature of filtering by a username on the My Data page was not working. The "Results for Username" field now returns data for the desired user. See also #7239 and #10980. +### Version Differences Details Sorting Added + +In order to facilitate the comparison between the draft version and the published version of a dataset, a sort on subfields has been added. See #10969. + ### Facets Filter Labels Now Translated Above Search Results On the main page, it's possible to filter results using search facets. If internationalization (i18n) has been activated in the Dataverse installation, allowing pages to be displayed in several languages, the facets are translated in the filter column. However, they weren't being translated above the search results, remaining in the default language, English. This version of Dataverse fix this, and includes internationalization in the facets visible in the search results section. For more information, see #9408 and #10158. +### Unpublished File Bug Fix Related to Deaccessioning + +A bug fix was made that gets the major version of a Dataset when all major versions were deaccessioned. This fixes the incorrect showing of the files as "Unpublished" in the search list even when they are published. This fix affects the indexing, meaning these datasets must be re-indexed once Dataverse is updated. See also #10947 and #10974. + +### Globus "missing properties" Logging Fixed + +In previous releases, logging would show Globus-related strings were missing from properties files. This has been fixed. See #11030 for details. + ## API Updates ### fileCount Added to Search API @@ -65,9 +83,13 @@ The update collection (dataverse) API endpoint has been updated to support an "i Previously, not setting these fields meant using a copy of the settings from the parent collection, which could get out of sync.. See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#update-a-dataverse-collection), #11018, and #11026. -## Settings Added +### isMetadataBlockRoot and isFacetRoot + +The JSON payload of the "get collection" endpoint has been extended to include properties isMetadataBlockRoot and isFacetRoot. See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#view-a-dataverse-collection), #11012, and #11013. +## Settings Added +- :GlobusBatchLookupSize ## Backward Incompatible Changes From 3e69dd1ccfb4f97e0937cf86cb4acfea89429d82 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Dec 2024 14:48:11 -0500 Subject: [PATCH 361/402] more features, etc #10952 --- ...17-guestbook-question-size-limit-raised.md | 1 - .../10914-users-token-api-credentials.md | 3 --- .../10919-minor-DataCiteXML-bugfix.md | 1 - doc/release-notes/10939-i18n-docker.md | 5 ---- doc/release-notes/6.5-release-notes.md | 24 +++++++++++++++++++ 5 files changed, 24 insertions(+), 10 deletions(-) delete mode 100644 doc/release-notes/10117-guestbook-question-size-limit-raised.md delete mode 100644 doc/release-notes/10914-users-token-api-credentials.md delete mode 100644 doc/release-notes/10919-minor-DataCiteXML-bugfix.md delete mode 100644 doc/release-notes/10939-i18n-docker.md diff --git a/doc/release-notes/10117-guestbook-question-size-limit-raised.md b/doc/release-notes/10117-guestbook-question-size-limit-raised.md deleted file mode 100644 index ab5e84d78fe..00000000000 --- a/doc/release-notes/10117-guestbook-question-size-limit-raised.md +++ /dev/null @@ -1 +0,0 @@ -Custom questions in Guestbooks can now be more than 255 characters and the bug causing a silent failure when questions were longer than this limit has been fixed. \ No newline at end of file diff --git a/doc/release-notes/10914-users-token-api-credentials.md b/doc/release-notes/10914-users-token-api-credentials.md deleted file mode 100644 index 888214481f6..00000000000 --- a/doc/release-notes/10914-users-token-api-credentials.md +++ /dev/null @@ -1,3 +0,0 @@ -Extended the users/token GET endpoint to support any auth mechanism for retrieving the token information. - -Previously, this endpoint only accepted an API token to retrieve its information. Now, it accepts any authentication mechanism and returns the associated API token information. diff --git a/doc/release-notes/10919-minor-DataCiteXML-bugfix.md b/doc/release-notes/10919-minor-DataCiteXML-bugfix.md deleted file mode 100644 index 4fa0c1142b1..00000000000 --- a/doc/release-notes/10919-minor-DataCiteXML-bugfix.md +++ /dev/null @@ -1 +0,0 @@ -A minor bug fix was made to avoid sending a useless ", null" in the DataCiteXML sent to DataCite and in the DataCite export when a dataset has a metadata entry for "Software Name" and no entry for "Software Version". The bug fix will update datasets upon publication. Anyone with existing published datasets with this problem can be fixed by [pushing updated metadata to DataCite for affected datasets](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-a-published-dataset-at-the-pid-provider) and [re-exporting the dataset metadata](https://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api) or by following steps 9 and 10 in the v6.4 release notes to update and re-export all datasets. diff --git a/doc/release-notes/10939-i18n-docker.md b/doc/release-notes/10939-i18n-docker.md deleted file mode 100644 index d9887b684db..00000000000 --- a/doc/release-notes/10939-i18n-docker.md +++ /dev/null @@ -1,5 +0,0 @@ -## Multiple Language in Docker - -Configuration and documentation has been added to explain how to set up multiple languages (e.g. English and French) in the tutorial for setting up Dataverse in Docker. - -See also #10939 diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 30bd0f68732..2d6345941bb 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -13,6 +13,10 @@ New features in Dataverse 6.5: ## Features Added +### Longer Custom Questions in Guestbooks + +Custom questions in Guestbooks can now be more than 255 characters and the bug causing a silent failure when questions were longer than this limit has been fixed. See also #9492, #10117, #10118. + ### Private URL Renamed to Preview URL With this release the name of the URL that may be used by dataset administrators to share a draft version of a dataset has been changed from Private URL to Preview URL. @@ -25,6 +29,12 @@ The old "privateUrl" API endpoints for the creation and deletion of Preview (for See also #8184, #8185, #10950, and #10961. +## Multiple Language in Docker + +Configuration and documentation has been added to explain how to set up multiple languages (e.g. English and French) in the tutorial for setting up Dataverse in Docker. + +See also [the docs](https://dataverse-guide--10940.org.readthedocs.build/en/10940/container/running/demo.html#multiple-languages), #10939, and #10940. + ### GlobusBatchLookupSize An optimization has been added for the Globus upload workflow, with a corresponding new database setting: `:GlobusBatchLookupSize` @@ -59,12 +69,20 @@ This version of Dataverse fix this, and includes internationalization in the fac A bug fix was made that gets the major version of a Dataset when all major versions were deaccessioned. This fixes the incorrect showing of the files as "Unpublished" in the search list even when they are published. This fix affects the indexing, meaning these datasets must be re-indexed once Dataverse is updated. See also #10947 and #10974. +### Minor DataCiteXML Fix + +A minor bug fix was made to avoid sending a useless ", null" in the DataCiteXML sent to DataCite and in the DataCite export when a dataset has a metadata entry for "Software Name" and no entry for "Software Version". The bug fix will update datasets upon publication. Anyone with existing published datasets with this problem can be fixed by [pushing updated metadata to DataCite for affected datasets](https://guides.dataverse.org/en/6.5/admin/dataverses-datasets.html#update-metadata-for-a-published-dataset-at-the-pid-provider) and [re-exporting the dataset metadata](https://guides.dataverse.org/en/6.5/admin/metadataexport.html#batch-exports-through-the-api) or by steps 9 and 10 in the [v6.4 release notes](https://github.com/IQSS/dataverse/releases/tag/v6.4) to update and re-export all datasets. See also #10919. + ### Globus "missing properties" Logging Fixed In previous releases, logging would show Globus-related strings were missing from properties files. This has been fixed. See #11030 for details. ## API Updates +### Editing Collections + +A new endpoint (`PUT /api/dataverses/`) for updating an existing collection (dataverse) has been added. It uses the same JSON structure as the one used for collection creation. See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#update-a-dataverse-collection), #10904, and #10925. + ### fileCount Added to Search API A new search field called `fileCount` can be searched to discover the number of files per dataset. See also #8941 and #10598. @@ -87,6 +105,12 @@ Previously, not setting these fields meant using a copy of the settings from the The JSON payload of the "get collection" endpoint has been extended to include properties isMetadataBlockRoot and isFacetRoot. See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#view-a-dataverse-collection), #11012, and #11013. +### Get API Token Supports Any Auth + +The `/api/users/token` endpoint has been extended to support any auth mechanism for retrieving the token information. + +Previously, this endpoint only accepted an API token to retrieve its information. Now, it accepts any authentication mechanism and returns the associated API token information. See #10914 and #10924. + ## Settings Added - :GlobusBatchLookupSize From 65fbd27290be8c563f39fd63c727f6c43717e97d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Dec 2024 14:55:20 -0500 Subject: [PATCH 362/402] more content #10952 --- doc/release-notes/10901deaccessioned file edit fix.md | 1 - doc/release-notes/10904-edit-dataverse-collection-endpoint.md | 1 - doc/release-notes/6.5-release-notes.md | 4 ++++ 3 files changed, 4 insertions(+), 2 deletions(-) delete mode 100644 doc/release-notes/10901deaccessioned file edit fix.md delete mode 100644 doc/release-notes/10904-edit-dataverse-collection-endpoint.md diff --git a/doc/release-notes/10901deaccessioned file edit fix.md b/doc/release-notes/10901deaccessioned file edit fix.md deleted file mode 100644 index db12b1fc978..00000000000 --- a/doc/release-notes/10901deaccessioned file edit fix.md +++ /dev/null @@ -1 +0,0 @@ -When a dataset was deaccessioned and was the only previous version it will cause an error when trying to update the files. \ No newline at end of file diff --git a/doc/release-notes/10904-edit-dataverse-collection-endpoint.md b/doc/release-notes/10904-edit-dataverse-collection-endpoint.md deleted file mode 100644 index b9256941eea..00000000000 --- a/doc/release-notes/10904-edit-dataverse-collection-endpoint.md +++ /dev/null @@ -1 +0,0 @@ -Adds a new endpoint (`PUT /api/dataverses/`) for updating an existing Dataverse collection using a JSON file following the same structure as the one used in the API for the creation. diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 2d6345941bb..9500c90f53a 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -51,6 +51,10 @@ It is also possible to optionally configure a harvesting client to use this OAI- ## Bugs Fixed +### Updating Files Now Possible When Latest and Only Dataset Version is Deaccessioned + +When a dataset was deaccessioned and was the only previous version it would cause an error when trying to update the files. This has been fixed. See #9351 and #10901. + ### My Data Filter by Username Feature Fixed The superuser-only feature of filtering by a username on the My Data page was not working. The "Results for Username" field now returns data for the desired user. See also #7239 and #10980. From aa56e627fcd72edb4e0f93f416ed4aa7a032b64a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Dec 2024 15:55:04 -0500 Subject: [PATCH 363/402] more content #10952 --- ...-add-api-for-comparing-dataset-versions.md | 21 ------------------- doc/release-notes/10889_bump_PG17_FlyWay10.md | 7 ------- doc/release-notes/6.5-release-notes.md | 20 +++++++++++++++++- 3 files changed, 19 insertions(+), 29 deletions(-) delete mode 100644 doc/release-notes/10888-add-api-for-comparing-dataset-versions.md delete mode 100644 doc/release-notes/10889_bump_PG17_FlyWay10.md diff --git a/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md b/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md deleted file mode 100644 index b82441ee11a..00000000000 --- a/doc/release-notes/10888-add-api-for-comparing-dataset-versions.md +++ /dev/null @@ -1,21 +0,0 @@ -The following API have been added: - -/api/datasets/{persistentId}/versions/{versionId0}/compare/{versionId1} - -This API lists the changes between 2 dataset versions. The Json response shows the changes per field within the Metadata block and the Terms Of Access. Also listed are the files that have been added or removed. Files that have been modified will also display the new file data plus the fields that have been modified. -When compare includes an unpublished/draft version the api token must be associated with a user having view unpublished privileges -An error will be returned if VERSION0 was not created before VERSION1 - -Example of Metadata Block field change: -```json -{ - "blockName": "Life Sciences Metadata", - "changed": [ - { - "fieldName": "Design Type", - "oldValue": "", - "newValue": "Parallel Group Design; Nested Case Control Design" - } - ] -} -``` diff --git a/doc/release-notes/10889_bump_PG17_FlyWay10.md b/doc/release-notes/10889_bump_PG17_FlyWay10.md deleted file mode 100644 index 932c06fbc3d..00000000000 --- a/doc/release-notes/10889_bump_PG17_FlyWay10.md +++ /dev/null @@ -1,7 +0,0 @@ -This release bumps both the Postgres JDBC driver and Flyway versions. This should better support Postgres version 17, and as of version 10 Flyway no longer requires a paid subscription to support older versions of Postgres. - -While we don't encourage the use of older Postgres versions, this flexibility may benefit some of our long-standing installations in their upgrade paths. Postgres 13 remains the version used with automated testing. - -As part of this update, the containerized development environment now uses Postgres 17 instead of 16. Developers must delete their data (`rm -rf docker-dev-volumes`) and start with an empty database. They can rerun the quickstart in the dev guide. - -The Docker compose file used for [evaluations or demos](https://dataverse-guide--10912.org.readthedocs.build/en/10912/container/running/demo.html) has been upgraded from Postgres 13 to 17. diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 9500c90f53a..16bd925eac8 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -29,7 +29,21 @@ The old "privateUrl" API endpoints for the creation and deletion of Preview (for See also #8184, #8185, #10950, and #10961. -## Multiple Language in Docker +### PostgreSQL and Flyway Updates + +This release bumps the version of PostgreSQL and Flyway used in containers as well as the PostgreSQL JDBC driver used all installations, including classic (non-Docker) installations. PostgreSQL and its driver have been bumped to version 17. Flyway has been bumped to version 10. + +PostgreSQL 13 remains the version used with automated testing, leading us to continue to [recommend](https://guides.dataverse.org/en/6.5/installation/prerequisites.html#postgresql) that version for classic installations. + +As of Flyway 10, supporting older versions of PostgreSQL no longer requires a paid subscription. While we don't encourage the use of older PostgreSQL versions, this flexibility may benefit some of our long-standing installations in their upgrade paths. + +As part of this update, the containerized development environment now uses Postgres 17 instead of 16. Developers must delete their data (`rm -rf docker-dev-volumes`) and start with an empty database, as [explained](https://groups.google.com/g/dataverse-dev/c/ffoNj5UXyzU/m/nE5oGY_sAQAJ) on the dev mailing list. They can rerun the quickstart in the dev guide. + +The Docker compose file used for [evaluations or demos](https://guides.dataverse.org/en/6.4/container/running/demo.html) has been upgraded from Postgres 13 to 17. + +See also #10889 and #10912. + +## Multiple Languages in Docker Configuration and documentation has been added to explain how to set up multiple languages (e.g. English and French) in the tutorial for setting up Dataverse in Docker. @@ -95,6 +109,10 @@ A new search field called `fileCount` can be searched to discover the number of A list of available dataset metadata exporters can now be retrieved programmatically via API. See [the docs](https://dataverse-guide--10739.org.readthedocs.build/en/10739/api/native-api.html#get-export-formats) and #10739. +### Comparing Dataset Versions + +An API has been added to compare dataset versions. See [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#compare-versions-of-a-dataset), #10888, and #10945. + ### Audit Data Files A superuser-only API endpoint has been added to audit datasets with data files where the physical files are missing or the file metadata is missing. See [the docs](https://dataverse-guide--11016.org.readthedocs.build/en/11016/api/native-api.html#datafile-audit), #11016, and [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220). From 8f8bce84ec9b9705d4250897739f96e8537a64c5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 5 Dec 2024 13:04:48 -0500 Subject: [PATCH 364/402] more content #10952 --- .../10772-fix-importDDI-otherId.md | 2 -- .../10793-optimisticlockexception handling.md | 2 -- .../10814-Differencing improvement.md | 3 -- ...837-exclude-others-ns-harvesting-oai-dc.md | 3 -- ...d-expiration-date-to-recreate-token-api.md | 1 - ...date-to-conditions-to-display-image_url.md | 8 ----- doc/release-notes/6.5-release-notes.md | 36 +++++++++++++++++-- 7 files changed, 33 insertions(+), 22 deletions(-) delete mode 100644 doc/release-notes/10772-fix-importDDI-otherId.md delete mode 100644 doc/release-notes/10793-optimisticlockexception handling.md delete mode 100644 doc/release-notes/10814-Differencing improvement.md delete mode 100644 doc/release-notes/10837-exclude-others-ns-harvesting-oai-dc.md delete mode 100644 doc/release-notes/10857-add-expiration-date-to-recreate-token-api.md delete mode 100644 doc/release-notes/10886-update-to-conditions-to-display-image_url.md diff --git a/doc/release-notes/10772-fix-importDDI-otherId.md b/doc/release-notes/10772-fix-importDDI-otherId.md deleted file mode 100644 index d5a9018b2b2..00000000000 --- a/doc/release-notes/10772-fix-importDDI-otherId.md +++ /dev/null @@ -1,2 +0,0 @@ -Bug Fix : -This PR fixes the `edu.harvard.iq.dataverse.util.json.JsonParseException: incorrect multiple for field otherId` error when DDI harvested data contains multiple ortherId. \ No newline at end of file diff --git a/doc/release-notes/10793-optimisticlockexception handling.md b/doc/release-notes/10793-optimisticlockexception handling.md deleted file mode 100644 index 3312063be8f..00000000000 --- a/doc/release-notes/10793-optimisticlockexception handling.md +++ /dev/null @@ -1,2 +0,0 @@ -Improvements have been made in handling the errors when a dataset has been edited in one window and an attempt is made to -edit/publish it in another. diff --git a/doc/release-notes/10814-Differencing improvement.md b/doc/release-notes/10814-Differencing improvement.md deleted file mode 100644 index 49bbdae3e1b..00000000000 --- a/doc/release-notes/10814-Differencing improvement.md +++ /dev/null @@ -1,3 +0,0 @@ -### More Scalable Dataset Version Differencing - -Differencing between dataset versions, which is done during dataset edit operations and to populate the dataset page versions table has been made signficantly more scalable. diff --git a/doc/release-notes/10837-exclude-others-ns-harvesting-oai-dc.md b/doc/release-notes/10837-exclude-others-ns-harvesting-oai-dc.md deleted file mode 100644 index c1826bfaed5..00000000000 --- a/doc/release-notes/10837-exclude-others-ns-harvesting-oai-dc.md +++ /dev/null @@ -1,3 +0,0 @@ -Some repository extend the "oai_dc" metadata prefix with specific namespaces. In this case, harvesting of these datasets is not possible, as an XML parsing error is raised. - -The PR [#10837](https://github.com/IQSS/dataverse/pull/10837) allows the harvesting of these datasets by excluding tags with namespaces that are not "dc:", and harvest only metadata with the "dc" namespace. diff --git a/doc/release-notes/10857-add-expiration-date-to-recreate-token-api.md b/doc/release-notes/10857-add-expiration-date-to-recreate-token-api.md deleted file mode 100644 index b450867c630..00000000000 --- a/doc/release-notes/10857-add-expiration-date-to-recreate-token-api.md +++ /dev/null @@ -1 +0,0 @@ -An optional query parameter called 'returnExpiration' has been added to the 'users/token/recreate' endpoint, which, if set to true, returns the expiration time in the response message. diff --git a/doc/release-notes/10886-update-to-conditions-to-display-image_url.md b/doc/release-notes/10886-update-to-conditions-to-display-image_url.md deleted file mode 100644 index 6dfe8eb9f2d..00000000000 --- a/doc/release-notes/10886-update-to-conditions-to-display-image_url.md +++ /dev/null @@ -1,8 +0,0 @@ -Search API (/api/search) responses for Datafiles include image_url for the thumbnail if each of the following are true: -1. The DataFile is not Harvested -2. A Thumbnail is available for the Datafile -3. If the Datafile is Restricted then the caller must have Download File Permission for the Datafile -4. The Datafile is NOT actively embargoed -5. The Datafile's retention period has NOT expired - -See also #10875 and #10886. diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 16bd925eac8..b66ae2f1b21 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -29,6 +29,10 @@ The old "privateUrl" API endpoints for the creation and deletion of Preview (for See also #8184, #8185, #10950, and #10961. +### More Scalable Dataset Version Differencing + +Differencing between dataset versions, which is done during dataset edit operations and to populate the dataset page versions table has been made signficantly more scalable. See #10814 and #10818. + ### PostgreSQL and Flyway Updates This release bumps the version of PostgreSQL and Flyway used in containers as well as the PostgreSQL JDBC driver used all installations, including classic (non-Docker) installations. PostgreSQL and its driver have been bumped to version 17. Flyway has been bumped to version 10. @@ -55,6 +59,12 @@ An optimization has been added for the Globus upload workflow, with a correspond See the [Database Settings](https://guides.dataverse.org/en/6.5/installation/config.html#GlobusBatchLookupSize) section of the Guides, #10977, and #11040 for more information. +### Harvesting "oai_dc" Metadata Prefix When Extended With Specific Namespaces + +Some repository extend the "oai_dc" metadata prefix with specific namespaces. In this case, harvesting of these datasets was not possible because an XML parsing error was raised. + +Harvesting of these datasets has been fixed by excluding tags with namespaces that are not "dc:" and harvest only metadata with the "dc" namespace. See #10837. + ### Harvested Dataset PID from Record Header When harvesting, Dataverse can now use the identifier from the OAI-PMH record header as the persistent id for the harvested dataset. @@ -63,6 +73,10 @@ This will allow harvesting from sources that do not include a persistent id in t It is also possible to optionally configure a harvesting client to use this OAI-PMH identifier as the **preferred** choice for the persistent id. See the [Harvesting Clients API](https://guides.dataverse.org/en/6.5/api/native-api.html#create-a-harvesting-client) section of the Guides, #11049 and #10982 for more information. +### Harvested Datasets Can Have Multiple "otherId" Values + +When harvesting using the DDI format, datasets can now have multiple "otherId" values. See #10772. + ## Bugs Fixed ### Updating Files Now Possible When Latest and Only Dataset Version is Deaccessioned @@ -73,6 +87,10 @@ When a dataset was deaccessioned and was the only previous version it would caus The superuser-only feature of filtering by a username on the My Data page was not working. The "Results for Username" field now returns data for the desired user. See also #7239 and #10980. +### Better Handling of Parallel Edit/Publish Errors + +Improvements have been made in handling the errors when a dataset has been edited in one browser window and an attempt is made to edit/publish it in another. (This practice is discouraged.) See #10793 and #10794. + ### Version Differences Details Sorting Added In order to facilitate the comparison between the draft version and the published version of a dataset, a sort on subfields has been added. See #10969. @@ -127,11 +145,23 @@ Previously, not setting these fields meant using a copy of the settings from the The JSON payload of the "get collection" endpoint has been extended to include properties isMetadataBlockRoot and isFacetRoot. See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#view-a-dataverse-collection), #11012, and #11013. -### Get API Token Supports Any Auth +### Image URLs from the Search API + +As of 6.4 (thanks to #10855) `image_url` is being returned from the Search API. The logic has been updated to only show the image if each of the following are true: + +1. The DataFile is not Harvested +2. A Thumbnail is available for the Datafile +3. If the Datafile is Restricted then the caller must have Download File Permission for the Datafile +4. The Datafile is NOT actively embargoed +5. The Datafile's retention period has NOT expired + +See also #10875 and #10886. + +## API Tokens -The `/api/users/token` endpoint has been extended to support any auth mechanism for retrieving the token information. +An optional query parameter called "returnExpiration" has been added to the "/api/users/token/recreate" endpoint, which, if set to true, returns the expiration time in the response message. See [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#recreate-a-token), #10857 and #10858. -Previously, this endpoint only accepted an API token to retrieve its information. Now, it accepts any authentication mechanism and returns the associated API token information. See #10914 and #10924. +The `/api/users/token` endpoint has been extended to support any auth mechanism for retrieving the token information. Previously, this endpoint only accepted an API token to retrieve its information. Now, it accepts any authentication mechanism and returns the associated API token information. See #10914 and #10924. ## Settings Added From c331d895573af5edcbafb94afeeef8dd9c265456 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 5 Dec 2024 13:19:44 -0500 Subject: [PATCH 365/402] added last of the snippets (for now) #10952 --- doc/release-notes/10379-MetricsBugsFixes.md | 10 ------ .../10661-guestbook-email-bug-fix.md | 4 --- .../10688_whitespace_trimming.md | 6 ---- .../10697-improve-permission-indexing.md | 7 ---- ...C Citation and DOI parsing improvements.md | 3 -- ...0742-newest-oldest-sort-order-backwards.md | 3 -- doc/release-notes/6.5-release-notes.md | 35 +++++++++++++++++++ 7 files changed, 35 insertions(+), 33 deletions(-) delete mode 100644 doc/release-notes/10379-MetricsBugsFixes.md delete mode 100644 doc/release-notes/10661-guestbook-email-bug-fix.md delete mode 100644 doc/release-notes/10688_whitespace_trimming.md delete mode 100644 doc/release-notes/10697-improve-permission-indexing.md delete mode 100644 doc/release-notes/10708 - MDC Citation and DOI parsing improvements.md delete mode 100644 doc/release-notes/10742-newest-oldest-sort-order-backwards.md diff --git a/doc/release-notes/10379-MetricsBugsFixes.md b/doc/release-notes/10379-MetricsBugsFixes.md deleted file mode 100644 index 0ebc6d99f0b..00000000000 --- a/doc/release-notes/10379-MetricsBugsFixes.md +++ /dev/null @@ -1,10 +0,0 @@ - -### Metrics API Bug fixes - -Two bugs in the Metrics API have been fixed: - -- The /datasets and /datasets/byMonth endpoints could report incorrect values if/when they have been called using the dataLocation parameter (which allows getting metrics for local, remote (harvested), or all datasets) as the metrics cache was not storing different values for these cases. - -- Metrics endpoints who's calculation relied on finding the latest published datasetversion were incorrect if/when the minor version number was > 9. - -When deploying the new release, the [/api/admin/clearMetricsCache](https://guides.dataverse.org/en/latest/api/native-api.html#metrics) API should be called to remove old cached values that may be incorrect. \ No newline at end of file diff --git a/doc/release-notes/10661-guestbook-email-bug-fix.md b/doc/release-notes/10661-guestbook-email-bug-fix.md deleted file mode 100644 index 05e70c9762a..00000000000 --- a/doc/release-notes/10661-guestbook-email-bug-fix.md +++ /dev/null @@ -1,4 +0,0 @@ - -### Guestbook Email Validation Bug fix - -Guestbook UI Form: Email address is now checked for valid email format diff --git a/doc/release-notes/10688_whitespace_trimming.md b/doc/release-notes/10688_whitespace_trimming.md deleted file mode 100644 index 52904c00fbf..00000000000 --- a/doc/release-notes/10688_whitespace_trimming.md +++ /dev/null @@ -1,6 +0,0 @@ -### Added whitespace trimming to uploaded custom metadata TSV files - -When loading custom metadata blocks using the `api/admin/datasetfield/load` API, whitespace can be introduced into field names. -This change trims whitespace at the beginning and end of all values read into the API before persisting them. - -For more information, see #10688. diff --git a/doc/release-notes/10697-improve-permission-indexing.md b/doc/release-notes/10697-improve-permission-indexing.md deleted file mode 100644 index b232b1c4d3c..00000000000 --- a/doc/release-notes/10697-improve-permission-indexing.md +++ /dev/null @@ -1,7 +0,0 @@ -### Reindexing after a role assignment is less memory intensive - -Adding/removing a user from a role on a collection, particularly the root collection, could lead to a significant increase in memory use resulting in Dataverse itself failing with an out-of-memory condition. Such changes now consume much less memory. - -If you have experienced out-of-memory failures in Dataverse in the past that could have been caused by this problem, you may wish to run a [reindex in place](https://guides.dataverse.org/en/latest/admin/solr-search-index.html#reindex-in-place) to update any out-of-date information. - -For more information, see #10697 and #10698. diff --git a/doc/release-notes/10708 - MDC Citation and DOI parsing improvements.md b/doc/release-notes/10708 - MDC Citation and DOI parsing improvements.md deleted file mode 100644 index 86c1bb14d32..00000000000 --- a/doc/release-notes/10708 - MDC Citation and DOI parsing improvements.md +++ /dev/null @@ -1,3 +0,0 @@ -MDC Citation retrieval with the PID settings has been fixed. -PID parsing in Dataverse is now case insensitive, improving interaction with services that may change the case of PIDs. -Warnings related to managed/excluded PID lists for PID providers have been reduced diff --git a/doc/release-notes/10742-newest-oldest-sort-order-backwards.md b/doc/release-notes/10742-newest-oldest-sort-order-backwards.md deleted file mode 100644 index 0afaf45449d..00000000000 --- a/doc/release-notes/10742-newest-oldest-sort-order-backwards.md +++ /dev/null @@ -1,3 +0,0 @@ -## Minor bug fix to UI to fix the order of the files on the Dataset Files page when ordering by Date - -A fix was made to the ui to fix the ordering 'Newest' and 'Oldest' which were reversed diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index b66ae2f1b21..47034806d90 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -33,6 +33,14 @@ See also #8184, #8185, #10950, and #10961. Differencing between dataset versions, which is done during dataset edit operations and to populate the dataset page versions table has been made signficantly more scalable. See #10814 and #10818. +### Reindexing After a Role Assignment is Less Memory Intensive + +Adding or removing a user from a role on a collection, particularly the root collection, could lead to a significant increase in memory use, resulting in Dataverse itself failing with an out-of-memory condition. Such changes now consume much less memory. + +If you have experienced out-of-memory failures in Dataverse in the past that could have been caused by this problem, you may wish to run a [reindex in place](https://guides.dataverse.org/en/latest/admin/solr-search-index.html#reindex-in-place) to update any out-of-date information. + +For more information, see #10697 and #10698. + ### PostgreSQL and Flyway Updates This release bumps the version of PostgreSQL and Flyway used in containers as well as the PostgreSQL JDBC driver used all installations, including classic (non-Docker) installations. PostgreSQL and its driver have been bumped to version 17. Flyway has been bumped to version 10. @@ -79,6 +87,14 @@ When harvesting using the DDI format, datasets can now have multiple "otherId" v ## Bugs Fixed +### Sort Order for Files + +"Newest" and "Oldest" were reversed when sorting files on the dataset landing page. This has been fixed. See #10742 and #11000. + +### Guestbook Email Validation + +In the Guestbook UI form, the email address is now checked for validity. See #10661 and #11022. + ### Updating Files Now Possible When Latest and Only Dataset Version is Deaccessioned When a dataset was deaccessioned and was the only previous version it would cause an error when trying to update the files. This has been fixed. See #9351 and #10901. @@ -109,6 +125,10 @@ A bug fix was made that gets the major version of a Dataset when all major versi A minor bug fix was made to avoid sending a useless ", null" in the DataCiteXML sent to DataCite and in the DataCite export when a dataset has a metadata entry for "Software Name" and no entry for "Software Version". The bug fix will update datasets upon publication. Anyone with existing published datasets with this problem can be fixed by [pushing updated metadata to DataCite for affected datasets](https://guides.dataverse.org/en/6.5/admin/dataverses-datasets.html#update-metadata-for-a-published-dataset-at-the-pid-provider) and [re-exporting the dataset metadata](https://guides.dataverse.org/en/6.5/admin/metadataexport.html#batch-exports-through-the-api) or by steps 9 and 10 in the [v6.4 release notes](https://github.com/IQSS/dataverse/releases/tag/v6.4) to update and re-export all datasets. See also #10919. +### PIDs and Make Data Count Citation Retrieval + +Make Data Count (MDC) citation retrieval with the PID settings has been fixed. PID parsing in Dataverse is now case insensitive, improving interaction with services that may change the case of PIDs. Warnings related to managed/excluded PID lists for PID providers have been reduced. See #10708. + ### Globus "missing properties" Logging Fixed In previous releases, logging would show Globus-related strings were missing from properties files. This has been fixed. See #11030 for details. @@ -145,6 +165,10 @@ Previously, not setting these fields meant using a copy of the settings from the The JSON payload of the "get collection" endpoint has been extended to include properties isMetadataBlockRoot and isFacetRoot. See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#view-a-dataverse-collection), #11012, and #11013. +### Whitespace Trimming When Loading Metadata Block TSV Files + +When loading custom metadata blocks using the `api/admin/datasetfield/load` API, whitespace can be introduced into field names. Whitespace is now trimmed from the beginning and end of all values read into the API before persisting them. See #10688 and #10696. + ### Image URLs from the Search API As of 6.4 (thanks to #10855) `image_url` is being returned from the Search API. The logic has been updated to only show the image if each of the following are true: @@ -157,6 +181,17 @@ As of 6.4 (thanks to #10855) `image_url` is being returned from the Search API. See also #10875 and #10886. +### Metrics API Bug Fixes + +Two bugs in the Metrics API have been fixed: + +- The /datasets and /datasets/byMonth endpoints could report incorrect values if/when they have been called using the dataLocation parameter (which allows getting metrics for local, remote (harvested), or all datasets) as the metrics cache was not storing different values for these cases. + +- Metrics endpoints who's calculation relied on finding the latest published datasetversion were incorrect if/when the minor version number was > 9. + +When deploying the new release, the [/api/admin/clearMetricsCache](https://guides.dataverse.org/en/latest/api/native-api.html#metrics) API should be called to remove old cached values that may be incorrect. +See #10379 and #10865. + ## API Tokens An optional query parameter called "returnExpiration" has been added to the "/api/users/token/recreate" endpoint, which, if set to true, returns the expiration time in the response message. See [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#recreate-a-token), #10857 and #10858. From cabf738f0d94495c7156dcc3a48feefd51f72a17 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 5 Dec 2024 15:23:10 -0500 Subject: [PATCH 366/402] fix URLs #10952 --- doc/release-notes/6.5-release-notes.md | 30 +++++++++++++------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 47034806d90..c12da62009a 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -25,7 +25,7 @@ Also, additional information about the creation of Preview URLs has been added t Any Private URLs created in previous versions of Dataverse will continue to work. -The old "privateUrl" API endpoints for the creation and deletion of Preview (formerly Private) URLs have been deprecated. They will continue to work but please switch to the "previewUrl" equivalents that have been [documented](https://dataverse-guide--10961.org.readthedocs.build/en/10961/api/native-api.html#create-a-preview-url-for-a-dataset) in the API Guide. +The old "privateUrl" API endpoints for the creation and deletion of Preview (formerly Private) URLs have been deprecated. They will continue to work but please switch to the "previewUrl" equivalents that have been [documented](https://guides.dataverse.org/en/6.5/api/native-api.html#create-a-preview-url-for-a-dataset) in the API Guide. See also #8184, #8185, #10950, and #10961. @@ -59,7 +59,7 @@ See also #10889 and #10912. Configuration and documentation has been added to explain how to set up multiple languages (e.g. English and French) in the tutorial for setting up Dataverse in Docker. -See also [the docs](https://dataverse-guide--10940.org.readthedocs.build/en/10940/container/running/demo.html#multiple-languages), #10939, and #10940. +See also [the docs](https://guides.dataverse.org/en/6.5/container/running/demo.html#multiple-languages), #10939, and #10940. ### GlobusBatchLookupSize @@ -73,11 +73,11 @@ Some repository extend the "oai_dc" metadata prefix with specific namespaces. In Harvesting of these datasets has been fixed by excluding tags with namespaces that are not "dc:" and harvest only metadata with the "dc" namespace. See #10837. -### Harvested Dataset PID from Record Header +### Harvested Dataset PID from Record Header When harvesting, Dataverse can now use the identifier from the OAI-PMH record header as the persistent id for the harvested dataset. -This will allow harvesting from sources that do not include a persistent id in their oai_dc metadata records, but use valid DOIs or handles as the OAI-PMH record header identifiers. +This will allow harvesting from sources that do not include a persistent id in their oai_dc metadata records, but use valid DOIs or handles as the OAI-PMH record header identifiers. It is also possible to optionally configure a harvesting client to use this OAI-PMH identifier as the **preferred** choice for the persistent id. See the [Harvesting Clients API](https://guides.dataverse.org/en/6.5/api/native-api.html#create-a-harvesting-client) section of the Guides, #11049 and #10982 for more information. @@ -137,7 +137,7 @@ In previous releases, logging would show Globus-related strings were missing fro ### Editing Collections -A new endpoint (`PUT /api/dataverses/`) for updating an existing collection (dataverse) has been added. It uses the same JSON structure as the one used for collection creation. See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#update-a-dataverse-collection), #10904, and #10925. +A new endpoint (`PUT /api/dataverses/`) for updating an existing collection (dataverse) has been added. It uses the same JSON structure as the one used for collection creation. See also [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#update-a-dataverse-collection), #10904, and #10925. ### fileCount Added to Search API @@ -145,25 +145,25 @@ A new search field called `fileCount` can be searched to discover the number of ### List Dataset Metadata Exporters -A list of available dataset metadata exporters can now be retrieved programmatically via API. See [the docs](https://dataverse-guide--10739.org.readthedocs.build/en/10739/api/native-api.html#get-export-formats) and #10739. +A list of available dataset metadata exporters can now be retrieved programmatically via API. See [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#get-export-formats) and #10739. ### Comparing Dataset Versions -An API has been added to compare dataset versions. See [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#compare-versions-of-a-dataset), #10888, and #10945. +An API has been added to compare dataset versions. See [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#compare-versions-of-a-dataset), #10888, and #10945. ### Audit Data Files -A superuser-only API endpoint has been added to audit datasets with data files where the physical files are missing or the file metadata is missing. See [the docs](https://dataverse-guide--11016.org.readthedocs.build/en/11016/api/native-api.html#datafile-audit), #11016, and [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220). +A superuser-only API endpoint has been added to audit datasets with data files where the physical files are missing or the file metadata is missing. See [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#datafile-audit), #11016, and [#220](https://github.com/IQSS/dataverse.harvard.edu/issues/220). ### Update Collection API Inheritance The update collection (dataverse) API endpoint has been updated to support an "inherit from parent" configuration for metadata blocks, facets, and input levels. -Previously, not setting these fields meant using a copy of the settings from the parent collection, which could get out of sync.. See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#update-a-dataverse-collection), #11018, and #11026. +Previously, not setting these fields meant using a copy of the settings from the parent collection, which could get out of sync.. See also [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#update-a-dataverse-collection), #11018, and #11026. ### isMetadataBlockRoot and isFacetRoot -The JSON payload of the "get collection" endpoint has been extended to include properties isMetadataBlockRoot and isFacetRoot. See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#view-a-dataverse-collection), #11012, and #11013. +The JSON payload of the "get collection" endpoint has been extended to include properties isMetadataBlockRoot and isFacetRoot. See also [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#view-a-dataverse-collection), #11012, and #11013. ### Whitespace Trimming When Loading Metadata Block TSV Files @@ -185,16 +185,16 @@ See also #10875 and #10886. Two bugs in the Metrics API have been fixed: -- The /datasets and /datasets/byMonth endpoints could report incorrect values if/when they have been called using the dataLocation parameter (which allows getting metrics for local, remote (harvested), or all datasets) as the metrics cache was not storing different values for these cases. +- The /datasets and /datasets/byMonth endpoints could report incorrect values if/when they have been called using the dataLocation parameter (which allows getting metrics for local, remote (harvested), or all datasets) as the metrics cache was not storing different values for these cases. -- Metrics endpoints who's calculation relied on finding the latest published datasetversion were incorrect if/when the minor version number was > 9. +- Metrics endpoints who's calculation relied on finding the latest published datasetversion were incorrect if/when the minor version number was > 9. When deploying the new release, the [/api/admin/clearMetricsCache](https://guides.dataverse.org/en/latest/api/native-api.html#metrics) API should be called to remove old cached values that may be incorrect. See #10379 and #10865. ## API Tokens -An optional query parameter called "returnExpiration" has been added to the "/api/users/token/recreate" endpoint, which, if set to true, returns the expiration time in the response message. See [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#recreate-a-token), #10857 and #10858. +An optional query parameter called "returnExpiration" has been added to the "/api/users/token/recreate" endpoint, which, if set to true, returns the expiration time in the response message. See [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#recreate-a-token), #10857 and #10858. The `/api/users/token` endpoint has been extended to support any auth mechanism for retrieving the token information. Previously, this endpoint only accepted an API token to retrieve its information. Now, it accepts any authentication mechanism and returns the associated API token information. See #10914 and #10924. @@ -204,11 +204,11 @@ The `/api/users/token` endpoint has been extended to support any auth mechanism ## Backward Incompatible Changes -Generally speaking, see the [API Changelog](https://preview.guides.gdcc.io/en/develop/api/changelog.html) for a list of backward-incompatible changes. +Generally speaking, see the [API Changelog](https://guides.dataverse.org/en/6.5/api/changelog.html) for a list of backward-incompatible changes. ### List Collections Linked to a Dataset -The API endpoint that returns a list of collections that a dataset has been linked to has been improved to provide a more structured JSON response. See [the docs](https://dataverse-guide--9665.org.readthedocs.build/en/9665/admin/dataverses-datasets.html#list-collections-that-are-linked-from-a-dataset), #9650, and #9665. +The API endpoint that returns a list of collections that a dataset has been linked to has been improved to provide a more structured JSON response. See [the docs](https://guides.dataverse.org/en/6.5/admin/dataverses-datasets.html#list-collections-that-are-linked-from-a-dataset), #9650, and #9665. ## Complete List of Changes From 8d1b4a18dc2a40b29d48b797387cf8d4f1d5d07e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 5 Dec 2024 15:40:06 -0500 Subject: [PATCH 367/402] add highlights and rework content a bit #10952 --- doc/release-notes/6.5-release-notes.md | 37 +++++++++++++++----------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index c12da62009a..c2d21062914 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -6,18 +6,19 @@ This release brings new features, enhancements, and bug fixes to Dataverse. Than ## Release Highlights -New features in Dataverse 6.5: - +Highlights in Dataverse 6.5 include: + +- new API endpoints, including editing of collections, Search API file counts, listing of exporters, comparing dataset versions, and auditing data files +- UX improvements, especially Preview URLs +- increased harvesting flexibility +- performance gains +- a security vulnerability addressed +- many bugs fixes - and more! Please see below. - ## Features Added -### Longer Custom Questions in Guestbooks - -Custom questions in Guestbooks can now be more than 255 characters and the bug causing a silent failure when questions were longer than this limit has been fixed. See also #9492, #10117, #10118. - -### Private URL Renamed to Preview URL +### Private URL Renamed to Preview URL and Improved With this release the name of the URL that may be used by dataset administrators to share a draft version of a dataset has been changed from Private URL to Preview URL. @@ -41,6 +42,10 @@ If you have experienced out-of-memory failures in Dataverse in the past that cou For more information, see #10697 and #10698. +### Longer Custom Questions in Guestbooks + +Custom questions in Guestbooks can now be more than 255 characters and the bug causing a silent failure when questions were longer than this limit has been fixed. See also #9492, #10117, #10118. + ### PostgreSQL and Flyway Updates This release bumps the version of PostgreSQL and Flyway used in containers as well as the PostgreSQL JDBC driver used all installations, including classic (non-Docker) installations. PostgreSQL and its driver have been bumped to version 17. Flyway has been bumped to version 10. @@ -61,12 +66,6 @@ Configuration and documentation has been added to explain how to set up multiple See also [the docs](https://guides.dataverse.org/en/6.5/container/running/demo.html#multiple-languages), #10939, and #10940. -### GlobusBatchLookupSize - -An optimization has been added for the Globus upload workflow, with a corresponding new database setting: `:GlobusBatchLookupSize` - -See the [Database Settings](https://guides.dataverse.org/en/6.5/installation/config.html#GlobusBatchLookupSize) section of the Guides, #10977, and #11040 for more information. - ### Harvesting "oai_dc" Metadata Prefix When Extended With Specific Namespaces Some repository extend the "oai_dc" metadata prefix with specific namespaces. In this case, harvesting of these datasets was not possible because an XML parsing error was raised. @@ -85,6 +84,12 @@ It is also possible to optionally configure a harvesting client to use this OAI- When harvesting using the DDI format, datasets can now have multiple "otherId" values. See #10772. +### GlobusBatchLookupSize + +An optimization has been added for the Globus upload workflow, with a corresponding new database setting: `:GlobusBatchLookupSize` + +See the [Database Settings](https://guides.dataverse.org/en/6.5/installation/config.html#GlobusBatchLookupSize) section of the Guides, #10977, and #11040 for more information. + ## Bugs Fixed ### Sort Order for Files @@ -99,7 +104,7 @@ In the Guestbook UI form, the email address is now checked for validity. See #10 When a dataset was deaccessioned and was the only previous version it would cause an error when trying to update the files. This has been fixed. See #9351 and #10901. -### My Data Filter by Username Feature Fixed +### My Data Filter by Username Feature Restored The superuser-only feature of filtering by a username on the My Data page was not working. The "Results for Username" field now returns data for the desired user. See also #7239 and #10980. @@ -121,7 +126,7 @@ This version of Dataverse fix this, and includes internationalization in the fac A bug fix was made that gets the major version of a Dataset when all major versions were deaccessioned. This fixes the incorrect showing of the files as "Unpublished" in the search list even when they are published. This fix affects the indexing, meaning these datasets must be re-indexed once Dataverse is updated. See also #10947 and #10974. -### Minor DataCiteXML Fix +### Minor DataCiteXML Fix (Useless Null) A minor bug fix was made to avoid sending a useless ", null" in the DataCiteXML sent to DataCite and in the DataCite export when a dataset has a metadata entry for "Software Name" and no entry for "Software Version". The bug fix will update datasets upon publication. Anyone with existing published datasets with this problem can be fixed by [pushing updated metadata to DataCite for affected datasets](https://guides.dataverse.org/en/6.5/admin/dataverses-datasets.html#update-metadata-for-a-published-dataset-at-the-pid-provider) and [re-exporting the dataset metadata](https://guides.dataverse.org/en/6.5/admin/metadataexport.html#batch-exports-through-the-api) or by steps 9 and 10 in the [v6.4 release notes](https://github.com/IQSS/dataverse/releases/tag/v6.4) to update and re-export all datasets. See also #10919. From 7503ea36833834890ba9e8edd99f037610e775ba Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 5 Dec 2024 15:48:50 -0500 Subject: [PATCH 368/402] add Relation Type bug (no release note snippet) #10952 #10926 --- doc/release-notes/6.5-release-notes.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index c2d21062914..3d18d1ae197 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -92,6 +92,10 @@ See the [Database Settings](https://guides.dataverse.org/en/6.5/installation/con ## Bugs Fixed +### Relation Type (Related Publication) and DataCite + +The subfield "Relation Type" was added to the field "Related Publication" in Dataverse 6.4 (#10632) but couldn't be used without workarounds described in an [announcement](https://groups.google.com/g/dataverse-community/c/zlRGJtu3x4g/m/GtVZ26uaBQAJ) about the problem. The bug has been fixed and workarounds and are longer required. See also #10926. + ### Sort Order for Files "Newest" and "Oldest" were reversed when sorting files on the dataset landing page. This has been fixed. See #10742 and #11000. From 77834a24a64ad8e29c377cb836a0d3128a9af846 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 5 Dec 2024 15:50:57 -0500 Subject: [PATCH 369/402] demo docker feature, fix heading #10952 --- doc/release-notes/6.5-release-notes.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 3d18d1ae197..30a2fad5504 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -60,12 +60,6 @@ The Docker compose file used for [evaluations or demos](https://guides.dataverse See also #10889 and #10912. -## Multiple Languages in Docker - -Configuration and documentation has been added to explain how to set up multiple languages (e.g. English and French) in the tutorial for setting up Dataverse in Docker. - -See also [the docs](https://guides.dataverse.org/en/6.5/container/running/demo.html#multiple-languages), #10939, and #10940. - ### Harvesting "oai_dc" Metadata Prefix When Extended With Specific Namespaces Some repository extend the "oai_dc" metadata prefix with specific namespaces. In this case, harvesting of these datasets was not possible because an XML parsing error was raised. @@ -84,6 +78,12 @@ It is also possible to optionally configure a harvesting client to use this OAI- When harvesting using the DDI format, datasets can now have multiple "otherId" values. See #10772. +### Multiple Languages in Docker + +Configuration and documentation has been added to explain how to set up multiple languages (e.g. English and French) in the tutorial for setting up Dataverse in Docker. + +See also [the docs](https://guides.dataverse.org/en/6.5/container/running/demo.html#multiple-languages), #10939, and #10940. + ### GlobusBatchLookupSize An optimization has been added for the Globus upload workflow, with a corresponding new database setting: `:GlobusBatchLookupSize` From 8ce290902a1d4a3ea2bb54f7f2474b5102f0724a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 5 Dec 2024 16:02:06 -0500 Subject: [PATCH 370/402] tweaks #10952 --- doc/release-notes/6.5-release-notes.md | 24 +++++------------------- 1 file changed, 5 insertions(+), 19 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 30a2fad5504..45b53c673b7 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -132,7 +132,7 @@ A bug fix was made that gets the major version of a Dataset when all major versi ### Minor DataCiteXML Fix (Useless Null) -A minor bug fix was made to avoid sending a useless ", null" in the DataCiteXML sent to DataCite and in the DataCite export when a dataset has a metadata entry for "Software Name" and no entry for "Software Version". The bug fix will update datasets upon publication. Anyone with existing published datasets with this problem can be fixed by [pushing updated metadata to DataCite for affected datasets](https://guides.dataverse.org/en/6.5/admin/dataverses-datasets.html#update-metadata-for-a-published-dataset-at-the-pid-provider) and [re-exporting the dataset metadata](https://guides.dataverse.org/en/6.5/admin/metadataexport.html#batch-exports-through-the-api) or by steps 9 and 10 in the [v6.4 release notes](https://github.com/IQSS/dataverse/releases/tag/v6.4) to update and re-export all datasets. See also #10919. +A minor bug fix was made to avoid sending a useless ", null" in the DataCiteXML sent to DataCite and in the DataCite export when a dataset has a metadata entry for "Software Name" and no entry for "Software Version". The bug fix will update datasets upon publication. Anyone with existing published datasets with this problem can be fixed by [pushing updated metadata to DataCite for affected datasets](https://guides.dataverse.org/en/6.5/admin/dataverses-datasets.html#update-metadata-for-a-published-dataset-at-the-pid-provider) and [re-exporting the dataset metadata](https://guides.dataverse.org/en/6.5/admin/metadataexport.html#batch-exports-through-the-api). See "Pushing updated metadata to DataCite" in the upgrade instructions below. See also #10919. ### PIDs and Make Data Count Citation Retrieval @@ -201,7 +201,7 @@ Two bugs in the Metrics API have been fixed: When deploying the new release, the [/api/admin/clearMetricsCache](https://guides.dataverse.org/en/latest/api/native-api.html#metrics) API should be called to remove old cached values that may be incorrect. See #10379 and #10865. -## API Tokens +### API Tokens An optional query parameter called "returnExpiration" has been added to the "/api/users/token/recreate" endpoint, which, if set to true, returns the expiration time in the response message. See [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#recreate-a-token), #10857 and #10858. @@ -219,7 +219,6 @@ Generally speaking, see the [API Changelog](https://guides.dataverse.org/en/6.5/ The API endpoint that returns a list of collections that a dataset has been linked to has been improved to provide a more structured JSON response. See [the docs](https://guides.dataverse.org/en/6.5/admin/dataverses-datasets.html#list-collections-that-are-linked-from-a-dataset), #9650, and #9665. - ## Complete List of Changes For the complete list of code changes in this release, see the [6.5 milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.5+is%3Aclosed) in GitHub. @@ -228,7 +227,6 @@ For the complete list of code changes in this release, see the [6.5 milestone](h For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/g/dataverse-community) or email support@dataverse.org. - ## Installation If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it! @@ -297,13 +295,7 @@ service payara start 6\. Update metadata blocks -These changes reflect incremental improvements made to the handling of core metadata fields. - -```shell -wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/scripts/api/data/metadatablocks/citation.tsv - -curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv -``` +No changes required for this release. 7\. Update Solr schema.xml file. Start with the standard v6.4 schema.xml, then, if your installation uses any custom or experimental metadata blocks, update it to include the extra fields (step 7a). @@ -350,8 +342,6 @@ The API above rebuilds the existing index "in place". If you want to be absolute 9\. Run reExportAll to update dataset metadata exports -This step is necessary because of changes described above for the `Datacite` and `oai_dc` export formats. - Below is the simple way to reexport all dataset metadata. For more advanced usage, please see [the guides](http://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api). ```shell @@ -360,11 +350,9 @@ curl http://localhost:8080/api/admin/metadata/reExportAll 10\. Pushing updated metadata to DataCite -(If you don't use DataCite, you can skip this.) +(If you don't use DataCite, you can skip this. Also, if you aren't affected by the "useless null" bug described above, you can skip this.) -Above you updated the citation metadata block and Solr with the new "relationType" field. With these two changes, the "Relation Type" fields will be available and creation/publication of datasets will result in the expanded XML being sent to DataCite. You've also already run "reExportAll" to update the `Datacite` metadata export format. - -Entries at DataCite for published datasets can be updated by a superuser using an API call (newly [documented](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-all-published-datasets-at-the-pid-provider)): +Entries at DataCite for published datasets can be updated by a superuser using an API call (newly [documented](https://guides.dataverse.org/en/6.5/admin/dataverses-datasets.html#update-metadata-for-all-published-datasets-at-the-pid-provider)): `curl -X POST -H 'X-Dataverse-key:' http://localhost:8080/api/datasets/modifyRegistrationPIDMetadataAll` @@ -377,5 +365,3 @@ Failures may occur if PIDs were never registered, or if they were never made fin PIDs can also be updated by a superuser on a per-dataset basis using `curl -X POST -H 'X-Dataverse-key:' http://localhost:8080/api/datasets//modifyRegistrationMetadata` - -### Additional Upgrade Steps From 7885fa26db09a06d3c4e49e59a266d2b8ed3ec9b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 5 Dec 2024 16:13:42 -0500 Subject: [PATCH 371/402] typo Co-authored-by: Omer Fahim --- doc/release-notes/6.5-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 45b53c673b7..60d26b58c70 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -13,7 +13,7 @@ Highlights in Dataverse 6.5 include: - increased harvesting flexibility - performance gains - a security vulnerability addressed -- many bugs fixes +- many bug fixes - and more! Please see below. ## Features Added From 48d77d33ab6dada93495e445d7ac1d197759b06f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 5 Dec 2024 16:14:01 -0500 Subject: [PATCH 372/402] spelling Co-authored-by: Omer Fahim --- doc/release-notes/6.5-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 60d26b58c70..4d900b12485 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -32,7 +32,7 @@ See also #8184, #8185, #10950, and #10961. ### More Scalable Dataset Version Differencing -Differencing between dataset versions, which is done during dataset edit operations and to populate the dataset page versions table has been made signficantly more scalable. See #10814 and #10818. +Differencing between dataset versions, which is done during dataset edit operations and to populate the dataset page versions table has been made significantly more scalable. See #10814 and #10818. ### Reindexing After a Role Assignment is Less Memory Intensive From 5e21b80dcc9c140da8afed4de8ac50ac8f878fab Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 12:35:53 -0500 Subject: [PATCH 373/402] remove trailing ` --- doc/release-notes/6.5-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 4d900b12485..7488013b1dd 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -249,7 +249,7 @@ If you are running Payara as a non-root user (and you should be!), **remember no In the following commands, we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed. ```shell -export PAYARA=/usr/local/payara6` +export PAYARA=/usr/local/payara6 ``` (or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) From 6cf718e24819040c030391c99dcec12597393494 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 12:55:25 -0500 Subject: [PATCH 374/402] tweak deploy step, remove mdb update and schema.xml steps (not needed) #10952 --- doc/release-notes/6.5-release-notes.md | 61 +++++++------------------- 1 file changed, 15 insertions(+), 46 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 7488013b1dd..3d05f9e9edb 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -254,20 +254,26 @@ export PAYARA=/usr/local/payara6 (or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) -1\. Undeploy the previous version +1\. List deployed applications + +```shell +$PAYARA/bin/asadmin list-applications +``` + +2\. Undeploy the previous version (should match "list-applications" above) ```shell $PAYARA/bin/asadmin undeploy dataverse-6.4 ``` -2\. Stop and start Payara +3\. Stop and start Payara ```shell -service payara stop +sudo service payara stop sudo service payara start ``` -3\. Deploy this version +4\. Deploy this version ```shell $PAYARA/bin/asadmin deploy dataverse-6.5.war @@ -282,55 +288,18 @@ rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache rm -rf $PAYARA/glassfish/domains/domain1/lib/databases ``` -4\. For installations with internationalization: +5\. For installations with internationalization: Please remember to update translations via [Dataverse language packs](https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs). -5\. Restart Payara +6\. Restart Payara ```shell service payara stop service payara start ``` -6\. Update metadata blocks - -No changes required for this release. - -7\. Update Solr schema.xml file. Start with the standard v6.4 schema.xml, then, if your installation uses any custom or experimental metadata blocks, update it to include the extra fields (step 7a). - -Stop Solr (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.4/installation/prerequisites.html#solr-init-script)). - -```shell -service solr stop -``` - -Replace schema.xml - -```shell -wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/conf/solr/schema.xml -cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf -``` - -Start Solr (but if you use any custom metadata blocks, perform the next step, 7a first). - -```shell -service solr start -``` - -7a\. For installations with custom or experimental metadata blocks: - -Before starting Solr, update the schema to include all the extra metadata fields that your installation uses. We do this by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the names of the directories, if different): - -```shell - wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/conf/solr/update-fields.sh - chmod +x update-fields.sh - curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.4.1/server/solr/collection1/conf/schema.xml -``` - -Now start Solr. - -8\. Reindex Solr +7\. Reindex Solr Below is the simplest way to reindex Solr: @@ -340,7 +309,7 @@ curl http://localhost:8080/api/admin/index The API above rebuilds the existing index "in place". If you want to be absolutely sure that your index is up-to-date and consistent, you may consider wiping it clean and reindexing everything from scratch (see [the guides](https://guides.dataverse.org/en/latest/admin/solr-search-index.html)). Just note that, depending on the size of your database, a full reindex may take a while and the users will be seeing incomplete search results during that window. -9\. Run reExportAll to update dataset metadata exports +8\. Run reExportAll to update dataset metadata exports Below is the simple way to reexport all dataset metadata. For more advanced usage, please see [the guides](http://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api). @@ -348,7 +317,7 @@ Below is the simple way to reexport all dataset metadata. For more advanced usag curl http://localhost:8080/api/admin/metadata/reExportAll ``` -10\. Pushing updated metadata to DataCite +9\. Pushing updated metadata to DataCite (If you don't use DataCite, you can skip this. Also, if you aren't affected by the "useless null" bug described above, you can skip this.) From a66c0857e4b1f016666faa71811406600e348d20 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 12:56:42 -0500 Subject: [PATCH 375/402] more sudo #10952 --- doc/release-notes/6.5-release-notes.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 3d05f9e9edb..e21fd0ce0fd 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -295,8 +295,8 @@ Please remember to update translations via [Dataverse language packs](https://gi 6\. Restart Payara ```shell -service payara stop -service payara start +sudo service payara stop +sudo service payara start ``` 7\. Reindex Solr From 0a82885f1825824122883b2bcd25d397a938d3da Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 13:13:24 -0500 Subject: [PATCH 376/402] whoops, we do need to update schema.xml #10952 --- doc/release-notes/6.5-release-notes.md | 51 +++++++++++++++++++++++--- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index e21fd0ce0fd..e58a6fcce4b 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -150,7 +150,7 @@ A new endpoint (`PUT /api/dataverses/`) for updating an existing col ### fileCount Added to Search API -A new search field called `fileCount` can be searched to discover the number of files per dataset. See also #8941 and #10598. +A new search field called `fileCount` can be searched to discover the number of files per dataset. The upgrade instructions below explain how to update your Solr `schema.xml` file to add the new field. See also #8941 and #10598. ### List Dataset Metadata Exporters @@ -282,7 +282,7 @@ $PAYARA/bin/asadmin deploy dataverse-6.5.war Note: if you have any trouble deploying, stop Payara, remove the following directories, start Payara, and try to deploy again. ```shell -service payara stop +sudo service payara stop rm -rf $PAYARA/glassfish/domains/domain1/generated rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache rm -rf $PAYARA/glassfish/domains/domain1/lib/databases @@ -298,8 +298,49 @@ Please remember to update translations via [Dataverse language packs](https://gi sudo service payara stop sudo service payara start ``` +7\. Update Solr schema.xml file. Start with the standard v6.5 schema.xml, then, if your installation uses any custom or experimental metadata blocks, update it to include the extra fields (step 7a). + +Stop Solr (usually `sudo service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.5/installation/prerequisites.html#solr-init-script)). + +```shell +sudo service solr stop +``` + +Replace schema.xml + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.5/conf/solr/schema.xml +cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf +``` + +Start Solr (but if you use any custom metadata blocks, perform the next step, 7a first). + +```shell +sudo service solr start +``` + +7a\. For installations with custom or experimental metadata blocks: + +Before starting Solr, update the `schema.xml` file to include all the extra metadata fields that your installation uses. + +We do this by collecting the output of Dataverse's Solr schema API endpoint (`/api/admin/index/solr/schema`) and piping it to the `update-fields.sh` script which updates the `schema.xml` file supplied as an argument. + +The example below assumes the default installation location of Solr, but you can modify the commands as needed. + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.5/conf/solr/update-fields.sh +chmod +x update-fields.sh +curl "http://localhost:8080/api/admin/index/solr/schema" | sudo ./update-fields.sh /usr/local/solr/solr-9.4.1/server/solr/collection1/conf/schema.xml +``` + +Now start Solr. + + +```shell +sudo service solr start +``` -7\. Reindex Solr +8\. Reindex Solr Below is the simplest way to reindex Solr: @@ -309,7 +350,7 @@ curl http://localhost:8080/api/admin/index The API above rebuilds the existing index "in place". If you want to be absolutely sure that your index is up-to-date and consistent, you may consider wiping it clean and reindexing everything from scratch (see [the guides](https://guides.dataverse.org/en/latest/admin/solr-search-index.html)). Just note that, depending on the size of your database, a full reindex may take a while and the users will be seeing incomplete search results during that window. -8\. Run reExportAll to update dataset metadata exports +9\. Run reExportAll to update dataset metadata exports Below is the simple way to reexport all dataset metadata. For more advanced usage, please see [the guides](http://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api). @@ -317,7 +358,7 @@ Below is the simple way to reexport all dataset metadata. For more advanced usag curl http://localhost:8080/api/admin/metadata/reExportAll ``` -9\. Pushing updated metadata to DataCite +10\. Pushing updated metadata to DataCite (If you don't use DataCite, you can skip this. Also, if you aren't affected by the "useless null" bug described above, you can skip this.) From 83c22b212543e37e675c25c782b242518bb0a584 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 13:17:04 -0500 Subject: [PATCH 377/402] yet more sudo and call out non-root user #10952 --- doc/release-notes/6.5-release-notes.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index e58a6fcce4b..b288794b9e6 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -300,6 +300,8 @@ sudo service payara start ``` 7\. Update Solr schema.xml file. Start with the standard v6.5 schema.xml, then, if your installation uses any custom or experimental metadata blocks, update it to include the extra fields (step 7a). +Run the commands below as a non-root user. + Stop Solr (usually `sudo service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.5/installation/prerequisites.html#solr-init-script)). ```shell @@ -310,7 +312,7 @@ Replace schema.xml ```shell wget https://raw.githubusercontent.com/IQSS/dataverse/v6.5/conf/solr/schema.xml -cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf +sudo cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf ``` Start Solr (but if you use any custom metadata blocks, perform the next step, 7a first). From c1df8c5108dc73dc28b03b8e1fafd7e3eefaa39e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 14:10:49 -0500 Subject: [PATCH 378/402] tweaks #10952 --- doc/release-notes/6.5-release-notes.md | 44 ++++++++++++-------------- 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index b288794b9e6..62ff9ba6e93 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -1,18 +1,18 @@ # Dataverse 6.5 -Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.5 rather than the list of releases, which will cut them off. +Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.5 rather than the [list of releases](https://github.com/IQSS/dataverse/releases), which will cut them off. -This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. +This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project! ## Release Highlights -Highlights in Dataverse 6.5 include: +Highlights for Dataverse 6.5 include: - new API endpoints, including editing of collections, Search API file counts, listing of exporters, comparing dataset versions, and auditing data files - UX improvements, especially Preview URLs - increased harvesting flexibility - performance gains -- a security vulnerability addressed +- a [security vulnerability](https://github.com/IQSS/dataverse-security/issues/98) addressed - many bug fixes - and more! Please see below. @@ -20,7 +20,7 @@ Highlights in Dataverse 6.5 include: ### Private URL Renamed to Preview URL and Improved -With this release the name of the URL that may be used by dataset administrators to share a draft version of a dataset has been changed from Private URL to Preview URL. +The name of the URL that may be used by dataset administrators to share a draft version of a dataset has been changed from Private URL to Preview URL. Also, additional information about the creation of Preview URLs has been added to the popup accessed via edit menu of the Dataset Page. @@ -30,17 +30,17 @@ The old "privateUrl" API endpoints for the creation and deletion of Preview (for See also #8184, #8185, #10950, and #10961. -### More Scalable Dataset Version Differencing +### Showing Differences Between Dataset Versions is More Scalable -Differencing between dataset versions, which is done during dataset edit operations and to populate the dataset page versions table has been made significantly more scalable. See #10814 and #10818. +Showing differences between dataset versions, which is done during dataset edit operations and to populate the dataset page versions table, has been made significantly more scalable. See #10814 and #10818. -### Reindexing After a Role Assignment is Less Memory Intensive +### Version Differences Details Sorting Added -Adding or removing a user from a role on a collection, particularly the root collection, could lead to a significant increase in memory use, resulting in Dataverse itself failing with an out-of-memory condition. Such changes now consume much less memory. +In order to facilitate the comparison between the draft version and the published version of a dataset, a sort on subfields has been added. See #10969. -If you have experienced out-of-memory failures in Dataverse in the past that could have been caused by this problem, you may wish to run a [reindex in place](https://guides.dataverse.org/en/latest/admin/solr-search-index.html#reindex-in-place) to update any out-of-date information. +### Reindexing After a Role Assignment is Less Memory Intensive -For more information, see #10697 and #10698. +Adding or removing a user from a role on a collection, particularly the root collection, could lead to a significant increase in memory use, resulting in Dataverse itself failing with an out-of-memory condition. Such changes now consume much less memory. A Solr reindexing step is included in the upgrade instructions below. See also #10697 and #10698. ### Longer Custom Questions in Guestbooks @@ -54,7 +54,7 @@ PostgreSQL 13 remains the version used with automated testing, leading us to con As of Flyway 10, supporting older versions of PostgreSQL no longer requires a paid subscription. While we don't encourage the use of older PostgreSQL versions, this flexibility may benefit some of our long-standing installations in their upgrade paths. -As part of this update, the containerized development environment now uses Postgres 17 instead of 16. Developers must delete their data (`rm -rf docker-dev-volumes`) and start with an empty database, as [explained](https://groups.google.com/g/dataverse-dev/c/ffoNj5UXyzU/m/nE5oGY_sAQAJ) on the dev mailing list. They can rerun the quickstart in the dev guide. +As part of this update, the containerized development environment now uses Postgres 17 instead of 16. Developers must delete their data (`rm -rf docker-dev-volumes`) and start with an empty database (rerun the [quickstart](https://guides.dataverse.org/en/6.5/developers/dev-environment.html#quickstart) in the dev guide), as [explained](https://groups.google.com/g/dataverse-dev/c/ffoNj5UXyzU/m/nE5oGY_sAQAJ) on the dev mailing list. The Docker compose file used for [evaluations or demos](https://guides.dataverse.org/en/6.4/container/running/demo.html) has been upgraded from Postgres 13 to 17. @@ -62,9 +62,9 @@ See also #10889 and #10912. ### Harvesting "oai_dc" Metadata Prefix When Extended With Specific Namespaces -Some repository extend the "oai_dc" metadata prefix with specific namespaces. In this case, harvesting of these datasets was not possible because an XML parsing error was raised. +Some data repositories extend the "oai_dc" metadata prefix with specific namespaces. In this case, harvesting of these datasets into Dataverse was not possible because an XML parsing error was raised. -Harvesting of these datasets has been fixed by excluding tags with namespaces that are not "dc:" and harvest only metadata with the "dc" namespace. See #10837. +Harvesting of these datasets has been fixed by excluding tags with namespaces that are not "dc:". That is, only harvesting metadata with the "dc" namespace. See #10837. ### Harvested Dataset PID from Record Header @@ -80,21 +80,21 @@ When harvesting using the DDI format, datasets can now have multiple "otherId" v ### Multiple Languages in Docker -Configuration and documentation has been added to explain how to set up multiple languages (e.g. English and French) in the tutorial for setting up Dataverse in Docker. +Documentation has been added to explain how to set up multiple languages (e.g. English and French) in the tutorial for setting up Dataverse in Docker. -See also [the docs](https://guides.dataverse.org/en/6.5/container/running/demo.html#multiple-languages), #10939, and #10940. +See [the tutorial](https://guides.dataverse.org/en/6.5/container/running/demo.html#multiple-languages), #10939, and #10940. ### GlobusBatchLookupSize An optimization has been added for the Globus upload workflow, with a corresponding new database setting: `:GlobusBatchLookupSize` -See the [Database Settings](https://guides.dataverse.org/en/6.5/installation/config.html#GlobusBatchLookupSize) section of the Guides, #10977, and #11040 for more information. +See the [Database Settings](https://guides.dataverse.org/en/6.5/installation/config.html#GlobusBatchLookupSize) section of the guides, #10977, and #11040 for more information. ## Bugs Fixed ### Relation Type (Related Publication) and DataCite -The subfield "Relation Type" was added to the field "Related Publication" in Dataverse 6.4 (#10632) but couldn't be used without workarounds described in an [announcement](https://groups.google.com/g/dataverse-community/c/zlRGJtu3x4g/m/GtVZ26uaBQAJ) about the problem. The bug has been fixed and workarounds and are longer required. See also #10926. +The subfield "Relation Type" was added to the field "Related Publication" in Dataverse 6.4 (#10632) but couldn't be used without workarounds described in an [announcement](https://groups.google.com/g/dataverse-community/c/zlRGJtu3x4g/m/GtVZ26uaBQAJ) about the problem. The bug has been fixed and workarounds and are longer required. See #10926 and the announcement above. ### Sort Order for Files @@ -110,15 +110,11 @@ When a dataset was deaccessioned and was the only previous version it would caus ### My Data Filter by Username Feature Restored -The superuser-only feature of filtering by a username on the My Data page was not working. The "Results for Username" field now returns data for the desired user. See also #7239 and #10980. +The superuser-only feature of filtering by a username on the My Data page was not working. This "Results for Username" field now returns data for the desired user. See also #7239 and #10980. ### Better Handling of Parallel Edit/Publish Errors -Improvements have been made in handling the errors when a dataset has been edited in one browser window and an attempt is made to edit/publish it in another. (This practice is discouraged.) See #10793 and #10794. - -### Version Differences Details Sorting Added - -In order to facilitate the comparison between the draft version and the published version of a dataset, a sort on subfields has been added. See #10969. +Improvements have been made in handling the errors when a dataset has been edited in one browser window and an attempt is made to edit or publish it in another. (This practice is discouraged, by the way.) See #10793 and #10794. ### Facets Filter Labels Now Translated Above Search Results From f19794b6e194f9168a49f5dfeca7e8c3187f7222 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 14:53:11 -0500 Subject: [PATCH 379/402] changes made in #9665 belong in 6.5 changelog, not 6.4 --- doc/sphinx-guides/source/api/changelog.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 92cd4fc941b..65700cf19d4 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -7,12 +7,16 @@ This API changelog is experimental and we would love feedback on its usefulness. :local: :depth: 1 +v6.5 +---- + +- **/api/datasets/{identifier}/links**: The GET endpoint returns a list of Dataverses linked to the given Dataset. The format of the response has changes for v6.4 making it backward incompatible. + v6.4 ---- - **/api/datasets/$dataset-id/modifyRegistration**: Changed from GET to POST - **/api/datasets/modifyRegistrationPIDMetadataAll**: Changed from GET to POST -- **/api/datasets/{identifier}/links**: The GET endpoint returns a list of Dataverses linked to the given Dataset. The format of the response has changes for v6.4 making it backward incompatible. v6.3 ---- From 38cff71317af86e60cb489b7cb5eb57b0103fcf1 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 15:11:38 -0500 Subject: [PATCH 380/402] reword and link to endpoint docs #9665 --- doc/sphinx-guides/source/admin/dataverses-datasets.rst | 2 ++ doc/sphinx-guides/source/api/changelog.rst | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst index 7c03a6f80c0..c6d325a9651 100644 --- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst +++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst @@ -122,6 +122,8 @@ Creates a link between a dataset and a Dataverse collection (see the :ref:`datas curl -H "X-Dataverse-key: $API_TOKEN" -X PUT http://$SERVER/api/datasets/$linked-dataset-id/link/$linking-dataverse-alias +.. _list-collections-linked-from-dataset: + List Collections that are Linked from a Dataset ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 65700cf19d4..8df2b5b67e1 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -10,7 +10,7 @@ This API changelog is experimental and we would love feedback on its usefulness. v6.5 ---- -- **/api/datasets/{identifier}/links**: The GET endpoint returns a list of Dataverses linked to the given Dataset. The format of the response has changes for v6.4 making it backward incompatible. +- **/api/datasets/{identifier}/links**: The response from :ref:`list-collections-linked-from-dataset` has been improved to provide a more structured (but backward-incompatible) JSON response. v6.4 ---- From bafb91d85c7a9bacdf86202f8d081180cee5686e Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 6 Dec 2024 15:11:52 -0500 Subject: [PATCH 381/402] update query as native --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 98ac8ff387f..937f5693511 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1407,8 +1407,7 @@ public UploadSessionQuotaLimit getUploadSessionQuotaLimit(DvObjectContainer pare } public boolean isInReleasedVersion(Long id) { - Query query = em.createQuery("SELECT fm.id FROM FileMetadata fm, DvObject dvo WHERE fm.datasetVersion.id=(SELECT dv.id FROM DatasetVersion dv WHERE dv.dataset.id=dvo.owner.id and dv.versionState=edu.harvard.iq.dataverse.DatasetVersion.VersionState.RELEASED ORDER BY dv.versionNumber DESC, dv.minorVersionNumber DESC LIMIT 1) AND dvo.id=fm.dataFile.id AND fm.dataFile.id=:fid"); - query.setParameter("fid", id); + Query query = em.createNativeQuery("SELECT fm.id FROM filemetadata fm WHERE fm.datasetversion_id=(SELECT dv.id FROM datasetversion dv, dvobject dvo WHERE dv.dataset_id=dvo.owner_id AND dv.versionState='RELEASED' and dvo.id=" + id + " ORDER BY dv.versionNumber DESC, dv.minorVersionNumber DESC LIMIT 1) AND fm.datafile_id=" + id); try { query.getSingleResult(); From deb2d8821bfba71e9836f03a6f7c127e2024f28c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 15:15:18 -0500 Subject: [PATCH 382/402] many more tweaks #10952 --- doc/release-notes/6.5-release-notes.md | 55 +++++++++++++++----------- 1 file changed, 31 insertions(+), 24 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 62ff9ba6e93..afaa3a6021e 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -110,7 +110,7 @@ When a dataset was deaccessioned and was the only previous version it would caus ### My Data Filter by Username Feature Restored -The superuser-only feature of filtering by a username on the My Data page was not working. This "Results for Username" field now returns data for the desired user. See also #7239 and #10980. +The superuser-only feature of filtering by a username on the My Data page was not working. Entering a username in the "Results for Username" field now returns data for the desired user. See also #7239 and #10980. ### Better Handling of Parallel Edit/Publish Errors @@ -118,13 +118,11 @@ Improvements have been made in handling the errors when a dataset has been edite ### Facets Filter Labels Now Translated Above Search Results -On the main page, it's possible to filter results using search facets. If internationalization (i18n) has been activated in the Dataverse installation, allowing pages to be displayed in several languages, the facets are translated in the filter column. However, they weren't being translated above the search results, remaining in the default language, English. - -This version of Dataverse fix this, and includes internationalization in the facets visible in the search results section. For more information, see #9408 and #10158. +On the main page, it's possible to filter results using search facets. If internationalization (i18n) has been enabled in the Dataverse installation, allowing pages to be displayed in several languages, the facets were correctly translated in the filter column at the left. However, they were not being translated above the search results, remaining in the default language, English. This has been fixed. See #9408 and #10158. ### Unpublished File Bug Fix Related to Deaccessioning -A bug fix was made that gets the major version of a Dataset when all major versions were deaccessioned. This fixes the incorrect showing of the files as "Unpublished" in the search list even when they are published. This fix affects the indexing, meaning these datasets must be re-indexed once Dataverse is updated. See also #10947 and #10974. +A bug fix was made related to retrieval of the major version of a Dataset when all major versions were deaccessioned. This fixes the incorrect showing of the files as "Unpublished" in the search list even when they are published. In the upgrade instructions below, there is a step to reindex Solr. See also #10947 and #10974. ### Minor DataCiteXML Fix (Useless Null) @@ -136,7 +134,7 @@ Make Data Count (MDC) citation retrieval with the PID settings has been fixed. P ### Globus "missing properties" Logging Fixed -In previous releases, logging would show Globus-related strings were missing from properties files. This has been fixed. See #11030 for details. +In previous releases, logging would show Globus-related strings were missing from properties files. This has been fixed. See #11030. ## API Updates @@ -146,7 +144,7 @@ A new endpoint (`PUT /api/dataverses/`) for updating an existing col ### fileCount Added to Search API -A new search field called `fileCount` can be searched to discover the number of files per dataset. The upgrade instructions below explain how to update your Solr `schema.xml` file to add the new field. See also #8941 and #10598. +A new search field called `fileCount` can be searched to discover the number of files per dataset. The upgrade instructions below explain how to update your Solr `schema.xml` file to add the new field and reindex Solr. See also #8941 and #10598. ### List Dataset Metadata Exporters @@ -164,7 +162,7 @@ A superuser-only API endpoint has been added to audit datasets with data files w The update collection (dataverse) API endpoint has been updated to support an "inherit from parent" configuration for metadata blocks, facets, and input levels. -Previously, not setting these fields meant using a copy of the settings from the parent collection, which could get out of sync.. See also [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#update-a-dataverse-collection), #11018, and #11026. +Previously, not setting these fields meant using a copy of the settings from the parent collection, which could get out of sync. See also [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#update-a-dataverse-collection), #11018, and #11026. ### isMetadataBlockRoot and isFacetRoot @@ -172,17 +170,17 @@ The JSON payload of the "get collection" endpoint has been extended to include p ### Whitespace Trimming When Loading Metadata Block TSV Files -When loading custom metadata blocks using the `api/admin/datasetfield/load` API, whitespace can be introduced into field names. Whitespace is now trimmed from the beginning and end of all values read into the API before persisting them. See #10688 and #10696. +When loading custom metadata blocks using the `api/admin/datasetfield/load` API endpoint, whitespace can be introduced into field names. Whitespace is now trimmed from the beginning and end of all values read into the API before persisting them. See #10688 and #10696. ### Image URLs from the Search API -As of 6.4 (thanks to #10855) `image_url` is being returned from the Search API. The logic has been updated to only show the image if each of the following are true: +As of 6.4 (#10855) `image_url` is being returned from the Search API. The logic has been updated to only show the image if each of the following are true: -1. The DataFile is not Harvested -2. A Thumbnail is available for the Datafile -3. If the Datafile is Restricted then the caller must have Download File Permission for the Datafile -4. The Datafile is NOT actively embargoed -5. The Datafile's retention period has NOT expired +1. The data file is not harvested +2. A thumbnail is available for the data file +3. If the data file is restricted, then the caller must have DownloadFile permission for the data file +4. The data file is NOT actively embargoed +5. The data file's retention period has NOT expired See also #10875 and #10886. @@ -190,26 +188,27 @@ See also #10875 and #10886. Two bugs in the Metrics API have been fixed: -- The /datasets and /datasets/byMonth endpoints could report incorrect values if/when they have been called using the dataLocation parameter (which allows getting metrics for local, remote (harvested), or all datasets) as the metrics cache was not storing different values for these cases. +- The /datasets and /datasets/byMonth endpoints could report incorrect values if or when they have been called using the "dataLocation" parameter (which allows getting metrics for local, remote (harvested), or all datasets) as the metrics cache was not storing different values for these cases. + +- Metrics endpoints whose calculation relied on finding the latest published dataset version were incorrect if/when the minor version number was > 9. -- Metrics endpoints who's calculation relied on finding the latest published datasetversion were incorrect if/when the minor version number was > 9. +The upgrade instructions below include a step for clearing the metrics cache. -When deploying the new release, the [/api/admin/clearMetricsCache](https://guides.dataverse.org/en/latest/api/native-api.html#metrics) API should be called to remove old cached values that may be incorrect. -See #10379 and #10865. +See also #10379 and #10865. ### API Tokens -An optional query parameter called "returnExpiration" has been added to the "/api/users/token/recreate" endpoint, which, if set to true, returns the expiration time in the response message. See [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#recreate-a-token), #10857 and #10858. +An optional query parameter called "returnExpiration" has been added to the `/api/users/token/recreate` endpoint, which, if set to true, returns the expiration time in the response. See [the docs](https://guides.dataverse.org/en/6.5/api/native-api.html#recreate-a-token), #10857 and #10858. -The `/api/users/token` endpoint has been extended to support any auth mechanism for retrieving the token information. Previously, this endpoint only accepted an API token to retrieve its information. Now, it accepts any authentication mechanism and returns the associated API token information. See #10914 and #10924. +The `/api/users/token` endpoint has been extended to support any auth mechanism for retrieving the token information. Previously this endpoint only accepted an API token to retrieve its information. Now it accepts any authentication mechanism and returns the associated API token information. See #10914 and #10924. ## Settings Added -- :GlobusBatchLookupSize +- `:GlobusBatchLookupSize` ## Backward Incompatible Changes -Generally speaking, see the [API Changelog](https://guides.dataverse.org/en/6.5/api/changelog.html) for a list of backward-incompatible changes. +Generally speaking, see the [API Changelog](https://guides.dataverse.org/en/latest/api/changelog.html) for a list of backward-incompatible API changes. ### List Collections Linked to a Dataset @@ -356,7 +355,15 @@ Below is the simple way to reexport all dataset metadata. For more advanced usag curl http://localhost:8080/api/admin/metadata/reExportAll ``` -10\. Pushing updated metadata to DataCite +10\. Clear metrics cache + +Run the [clearMetricsCache](https://guides.dataverse.org/en/6.5/api/native-api.html#metrics) API endpoint to remove old cached values that may be incorrect. + +```shell +curl -X DELETE http://localhost:8080/api/admin/clearMetricsCache +``` + +11\. Pushing updated metadata to DataCite (If you don't use DataCite, you can skip this. Also, if you aren't affected by the "useless null" bug described above, you can skip this.) From 7611a212e25e82b68176a422c1f2a5925bde3dad Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 15:21:34 -0500 Subject: [PATCH 383/402] update how we use sudo #10952 --- doc/release-notes/6.5-release-notes.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index afaa3a6021e..fbe37f89baa 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -239,9 +239,9 @@ These instructions assume that you've already upgraded through all the 5.x relea 0\. These instructions assume that you are upgrading from the immediate previous version. If you are running an earlier version, the only supported way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to this version. -If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. By default, Payara runs as the `dataverse` user. In the commands below, we use sudo to run the commands as a non-root user. -In the following commands, we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed. +Also, we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed. ```shell export PAYARA=/usr/local/payara6 From 2d0c584729860a422a80c19808187799c87588e8 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 6 Dec 2024 15:25:17 -0500 Subject: [PATCH 384/402] typo --- doc/sphinx-guides/source/api/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 8df2b5b67e1..14958095658 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -1,7 +1,7 @@ API Changelog (Breaking Changes) ================================ -This API changelog is experimental and we would love feedback on its usefulness. Its primary purpose is to inform API developers of any breaking changes. (We try not ship any backward incompatible changes, but it happens.) To see a list of new APIs and backward-compatible changes to existing API, please see each version's release notes at https://github.com/IQSS/dataverse/releases +This API changelog is experimental and we would love feedback on its usefulness. Its primary purpose is to inform API developers of any breaking changes. (We try not to ship any backward incompatible changes, but it happens.) To see a list of new APIs and backward-compatible changes to existing API, please see each version's release notes at https://github.com/IQSS/dataverse/releases .. contents:: |toctitle| :local: From d3be336185929c3414fe0f114e5bb2a26efab887 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 6 Dec 2024 18:17:37 -0500 Subject: [PATCH 385/402] cvoc fix, status fix, lower per-file logging --- .../iq/dataverse/search/IndexServiceBean.java | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index f72973076ec..4efd339ee46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -1151,9 +1151,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, SetFeature Request/Idea: Harvest metadata values that aren't from a list of controlled values #9992 @@ -1301,7 +1299,6 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set findPermissionsInSolrOnly() throws SearchException { String dtype = dvObjectService.getDtype(id); if (dtype == null) { permissionInSolrOnly.add(docId); - } - if (dtype.equals(DType.Dataset.getDType())) { + }else if (dtype.equals(DType.Dataset.getDType())) { List states = datasetService.getVersionStates(id); if (states != null) { String latestState = states.get(states.size() - 1); @@ -2257,7 +2253,7 @@ public List findPermissionsInSolrOnly() throws SearchException { } else if (dtype.equals(DType.DataFile.getDType())) { List states = dataFileService.findVersionStates(id); Set strings = states.stream().map(VersionState::toString).collect(Collectors.toSet()); - logger.fine("States for " + docId + ": " + String.join(", ", strings)); + logger.finest("States for " + docId + ": " + String.join(", ", strings)); if (docId.endsWith("draft_permission")) { if (!states.contains(VersionState.DRAFT)) { permissionInSolrOnly.add(docId); @@ -2271,7 +2267,7 @@ public List findPermissionsInSolrOnly() throws SearchException { permissionInSolrOnly.add(docId); } else { if (!dataFileService.isInReleasedVersion(id)) { - logger.fine("Adding doc " + docId + " to list of permissions in Solr only"); + logger.finest("Adding doc " + docId + " to list of permissions in Solr only"); permissionInSolrOnly.add(docId); } } From dd64ebbc55cac820b2bc7258982f9f8bee981c9b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 9 Dec 2024 09:33:03 -0500 Subject: [PATCH 386/402] ever more sudo #10952 --- doc/release-notes/6.5-release-notes.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index fbe37f89baa..25caa64b145 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -278,9 +278,9 @@ Note: if you have any trouble deploying, stop Payara, remove the following direc ```shell sudo service payara stop -rm -rf $PAYARA/glassfish/domains/domain1/generated -rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache -rm -rf $PAYARA/glassfish/domains/domain1/lib/databases +sudo rm -rf $PAYARA/glassfish/domains/domain1/generated +sudo rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache +sudo rm -rf $PAYARA/glassfish/domains/domain1/lib/databases ``` 5\. For installations with internationalization: From 98d68c95fa0fd303f2343bb45932682d587fd384 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 9 Dec 2024 10:19:13 -0500 Subject: [PATCH 387/402] explain how to download the war file before deploying #10952 --- doc/release-notes/6.5-release-notes.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 25caa64b145..ad5d3575dd5 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -268,9 +268,10 @@ sudo service payara stop sudo service payara start ``` -4\. Deploy this version +4\. Download and deploy this version ```shell +wget https://github.com/IQSS/dataverse/releases/download/v6.5/dataverse-6.5.war $PAYARA/bin/asadmin deploy dataverse-6.5.war ``` From 48246a635753914e9bf1c3fcdb1560f8f15b7b26 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 9 Dec 2024 10:20:52 -0500 Subject: [PATCH 388/402] note that Solr dir can differ #10952 --- doc/release-notes/6.5-release-notes.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index ad5d3575dd5..019d437ed06 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -306,6 +306,8 @@ sudo service solr stop Replace schema.xml +Please note that the path to Solr may differ than the example below. + ```shell wget https://raw.githubusercontent.com/IQSS/dataverse/v6.5/conf/solr/schema.xml sudo cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf From 9a4252b22052201d355a4173fbb16e57a3704e7c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 10 Dec 2024 10:55:32 -0500 Subject: [PATCH 389/402] #11076 test session user perms --- .../dataverse/privateurl/PrivateUrlPage.java | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java index 9af4bb6af9e..17c622be9e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java @@ -1,6 +1,10 @@ package edu.harvard.iq.dataverse.privateurl; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.PermissionsWrapper; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import java.io.Serializable; import java.util.logging.Logger; @@ -20,8 +24,14 @@ public class PrivateUrlPage implements Serializable { @EJB PrivateUrlServiceBean privateUrlService; + @EJB + DatasetServiceBean datasetServiceBean; @Inject DataverseSession session; + @Inject + PermissionsWrapper permissionsWrapper; + @Inject + DataverseRequestServiceBean dvRequestService; /** * The unique string used to look up a PrivateUrlUser and the associated @@ -34,7 +44,16 @@ public String init() { PrivateUrlRedirectData privateUrlRedirectData = privateUrlService.getPrivateUrlRedirectDataFromToken(token); String draftDatasetPageToBeRedirectedTo = privateUrlRedirectData.getDraftDatasetPageToBeRedirectedTo() + "&faces-redirect=true"; PrivateUrlUser privateUrlUser = privateUrlRedirectData.getPrivateUrlUser(); - session.setUser(privateUrlUser); + boolean sessionUserCanViewUnpublishedDataset = false; + if (session.getUser().isAuthenticated()){ + Long datasetId = privateUrlUser.getDatasetId(); + Dataset dataset = datasetServiceBean.find(datasetId); + sessionUserCanViewUnpublishedDataset = permissionsWrapper.canViewUnpublishedDataset(dvRequestService.getDataverseRequest(), dataset); + } + if(!sessionUserCanViewUnpublishedDataset){ + //Only Reset if user cannot view this Draft Version + session.setUser(privateUrlUser); + } logger.info("Redirecting PrivateUrlUser '" + privateUrlUser.getIdentifier() + "' to " + draftDatasetPageToBeRedirectedTo); return draftDatasetPageToBeRedirectedTo; } catch (Exception ex) { From ab7c90e607b072af6e962f8fc0ec652a76d44b53 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 10 Dec 2024 10:59:01 -0500 Subject: [PATCH 390/402] typo Co-authored-by: Omer Fahim --- doc/release-notes/6.5-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 019d437ed06..56521c45d71 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -94,7 +94,7 @@ See the [Database Settings](https://guides.dataverse.org/en/6.5/installation/con ### Relation Type (Related Publication) and DataCite -The subfield "Relation Type" was added to the field "Related Publication" in Dataverse 6.4 (#10632) but couldn't be used without workarounds described in an [announcement](https://groups.google.com/g/dataverse-community/c/zlRGJtu3x4g/m/GtVZ26uaBQAJ) about the problem. The bug has been fixed and workarounds and are longer required. See #10926 and the announcement above. +The subfield "Relation Type" was added to the field "Related Publication" in Dataverse 6.4 (#10632) but couldn't be used without workarounds described in an [announcement](https://groups.google.com/g/dataverse-community/c/zlRGJtu3x4g/m/GtVZ26uaBQAJ) about the problem. The bug has been fixed and workarounds are no longer required. See #10926 and the announcement above. ### Sort Order for Files From 63ff790d4b3c319bb674fba8201eb8e3c8686fc9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 10 Dec 2024 11:01:51 -0500 Subject: [PATCH 391/402] typo Co-authored-by: Omer Fahim --- doc/release-notes/6.5-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 56521c45d71..7bb9cbb1e73 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -106,7 +106,7 @@ In the Guestbook UI form, the email address is now checked for validity. See #10 ### Updating Files Now Possible When Latest and Only Dataset Version is Deaccessioned -When a dataset was deaccessioned and was the only previous version it would cause an error when trying to update the files. This has been fixed. See #9351 and #10901. +When a dataset was deaccessioned, and was the only previous version, it would cause an error when trying to update the files. This has been fixed. See #9351 and #10901. ### My Data Filter by Username Feature Restored From e733a2b7c54921f2380af4ec14df5b0672184825 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 10 Dec 2024 13:35:35 -0500 Subject: [PATCH 392/402] rely on the path based checks rather than looking at [0] in the array --- src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index e3c26284d55..ffb7aa4cc3b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2274,9 +2274,6 @@ public void testDeleteFile() { // Check file 2 still in v1.0 Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", apiToken); v1.prettyPrint(); - v1.then().assertThat() - .body("data.files[0].dataFile.filename", equalTo("cc0.png")) - .statusCode(OK.getStatusCode()); Map v1files1 = with(v1.body().asString()).param("fileToFind", "cc0.png") .getJsonObject("data.files.find { files -> files.label == fileToFind }"); @@ -2289,9 +2286,6 @@ public void testDeleteFile() { // Check file 3 still in post v1.0 draft Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft2.prettyPrint(); - postv1draft2.then().assertThat() - .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png")) - .statusCode(OK.getStatusCode()); Map v1files2 = with(postv1draft2.body().asString()).param("fileToFind", "orcid_16x16.png") .getJsonObject("data.files.find { files -> files.label == fileToFind }"); From 853ced6340c163e4d8deb7ecab53a82092fd7642 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 10 Dec 2024 14:37:04 -0500 Subject: [PATCH 393/402] #11076 refresh delete popup --- src/main/webapp/dataset.xhtml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 051dc03ab34..9426884d349 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1193,7 +1193,7 @@

    #{bundle['dataset.privateurl.general.description']}

    @@ -1213,7 +1213,7 @@ - +
    @@ -1252,7 +1252,7 @@ - + From 452cca4f15805063b769003620762a44cdc8260b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 10 Dec 2024 14:49:00 -0500 Subject: [PATCH 394/402] word choice Co-authored-by: Omer Fahim --- doc/release-notes/6.5-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 7bb9cbb1e73..a2cac8ed800 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -306,7 +306,7 @@ sudo service solr stop Replace schema.xml -Please note that the path to Solr may differ than the example below. +Please note that the path to Solr may differ from the example below. ```shell wget https://raw.githubusercontent.com/IQSS/dataverse/v6.5/conf/solr/schema.xml From 4c803d2cd14236bc98f925cce4ed2a348eb4e3ee Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 11 Dec 2024 11:13:54 -0500 Subject: [PATCH 395/402] keep status checks --- src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index ffb7aa4cc3b..98107eca33a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2274,6 +2274,8 @@ public void testDeleteFile() { // Check file 2 still in v1.0 Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", apiToken); v1.prettyPrint(); + v1.then().assertThat() + .statusCode(OK.getStatusCode()); Map v1files1 = with(v1.body().asString()).param("fileToFind", "cc0.png") .getJsonObject("data.files.find { files -> files.label == fileToFind }"); @@ -2286,6 +2288,8 @@ public void testDeleteFile() { // Check file 3 still in post v1.0 draft Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft2.prettyPrint(); + postv1draft2.then().assertThat() + .statusCode(OK.getStatusCode()); Map v1files2 = with(postv1draft2.body().asString()).param("fileToFind", "orcid_16x16.png") .getJsonObject("data.files.find { files -> files.label == fileToFind }"); From ce6a99a49f54caaf54bce633dd08869365c87e8c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 11 Dec 2024 13:38:31 -0500 Subject: [PATCH 396/402] #10952 add not about breadcrumbs in Anon Preview URL --- doc/release-notes/6.5-release-notes.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index a2cac8ed800..c866d52e42c 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -24,6 +24,8 @@ The name of the URL that may be used by dataset administrators to share a draft Also, additional information about the creation of Preview URLs has been added to the popup accessed via edit menu of the Dataset Page. +Users of the Anonymous Preview URL will no longer be able to see the name of the Dataverse that this dataset is in but will be able to see the name of the repository. + Any Private URLs created in previous versions of Dataverse will continue to work. The old "privateUrl" API endpoints for the creation and deletion of Preview (formerly Private) URLs have been deprecated. They will continue to work but please switch to the "previewUrl" equivalents that have been [documented](https://guides.dataverse.org/en/6.5/api/native-api.html#create-a-preview-url-for-a-dataset) in the API Guide. From 929128bb344df97678c49e3efa31d22c5074e646 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 11 Dec 2024 13:40:19 -0500 Subject: [PATCH 397/402] #10952 reword --- doc/release-notes/6.5-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index c866d52e42c..17abec2907a 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -24,7 +24,7 @@ The name of the URL that may be used by dataset administrators to share a draft Also, additional information about the creation of Preview URLs has been added to the popup accessed via edit menu of the Dataset Page. -Users of the Anonymous Preview URL will no longer be able to see the name of the Dataverse that this dataset is in but will be able to see the name of the repository. +Users of the Anonymous Preview URL will no longer be able to see the name of the Dataverse that the dataset is in but will be able to see the name of the repository. Any Private URLs created in previous versions of Dataverse will continue to work. From a0508d1989c94e87e7fd7ddfaf7853cc69936224 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 11 Dec 2024 13:59:56 -0500 Subject: [PATCH 398/402] add link to #11085 --- doc/release-notes/6.5-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 17abec2907a..a45efb59556 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -30,7 +30,7 @@ Any Private URLs created in previous versions of Dataverse will continue to work The old "privateUrl" API endpoints for the creation and deletion of Preview (formerly Private) URLs have been deprecated. They will continue to work but please switch to the "previewUrl" equivalents that have been [documented](https://guides.dataverse.org/en/6.5/api/native-api.html#create-a-preview-url-for-a-dataset) in the API Guide. -See also #8184, #8185, #10950, and #10961. +See also #8184, #8185, #10950, #10961, and #11085. ### Showing Differences Between Dataset Versions is More Scalable From fe13a437c9c990086945c48a21dd32ce7e80ee6f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 11 Dec 2024 15:13:43 -0500 Subject: [PATCH 399/402] add cvoc overview fix #10952 --- doc/release-notes/6.5-release-notes.md | 4 ++++ doc/release-notes/display_overview_fix.md | 1 - 2 files changed, 4 insertions(+), 1 deletion(-) delete mode 100644 doc/release-notes/display_overview_fix.md diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index a45efb59556..8cccc7114cf 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -134,6 +134,10 @@ A minor bug fix was made to avoid sending a useless ", null" in the DataCiteXML Make Data Count (MDC) citation retrieval with the PID settings has been fixed. PID parsing in Dataverse is now case insensitive, improving interaction with services that may change the case of PIDs. Warnings related to managed/excluded PID lists for PID providers have been reduced. See #10708. +### Quirk in Overview Display When Using External Controlled Variables + +This bugfix corrects an issue when there are duplicated entries on the metadata page. It is fixed by correcting an IF-clause in metadataFragment.xhtml. See #11005 and #11034. + ### Globus "missing properties" Logging Fixed In previous releases, logging would show Globus-related strings were missing from properties files. This has been fixed. See #11030. diff --git a/doc/release-notes/display_overview_fix.md b/doc/release-notes/display_overview_fix.md deleted file mode 100644 index 73a01435caf..00000000000 --- a/doc/release-notes/display_overview_fix.md +++ /dev/null @@ -1 +0,0 @@ -This bugfix corrects an issue when there are duplicated entries on the metadata page. It is fixed by correcting an IF-clause in metadataFragment.xhtml. \ No newline at end of file From 2de57e13135a8512218a651552e924cce512382a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 11 Dec 2024 15:16:26 -0500 Subject: [PATCH 400/402] remove "in place" from Solr reindex step #10952 --- doc/release-notes/6.5-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/6.5-release-notes.md b/doc/release-notes/6.5-release-notes.md index 8cccc7114cf..2e27f4419bd 100644 --- a/doc/release-notes/6.5-release-notes.md +++ b/doc/release-notes/6.5-release-notes.md @@ -354,7 +354,7 @@ Below is the simplest way to reindex Solr: curl http://localhost:8080/api/admin/index ``` -The API above rebuilds the existing index "in place". If you want to be absolutely sure that your index is up-to-date and consistent, you may consider wiping it clean and reindexing everything from scratch (see [the guides](https://guides.dataverse.org/en/latest/admin/solr-search-index.html)). Just note that, depending on the size of your database, a full reindex may take a while and the users will be seeing incomplete search results during that window. +The API above rebuilds the existing index. If you want to be absolutely sure that your index is up-to-date and consistent, you may consider wiping it clean and reindexing everything from scratch (see [the guides](https://guides.dataverse.org/en/latest/admin/solr-search-index.html)). Just note that, depending on the size of your database, a full reindex may take a while and the users will be seeing incomplete search results during that window. 9\. Run reExportAll to update dataset metadata exports From 2c08be4a3550fdd7487969e12dbbf88cbe1712ea Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 11 Dec 2024 16:12:00 -0500 Subject: [PATCH 401/402] bump version to 6.5 #10954 --- doc/sphinx-guides/source/conf.py | 4 ++-- doc/sphinx-guides/source/versions.rst | 3 ++- modules/dataverse-parent/pom.xml | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 7ee355302d8..fc88de1fcd7 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -68,9 +68,9 @@ # built documents. # # The short X.Y version. -version = '6.4' +version = '6.5' # The full version, including alpha/beta/rc tags. -release = '6.4' +release = '6.5' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index 800bdc6e0f9..9d640bd22bd 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -7,7 +7,8 @@ Dataverse Software Documentation Versions This list provides a way to refer to the documentation for previous and future versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. - pre-release `HTML (not final!) `__ and `PDF (experimental!) `__ built from the :doc:`develop ` branch :doc:`(how to contribute!) ` -- 6.4 +- 6.5 +- `6.4 `__ - `6.3 `__ - `6.2 `__ - `6.1 `__ diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 9442b55d622..9612988b3e7 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -131,7 +131,7 @@ - 6.4 + 6.5 17 UTF-8 From d9c03e0cf93b4cb6a8b1fadd838a3df5737d4bb4 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 11 Dec 2024 16:14:13 -0500 Subject: [PATCH 402/402] make change for proper tagging of images #10954 --- modules/dataverse-parent/pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 9612988b3e7..d8105535248 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -446,8 +446,8 @@ Once the release has been made (tag created), change this back to "${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion}" (These properties are provided by the build-helper plugin below.) --> - ${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion} - + + ${revision}