diff --git a/doc/release-notes/10797-update-current-version-bug-fix.md b/doc/release-notes/10797-update-current-version-bug-fix.md new file mode 100644 index 00000000000..2cfaf69cad3 --- /dev/null +++ b/doc/release-notes/10797-update-current-version-bug-fix.md @@ -0,0 +1,11 @@ +A significant bug in the superuser-only "Update-Current-Version" publication was found and fixed in this release. If the Update-Current-Version option was used when changes were made to the dataset Terms (rather than to dataset metadata), or if the PID provider service was down/returned an error, the update would fail and render the dataset unusable and require restoration from a backup. The fix in this release allows the update to succeed in both of these cases and redesigns the functionality such that any unknown issues should not make the dataset unusable (i.e. the error would be reported and the dataset would remain in its current state with the last-published version as it was and changes still in the draft version.) + +Users of earlier Dataverse releases are encouraged to alert their superusers to this issue. Those who wish to disable this functionality have two options: +* Change the dataset.updateRelease entry in the Bundle.properties file (or local language version) to "Do Not Use" or similar (doesn't disable but alerts superusers to the issue), or +* Edit the dataset.xhtml file to remove the lines + + + + + +, delete the contents of the generated and osgi-cache directories in the Dataverse Payara domain, and restart the Payara server. diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index f83041d87bd..e6e8279a314 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.datavariable.VarGroup; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; @@ -13,14 +12,17 @@ import edu.harvard.iq.dataverse.util.DatasetFieldUtil; import edu.harvard.iq.dataverse.workflows.WorkflowComment; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.DataFileCategory; import edu.harvard.iq.dataverse.DatasetVersionDifference; -import java.util.Collection; +import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; @@ -50,6 +52,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException { if (!getUser().isSuperuser()) { throw new IllegalCommandException("Only superusers can curate published dataset versions", this); } + Dataset savedDataset = null; + // Merge the dataset into our JPA context + setDataset(ctxt.em().merge(getDataset())); ctxt.permissions().checkEditDatasetLock(getDataset(), getRequest(), this); // Invariant: Dataset has no locks preventing the update @@ -58,23 +63,23 @@ public Dataset execute(CommandContext ctxt) throws CommandException { DatasetVersion newVersion = getDataset().getOrCreateEditVersion(); // Copy metadata from draft version to latest published version updateVersion.setDatasetFields(newVersion.initDatasetFields()); - - + newVersion.setDatasetFields(new ArrayList()); // final DatasetVersion editVersion = getDataset().getEditVersion(); DatasetFieldUtil.tidyUpFields(updateVersion.getDatasetFields(), true); - // Merge the new version into our JPA context - ctxt.em().merge(updateVersion); - TermsOfUseAndAccess oldTerms = updateVersion.getTermsOfUseAndAccess(); TermsOfUseAndAccess newTerms = newVersion.getTermsOfUseAndAccess(); newTerms.setDatasetVersion(updateVersion); updateVersion.setTermsOfUseAndAccess(newTerms); - //Put old terms on version that will be deleted.... - newVersion.setTermsOfUseAndAccess(oldTerms); - - //Validate metadata and TofA conditions + // Clear unnecessary terms relationships .... + newVersion.setTermsOfUseAndAccess(null); + oldTerms.setDatasetVersion(null); + // Without this there's a db exception related to the oldTerms being referenced + // by the datasetversion table at the flush around line 212 + ctxt.em().flush(); + + // Validate metadata and TofA conditions validateOrDie(updateVersion, isValidateLenient()); //Also set the fileaccessrequest boolean on the dataset to match the new terms @@ -87,19 +92,20 @@ public Dataset execute(CommandContext ctxt) throws CommandException { updateVersion.getWorkflowComments().addAll(newComments); } - // we have to merge to update the database but not flush because // we don't want to create two draft versions! - Dataset tempDataset = ctxt.em().merge(getDataset()); - + Dataset tempDataset = getDataset(); updateVersion = tempDataset.getLatestVersionForCopy(); // Look for file metadata changes and update published metadata if needed List pubFmds = updateVersion.getFileMetadatas(); int pubFileCount = pubFmds.size(); int newFileCount = tempDataset.getOrCreateEditVersion().getFileMetadatas().size(); - /* The policy for this command is that it should only be used when the change is a 'minor update' with no file changes. - * Nominally we could call .isMinorUpdate() for that but we're making the same checks as we go through the update here. + /* + * The policy for this command is that it should only be used when the change is + * a 'minor update' with no file changes. Nominally we could call + * .isMinorUpdate() for that but we're making the same checks as we go through + * the update here. */ if (pubFileCount != newFileCount) { logger.severe("Draft version of dataset: " + tempDataset.getId() + " has: " + newFileCount + " while last published version has " + pubFileCount); @@ -108,7 +114,10 @@ public Dataset execute(CommandContext ctxt) throws CommandException { Long thumbId = null; if(tempDataset.getThumbnailFile()!=null) { thumbId = tempDataset.getThumbnailFile().getId(); - }; + } + + // Note - Curate allows file metadata changes but not adding/deleting files. If + // that ever changes, this command needs to be updated. for (FileMetadata publishedFmd : pubFmds) { DataFile dataFile = publishedFmd.getDataFile(); FileMetadata draftFmd = dataFile.getLatestFileMetadata(); @@ -155,45 +164,73 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // Update modification time on the published version and the dataset updateVersion.setLastUpdateTime(getTimestamp()); tempDataset.setModificationTime(getTimestamp()); - ctxt.em().merge(updateVersion); - Dataset savedDataset = ctxt.em().merge(tempDataset); - - // Flush before calling DeleteDatasetVersion which calls - // PrivateUrlServiceBean.getPrivateUrlFromDatasetId() that will query the DB and - // fail if our changes aren't there - ctxt.em().flush(); + newVersion = ctxt.em().merge(newVersion); + savedDataset = ctxt.em().merge(tempDataset); // Now delete draft version - DeleteDatasetVersionCommand cmd; - cmd = new DeleteDatasetVersionCommand(getRequest(), savedDataset); - ctxt.engine().submit(cmd); - // Running the command above reindexes the dataset, so we don't need to do it - // again in here. + ctxt.em().remove(newVersion); + + Iterator dvIt = savedDataset.getVersions().iterator(); + while (dvIt.hasNext()) { + DatasetVersion dv = dvIt.next(); + if (dv.isDraft()) { + dvIt.remove(); + break; // We've removed the draft version, no need to continue iterating + } + } + + savedDataset = ctxt.em().merge(savedDataset); + ctxt.em().flush(); + + RoleAssignment ra = ctxt.privateUrl().getPrivateUrlRoleAssignmentFromDataset(savedDataset); + if (ra != null) { + ctxt.roles().revoke(ra); + } // And update metadata at PID provider - ctxt.engine().submit( - new UpdateDvObjectPIDMetadataCommand(savedDataset, getRequest())); - - //And the exported metadata files try { - ExportService instance = ExportService.getInstance(); - instance.exportAllFormats(getDataset()); - } catch (ExportException ex) { - // Just like with indexing, a failure to export is not a fatal condition. - logger.log(Level.WARNING, "Curate Published DatasetVersion: exception while exporting metadata files:{0}", ex.getMessage()); + ctxt.engine().submit( + new UpdateDvObjectPIDMetadataCommand(savedDataset, getRequest())); + } catch (CommandException ex) { + // The try/catch makes this non-fatal. Should it be non-fatal - it's different from what we do in publish? + // This can be corrected by running the update PID API later, but who will look in the log? + // With the change to not use the DeleteDatasetVersionCommand above and other + // fixes, this error may now cleanly restore the initial state + // with the draft and last published versions unchanged, but this has not yet bee tested. + // (Alternately this could move to onSuccess if we intend it to stay non-fatal.) + logger.log(Level.WARNING, "Curate Published DatasetVersion: exception while updating PID metadata:{0}", ex.getMessage()); } - - - // Update so that getDataset() in updateDatasetUser will get the up-to-date copy - // (with no draft version) + // Update so that getDataset() in updateDatasetUser() will get the up-to-date + // copy (with no draft version) setDataset(savedDataset); + updateDatasetUser(ctxt); - - + // ToDo - see if there are other DatasetVersionUser entries unique to the draft + // version that should be moved to the last published version + // As this command is intended for minor fixes, often done by the person pushing + // the update-current-version button, this is probably a minor issue. return savedDataset; } + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + boolean retVal = true; + Dataset d = (Dataset) r; + + ctxt.index().asyncIndexDataset(d, true); + + // And the exported metadata files + try { + ExportService instance = ExportService.getInstance(); + instance.exportAllFormats(d); + } catch (ExportException ex) { + // Just like with indexing, a failure to export is not a fatal condition. + retVal = false; + logger.log(Level.WARNING, "Curate Published DatasetVersion: exception while exporting metadata files:{0}", ex.getMessage()); + } + return retVal; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java index 9e5879106e4..01710e06f8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java @@ -96,7 +96,7 @@ private RoleAssignment getRoleAssignmentFromPrivateUrlToken(String privateUrlTok * * @todo This might be a good place for Optional. */ - private RoleAssignment getPrivateUrlRoleAssignmentFromDataset(Dataset dataset) { + public RoleAssignment getPrivateUrlRoleAssignmentFromDataset(Dataset dataset) { if (dataset == null) { return null; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 1a9221cdca2..3ff580268a9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3660,17 +3660,47 @@ public void testCuratePublishedDatasetVersionCommand() throws IOException { UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(FORBIDDEN.getStatusCode()); - Response makeSuperUser = UtilIT.makeSuperUser(username); + Response makeSuperUser = UtilIT.setSuperuserStatus(username, true); //should work after making super user UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + //Check that the dataset contains the updated metadata (which includes the name Spruce) Response getDatasetJsonAfterUpdate = UtilIT.nativeGet(datasetId, apiToken); - getDatasetJsonAfterUpdate.prettyPrint(); + assertTrue(getDatasetJsonAfterUpdate.prettyPrint().contains("Spruce")); getDatasetJsonAfterUpdate.then().assertThat() .statusCode(OK.getStatusCode()); + //Check that the draft version is gone + Response getDraft1 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); + getDraft1.then().assertThat() + .statusCode(NOT_FOUND.getStatusCode()); + + + //Also test a terms change + String jsonLDTerms = "{\"https://dataverse.org/schema/core#fileTermsOfAccess\":{\"https://dataverse.org/schema/core#dataAccessPlace\":\"Somewhere\"}}"; + Response updateTerms = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, jsonLDTerms, true); + updateTerms.then().assertThat() + .statusCode(OK.getStatusCode()); + + //Run Update-Current Version again + + UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + + //Verify the new term is there + Response jsonLDResponse = UtilIT.getDatasetJsonLDMetadata(datasetId, apiToken); + assertTrue(jsonLDResponse.prettyPrint().contains("Somewhere")); + jsonLDResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + //And that the draft is gone + Response getDraft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); + getDraft2.then().assertThat() + .statusCode(NOT_FOUND.getStatusCode()); + + } /** diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java index 2c0e0a5c6b7..c57fa71a340 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java @@ -41,7 +41,7 @@ public class RemoteOverlayAccessIOTest { public void setUp() { System.setProperty("dataverse.files.test.type", "remote"); System.setProperty("dataverse.files.test.label", "testOverlay"); - System.setProperty("dataverse.files.test.base-url", "https://demo.dataverse.org/resources"); + System.setProperty("dataverse.files.test.base-url", "https://data.qdr.syr.edu/resources"); System.setProperty("dataverse.files.test.base-store", "file"); System.setProperty("dataverse.files.test.download-redirect", "true"); System.setProperty("dataverse.files.test.remote-store-name", "DemoDataCorp");