From c5e8e2dc878838d848daf581a7b3ca7fec9e9fee Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 14 Sep 2020 10:30:11 -0400 Subject: [PATCH 0001/1551] Update develop with globus phase1 --- .../edu/harvard/iq/dataverse/DataFile.java | 4 + .../iq/dataverse/DataFileServiceBean.java | 12 + .../edu/harvard/iq/dataverse/DatasetLock.java | 3 + .../edu/harvard/iq/dataverse/DatasetPage.java | 51 +- .../iq/dataverse/EditDatafilesPage.java | 40 +- .../iq/dataverse/FileDownloadHelper.java | 62 +- .../iq/dataverse/PermissionServiceBean.java | 7 + .../harvard/iq/dataverse/SettingsWrapper.java | 2 + .../harvard/iq/dataverse/api/GlobusApi.java | 346 +++++++ .../iq/dataverse/dataaccess/FileAccessIO.java | 8 +- .../dataverse/dataaccess/InputStreamIO.java | 7 + .../iq/dataverse/dataaccess/S3AccessIO.java | 49 +- .../iq/dataverse/dataaccess/StorageIO.java | 3 + .../dataverse/dataaccess/SwiftAccessIO.java | 6 + .../iq/dataverse/globus/AccessList.java | 33 + .../iq/dataverse/globus/AccessToken.java | 71 ++ .../harvard/iq/dataverse/globus/FileG.java | 67 ++ .../iq/dataverse/globus/FilesList.java | 60 ++ .../dataverse/globus/GlobusServiceBean.java | 880 ++++++++++++++++++ .../iq/dataverse/globus/Identities.java | 16 + .../harvard/iq/dataverse/globus/Identity.java | 67 ++ .../harvard/iq/dataverse/globus/MkDir.java | 22 + .../iq/dataverse/globus/MkDirResponse.java | 50 + .../iq/dataverse/globus/Permissions.java | 58 ++ .../dataverse/globus/PermissionsResponse.java | 58 ++ .../dataverse/globus/SuccessfulTransfer.java | 35 + .../edu/harvard/iq/dataverse/globus/Task.java | 69 ++ .../harvard/iq/dataverse/globus/Tasklist.java | 17 + .../iq/dataverse/globus/Transferlist.java | 18 + .../harvard/iq/dataverse/globus/UserInfo.java | 68 ++ .../settings/SettingsServiceBean.java | 15 +- .../harvard/iq/dataverse/util/FileUtil.java | 13 +- .../iq/dataverse/util/SystemConfig.java | 24 +- src/main/java/propertyFiles/Bundle.properties | 7 + src/main/webapp/editFilesFragment.xhtml | 62 +- .../file-download-button-fragment.xhtml | 24 +- src/main/webapp/globus.xhtml | 30 + 37 files changed, 2345 insertions(+), 19 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/AccessList.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/FileG.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Identities.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Identity.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Permissions.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Task.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java create mode 100644 src/main/webapp/globus.xhtml diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 560048db9ca..98b7b624d8c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -671,6 +671,10 @@ public boolean isFilePackage() { return DataFileServiceBean.MIME_TYPE_PACKAGE_FILE.equalsIgnoreCase(contentType); } + public boolean isFileGlobus() { + return DataFileServiceBean.MIME_TYPE_GLOBUS_FILE.equalsIgnoreCase(contentType); + } + public void setIngestStatus(char ingestStatus) { this.ingestStatus = ingestStatus; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 65d26d2eb63..4d04ee1889d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -137,6 +137,8 @@ public class DataFileServiceBean implements java.io.Serializable { * the page URL above. */ public static final String MIME_TYPE_PACKAGE_FILE = "application/vnd.dataverse.file-package"; + + public static final String MIME_TYPE_GLOBUS_FILE = "application/vnd.dataverse.file-globus"; public DataFile find(Object pk) { return em.find(DataFile.class, pk); @@ -1355,6 +1357,16 @@ public boolean isFileClassPackage (DataFile file) { return MIME_TYPE_PACKAGE_FILE.equalsIgnoreCase(contentType); } + + public boolean isFileClassGlobus (DataFile file) { + if (file == null) { + return false; + } + + String contentType = file.getContentType(); + + return MIME_TYPE_GLOBUS_FILE.equalsIgnoreCase(contentType); + } public void populateFileSearchCard(SolrSearchResult solrSearchResult) { solrSearchResult.setEntity(this.findCheapAndEasy(solrSearchResult.getEntityId())); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java index 93f4aca13d1..82997deef8c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java @@ -71,6 +71,9 @@ public enum Reason { /** DCM (rsync) upload in progress */ DcmUpload, + + /** Globus upload in progress */ + GlobusUpload, /** Tasks handled by FinalizeDatasetPublicationCommand: Registering PIDs for DS and DFs and/or file validation */ diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 458fcf56ab0..d1cfb184462 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -1,5 +1,11 @@ package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.globus.AccessToken; +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; +import edu.harvard.iq.dataverse.globus.UserInfo; + + import edu.harvard.iq.dataverse.provenance.ProvPopupFragmentBean; import edu.harvard.iq.dataverse.api.AbstractApiBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; @@ -55,10 +61,9 @@ import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; + +import java.io.*; +import java.net.MalformedURLException; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -236,6 +241,8 @@ public enum DisplayMode { @Inject MakeDataCountLoggingServiceBean mdcLogService; @Inject DataverseHeaderFragment dataverseHeaderFragment; + @Inject + protected GlobusServiceBean globusService; private Dataset dataset = new Dataset(); @@ -2114,6 +2121,10 @@ private void displayLockInfo(Dataset dataset) { BundleUtil.getStringFromBundle("file.rsyncUpload.inProgressMessage.details")); lockedDueToDcmUpload = true; } + if (dataset.isLockedFor(DatasetLock.Reason.GlobusUpload)) { + JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.rsyncUpload.inProgressMessage.summary"), + BundleUtil.getStringFromBundle("file.rsyncUpload.inProgressMessage.details")); + } //This is a hack to remove dataset locks for File PID registration if //the dataset is released //in testing we had cases where datasets with 1000 files were remaining locked after being published successfully @@ -2657,10 +2668,22 @@ private String releaseDataset(boolean minor) { // has been published. If a publishing workflow is configured, this may have sent the // dataset into a workflow limbo, potentially waiting for a third party system to complete // the process. So it may be premature to show the "success" message at this point. - + + boolean globus = checkForGlobus(); if ( result.isCompleted() ) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.publishSuccess")); + if (globus) { + if (!globusService.giveGlobusPublicPermissions(dataset.getId().toString())) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.publishGlobusFailure.details")); + } else { + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.publishSuccess")); + } + } else { + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.publishSuccess")); + } } else { + if (globus) { + globusService.giveGlobusPublicPermissions(dataset.getId().toString()); + } JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.locked.message"), BundleUtil.getStringFromBundle("dataset.locked.message.details")); } @@ -2673,6 +2696,12 @@ private String releaseDataset(boolean minor) { JsfHelper.addErrorMessage(ex.getLocalizedMessage()); } logger.severe(ex.getMessage()); + } catch (UnsupportedEncodingException ex) { + JsfHelper.addErrorMessage(ex.getLocalizedMessage()); + logger.severe(ex.getMessage()); + } catch (MalformedURLException ex) { + JsfHelper.addErrorMessage(ex.getLocalizedMessage()); + logger.severe(ex.getMessage()); } } else { @@ -2681,6 +2710,16 @@ private String releaseDataset(boolean minor) { return returnToDraftVersion(); } + private boolean checkForGlobus() { + List fml = dataset.getLatestVersion().getFileMetadatas(); + for (FileMetadata fm : fml) { + if (fm.getDataFile().isFileGlobus()) { + return true; + } + } + return false; + } + @Deprecated public String registerDataset() { try { diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 3138dcce2fe..b6c4cc744b2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; import edu.harvard.iq.dataverse.provenance.ProvPopupFragmentBean; import edu.harvard.iq.dataverse.api.AbstractApiBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; @@ -36,6 +38,8 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.EjbUtil; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; +import java.net.MalformedURLException; +import java.text.ParseException; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -55,6 +59,7 @@ import javax.faces.view.ViewScoped; import javax.inject.Inject; import javax.inject.Named; +import org.primefaces.PrimeFaces; import org.primefaces.event.FileUploadEvent; import org.primefaces.model.file.UploadedFile; import javax.json.Json; @@ -73,9 +78,9 @@ import javax.faces.event.FacesEvent; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.mutable.MutableBoolean; -import org.primefaces.PrimeFaces; /** * @@ -120,6 +125,10 @@ public enum FileEditMode { DataverseLinkingServiceBean dvLinkingService; @EJB IndexServiceBean indexService; + @EJB + GlobusServiceBean globusServiceBean; + @EJB + protected SettingsServiceBean settingsSvc; @Inject DataverseRequestServiceBean dvRequestService; @Inject PermissionsWrapper permissionsWrapper; @@ -1425,7 +1434,6 @@ public boolean showFileUploadFragment(){ return mode == FileEditMode.UPLOAD || mode == FileEditMode.CREATE || mode == FileEditMode.SINGLE_REPLACE; } - public boolean showFileUploadComponent(){ if (mode == FileEditMode.UPLOAD || mode == FileEditMode.CREATE) { return true; @@ -3135,5 +3143,31 @@ private void populateFileMetadatas() { } } } - } + } + + public String getClientId() { + logger.info(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusClientId)); + return "'" + settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusClientId) + "'"; + } + + public void startTaskList() throws MalformedURLException { + + AuthenticatedUser user = (AuthenticatedUser) session.getUser(); + globusServiceBean.globusFinishTransfer(dataset, user); + HttpServletRequest origRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); + + String serverName = origRequest.getServerName(); + + String httpString = "window.location.replace('" + "https://" + serverName + "/dataset.xhtml?persistentId=" + dataset.getGlobalId(); + Dataset ds = datasetService.find(dataset.getId()); + if (ds.getLatestVersion().isWorkingCopy()) { + httpString = httpString + "&version=DRAFT" + "'" + ")"; + } + else { + httpString = httpString + "'" +")"; + } + + logger.info(httpString); + PrimeFaces.current().executeScript(httpString); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index a6be412990b..9e9594d9044 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -10,6 +10,9 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.externaltools.ExternalTool; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import static edu.harvard.iq.dataverse.dataaccess.S3AccessIO.S3_IDENTIFIER_PREFIX; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import java.util.ArrayList; import java.util.HashMap; @@ -28,6 +31,12 @@ import org.primefaces.PrimeFaces; //import org.primefaces.context.RequestContext; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; + +import edu.harvard.iq.dataverse.util.SystemConfig; +import org.primefaces.PrimeFaces; + /** * * @author skraffmi @@ -39,6 +48,7 @@ public class FileDownloadHelper implements java.io.Serializable { private static final Logger logger = Logger.getLogger(FileDownloadHelper.class.getCanonicalName()); + @Inject DataverseSession session; @@ -56,7 +66,14 @@ public class FileDownloadHelper implements java.io.Serializable { @EJB DataFileServiceBean datafileService; - + + @EJB + protected SettingsServiceBean settingsSvc; + + @EJB + protected DatasetServiceBean datasetSvc; + + UIInput nameField; public UIInput getNameField() { @@ -553,5 +570,48 @@ public DataverseSession getSession() { public void setSession(DataverseSession session) { this.session = session; } + + public void goGlobusDownload(FileMetadata fileMetadata) { + + String datasetId = fileMetadata.getDatasetVersion().getDataset().getId().toString(); //fileMetadata.datasetVersion.dataset.id + + String directory = getDirectory(datasetId); + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + + if ( fileMetadata.getDirectoryLabel() != null && !fileMetadata.getDirectoryLabel().equals("")) { + directory = directory + "/" + fileMetadata.getDirectoryLabel() + "/"; + + } + + logger.info(directory); + + String httpString = "window.open('" + "https://app.globus.org/file-manager?origin_id=" + globusEndpoint + "&origin_path=" + directory + "'" +",'_blank')"; + PrimeFaces.current().executeScript(httpString); + } + + String getDirectory(String datasetId) { + Dataset dataset = null; + String directory = null; + try { + dataset = datasetSvc.find(Long.parseLong(datasetId)); + if (dataset == null) { + logger.severe("Dataset not found " + datasetId); + return null; + } + String storeId = dataset.getStorageIdentifier(); + storeId.substring(storeId.indexOf("//") + 1); + directory = storeId.substring(storeId.indexOf("//") + 1); + logger.info(storeId); + logger.info(directory); + logger.info("Storage identifier:" + dataset.getIdentifierForFileStorage()); + return directory; + + } catch (NumberFormatException nfe) { + logger.severe(nfe.getMessage()); + + return null; + } + + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index bef27ec49b6..74346b0a567 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -722,6 +722,10 @@ public void checkEditDatasetLock(Dataset dataset, DataverseRequest dataverseRequ if (dataset.isLockedFor(DatasetLock.Reason.DcmUpload)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.editNotAllowed"), command); } + // TODO: Do we need to check for "GlobusUpload"? Should the message be more specific? + if (dataset.isLockedFor(DatasetLock.Reason.GlobusUpload)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.editNotAllowed"), command); + } if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.editNotAllowed"), command); } @@ -753,6 +757,9 @@ public void checkPublishDatasetLock(Dataset dataset, DataverseRequest dataverseR if (dataset.isLockedFor(DatasetLock.Reason.DcmUpload)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.publishNotAllowed"), command); } + if (dataset.isLockedFor(DatasetLock.Reason.GlobusUpload)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.downloadNotAllowed"), command); + } if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.publishNotAllowed"), command); } diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index 556c2294bda..bf03e4b51b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -157,6 +157,8 @@ public boolean isRsyncUpload() { public boolean isRsyncDownload() { return systemConfig.isRsyncDownload(); } + + public boolean isGlobusUpload() { return systemConfig.isGlobusUpload(); } public boolean isRsyncOnly() { return systemConfig.isRsyncOnly(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java new file mode 100644 index 00000000000..ff5c3c6eb51 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -0,0 +1,346 @@ +package edu.harvard.iq.dataverse.api; + +import com.amazonaws.services.s3.model.S3ObjectSummary; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; +import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.PermissionServiceBean; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.*; + +import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.engine.command.Command; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.FileUtil; + + + +import javax.ejb.EJB; +import javax.ejb.EJBException; +import javax.ejb.Stateless; +import javax.inject.Inject; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.*; +import javax.ws.rs.core.Response; +import java.io.File; +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.logging.Level; +import java.util.logging.Logger; + +@Stateless +@Path("globus") +public class GlobusApi extends AbstractApiBean { + private static final Logger logger = Logger.getLogger(Access.class.getCanonicalName()); + + @EJB + DatasetServiceBean datasetService; + + @EJB + GlobusServiceBean globusServiceBean; + + @EJB + EjbDataverseEngine commandEngine; + + @EJB + PermissionServiceBean permissionService; + + @Inject + DataverseRequestServiceBean dvRequestService; + + + @POST + @Path("{datasetId}") + public Response globus(@PathParam("datasetId") String datasetId ) { + + logger.info("Async:======Start Async Tasklist == dataset id :"+ datasetId ); + Dataset dataset = null; + try { + dataset = findDatasetOrDie(datasetId); + + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + User apiTokenUser = checkAuth(dataset); + + if (apiTokenUser == null) { + return unauthorized("Access denied"); + } + + try { + + + /* + String lockInfoMessage = "Globus upload in progress"; + DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload, apiTokenUser != null ? ((AuthenticatedUser)apiTokenUser).getId() : null, lockInfoMessage); + if (lock != null) { + dataset.addLock(lock); + } else { + logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); + } + */ + + List fileMetadatas = new ArrayList<>(); + + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + + StorageIO datasetSIO = DataAccess.getStorageIO(dataset); + + + String task_id = null; + + String timeWhenAsyncStarted = sdf.format(new Date(System.currentTimeMillis() + (5 * 60 * 60 * 1000))); // added 5 hrs to match output from globus api + + String endDateTime = sdf.format(new Date(System.currentTimeMillis() + (4 * 60 * 60 * 1000))); // the tasklist will be monitored for 4 hrs + Calendar cal1 = Calendar.getInstance(); + cal1.setTime(sdf.parse(endDateTime)); + + + do { + try { + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + + task_id = globusServiceBean.getTaskList(basicGlobusToken, dataset.getIdentifierForFileStorage(), timeWhenAsyncStarted); + //Thread.sleep(10000); + String currentDateTime = sdf.format(new Date(System.currentTimeMillis())); + Calendar cal2 = Calendar.getInstance(); + cal2.setTime(sdf.parse(currentDateTime)); + + if (cal2.after(cal1)) { + logger.info("Async:======Time exceeded " + endDateTime + " ====== " + currentDateTime + " ==== datasetId :" + datasetId); + break; + } else if (task_id != null) { + break; + } + + } catch (Exception ex) { + ex.printStackTrace(); + logger.info(ex.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to get task id" ); + } + + } while (task_id == null); + + + logger.info("Async:======Found matching task id " + task_id + " ==== datasetId :" + datasetId); + + + DatasetVersion workingVersion = dataset.getEditVersion(); + + if (workingVersion.getCreateTime() != null) { + workingVersion.setCreateTime(new Timestamp(new Date().getTime())); + } + + + String directory = dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); + + System.out.println("Async:======= directory ==== " + directory+ " ==== datasetId :" + datasetId); + Map checksumMapOld = new HashMap<>(); + + Iterator fmIt = workingVersion.getFileMetadatas().iterator(); + + while (fmIt.hasNext()) { + FileMetadata fm = fmIt.next(); + if (fm.getDataFile() != null && fm.getDataFile().getId() != null) { + String chksum = fm.getDataFile().getChecksumValue(); + if (chksum != null) { + checksumMapOld.put(chksum, 1); + } + } + } + + List dFileList = new ArrayList<>(); + for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { + + String s3ObjectKey = s3ObjectSummary.getKey(); + + String t = s3ObjectKey.replace(directory, ""); + + if (t.indexOf(".") > 0) { + long totalSize = s3ObjectSummary.getSize(); + String filePath = s3ObjectKey; + String checksumVal = s3ObjectSummary.getETag(); + + if ((checksumMapOld.get(checksumVal) != null)) { + logger.info("Async: ==== datasetId :" + datasetId + "======= filename ==== " + filePath + " == file already exists "); + } else if (!filePath.contains("cached")) { + + logger.info("Async: ==== datasetId :" + datasetId + "======= filename ==== " + filePath + " == new file "); + try { + + DataFile datafile = new DataFile(DataFileServiceBean.MIME_TYPE_GLOBUS_FILE); //MIME_TYPE_GLOBUS + datafile.setModificationTime(new Timestamp(new Date().getTime())); + datafile.setCreateDate(new Timestamp(new Date().getTime())); + datafile.setPermissionModificationTime(new Timestamp(new Date().getTime())); + + FileMetadata fmd = new FileMetadata(); + + String fileName = filePath.split("/")[filePath.split("/").length - 1]; + fmd.setLabel(fileName); + fmd.setDirectoryLabel(filePath.replace(directory, "").replace(File.separator + fileName, "")); + + fmd.setDataFile(datafile); + + datafile.getFileMetadatas().add(fmd); + + FileUtil.generateS3PackageStorageIdentifier(datafile); + logger.info("Async: ==== datasetId :" + datasetId + "======= filename ==== " + filePath + " == added to datafile, filemetadata "); + + try { + // We persist "SHA1" rather than "SHA-1". + datafile.setChecksumType(DataFile.ChecksumType.SHA1); + datafile.setChecksumValue(checksumVal); + } catch (Exception cksumEx) { + logger.info("Async: ==== datasetId :" + datasetId + "======Could not calculate checksumType signature for the new file "); + } + + datafile.setFilesize(totalSize); + + dFileList.add(datafile); + + } catch (Exception ioex) { + logger.info("Async: ==== datasetId :" + datasetId + "======Failed to process and/or save the file " + ioex.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to do task_list" ); + + } + } + } + } + +/* + DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.GlobusUpload); + if (dcmLock == null) { + logger.info("Dataset not locked for DCM upload"); + } else { + datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.GlobusUpload); + dataset.removeLock(dcmLock); + } + logger.info(" ======= Remove Dataset Lock "); +*/ + + List filesAdded = new ArrayList<>(); + + if (dFileList != null && dFileList.size() > 0) { + + // Dataset dataset = version.getDataset(); + + for (DataFile dataFile : dFileList) { + + if (dataFile.getOwner() == null) { + dataFile.setOwner(dataset); + + workingVersion.getFileMetadatas().add(dataFile.getFileMetadata()); + dataFile.getFileMetadata().setDatasetVersion(workingVersion); + dataset.getFiles().add(dataFile); + + } + + filesAdded.add(dataFile); + + } + + logger.info("Async: ==== datasetId :" + datasetId + " ===== Done! Finished saving new files to the dataset."); + } + + fileMetadatas.clear(); + for (DataFile addedFile : filesAdded) { + fileMetadatas.add(addedFile.getFileMetadata()); + } + filesAdded = null; + + if (workingVersion.isDraft()) { + + logger.info("Async: ==== datasetId :" + datasetId + " ==== inside draft version "); + + Timestamp updateTime = new Timestamp(new Date().getTime()); + + workingVersion.setLastUpdateTime(updateTime); + dataset.setModificationTime(updateTime); + + + for (FileMetadata fileMetadata : fileMetadatas) { + + if (fileMetadata.getDataFile().getCreateDate() == null) { + fileMetadata.getDataFile().setCreateDate(updateTime); + fileMetadata.getDataFile().setCreator((AuthenticatedUser) apiTokenUser); + } + fileMetadata.getDataFile().setModificationTime(updateTime); + } + + + } else { + logger.info("Async: ==== datasetId :" + datasetId + " ==== inside released version "); + + for (int i = 0; i < workingVersion.getFileMetadatas().size(); i++) { + for (FileMetadata fileMetadata : fileMetadatas) { + if (fileMetadata.getDataFile().getStorageIdentifier() != null) { + + if (fileMetadata.getDataFile().getStorageIdentifier().equals(workingVersion.getFileMetadatas().get(i).getDataFile().getStorageIdentifier())) { + workingVersion.getFileMetadatas().set(i, fileMetadata); + } + } + } + } + + + } + + + try { + Command cmd; + logger.info("Async: ==== datasetId :" + datasetId + " ======= UpdateDatasetVersionCommand START in globus function "); + cmd = new UpdateDatasetVersionCommand(dataset,new DataverseRequest(apiTokenUser, (HttpServletRequest) null)); + ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true); + //new DataverseRequest(authenticatedUser, (HttpServletRequest) null) + //dvRequestService.getDataverseRequest() + commandEngine.submit(cmd); + } catch (CommandException ex) { + logger.log(Level.WARNING, "Async: ==== datasetId :" + datasetId + "======CommandException updating DatasetVersion from batch job: " + ex.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to do task_list" ); + } + + logger.info("Async: ==== datasetId :" + datasetId + " ======= GLOBUS ASYNC CALL COMPLETED SUCCESSFULLY "); + + return ok("Async: ==== datasetId :" + datasetId + ": Finished task_list"); + } catch(Exception e) { + String message = e.getMessage(); + + logger.info("Async: ==== datasetId :" + datasetId + " ======= GLOBUS ASYNC CALL Exception ============== " + message); + e.printStackTrace(); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to do task_list" ); + //return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'."); + } + + + } + + private User checkAuth(Dataset dataset) { + + User apiTokenUser = null; + + try { + apiTokenUser = findUserOrDie(); + } catch (WrappedResponse wr) { + apiTokenUser = null; + logger.log(Level.FINE, "Message from findUserOrDie(): {0}", wr.getMessage()); + } + + if (apiTokenUser != null) { + // used in an API context + if (!permissionService.requestOn(createDataverseRequest(apiTokenUser), dataset.getOwner()).has(Permission.EditDataset)) { + apiTokenUser = null; + } + } + + return apiTokenUser; + + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index bd0549622f0..46c80c0f984 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -46,6 +46,8 @@ import java.nio.file.StandardCopyOption; import java.util.ArrayList; +import com.amazonaws.services.s3.model.S3ObjectSummary; + public class FileAccessIO extends StorageIO { @@ -415,7 +417,11 @@ public void deleteAllAuxObjects() throws IOException { } } - + + @Override + public List listAuxObjects(String s) throws IOException { + return null; + } @Override public String getStorageLocation() { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java index c9796d24b27..e244b8a788a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java @@ -16,6 +16,8 @@ import java.util.List; import java.util.logging.Logger; +import com.amazonaws.services.s3.model.S3ObjectSummary; + /** * * @author Leonid Andreev @@ -149,6 +151,11 @@ public OutputStream getOutputStream() throws IOException { throw new UnsupportedDataAccessOperationException("InputStreamIO: there is no output stream associated with this object."); } + @Override + public List listAuxObjects(String s) throws IOException { + return null; + } + @Override public InputStream getAuxFileAsInputStream(String auxItemTag) { throw new UnsupportedOperationException("InputStreamIO: this method is not supported in this DataAccess driver."); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index c78b84233be..3e38d3cdc9c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -3,6 +3,8 @@ import com.amazonaws.AmazonClientException; import com.amazonaws.HttpMethod; import com.amazonaws.SdkClientException; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.profile.ProfileCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.services.s3.AmazonS3; @@ -103,6 +105,8 @@ public S3AccessIO(String storageLocation, String driverId) { minPartSize = getMinPartSize(driverId); key = storageLocation.substring(storageLocation.indexOf('/')+1); } + + public static String S3_IDENTIFIER_PREFIX = "s3"; //Used for tests only public S3AccessIO(T dvObject, DataAccessRequest req, @NotNull AmazonS3 s3client, String driverId) { @@ -634,6 +638,46 @@ public List listAuxObjects() throws IOException { return ret; } + @Override + public List listAuxObjects(String s ) throws IOException { + if (!this.canWrite()) { + open(); + } + String prefix = getDestinationKey(""); + + List ret = new ArrayList<>(); + + System.out.println("======= bucketname ===== "+ bucketName); + System.out.println("======= prefix ===== "+ prefix); + + ListObjectsRequest req = new ListObjectsRequest().withBucketName(bucketName).withPrefix(prefix); + ObjectListing storedAuxFilesList = null; + try { + storedAuxFilesList = s3.listObjects(req); + } catch (SdkClientException sce) { + throw new IOException ("S3 listAuxObjects: failed to get a listing for "+prefix); + } + if (storedAuxFilesList == null) { + return ret; + } + List storedAuxFilesSummary = storedAuxFilesList.getObjectSummaries(); + try { + while (storedAuxFilesList.isTruncated()) { + logger.fine("S3 listAuxObjects: going to next page of list"); + storedAuxFilesList = s3.listNextBatchOfObjects(storedAuxFilesList); + if (storedAuxFilesList != null) { + storedAuxFilesSummary.addAll(storedAuxFilesList.getObjectSummaries()); + } + } + } catch (AmazonClientException ase) { + //logger.warning("Caught an AmazonServiceException in S3AccessIO.listAuxObjects(): " + ase.getMessage()); + throw new IOException("S3AccessIO: Failed to get aux objects for listing."); + } + + + return storedAuxFilesSummary; + } + @Override public void deleteAuxObject(String auxItemTag) throws IOException { if (!this.canWrite()) { @@ -1056,7 +1100,10 @@ private static AmazonS3 getClient(String driverId) { // if the admin has set a system property (see below) we use this endpoint URL instead of the standard ones. if (!s3CEUrl.isEmpty()) { - s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl, s3CERegion)); + //s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl, s3CERegion)); + BasicAWSCredentials creds = new BasicAWSCredentials("14e4f8b986874272894d527a16c06473", "f7b28fbec4984588b0da7d0288ce67f6"); + s3CB.withCredentials(new AWSStaticCredentialsProvider(creds)); + s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl.trim(), s3CERegion.trim())); } /** * Pass in a boolean value if path style access should be used within the S3 client. diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 2f66eec5f4c..9bfd9154323 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -37,6 +37,7 @@ import java.util.Iterator; import java.util.List; +import com.amazonaws.services.s3.model.S3ObjectSummary; //import org.apache.commons.httpclient.Header; //import org.apache.commons.httpclient.methods.GetMethod; @@ -542,4 +543,6 @@ public boolean isBelowIngestSizeLimit() { return true; } } + + public abstract ListlistAuxObjects(String s) throws IOException; } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java index 3bc29cb9836..7f851f09450 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java @@ -32,6 +32,7 @@ import org.javaswift.joss.model.Container; import org.javaswift.joss.model.StoredObject; +import com.amazonaws.services.s3.model.S3ObjectSummary; /** * * @author leonid andreev @@ -874,6 +875,11 @@ public String getSwiftContainerName() { } return null; } + + @Override + public List listAuxObjects(String s) throws IOException { + return null; + } //https://gist.github.com/ishikawa/88599 public static String toHexString(byte[] bytes) { diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/AccessList.java b/src/main/java/edu/harvard/iq/dataverse/globus/AccessList.java new file mode 100644 index 00000000000..9a963000541 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/AccessList.java @@ -0,0 +1,33 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + +public class AccessList { + private int length; + private String endpoint; + private ArrayList DATA; + + public void setDATA(ArrayList DATA) { + this.DATA = DATA; + } + + public void setEndpoint(String endpoint) { + this.endpoint = endpoint; + } + + public void setLength(int length) { + this.length = length; + } + + public String getEndpoint() { + return endpoint; + } + + public ArrayList getDATA() { + return DATA; + } + + public int getLength() { + return length; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java new file mode 100644 index 00000000000..2d68c5c8839 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java @@ -0,0 +1,71 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + + +public class AccessToken implements java.io.Serializable { + + private String accessToken; + private String idToken; + private Long expiresIn; + private String resourceServer; + private String tokenType; + private String state; + private String scope; + private String refreshToken; + private ArrayList otherTokens; + + public String getAccessToken() { return accessToken; } + + String getIdToken() { return idToken; } + + Long getExpiresIn() { return expiresIn; } + + String getResourceServer() { return resourceServer; } + + String getTokenType() { return tokenType; } + + String getState() { return state; } + + String getScope() {return scope; } + + String getRefreshToken() { return refreshToken; } + + ArrayList getOtherTokens() { return otherTokens; } + + public void setAccessToken(String accessToken) { + this.accessToken = accessToken; + } + + public void setExpiresIn(Long expiresIn) { + this.expiresIn = expiresIn; + } + + public void setIdToken(String idToken) { + this.idToken = idToken; + } + + public void setOtherTokens(ArrayList otherTokens) { + this.otherTokens = otherTokens; + } + + public void setRefreshToken(String refreshToken) { + this.refreshToken = refreshToken; + } + + public void setResourceServer(String resourceServer) { + this.resourceServer = resourceServer; + } + + public void setScope(String scope) { + this.scope = scope; + } + + public void setState(String state) { + this.state = state; + } + + public void setTokenType(String tokenType) { + this.tokenType = tokenType; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/FileG.java b/src/main/java/edu/harvard/iq/dataverse/globus/FileG.java new file mode 100644 index 00000000000..bd6a4b3b881 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/FileG.java @@ -0,0 +1,67 @@ +package edu.harvard.iq.dataverse.globus; + +public class FileG { + private String DATA_TYPE; + private String group; + private String name; + private String permissions; + private String size; + private String type; + private String user; + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public String getGroup() { + return group; + } + + public String getName() { + return name; + } + + public String getPermissions() { + return permissions; + } + + public String getSize() { + return size; + } + + public String getType() { + return type; + } + + public String getUser() { + return user; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public void setGroup(String group) { + this.group = group; + } + + public void setName(String name) { + this.name = name; + } + + public void setPermissions(String permissions) { + this.permissions = permissions; + } + + public void setSize(String size) { + this.size = size; + } + + public void setType(String type) { + this.type = type; + } + + public void setUser(String user) { + this.user = user; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java b/src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java new file mode 100644 index 00000000000..777e37f9b80 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java @@ -0,0 +1,60 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + +public class FilesList { + private ArrayList DATA; + private String DATA_TYPE; + private String absolute_path; + private String endpoint; + private String length; + private String path; + + public String getEndpoint() { + return endpoint; + } + + public ArrayList getDATA() { + return DATA; + } + + public String getAbsolute_path() { + return absolute_path; + } + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public String getLength() { + return length; + } + + public String getPath() { + return path; + } + + public void setLength(String length) { + this.length = length; + } + + public void setEndpoint(String endpoint) { + this.endpoint = endpoint; + } + + public void setDATA(ArrayList DATA) { + this.DATA = DATA; + } + + public void setAbsolute_path(String absolute_path) { + this.absolute_path = absolute_path; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public void setPath(String path) { + this.path = path; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java new file mode 100644 index 00000000000..e060a5de59b --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -0,0 +1,880 @@ +package edu.harvard.iq.dataverse.globus; + +import com.amazonaws.services.s3.model.S3ObjectSummary; +import com.google.gson.FieldNamingPolicy; +import com.google.gson.GsonBuilder; +import edu.harvard.iq.dataverse.*; + +import javax.ejb.EJB; +import javax.ejb.Stateless; +import javax.faces.application.FacesMessage; +import javax.faces.context.FacesContext; +import javax.faces.view.ViewScoped; +import javax.inject.Inject; +import javax.inject.Named; + +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.*; + +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLEncoder; + +import java.sql.Timestamp; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.logging.Level; +import java.util.logging.Logger; +import com.google.gson.Gson; +import edu.harvard.iq.dataverse.api.AbstractApiBean; +import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.engine.command.Command; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.FileUtil; +import edu.harvard.iq.dataverse.util.JsfHelper; +import edu.harvard.iq.dataverse.util.SystemConfig; +import org.primefaces.PrimeFaces; + +import static edu.harvard.iq.dataverse.util.JsfHelper.JH; + + +@Stateless +@Named("GlobusServiceBean") +public class GlobusServiceBean implements java.io.Serializable{ + + @EJB + protected DatasetServiceBean datasetSvc; + + @EJB + protected SettingsServiceBean settingsSvc; + + @Inject + DataverseSession session; + + @EJB + protected AuthenticationServiceBean authSvc; + + @EJB + EjbDataverseEngine commandEngine; + + private static final Logger logger = Logger.getLogger(FeaturedDataverseServiceBean.class.getCanonicalName()); + + private String code; + private String userTransferToken; + private String state; + + public String getState() { + return state; + } + + public void setState(String state) { + this.state = state; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getUserTransferToken() { + return userTransferToken; + } + + public void setUserTransferToken(String userTransferToken) { + this.userTransferToken = userTransferToken; + } + + public void onLoad() { + logger.info("Start Globus " + code); + logger.info("State " + state); + + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + if (globusEndpoint.equals("") || basicGlobusToken.equals("")) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + String datasetId = state; + logger.info("DatasetId = " + datasetId); + + String directory = getDirectory(datasetId); + if (directory == null) { + logger.severe("Cannot find directory"); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + HttpServletRequest origRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); + + logger.info(origRequest.getScheme()); + logger.info(origRequest.getServerName()); + + if (code != null ) { + + try { + AccessToken accessTokenUser = getAccessToken(origRequest, basicGlobusToken); + if (accessTokenUser == null) { + logger.severe("Cannot get access user token for code " + code); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } else { + setUserTransferToken(accessTokenUser.getOtherTokens().get(0).getAccessToken()); + } + + UserInfo usr = getUserInfo(accessTokenUser); + if (usr == null) { + logger.severe("Cannot get user info for " + accessTokenUser.getAccessToken()); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + logger.info(accessTokenUser.getAccessToken()); + logger.info(usr.getEmail()); + AccessToken clientTokenUser = getClientToken(basicGlobusToken); + if (clientTokenUser == null) { + logger.severe("Cannot get client token "); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + logger.info(clientTokenUser.getAccessToken()); + + int status = createDirectory(clientTokenUser, directory, globusEndpoint); + if (status == 202) { + int perStatus = givePermission("identity", usr.getSub(), "rw", clientTokenUser, directory, globusEndpoint); + if (perStatus != 201 && perStatus != 200) { + logger.severe("Cannot get permissions "); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + } else if (status == 502) { //directory already exists + int perStatus = givePermission("identity", usr.getSub(), "rw", clientTokenUser, directory, globusEndpoint); + if (perStatus == 409) { + logger.info("permissions already exist"); + } else if (perStatus != 201 && perStatus != 200) { + logger.severe("Cannot get permissions "); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + } else { + logger.severe("Cannot create directory, status code " + status); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + // ProcessBuilder processBuilder = new ProcessBuilder(); + // AuthenticatedUser user = (AuthenticatedUser) session.getUser(); + // ApiToken token = authSvc.findApiTokenByUser(user); + // String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST https://" + origRequest.getServerName() + "/api/globus/" + datasetId; + // logger.info("====command ==== " + command); + // processBuilder.command("bash", "-c", command); + // logger.info("=== Start process"); + // Process process = processBuilder.start(); + // logger.info("=== Going globus"); + goGlobusUpload(directory, globusEndpoint); + logger.info("=== Finished globus"); + + + } catch (MalformedURLException ex) { + logger.severe(ex.getMessage()); + logger.severe(ex.getCause().toString()); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + } catch (UnsupportedEncodingException ex) { + logger.severe(ex.getMessage()); + logger.severe(ex.getCause().toString()); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + } catch (IOException ex) { + logger.severe(ex.getMessage()); + logger.severe(ex.getCause().toString()); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + } + + } + + } + + private void goGlobusUpload(String directory, String globusEndpoint ) { + + String httpString = "window.location.replace('" + "https://app.globus.org/file-manager?destination_id=" + globusEndpoint + "&destination_path=" + directory + "'" +")"; + PrimeFaces.current().executeScript(httpString); + } + + public void goGlobusDownload(String datasetId) { + + String directory = getDirectory(datasetId); + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + String httpString = "window.location.replace('" + "https://app.globus.org/file-manager?origin_id=" + globusEndpoint + "&origin_path=" + directory + "'" +")"; + PrimeFaces.current().executeScript(httpString); + } + + ArrayList checkPermisions( AccessToken clientTokenUser, String directory, String globusEndpoint, String principalType, String principal) throws MalformedURLException { + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access_list"); + MakeRequestResponse result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(),"GET", null); + ArrayList ids = new ArrayList(); + if (result.status == 200) { + AccessList al = parseJson(result.jsonResponse, AccessList.class, false); + + for (int i = 0; i< al.getDATA().size(); i++) { + Permissions pr = al.getDATA().get(i); + if ((pr.getPath().equals(directory + "/") || pr.getPath().equals(directory )) && pr.getPrincipalType().equals(principalType) && + ((principal == null) || (principal != null && pr.getPrincipal().equals(principal))) ) { + ids.add(pr.getId()); + } else { + continue; + } + } + } + + return ids; + } + + public void updatePermision(AccessToken clientTokenUser, String directory, String principalType, String perm) throws MalformedURLException { + if (directory != null && !directory.equals("")) { + directory = "/" + directory + "/"; + } + logger.info("Start updating permissions." + " Directory is " + directory); + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + ArrayList rules = checkPermisions( clientTokenUser, directory, globusEndpoint, principalType, null); + logger.info("Size of rules " + rules.size()); + int count = 0; + while (count < rules.size()) { + logger.info("Start removing rules " + rules.get(count) ); + Permissions permissions = new Permissions(); + permissions.setDATA_TYPE("access"); + permissions.setPermissions(perm); + permissions.setPath(directory); + + Gson gson = new GsonBuilder().create(); + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + rules.get(count)); + logger.info("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + rules.get(count)); + MakeRequestResponse result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(),"PUT", gson.toJson(permissions)); + if (result.status != 200) { + logger.warning("Cannot update access rule " + rules.get(count)); + } else { + logger.info("Access rule " + rules.get(count) + " was updated"); + } + count++; + } + } + + public int givePermission(String principalType, String principal, String perm, AccessToken clientTokenUser, String directory, String globusEndpoint) throws MalformedURLException { + + ArrayList rules = checkPermisions( clientTokenUser, directory, globusEndpoint, principalType, principal); + + + + Permissions permissions = new Permissions(); + permissions.setDATA_TYPE("access"); + permissions.setPrincipalType(principalType); + permissions.setPrincipal(principal); + permissions.setPath(directory + "/" ); + permissions.setPermissions(perm); + + Gson gson = new GsonBuilder().create(); + MakeRequestResponse result = null; + if (rules.size() == 0) { + logger.info("Start creating the rule"); + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/"+ globusEndpoint + "/access"); + result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(), "POST", gson.toJson(permissions)); + + if (result.status == 400) { + logger.severe("Path " + permissions.getPath() + " is not valid"); + } else if (result.status == 409) { + logger.warning("ACL already exists or Endpoint ACL already has the maximum number of access rules"); + } + + return result.status; + } else { + logger.info("Start Updating the rule"); + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/"+ globusEndpoint + "/access/" + rules.get(0)); + result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(), "PUT", gson.toJson(permissions)); + + if (result.status == 400) { + logger.severe("Path " + permissions.getPath() + " is not valid"); + } else if (result.status == 409) { + logger.warning("ACL already exists or Endpoint ACL already has the maximum number of access rules"); + } + logger.info("Result status " + result.status); + } + + return result.status; + } + + private int createDirectory(AccessToken clientTokenUser, String directory, String globusEndpoint) throws MalformedURLException { + URL url = new URL("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + globusEndpoint + "/mkdir"); + + MkDir mkDir = new MkDir(); + mkDir.setDataType("mkdir"); + mkDir.setPath(directory); + Gson gson = new GsonBuilder().create(); + + MakeRequestResponse result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(),"POST", gson.toJson(mkDir)); + logger.info(result.toString()); + + if (result.status == 502) { + logger.warning("Cannot create directory " + mkDir.getPath() + ", it already exists"); + } else if (result.status == 403) { + logger.severe("Cannot create directory " + mkDir.getPath() + ", permission denied"); + } else if (result.status == 202) { + logger.info("Directory created " + mkDir.getPath()); + } + + return result.status; + + } + + public String getTaskList(String basicGlobusToken, String identifierForFileStorage, String timeWhenAsyncStarted) throws MalformedURLException { + try + { + logger.info("1.getTaskList ====== timeWhenAsyncStarted = " + timeWhenAsyncStarted + " ====== identifierForFileStorage ====== " + identifierForFileStorage); + + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + AccessToken clientTokenUser = getClientToken(basicGlobusToken); + + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task_list?filter_endpoint="+globusEndpoint+"&filter_status=SUCCEEDED&filter_completion_time="+timeWhenAsyncStarted); + + //AccessToken accessTokenUser + //accessTokenUser.getOtherTokens().get(0).getAccessToken() + MakeRequestResponse result = makeRequest(url, "Bearer", clientTokenUser.getOtherTokens().get(0).getAccessToken(),"GET", null); + //logger.info("==TEST ==" + result.toString()); + + + + //2019-12-01 18:34:37+00:00 + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + //SimpleDateFormat task_sdf = new SimpleDateFormat("yyyy-MM-ddTHH:mm:ss"); + + Calendar cal1 = Calendar.getInstance(); + cal1.setTime(sdf.parse(timeWhenAsyncStarted)); + + Calendar cal2 = Calendar.getInstance(); + + Tasklist tasklist = null; + //2019-12-01 18:34:37+00:00 + + if (result.status == 200) { + tasklist = parseJson(result.jsonResponse, Tasklist.class, false); + for (int i = 0; i< tasklist.getDATA().size(); i++) { + Task task = tasklist.getDATA().get(i); + Date tastTime = sdf.parse(task.getRequest_time().replace("T" , " ")); + cal2.setTime(tastTime); + + + if ( cal1.before(cal2)) { + + // get /task//successful_transfers + // verify datasetid in "destination_path": "/~/test_godata_copy/file1.txt", + // go to aws and get files and write to database tables + + logger.info("====== timeWhenAsyncStarted = " + timeWhenAsyncStarted + " ====== task.getRequest_time().toString() ====== " + task.getRequest_time()); + + boolean success = getSuccessfulTransfers(clientTokenUser, task.getTask_id() , identifierForFileStorage) ; + + if(success) + { + logger.info("SUCCESS ====== " + timeWhenAsyncStarted + " timeWhenAsyncStarted is before tastTime = TASK time = " + task.getTask_id()); + return task.getTask_id(); + } + } + else + { + //logger.info("====== " + timeWhenAsyncStarted + " timeWhenAsyncStarted is after tastTime = TASK time = " + task.getTask_id()); + //return task.getTask_id(); + } + } + } + } catch (MalformedURLException ex) { + logger.severe(ex.getMessage()); + logger.severe(ex.getCause().toString()); + } catch (Exception e) { + e.printStackTrace(); + } + return null; + } + + public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId, String identifierForFileStorage) throws MalformedURLException { + + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/"+taskId+"/successful_transfers"); + + MakeRequestResponse result = makeRequest(url, "Bearer",clientTokenUser.getOtherTokens().get(0).getAccessToken(), + "GET", null); + + Transferlist transferlist = null; + + if (result.status == 200) { + transferlist = parseJson(result.jsonResponse, Transferlist.class, false); + for (int i = 0; i < transferlist.getDATA().size(); i++) { + SuccessfulTransfer successfulTransfer = transferlist.getDATA().get(i); + String pathToVerify = successfulTransfer.getDestination_path(); + logger.info("getSuccessfulTransfers : ======pathToVerify === " + pathToVerify + " ====identifierForFileStorage === " + identifierForFileStorage); + if(pathToVerify.contains(identifierForFileStorage)) + { + logger.info(" SUCCESS ====== " + pathToVerify + " ==== " + identifierForFileStorage); + return true; + } + } + } + return false; + } + + + + public AccessToken getClientToken(String basicGlobusToken) throws MalformedURLException { + URL url = new URL("https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); + + MakeRequestResponse result = makeRequest(url, "Basic", + basicGlobusToken,"POST", null); + AccessToken clientTokenUser = null; + if (result.status == 200) { + clientTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); + } + return clientTokenUser; + } + + public AccessToken getAccessToken(HttpServletRequest origRequest, String basicGlobusToken ) throws UnsupportedEncodingException, MalformedURLException { + String serverName = origRequest.getServerName(); + if (serverName.equals("localhost")) { + serverName = "utl-192-123.library.utoronto.ca"; + } + + String redirectURL = "https://" + serverName + "/globus.xhtml"; + + redirectURL = URLEncoder.encode(redirectURL, "UTF-8"); + + URL url = new URL("https://auth.globus.org/v2/oauth2/token?code=" + code + "&redirect_uri=" + redirectURL + + "&grant_type=authorization_code"); + logger.info(url.toString()); + + MakeRequestResponse result = makeRequest(url, "Basic", basicGlobusToken,"POST", null); + AccessToken accessTokenUser = null; + + if (result.status == 200) { + logger.info("Access Token: \n" + result.toString()); + accessTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); + logger.info(accessTokenUser.getAccessToken()); + } + + return accessTokenUser; + + } + + public UserInfo getUserInfo(AccessToken accessTokenUser) throws MalformedURLException { + + URL url = new URL("https://auth.globus.org/v2/oauth2/userinfo"); + MakeRequestResponse result = makeRequest(url, "Bearer" , accessTokenUser.getAccessToken() , "GET", null); + UserInfo usr = null; + if (result.status == 200) { + usr = parseJson(result.jsonResponse, UserInfo.class, true); + } + + return usr; + } + + public MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, String jsonString) { + String str = null; + HttpURLConnection connection = null; + int status = 0; + try { + connection = (HttpURLConnection) url.openConnection(); + //Basic NThjMGYxNDQtN2QzMy00ZTYzLTk3MmUtMjljNjY5YzJjNGJiOktzSUVDMDZtTUxlRHNKTDBsTmRibXBIbjZvaWpQNGkwWVVuRmQyVDZRSnc9 + logger.info(authType + " " + authCode); + connection.setRequestProperty("Authorization", authType + " " + authCode); + //connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); + connection.setRequestMethod(method); + if (jsonString != null) { + connection.setRequestProperty("Content-Type", "application/json"); + connection.setRequestProperty("Accept", "application/json"); + logger.info(jsonString); + connection.setDoOutput(true); + OutputStreamWriter wr = new OutputStreamWriter(connection.getOutputStream()); + wr.write(jsonString); + wr.flush(); + } + + status = connection.getResponseCode(); + logger.info("Status now " + status); + InputStream result = connection.getInputStream(); + if (result != null) { + logger.info("Result is not null"); + str = readResultJson(result).toString(); + logger.info("str is "); + logger.info(result.toString()); + } else { + logger.info("Result is null"); + str = null; + } + + logger.info("status: " + status); + } catch (IOException ex) { + logger.info("IO"); + logger.severe(ex.getMessage()); + logger.info(ex.getCause().toString()); + logger.info(ex.getStackTrace().toString()); + } finally { + if (connection != null) { + connection.disconnect(); + } + } + MakeRequestResponse r = new MakeRequestResponse(str, status); + return r; + + } + + private StringBuilder readResultJson(InputStream in) { + StringBuilder sb = null; + try { + + BufferedReader br = new BufferedReader(new InputStreamReader(in)); + sb = new StringBuilder(); + String line; + while ((line = br.readLine()) != null) { + sb.append(line + "\n"); + } + br.close(); + logger.info(sb.toString()); + } catch (IOException e) { + sb = null; + logger.severe(e.getMessage()); + } + return sb; + } + + private T parseJson(String sb, Class jsonParserClass, boolean namingPolicy) { + if (sb != null) { + Gson gson = null; + if (namingPolicy) { + gson = new GsonBuilder().setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES).create(); + + } else { + gson = new GsonBuilder().create(); + } + T jsonClass = gson.fromJson(sb, jsonParserClass); + return jsonClass; + } else { + logger.severe("Bad respond from token rquest"); + return null; + } + } + + String getDirectory(String datasetId) { + Dataset dataset = null; + String directory = null; + try { + dataset = datasetSvc.find(Long.parseLong(datasetId)); + if (dataset == null) { + logger.severe("Dataset not found " + datasetId); + return null; + } + String storeId = dataset.getStorageIdentifier(); + storeId.substring(storeId.indexOf("//") + 1); + directory = storeId.substring(storeId.indexOf("//") + 1); + logger.info(storeId); + logger.info(directory); + logger.info("Storage identifier:" + dataset.getIdentifierForFileStorage()); + return directory; + + } catch (NumberFormatException nfe) { + logger.severe(nfe.getMessage()); + + return null; + } + + } + + class MakeRequestResponse { + public String jsonResponse; + public int status; + MakeRequestResponse(String jsonResponse, int status) { + this.jsonResponse = jsonResponse; + this.status = status; + } + + } + + private MakeRequestResponse findDirectory(String directory, AccessToken clientTokenUser, String globusEndpoint) throws MalformedURLException { + URL url = new URL(" https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint +"/ls?path=" + directory + "/"); + + MakeRequestResponse result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(),"GET", null); + logger.info("find directory status:" + result.status); + + return result; + } + + public boolean giveGlobusPublicPermissions(String datasetId) throws UnsupportedEncodingException, MalformedURLException { + + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + if (globusEndpoint.equals("") || basicGlobusToken.equals("")) { + return false; + } + AccessToken clientTokenUser = getClientToken(basicGlobusToken); + if (clientTokenUser == null) { + logger.severe("Cannot get client token "); + return false; + } + + String directory = getDirectory(datasetId); + logger.info(directory); + + MakeRequestResponse status = findDirectory(directory, clientTokenUser, globusEndpoint); + + if (status.status == 200) { + + /* FilesList fl = parseJson(status.jsonResponse, FilesList.class, false); + ArrayList files = fl.getDATA(); + if (files != null) { + for (FileG file: files) { + if (!file.getName().contains("cached") && !file.getName().contains(".thumb")) { + int perStatus = givePermission("all_authenticated_users", "", "r", clientTokenUser, + directory + "/" + file.getName(), globusEndpoint); + logger.info("givePermission status " + perStatus + " for " + file.getName()); + if (perStatus == 409) { + logger.info("Permissions already exist or limit was reached for " + file.getName()); + } else if (perStatus == 400) { + logger.info("No file in Globus " + file.getName()); + } else if (perStatus != 201) { + logger.info("Cannot get permission for " + file.getName()); + } + } + } + }*/ + + int perStatus = givePermission("all_authenticated_users", "", "r", clientTokenUser, directory, globusEndpoint); + logger.info("givePermission status " + perStatus); + if (perStatus == 409) { + logger.info("Permissions already exist or limit was reached"); + } else if (perStatus == 400) { + logger.info("No directory in Globus"); + } else if (perStatus != 201 && perStatus != 200) { + logger.info("Cannot give read permission"); + return false; + } + + } else if (status.status == 404) { + logger.info("There is no globus directory"); + }else { + logger.severe("Cannot find directory in globus, status " + status ); + return false; + } + + return true; + } + + public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) throws MalformedURLException { + + logger.info("=====Tasklist == dataset id :" + dataset.getId()); + String directory = null; + + try { + + List fileMetadatas = new ArrayList<>(); + + StorageIO datasetSIO = DataAccess.getStorageIO(dataset); + + DatasetVersion workingVersion = dataset.getEditVersion(); + + if (workingVersion.getCreateTime() != null) { + workingVersion.setCreateTime(new Timestamp(new Date().getTime())); + } + + + directory = dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); + + System.out.println("======= directory ==== " + directory + " ==== datasetId :" + dataset.getId()); + Map checksumMapOld = new HashMap<>(); + + Iterator fmIt = workingVersion.getFileMetadatas().iterator(); + + while (fmIt.hasNext()) { + FileMetadata fm = fmIt.next(); + if (fm.getDataFile() != null && fm.getDataFile().getId() != null) { + String chksum = fm.getDataFile().getChecksumValue(); + if (chksum != null) { + checksumMapOld.put(chksum, 1); + } + } + } + + List dFileList = new ArrayList<>(); + boolean update = false; + for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { + + String s3ObjectKey = s3ObjectSummary.getKey(); + + String t = s3ObjectKey.replace(directory, ""); + + if (t.indexOf(".") > 0) { + long totalSize = s3ObjectSummary.getSize(); + String filePath = s3ObjectKey; + String checksumVal = s3ObjectSummary.getETag(); + + if ((checksumMapOld.get(checksumVal) != null)) { + logger.info("datasetId :" + dataset.getId() + "======= filename ==== " + filePath + " == file already exists "); + } else if (filePath.contains("cached") || filePath.contains(".thumb")) { + logger.info(filePath + " is ignored"); + } else { + update = true; + logger.info("datasetId :" + dataset.getId() + "======= filename ==== " + filePath + " == new file "); + try { + + DataFile datafile = new DataFile(DataFileServiceBean.MIME_TYPE_GLOBUS_FILE); //MIME_TYPE_GLOBUS + datafile.setModificationTime(new Timestamp(new Date().getTime())); + datafile.setCreateDate(new Timestamp(new Date().getTime())); + datafile.setPermissionModificationTime(new Timestamp(new Date().getTime())); + + FileMetadata fmd = new FileMetadata(); + + String fileName = filePath.split("/")[filePath.split("/").length - 1]; + fmd.setLabel(fileName); + fmd.setDirectoryLabel(filePath.replace(directory, "").replace(File.separator + fileName, "")); + + fmd.setDataFile(datafile); + + datafile.getFileMetadatas().add(fmd); + + FileUtil.generateS3PackageStorageIdentifierForGlobus(datafile); + logger.info("==== datasetId :" + dataset.getId() + "======= filename ==== " + filePath + " == added to datafile, filemetadata "); + + try { + // We persist "SHA1" rather than "SHA-1". + datafile.setChecksumType(DataFile.ChecksumType.SHA1); + datafile.setChecksumValue(checksumVal); + } catch (Exception cksumEx) { + logger.info("==== datasetId :" + dataset.getId() + "======Could not calculate checksumType signature for the new file "); + } + + datafile.setFilesize(totalSize); + + dFileList.add(datafile); + + } catch (Exception ioex) { + logger.info("datasetId :" + dataset.getId() + "======Failed to process and/or save the file " + ioex.getMessage()); + return false; + + } + } + } + } + if (update) { + + List filesAdded = new ArrayList<>(); + + if (dFileList != null && dFileList.size() > 0) { + + // Dataset dataset = version.getDataset(); + + for (DataFile dataFile : dFileList) { + + if (dataFile.getOwner() == null) { + dataFile.setOwner(dataset); + + workingVersion.getFileMetadatas().add(dataFile.getFileMetadata()); + dataFile.getFileMetadata().setDatasetVersion(workingVersion); + dataset.getFiles().add(dataFile); + + } + + filesAdded.add(dataFile); + + } + + logger.info("==== datasetId :" + dataset.getId() + " ===== Done! Finished saving new files to the dataset."); + } + + fileMetadatas.clear(); + for (DataFile addedFile : filesAdded) { + fileMetadatas.add(addedFile.getFileMetadata()); + } + filesAdded = null; + + if (workingVersion.isDraft()) { + + logger.info("Async: ==== datasetId :" + dataset.getId() + " ==== inside draft version "); + + Timestamp updateTime = new Timestamp(new Date().getTime()); + + workingVersion.setLastUpdateTime(updateTime); + dataset.setModificationTime(updateTime); + + + for (FileMetadata fileMetadata : fileMetadatas) { + + if (fileMetadata.getDataFile().getCreateDate() == null) { + fileMetadata.getDataFile().setCreateDate(updateTime); + fileMetadata.getDataFile().setCreator((AuthenticatedUser) user); + } + fileMetadata.getDataFile().setModificationTime(updateTime); + } + + + } else { + logger.info("datasetId :" + dataset.getId() + " ==== inside released version "); + + for (int i = 0; i < workingVersion.getFileMetadatas().size(); i++) { + for (FileMetadata fileMetadata : fileMetadatas) { + if (fileMetadata.getDataFile().getStorageIdentifier() != null) { + + if (fileMetadata.getDataFile().getStorageIdentifier().equals(workingVersion.getFileMetadatas().get(i).getDataFile().getStorageIdentifier())) { + workingVersion.getFileMetadatas().set(i, fileMetadata); + } + } + } + } + + + } + + + try { + Command cmd; + logger.info("Async: ==== datasetId :" + dataset.getId() + " ======= UpdateDatasetVersionCommand START in globus function "); + cmd = new UpdateDatasetVersionCommand(dataset, new DataverseRequest(user, (HttpServletRequest) null)); + ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true); + //new DataverseRequest(authenticatedUser, (HttpServletRequest) null) + //dvRequestService.getDataverseRequest() + commandEngine.submit(cmd); + } catch (CommandException ex) { + logger.log(Level.WARNING, "==== datasetId :" + dataset.getId() + "======CommandException updating DatasetVersion from batch job: " + ex.getMessage()); + return false; + } + + logger.info("==== datasetId :" + dataset.getId() + " ======= GLOBUS CALL COMPLETED SUCCESSFULLY "); + + //return true; + } + + } catch (Exception e) { + String message = e.getMessage(); + + logger.info("==== datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); + e.printStackTrace(); + return false; + //return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'."); + } + + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + AccessToken clientTokenUser = getClientToken(basicGlobusToken); + updatePermision(clientTokenUser, directory, "identity", "r"); + return true; + } + + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Identities.java b/src/main/java/edu/harvard/iq/dataverse/globus/Identities.java new file mode 100644 index 00000000000..6411262b5c9 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Identities.java @@ -0,0 +1,16 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + + +public class Identities { + ArrayList identities; + + public void setIdentities(ArrayList identities) { + this.identities = identities; + } + + public ArrayList getIdentities() { + return identities; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Identity.java b/src/main/java/edu/harvard/iq/dataverse/globus/Identity.java new file mode 100644 index 00000000000..265bd55217a --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Identity.java @@ -0,0 +1,67 @@ +package edu.harvard.iq.dataverse.globus; + +public class Identity { + private String id; + private String username; + private String status; + private String name; + private String email; + private String identityProvider; + private String organization; + + public void setOrganization(String organization) { + this.organization = organization; + } + + public void setIdentityProvider(String identityProvider) { + this.identityProvider = identityProvider; + } + + public void setName(String name) { + this.name = name; + } + + public void setEmail(String email) { + this.email = email; + } + + public void setId(String id) { + this.id = id; + } + + public void setStatus(String status) { + this.status = status; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getOrganization() { + return organization; + } + + public String getIdentityProvider() { + return identityProvider; + } + + public String getName() { + return name; + } + + public String getEmail() { + return email; + } + + public String getId() { + return id; + } + + public String getStatus() { + return status; + } + + public String getUsername() { + return username; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java b/src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java new file mode 100644 index 00000000000..2c906f1f31d --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java @@ -0,0 +1,22 @@ +package edu.harvard.iq.dataverse.globus; + +public class MkDir { + private String DATA_TYPE; + private String path; + + public void setDataType(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public void setPath(String path) { + this.path = path; + } + + public String getDataType() { + return DATA_TYPE; + } + + public String getPath() { + return path; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java b/src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java new file mode 100644 index 00000000000..d31b34b8e70 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java @@ -0,0 +1,50 @@ +package edu.harvard.iq.dataverse.globus; + +public class MkDirResponse { + private String DATA_TYPE; + private String code; + private String message; + private String request_id; + private String resource; + + public void setCode(String code) { + this.code = code; + } + + public void setDataType(String dataType) { + this.DATA_TYPE = dataType; + } + + public void setMessage(String message) { + this.message = message; + } + + public void setRequestId(String requestId) { + this.request_id = requestId; + } + + public void setResource(String resource) { + this.resource = resource; + } + + public String getCode() { + return code; + } + + public String getDataType() { + return DATA_TYPE; + } + + public String getMessage() { + return message; + } + + public String getRequestId() { + return request_id; + } + + public String getResource() { + return resource; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Permissions.java b/src/main/java/edu/harvard/iq/dataverse/globus/Permissions.java new file mode 100644 index 00000000000..b8bb5193fa4 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Permissions.java @@ -0,0 +1,58 @@ +package edu.harvard.iq.dataverse.globus; + +public class Permissions { + private String DATA_TYPE; + private String principal_type; + private String principal; + private String id; + private String path; + private String permissions; + + public void setPath(String path) { + this.path = path; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public void setPermissions(String permissions) { + this.permissions = permissions; + } + + public void setPrincipal(String principal) { + this.principal = principal; + } + + public void setPrincipalType(String principalType) { + this.principal_type = principalType; + } + + public String getPath() { + return path; + } + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public String getPermissions() { + return permissions; + } + + public String getPrincipal() { + return principal; + } + + public String getPrincipalType() { + return principal_type; + } + + public void setId(String id) { + this.id = id; + } + + public String getId() { + return id; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java b/src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java new file mode 100644 index 00000000000..a30b1ecdc04 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java @@ -0,0 +1,58 @@ +package edu.harvard.iq.dataverse.globus; + +public class PermissionsResponse { + private String code; + private String resource; + private String DATA_TYPE; + private String request_id; + private String access_id; + private String message; + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public String getResource() { + return resource; + } + + public String getRequestId() { + return request_id; + } + + public String getMessage() { + return message; + } + + public String getCode() { + return code; + } + + public String getAccessId() { + return access_id; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public void setResource(String resource) { + this.resource = resource; + } + + public void setRequestId(String requestId) { + this.request_id = requestId; + } + + public void setMessage(String message) { + this.message = message; + } + + public void setCode(String code) { + this.code = code; + } + + public void setAccessId(String accessId) { + this.access_id = accessId; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java b/src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java new file mode 100644 index 00000000000..6e2e5810a0a --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java @@ -0,0 +1,35 @@ +package edu.harvard.iq.dataverse.globus; + +public class SuccessfulTransfer { + + private String DATA_TYPE; + private String destination_path; + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public String getDestination_path() { + return destination_path; + } + + public void setDestination_path(String destination_path) { + this.destination_path = destination_path; + } + + public String getSource_path() { + return source_path; + } + + public void setSource_path(String source_path) { + this.source_path = source_path; + } + + private String source_path; + + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Task.java b/src/main/java/edu/harvard/iq/dataverse/globus/Task.java new file mode 100644 index 00000000000..8d9f13f8ddf --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Task.java @@ -0,0 +1,69 @@ +package edu.harvard.iq.dataverse.globus; + +public class Task { + + private String DATA_TYPE; + private String type; + private String status; + private String owner_id; + private String request_time; + private String task_id; + private String destination_endpoint_display_name; + + public String getDestination_endpoint_display_name() { + return destination_endpoint_display_name; + } + + public void setDestination_endpoint_display_name(String destination_endpoint_display_name) { + this.destination_endpoint_display_name = destination_endpoint_display_name; + } + + public void setRequest_time(String request_time) { + this.request_time = request_time; + } + + public String getRequest_time() { + return request_time; + } + + public String getTask_id() { + return task_id; + } + + public void setTask_id(String task_id) { + this.task_id = task_id; + } + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getOwner_id() { + return owner_id; + } + + public void setOwner_id(String owner_id) { + this.owner_id = owner_id; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java b/src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java new file mode 100644 index 00000000000..34e8c6c528e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + +public class Tasklist { + + private ArrayList DATA; + + public void setDATA(ArrayList DATA) { + this.DATA = DATA; + } + + public ArrayList getDATA() { + return DATA; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java b/src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java new file mode 100644 index 00000000000..0a1bd607ee2 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java @@ -0,0 +1,18 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + +public class Transferlist { + + + private ArrayList DATA; + + public void setDATA(ArrayList DATA) { + this.DATA = DATA; + } + + public ArrayList getDATA() { + return DATA; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java b/src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java new file mode 100644 index 00000000000..a195486dd0b --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java @@ -0,0 +1,68 @@ +package edu.harvard.iq.dataverse.globus; + +public class UserInfo implements java.io.Serializable{ + + private String identityProviderDisplayName; + private String identityProvider; + private String organization; + private String sub; + private String preferredUsername; + private String name; + private String email; + + public void setEmail(String email) { + this.email = email; + } + + public void setName(String name) { + this.name = name; + } + + public void setPreferredUsername(String preferredUsername) { + this.preferredUsername = preferredUsername; + } + + public void setSub(String sub) { + this.sub = sub; + } + + public void setIdentityProvider(String identityProvider) { + this.identityProvider = identityProvider; + } + + public void setIdentityProviderDisplayName(String identityProviderDisplayName) { + this.identityProviderDisplayName = identityProviderDisplayName; + } + + public void setOrganization(String organization) { + this.organization = organization; + } + + public String getEmail() { + return email; + } + + public String getPreferredUsername() { + return preferredUsername; + } + + public String getSub() { + return sub; + } + + public String getName() { + return name; + } + + public String getIdentityProvider() { + return identityProvider; + } + + public String getIdentityProviderDisplayName() { + return identityProviderDisplayName; + } + + public String getOrganization() { + return organization; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index b2e82d92dc3..a0d6d7a9f62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -440,7 +440,20 @@ Whether Harvesting (OAI) service is enabled /** * Sort Date Facets Chronologically instead or presenting them in order of # of hits as other facets are. Default is true */ - ChronologicalDateFacets + ChronologicalDateFacets, + + /** + * BasicGlobusToken for Globus Application + */ + BasicGlobusToken, + /** + * GlobusEndpoint is Glopus endpoint for Globus application + */ + GlobusEndpoint, + /**Client id for Globus application + * + */ + GlobusClientId ; @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 02bf34f83c5..2706d840d21 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -20,7 +20,7 @@ package edu.harvard.iq.dataverse.util; - +import static edu.harvard.iq.dataverse.dataaccess.S3AccessIO.S3_IDENTIFIER_PREFIX; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFile.ChecksumType; import edu.harvard.iq.dataverse.DataFileServiceBean; @@ -1337,6 +1337,17 @@ public static void generateS3PackageStorageIdentifier(DataFile dataFile) { String storageId = driverId + "://" + bucketName + ":" + dataFile.getFileMetadata().getLabel(); dataFile.setStorageIdentifier(storageId); } + + public static void generateS3PackageStorageIdentifierForGlobus(DataFile dataFile) { + String bucketName = System.getProperty("dataverse.files.s3-bucket-name"); + String storageId = null; + if ( dataFile.getFileMetadata().getDirectoryLabel() != null && !dataFile.getFileMetadata().getDirectoryLabel().equals("")) { + storageId = S3_IDENTIFIER_PREFIX + "://" + bucketName + ":" + dataFile.getFileMetadata().getDirectoryLabel() + "/" + dataFile.getFileMetadata().getLabel(); + } else { + storageId = S3_IDENTIFIER_PREFIX + "://" + bucketName + ":" + dataFile.getFileMetadata().getLabel(); + } + dataFile.setStorageIdentifier(storageId); + } public static void generateStorageIdentifier(DataFile dataFile) { //Is it true that this is only used for temp files and we could safely prepend "tmp://" to indicate that? diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 9c801f5197d..d98dfa8ab34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -831,7 +831,14 @@ public enum FileUploadMethods { * Traditional Dataverse file handling, which tends to involve users * uploading and downloading files using a browser or APIs. */ - NATIVE("native/http"); + NATIVE("native/http"), + + /** + * Upload through Globus of large files + */ + + GLOBUS("globus") + ; private final String text; @@ -871,7 +878,9 @@ public enum FileDownloadMethods { * go through Glassfish. */ RSYNC("rsal/rsync"), - NATIVE("native/http"); + NATIVE("native/http"), + GLOBUS("globus") + ; private final String text; private FileDownloadMethods(final String text) { @@ -961,7 +970,11 @@ public boolean isPublicInstall(){ public boolean isRsyncUpload(){ return getUploadMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString()); } - + + public boolean isGlobusUpload(){ + return getUploadMethodAvailable(FileUploadMethods.GLOBUS.toString()); + } + // Controls if HTTP upload is enabled for both GUI and API. public boolean isHTTPUpload(){ return getUploadMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString()); @@ -993,6 +1006,11 @@ public boolean isHTTPDownload() { logger.warning("Download Methods:" + downloadMethods); return downloadMethods !=null && downloadMethods.toLowerCase().contains(SystemConfig.FileDownloadMethods.NATIVE.toString()); } + + public boolean isGlobusDownload() { + String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); + return downloadMethods !=null && downloadMethods.toLowerCase().contains(FileDownloadMethods.GLOBUS.toString()); + } private Boolean getUploadMethodAvailable(String method){ String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 8c70475953c..e723ce7c6c2 100755 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1395,6 +1395,9 @@ dataset.message.filesSuccess=The files for this dataset have been updated. dataset.message.addFiles.Failure=Failed to add files to the dataset. Please try uploading the file(s) again. dataset.message.addFiles.partialSuccess=Partial success: only {0} files out of {1} have been saved. Please try uploading the missing file(s) again. dataset.message.publishSuccess=This dataset has been published. +dataset.message.publishGlobusFailure.details=Could not publish Globus data. +dataset.message.publishGlobusFailure=Error with publidhing data. +dataset.message.GlobusError=Cannot go to Globus. dataset.message.only.authenticatedUsers=Only authenticated users may release Datasets. dataset.message.deleteSuccess=This dataset has been deleted. dataset.message.bulkFileUpdateSuccess=The selected files have been updated. @@ -1479,10 +1482,14 @@ file.selectToAdd.tipLimit=File upload limit is {0} per file. file.selectToAdd.tipMoreInformation=Select files or drag and drop into the upload widget. file.selectToAdd.dragdropMsg=Drag and drop files here. file.createUploadDisabled=Upload files using rsync via SSH. This method is recommended for large file transfers. The upload script will be available on the Upload Files page once you save this dataset. +file.createGlobusUploadDisabled=Upload files using Globus. This method is recommended for large file transfers. The "Upload with Globus" button will be available on the Upload Files page once you save this dataset. file.fromHTTP=Upload with HTTP via your browser file.fromDropbox=Upload from Dropbox file.fromDropbox.tip=Select files from Dropbox. file.fromRsync=Upload with rsync + SSH via Data Capture Module (DCM) +file.fromGlobus=Upload with Globus +file.finishGlobus=Globus Transfer has finished +file.downloadFromGlobus=Download through Globus file.api.httpDisabled=File upload via HTTP is not available for this installation of Dataverse. file.api.alreadyHasPackageFile=File upload via HTTP disabled since this dataset already contains a package file. file.replace.original=Original File diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 3a69e21bbca..6e630edc5ea 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -276,7 +276,55 @@ - + +
Globus ++++
+
+
Globus
+ + +
+
+ + +

+ #{bundle['file.createGlobusUploadDisabled']} +

+
+
+ + +

+ + BEFORE YOU START: You will need to set up a free account with Globus and + have Globus Connect Personal running on your computer to transfer files to and from the service. +
+ + +
+
+ Once Globus transfer has finished, you will get an email notification. Please come back here and press the following button: +
+ + +
+
+ +

+ +
+ Click here to view the dataset page: #{EditDatafilesPage.dataset.displayName} . +
+
+
+
+
@@ -962,6 +1010,18 @@ }; Dropbox.choose(options); } + function openGlobus(datasetId, client_id) { + var res = location.protocol+'//'+location.hostname+(location.port ? ':'+location.port: ''); + + var scope = encodeURI("openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all", "UTF-8"); + + var new_url = "https://auth.globus.org/v2/oauth2/authorize?client_id=" + client_id + "&response_type=code&" + + "scope=" + scope + "&state=" + datasetId; + new_url = new_url + "&redirect_uri=" + res + "%2Fglobus.xhtml" ; + + + var myWindows = window.open(new_url); + } //]]> diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index b5ab1dbf759..f7d10c1cf60 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -60,6 +60,28 @@ #{bundle.download} + + + + + + #{bundle['file.downloadFromGlobus']} + + + + + + #{bundle.download} + @@ -545,4 +567,4 @@ #{bundle['file.compute']} - \ No newline at end of file + diff --git a/src/main/webapp/globus.xhtml b/src/main/webapp/globus.xhtml new file mode 100644 index 00000000000..f4eebd4babf --- /dev/null +++ b/src/main/webapp/globus.xhtml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + From beea5bc9fbd64e19f11ada4ebceab252e1287a0e Mon Sep 17 00:00:00 2001 From: lubitchv Date: Wed, 30 Sep 2020 09:48:32 -0400 Subject: [PATCH 0002/1551] Remove flyway --- .../db/migration/V1__flyway_schema_baseline.sql | 0 .../V4.11.0.1__5565-sanitize-directory-labels.sql | 9 --------- .../V4.11__5513-database-variablemetadata.sql | 5 ----- .../V4.12.0.1__4.13-re-sanitize-filemetadata.sql | 12 ------------ .../db/migration/V4.13.0.1__3575-usernames.sql | 1 - .../migration/V4.14.0.1__5822-export-var-meta.sql | 2 -- .../migration/V4.15.0.1__2043-split-gbr-table.sql | 10 ---------- .../V4.16.0.1__5303-addColumn-to-settingTable.sql | 13 ------------- .../migration/V4.16.0.2__5028-dataset-explore.sql | 3 --- .../V4.16.0.3__6156-FooterImageforSub-Dataverse.sql | 4 ---- .../migration/V4.17.0.1__5991-update-scribejava.sql | 1 - .../migration/V4.17.0.2__3578-file-page-preview.sql | 5 ----- .../V4.18.1.1__6459-contenttype-nullable.sql | 2 -- .../db/migration/V4.19.0.1__6485_multistore.sql | 3 --- .../V4.19.0.2__6644-update-editor-role-alias.sql | 2 -- ....1__2734-alter-data-table-add-orig-file-name.sql | 2 -- .../V4.20.0.2__6748-configure-dropdown-toolname.sql | 2 -- .../migration/V4.20.0.3__6558-file-validation.sql | 4 ---- .../migration/V4.20.0.4__6936-maildomain-groups.sql | 1 - .../migration/V4.20.0.5__6505-zipdownload-jobs.sql | 2 -- src/main/webapp/editFilesFragment.xhtml | 1 - 21 files changed, 84 deletions(-) delete mode 100644 src/main/resources/db/migration/V1__flyway_schema_baseline.sql delete mode 100644 src/main/resources/db/migration/V4.11.0.1__5565-sanitize-directory-labels.sql delete mode 100644 src/main/resources/db/migration/V4.11__5513-database-variablemetadata.sql delete mode 100644 src/main/resources/db/migration/V4.12.0.1__4.13-re-sanitize-filemetadata.sql delete mode 100644 src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql delete mode 100644 src/main/resources/db/migration/V4.14.0.1__5822-export-var-meta.sql delete mode 100644 src/main/resources/db/migration/V4.15.0.1__2043-split-gbr-table.sql delete mode 100644 src/main/resources/db/migration/V4.16.0.1__5303-addColumn-to-settingTable.sql delete mode 100644 src/main/resources/db/migration/V4.16.0.2__5028-dataset-explore.sql delete mode 100644 src/main/resources/db/migration/V4.16.0.3__6156-FooterImageforSub-Dataverse.sql delete mode 100644 src/main/resources/db/migration/V4.17.0.1__5991-update-scribejava.sql delete mode 100644 src/main/resources/db/migration/V4.17.0.2__3578-file-page-preview.sql delete mode 100644 src/main/resources/db/migration/V4.18.1.1__6459-contenttype-nullable.sql delete mode 100644 src/main/resources/db/migration/V4.19.0.1__6485_multistore.sql delete mode 100644 src/main/resources/db/migration/V4.19.0.2__6644-update-editor-role-alias.sql delete mode 100644 src/main/resources/db/migration/V4.20.0.1__2734-alter-data-table-add-orig-file-name.sql delete mode 100644 src/main/resources/db/migration/V4.20.0.2__6748-configure-dropdown-toolname.sql delete mode 100644 src/main/resources/db/migration/V4.20.0.3__6558-file-validation.sql delete mode 100644 src/main/resources/db/migration/V4.20.0.4__6936-maildomain-groups.sql delete mode 100644 src/main/resources/db/migration/V4.20.0.5__6505-zipdownload-jobs.sql diff --git a/src/main/resources/db/migration/V1__flyway_schema_baseline.sql b/src/main/resources/db/migration/V1__flyway_schema_baseline.sql deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/main/resources/db/migration/V4.11.0.1__5565-sanitize-directory-labels.sql b/src/main/resources/db/migration/V4.11.0.1__5565-sanitize-directory-labels.sql deleted file mode 100644 index 3d3ed777c9f..00000000000 --- a/src/main/resources/db/migration/V4.11.0.1__5565-sanitize-directory-labels.sql +++ /dev/null @@ -1,9 +0,0 @@ --- replace any sequences of slashes and backslashes with a single slash: -UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '[/\\][/\\]+', '/', 'g'); --- strip (and replace with a .) any characters that are no longer allowed in the directory labels: -UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '\.\.+', '.', 'g'); --- now replace any sequences of .s with a single .: -UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '\.\.+', '.', 'g'); --- get rid of any leading or trailing slashes, spaces, '-'s and '.'s: -UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '^[/ .\-]+', '', ''); -UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '[/ \.\-]+$', '', ''); diff --git a/src/main/resources/db/migration/V4.11__5513-database-variablemetadata.sql b/src/main/resources/db/migration/V4.11__5513-database-variablemetadata.sql deleted file mode 100644 index 3c29a974bae..00000000000 --- a/src/main/resources/db/migration/V4.11__5513-database-variablemetadata.sql +++ /dev/null @@ -1,5 +0,0 @@ --- universe is dropped since it is empty in the dataverse --- this column will be moved to variablemetadata table --- issue 5513 -ALTER TABLE datavariable -DROP COLUMN if exists universe; diff --git a/src/main/resources/db/migration/V4.12.0.1__4.13-re-sanitize-filemetadata.sql b/src/main/resources/db/migration/V4.12.0.1__4.13-re-sanitize-filemetadata.sql deleted file mode 100644 index 8623ed97b70..00000000000 --- a/src/main/resources/db/migration/V4.12.0.1__4.13-re-sanitize-filemetadata.sql +++ /dev/null @@ -1,12 +0,0 @@ --- let's try again and fix the existing directoryLabels: --- (the script shipped with 4.12 was missing the most important line; bad copy-and-paste) --- replace any sequences of slashes and backslashes with a single slash: -UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '[/\\][/\\]+', '/', 'g'); --- strip (and replace with a .) any characters that are no longer allowed in the directory labels: --- (this line was missing from the script released with 4.12!!) -UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '[^A-Za-z0-9_ ./-]+', '.', 'g'); --- now replace any sequences of .s with a single .: -UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '\.\.+', '.', 'g'); --- get rid of any leading or trailing slashes, spaces, '-'s and '.'s: -UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '^[/ .\-]+', '', ''); -UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '[/ \.\-]+$', '', ''); diff --git a/src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql b/src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql deleted file mode 100644 index 0b1804bdfc4..00000000000 --- a/src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE UNIQUE INDEX index_authenticateduser_lower_useridentifier ON authenticateduser (lower(useridentifier)); diff --git a/src/main/resources/db/migration/V4.14.0.1__5822-export-var-meta.sql b/src/main/resources/db/migration/V4.14.0.1__5822-export-var-meta.sql deleted file mode 100644 index e65f52c7c91..00000000000 --- a/src/main/resources/db/migration/V4.14.0.1__5822-export-var-meta.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE variablemetadata -ADD COLUMN IF NOT EXISTS postquestion text; diff --git a/src/main/resources/db/migration/V4.15.0.1__2043-split-gbr-table.sql b/src/main/resources/db/migration/V4.15.0.1__2043-split-gbr-table.sql deleted file mode 100644 index adde91ee1b0..00000000000 --- a/src/main/resources/db/migration/V4.15.0.1__2043-split-gbr-table.sql +++ /dev/null @@ -1,10 +0,0 @@ -DO $$ -BEGIN -IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='guestbookresponse' AND column_name='downloadtype') THEN - INSERT INTO filedownload(guestbookresponse_id, downloadtype, downloadtimestamp, sessionid) SELECT id, downloadtype, responsetime, sessionid FROM guestbookresponse; - ALTER TABLE guestbookresponse DROP COLUMN downloadtype, DROP COLUMN sessionid; -END IF; -END -$$ - - diff --git a/src/main/resources/db/migration/V4.16.0.1__5303-addColumn-to-settingTable.sql b/src/main/resources/db/migration/V4.16.0.1__5303-addColumn-to-settingTable.sql deleted file mode 100644 index 8309dacf486..00000000000 --- a/src/main/resources/db/migration/V4.16.0.1__5303-addColumn-to-settingTable.sql +++ /dev/null @@ -1,13 +0,0 @@ -ALTER TABLE ONLY setting DROP CONSTRAINT setting_pkey ; - -ALTER TABLE setting ADD COLUMN IF NOT EXISTS ID SERIAL PRIMARY KEY; - -ALTER TABLE setting ADD COLUMN IF NOT EXISTS lang text; - -ALTER TABLE setting - ADD CONSTRAINT non_empty_lang - CHECK (lang <> ''); - -CREATE UNIQUE INDEX unique_settings - ON setting - (name, coalesce(lang, '')); diff --git a/src/main/resources/db/migration/V4.16.0.2__5028-dataset-explore.sql b/src/main/resources/db/migration/V4.16.0.2__5028-dataset-explore.sql deleted file mode 100644 index d880b1bddb4..00000000000 --- a/src/main/resources/db/migration/V4.16.0.2__5028-dataset-explore.sql +++ /dev/null @@ -1,3 +0,0 @@ -ALTER TABLE externaltool ADD COLUMN IF NOT EXISTS scope VARCHAR(255); -UPDATE externaltool SET scope = 'FILE'; -ALTER TABLE externaltool ALTER COLUMN scope SET NOT NULL; diff --git a/src/main/resources/db/migration/V4.16.0.3__6156-FooterImageforSub-Dataverse.sql b/src/main/resources/db/migration/V4.16.0.3__6156-FooterImageforSub-Dataverse.sql deleted file mode 100644 index 3951897279e..00000000000 --- a/src/main/resources/db/migration/V4.16.0.3__6156-FooterImageforSub-Dataverse.sql +++ /dev/null @@ -1,4 +0,0 @@ -ALTER TABLE dataversetheme -ADD COLUMN IF NOT EXISTS logofooter VARCHAR, -ADD COLUMN IF NOT EXISTS logoFooterBackgroundColor VARCHAR, -ADD COLUMN IF NOT EXISTS logofooteralignment VARCHAR; diff --git a/src/main/resources/db/migration/V4.17.0.1__5991-update-scribejava.sql b/src/main/resources/db/migration/V4.17.0.1__5991-update-scribejava.sql deleted file mode 100644 index 6762e1fc076..00000000000 --- a/src/main/resources/db/migration/V4.17.0.1__5991-update-scribejava.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE OAuth2TokenData DROP COLUMN IF EXISTS scope; \ No newline at end of file diff --git a/src/main/resources/db/migration/V4.17.0.2__3578-file-page-preview.sql b/src/main/resources/db/migration/V4.17.0.2__3578-file-page-preview.sql deleted file mode 100644 index 152700ed96c..00000000000 --- a/src/main/resources/db/migration/V4.17.0.2__3578-file-page-preview.sql +++ /dev/null @@ -1,5 +0,0 @@ -ALTER TABLE externalTool -ADD COLUMN IF NOT EXISTS hasPreviewMode BOOLEAN; -UPDATE externaltool SET hasPreviewMode = false; -ALTER TABLE externaltool ALTER COLUMN hasPreviewMode SET NOT NULL; - diff --git a/src/main/resources/db/migration/V4.18.1.1__6459-contenttype-nullable.sql b/src/main/resources/db/migration/V4.18.1.1__6459-contenttype-nullable.sql deleted file mode 100644 index 79eab8583f0..00000000000 --- a/src/main/resources/db/migration/V4.18.1.1__6459-contenttype-nullable.sql +++ /dev/null @@ -1,2 +0,0 @@ --- contenttype can be null because dataset tools do not require it -ALTER TABLE externaltool ALTER contenttype DROP NOT NULL; diff --git a/src/main/resources/db/migration/V4.19.0.1__6485_multistore.sql b/src/main/resources/db/migration/V4.19.0.1__6485_multistore.sql deleted file mode 100644 index 84364169614..00000000000 --- a/src/main/resources/db/migration/V4.19.0.1__6485_multistore.sql +++ /dev/null @@ -1,3 +0,0 @@ -ALTER TABLE dataverse -ADD COLUMN IF NOT EXISTS storagedriver TEXT; -UPDATE dvobject set storageidentifier=CONCAT('file://', storageidentifier) where storageidentifier not like '%://%' and dtype='DataFile'; diff --git a/src/main/resources/db/migration/V4.19.0.2__6644-update-editor-role-alias.sql b/src/main/resources/db/migration/V4.19.0.2__6644-update-editor-role-alias.sql deleted file mode 100644 index 7eccdb5f3c4..00000000000 --- a/src/main/resources/db/migration/V4.19.0.2__6644-update-editor-role-alias.sql +++ /dev/null @@ -1,2 +0,0 @@ - -update dataverserole set alias = 'contributor' where alias = 'editor'; \ No newline at end of file diff --git a/src/main/resources/db/migration/V4.20.0.1__2734-alter-data-table-add-orig-file-name.sql b/src/main/resources/db/migration/V4.20.0.1__2734-alter-data-table-add-orig-file-name.sql deleted file mode 100644 index edde8821045..00000000000 --- a/src/main/resources/db/migration/V4.20.0.1__2734-alter-data-table-add-orig-file-name.sql +++ /dev/null @@ -1,2 +0,0 @@ - -ALTER TABLE datatable ADD COLUMN IF NOT EXISTS originalfilename character varying(255); \ No newline at end of file diff --git a/src/main/resources/db/migration/V4.20.0.2__6748-configure-dropdown-toolname.sql b/src/main/resources/db/migration/V4.20.0.2__6748-configure-dropdown-toolname.sql deleted file mode 100644 index e360b0adfb6..00000000000 --- a/src/main/resources/db/migration/V4.20.0.2__6748-configure-dropdown-toolname.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE externaltool -ADD COLUMN IF NOT EXISTS toolname VARCHAR(255); diff --git a/src/main/resources/db/migration/V4.20.0.3__6558-file-validation.sql b/src/main/resources/db/migration/V4.20.0.3__6558-file-validation.sql deleted file mode 100644 index 3e5e742968c..00000000000 --- a/src/main/resources/db/migration/V4.20.0.3__6558-file-validation.sql +++ /dev/null @@ -1,4 +0,0 @@ --- the lock type "pidRegister" has been removed in 4.20, replaced with "finalizePublication" type --- (since this script is run as the application is being deployed, any background pid registration --- job is definitely no longer running - so we do want to remove any such locks left behind) -DELETE FROM DatasetLock WHERE reason='pidRegister'; \ No newline at end of file diff --git a/src/main/resources/db/migration/V4.20.0.4__6936-maildomain-groups.sql b/src/main/resources/db/migration/V4.20.0.4__6936-maildomain-groups.sql deleted file mode 100644 index 8c89b66fdec..00000000000 --- a/src/main/resources/db/migration/V4.20.0.4__6936-maildomain-groups.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE persistedglobalgroup ADD COLUMN IF NOT EXISTS emaildomains text; \ No newline at end of file diff --git a/src/main/resources/db/migration/V4.20.0.5__6505-zipdownload-jobs.sql b/src/main/resources/db/migration/V4.20.0.5__6505-zipdownload-jobs.sql deleted file mode 100644 index 484d5dd0784..00000000000 --- a/src/main/resources/db/migration/V4.20.0.5__6505-zipdownload-jobs.sql +++ /dev/null @@ -1,2 +0,0 @@ --- maybe temporary? - work in progress -CREATE TABLE IF NOT EXISTS CUSTOMZIPSERVICEREQUEST (KEY VARCHAR(63), STORAGELOCATION VARCHAR(255), FILENAME VARCHAR(255), ISSUETIME TIMESTAMP); diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 6e630edc5ea..3e446d65586 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -277,7 +277,6 @@ -
Globus ++++
Globus
From 9ca568dd270796bff7a26b0ad97af12e75b1ea7f Mon Sep 17 00:00:00 2001 From: lubitchv Date: Fri, 9 Oct 2020 14:30:12 -0400 Subject: [PATCH 0003/1551] Download with Globus --- .../file-download-button-fragment.xhtml | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index f7d10c1cf60..9a8e535bcdd 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -69,18 +69,7 @@ - #{bundle['file.downloadFromGlobus']} - - - - - - #{bundle.download} + #{bundle['file.downloadFromGlobus']} @@ -234,6 +223,17 @@ #{bundle.download} + + + + + + #{bundle['file.downloadFromGlobus']} + Date: Tue, 13 Oct 2020 13:19:15 -0400 Subject: [PATCH 0004/1551] Initial implementation --- .../iq/dataverse/EditDatafilesPage.java | 16 +- .../dataverse/api/DownloadInstanceWriter.java | 4 +- .../iq/dataverse/dataaccess/DataAccess.java | 69 +-- .../dataaccess/HTTPOverlayAccessIO.java | 533 ++++++++++++++++++ .../iq/dataverse/dataaccess/S3AccessIO.java | 19 +- .../iq/dataverse/dataaccess/StorageIO.java | 10 +- .../iq/dataverse/util/UrlSignerUtil.java | 140 +++++ .../dataverse/dataaccess/S3AccessIOTest.java | 2 +- 8 files changed, 746 insertions(+), 47 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index b4feecfcdf4..eb3efcd117d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -1995,7 +1995,7 @@ private void handleReplaceFileUpload(String fullStorageLocation, fileReplacePageHelper.resetReplaceFileHelper(); saveEnabled = false; - String storageIdentifier = DataAccess.getStorarageIdFromLocation(fullStorageLocation); + String storageIdentifier = DataAccess.getStorageIdFromLocation(fullStorageLocation); if (fileReplacePageHelper.handleNativeFileUpload(null, storageIdentifier, fileName, contentType, checkSum)){ saveEnabled = true; @@ -2131,8 +2131,20 @@ public void handleExternalUpload() { String checksumType = paramMap.get("checksumType"); String checksumValue = paramMap.get("checksumValue"); + //ToDo - move this to StorageIO subclasses + int lastColon = fullStorageIdentifier.lastIndexOf(':'); - String storageLocation= fullStorageIdentifier.substring(0,lastColon) + "/" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + "/" + fullStorageIdentifier.substring(lastColon+1); + String storageLocation=null; + //Should check storage type, not parse name + //This works except with s3 stores with ids starting with 'http' + if(fullStorageIdentifier.startsWith("http")) { + //HTTP external URL case + //ToDo - check for valid URL + storageLocation= fullStorageIdentifier.substring(0,lastColon) + "/" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + "/" + FileUtil.generateStorageIdentifier() + "//" +fullStorageIdentifier.substring(lastColon+1); + } else { + //S3 direct upload case + storageLocation= fullStorageIdentifier.substring(0,lastColon) + "/" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + "/" + fullStorageIdentifier.substring(lastColon+1); + } if (uploadInProgress.isFalse()) { uploadInProgress.setValue(true); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java index b10412a577d..1361bff2167 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java @@ -228,7 +228,7 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] throw new NotFoundException("datafile access error: requested optional service (image scaling, format conversion, etc.) could not be performed on this datafile."); } } else { - if (storageIO instanceof S3AccessIO && !(dataFile.isTabularData()) && ((S3AccessIO) storageIO).downloadRedirectEnabled()) { + if (!(dataFile.isTabularData()) && storageIO.downloadRedirectEnabled()) { // definitely close the (still open) S3 input stream, // since we are not going to use it. The S3 documentation // emphasizes that it is very important not to leave these @@ -238,7 +238,7 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] // [attempt to] redirect: String redirect_url_str; try { - redirect_url_str = ((S3AccessIO)storageIO).generateTemporaryS3Url(); + redirect_url_str = storageIO.generateTemporaryDownloadUrl(); } catch (IOException ioex) { redirect_url_str = null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java index 0e2320401dd..4c6f1554250 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java @@ -54,40 +54,45 @@ public static StorageIO getStorageIO(T dvObject) throws } //passing DVObject instead of a datafile to accomodate for use of datafiles as well as datasets - public static StorageIO getStorageIO(T dvObject, DataAccessRequest req) throws IOException { + public static StorageIO getStorageIO(T dvObject, DataAccessRequest req) throws IOException { - if (dvObject == null - || dvObject.getStorageIdentifier() == null - || dvObject.getStorageIdentifier().isEmpty()) { - throw new IOException("getDataAccessObject: null or invalid datafile."); - } - String storageIdentifier = dvObject.getStorageIdentifier(); - int separatorIndex = storageIdentifier.indexOf("://"); - String storageDriverId = DEFAULT_STORAGE_DRIVER_IDENTIFIER; //default - if(separatorIndex>0) { - storageDriverId = storageIdentifier.substring(0,separatorIndex); - } - String storageType = getDriverType(storageDriverId); - switch(storageType) { - case "file": - return new FileAccessIO<>(dvObject, req, storageDriverId); - case "s3": - return new S3AccessIO<>(dvObject, req, storageDriverId); - case "swift": - return new SwiftAccessIO<>(dvObject, req, storageDriverId); - case "tmp": - throw new IOException("DataAccess IO attempted on a temporary file that hasn't been permanently saved yet."); - } + if (dvObject == null || dvObject.getStorageIdentifier() == null || dvObject.getStorageIdentifier().isEmpty()) { + throw new IOException("getDataAccessObject: null or invalid datafile."); + } + String storageIdentifier = dvObject.getStorageIdentifier(); + int separatorIndex = storageIdentifier.indexOf("://"); + String storageDriverId = DEFAULT_STORAGE_DRIVER_IDENTIFIER; // default + if (separatorIndex > 0) { + storageDriverId = storageIdentifier.substring(0, separatorIndex); + } + return getStorageIO(dvObject, req, storageDriverId); + } - // TODO: - // This code will need to be extended with a system of looking up - // available storage plugins by the storage tag embedded in the - // "storage identifier". - // -- L.A. 4.0.2 + protected static StorageIO getStorageIO(T dvObject, DataAccessRequest req, + String storageDriverId) throws IOException { + String storageType = getDriverType(storageDriverId); + switch (storageType) { + case "file": + return new FileAccessIO<>(dvObject, req, storageDriverId); + case "s3": + return new S3AccessIO<>(dvObject, req, storageDriverId); + case "swift": + return new SwiftAccessIO<>(dvObject, req, storageDriverId); + case "http": + return new HTTPOverlayAccessIO<>(dvObject, req, storageDriverId); + case "tmp": + throw new IOException( + "DataAccess IO attempted on a temporary file that hasn't been permanently saved yet."); + } + // TODO: + // This code will need to be extended with a system of looking up + // available storage plugins by the storage tag embedded in the + // "storage identifier". + // -- L.A. 4.0.2 - logger.warning("Could not find storage driver for: " + storageIdentifier); - throw new IOException("getDataAccessObject: Unsupported storage method."); - } + logger.warning("Could not find storage driver for: " + storageDriverId); + throw new IOException("getDataAccessObject: Unsupported storage method."); + } // Experimental extension of the StorageIO system allowing direct access to // stored physical files that may not be associated with any DvObjects @@ -122,7 +127,7 @@ public static String[] getDriverIdAndStorageLocation(String storageLocation) { return new String[]{storageDriverId, storageIdentifier}; } - public static String getStorarageIdFromLocation(String location) { + public static String getStorageIdFromLocation(String location) { if(location.contains("://")) { //It's a full location with a driverId, so strip and reapply the driver id //NOte that this will strip the bucketname out (which s3 uses) but the S3IOStorage class knows to look at re-insert it diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java new file mode 100644 index 00000000000..0bf4eb515de --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java @@ -0,0 +1,533 @@ +package edu.harvard.iq.dataverse.dataaccess; + +import com.amazonaws.AmazonClientException; +import com.amazonaws.HttpMethod; +import com.amazonaws.SdkClientException; +import com.amazonaws.auth.profile.ProfileCredentialsProvider; +import com.amazonaws.client.builder.AwsClientBuilder; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.util.FileUtil; +import edu.harvard.iq.dataverse.util.UrlSignerUtil; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.URL; +import java.net.URLEncoder; +import java.nio.channels.Channel; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Random; +import java.util.logging.Logger; +import org.apache.commons.io.IOUtils; +import org.apache.http.client.config.CookieSpecs; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.socket.ConnectionSocketFactory; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.TrustAllStrategy; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.ssl.SSLContextBuilder; +import org.apache.http.util.EntityUtils; + +import javax.json.Json; +import javax.json.JsonObjectBuilder; +import javax.net.ssl.SSLContext; +import javax.validation.constraints.NotNull; + +/** + * @author qqmyers + * @param what it stores + */ +/* + * HTTP Overlay Driver + * + * StorageIdentifier format: + * ://// + */ +public class HTTPOverlayAccessIO extends StorageIO { + + private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.HttpOverlayAccessIO"); + + private StorageIO baseStore = null; + private String urlPath = null; + private String baseUrl = null; + + private static HttpClientContext localContext = HttpClientContext.create(); + private PoolingHttpClientConnectionManager cm = null; + CloseableHttpClient httpclient = null; + private int timeout = 1200; + private RequestConfig config = RequestConfig.custom().setConnectTimeout(timeout * 1000) + .setConnectionRequestTimeout(timeout * 1000).setSocketTimeout(timeout * 1000) + .setCookieSpec(CookieSpecs.STANDARD).setExpectContinueEnabled(true).build(); + private static boolean trustCerts = false; + private int httpConcurrency = 4; + + public HTTPOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { + super(dvObject, req, driverId); + this.setIsLocalFile(false); + configureStores(req, driverId, null); + // TODO: validate the storage location supplied + urlPath = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//" + 2)); + logger.fine("Base URL: " + urlPath); + } + + public HTTPOverlayAccessIO(String storageLocation, String driverId) throws IOException { + super(null, null, driverId); + this.setIsLocalFile(false); + configureStores(null, driverId, storageLocation); + + // TODO: validate the storage location supplied + urlPath = storageLocation.substring(storageLocation.lastIndexOf("//" + 2)); + logger.fine("Base URL: " + urlPath); + } + + @Override + public void open(DataAccessOption... options) throws IOException { + + baseStore.open(options); + + DataAccessRequest req = this.getRequest(); + + if (isWriteAccessRequested(options)) { + isWriteAccess = true; + isReadAccess = false; + } else { + isWriteAccess = false; + isReadAccess = true; + } + + if (dvObject instanceof DataFile) { + String storageIdentifier = dvObject.getStorageIdentifier(); + + DataFile dataFile = this.getDataFile(); + + if (req != null && req.getParameter("noVarHeader") != null) { + baseStore.setNoVarHeader(true); + } + + if (storageIdentifier == null || "".equals(storageIdentifier)) { + throw new FileNotFoundException("Data Access: No local storage identifier defined for this datafile."); + } + + // Fix new DataFiles: DataFiles that have not yet been saved may use this method + // when they don't have their storageidentifier in the final form + // So we fix it up here. ToDo: refactor so that storageidentifier is generated + // by the appropriate StorageIO class and is final from the start. + logger.fine("StorageIdentifier is: " + storageIdentifier); + + if (isReadAccess) { + if (dataFile.getFilesize() >= 0) { + this.setSize(dataFile.getFilesize()); + } else { + this.setSize(getSizeFromHttpHeader()); + } + if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") + && dataFile.isTabularData() && dataFile.getDataTable() != null && (!this.noVarHeader())) { + + List datavariables = dataFile.getDataTable().getDataVariables(); + String varHeaderLine = generateVariableHeader(datavariables); + this.setVarHeader(varHeaderLine); + } + + } + + this.setMimeType(dataFile.getContentType()); + + try { + this.setFileName(dataFile.getFileMetadata().getLabel()); + } catch (Exception ex) { + this.setFileName("unknown"); + } + } else if (dvObject instanceof Dataset) { + throw new IOException( + "Data Access: HTTPOverlay Storage driver does not support dvObject type Dataverse yet"); + } else if (dvObject instanceof Dataverse) { + throw new IOException( + "Data Access: HTTPOverlay Storage driver does not support dvObject type Dataverse yet"); + } else { + this.setSize(getSizeFromHttpHeader()); + } + } + + private long getSizeFromHttpHeader() { + long size = -1; + HttpHead head = new HttpHead(baseUrl + "/" + urlPath); + try { + CloseableHttpResponse response = httpclient.execute(head, localContext); + + try { + int code = response.getStatusLine().getStatusCode(); + switch (code) { + case 200: + size = Long.parseLong(response.getHeaders("Content-Length")[0].getValue()); + logger.fine("Found file size: " + size); + break; + default: + logger.warning("Response from " + head.getURI().toString() + " was " + code); + } + } finally { + EntityUtils.consume(response.getEntity()); + } + } catch (Exception e) { + logger.warning(e.getMessage()); + } + return size; + } + + @Override + public InputStream getInputStream() throws IOException { + if (super.getInputStream() == null) { + try { + HttpGet get = new HttpGet(baseUrl + "/" + urlPath); + CloseableHttpResponse response = httpclient.execute(get, localContext); + + int code = response.getStatusLine().getStatusCode(); + switch (code) { + case 200: + setInputStream(response.getEntity().getContent()); + break; + default: + logger.warning("Response from " + get.getURI().toString() + " was " + code); + throw new IOException("Cannot retrieve: " + baseUrl + "/" + urlPath); + } + } catch (Exception e) { + logger.warning(e.getMessage()); + throw new IOException("Error retrieving: " + baseUrl + "/" + urlPath); + + } + setChannel(Channels.newChannel(super.getInputStream())); + } + return super.getInputStream(); + } + + @Override + public Channel getChannel() throws IOException { + if (super.getChannel() == null) { + getInputStream(); + } + return channel; + } + + @Override + public ReadableByteChannel getReadChannel() throws IOException { + // Make sure StorageIO.channel variable exists + getChannel(); + return super.getReadChannel(); + } + + @Override + public void delete() throws IOException { + // Delete is best-effort - we tell the remote server and it may or may not + // implement this call + if (!isDirectAccess()) { + throw new IOException("Direct Access IO must be used to permanently delete stored file objects"); + } + try { + HttpDelete del = new HttpDelete(baseUrl + "/" + urlPath); + CloseableHttpResponse response = httpclient.execute(del, localContext); + try { + int code = response.getStatusLine().getStatusCode(); + switch (code) { + case 200: + logger.fine("Sent DELETE for " + baseUrl + "/" + urlPath); + default: + logger.fine("Response from DELETE on " + del.getURI().toString() + " was " + code); + } + } finally { + EntityUtils.consume(response.getEntity()); + } + } catch (Exception e) { + logger.warning(e.getMessage()); + throw new IOException("Error retrieving: " + baseUrl + "/" + urlPath); + + } + + // Delete all the cached aux files as well: + deleteAllAuxObjects(); + + } + + @Override + public Channel openAuxChannel(String auxItemTag, DataAccessOption... options) throws IOException { + return baseStore.openAuxChannel(auxItemTag, options); + } + + @Override + public boolean isAuxObjectCached(String auxItemTag) throws IOException { + return baseStore.isAuxObjectCached(auxItemTag); + } + + @Override + public long getAuxObjectSize(String auxItemTag) throws IOException { + return baseStore.getAuxObjectSize(auxItemTag); + } + + @Override + public Path getAuxObjectAsPath(String auxItemTag) throws IOException { + return baseStore.getAuxObjectAsPath(auxItemTag); + } + + @Override + public void backupAsAux(String auxItemTag) throws IOException { + baseStore.backupAsAux(auxItemTag); + } + + @Override + public void revertBackupAsAux(String auxItemTag) throws IOException { + baseStore.revertBackupAsAux(auxItemTag); + } + + @Override + // this method copies a local filesystem Path into this DataAccess Auxiliary + // location: + public void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOException { + baseStore.savePathAsAux(fileSystemPath, auxItemTag); + } + + @Override + public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException { + baseStore.saveInputStreamAsAux(inputStream, auxItemTag, filesize); + } + + /** + * @param inputStream InputStream we want to save + * @param auxItemTag String representing this Auxiliary type ("extension") + * @throws IOException if anything goes wrong. + */ + @Override + public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException { + baseStore.saveInputStreamAsAux(inputStream, auxItemTag); + } + + @Override + public List listAuxObjects() throws IOException { + return baseStore.listAuxObjects(); + } + + @Override + public void deleteAuxObject(String auxItemTag) throws IOException { + baseStore.deleteAuxObject(auxItemTag); + } + + @Override + public void deleteAllAuxObjects() throws IOException { + baseStore.deleteAllAuxObjects(); + } + + @Override + public String getStorageLocation() throws IOException { + String fullStorageLocation = dvObject.getStorageIdentifier(); + fullStorageLocation = fullStorageLocation.substring(fullStorageLocation.lastIndexOf("://") + 3); + fullStorageLocation = fullStorageLocation.substring(0, fullStorageLocation.indexOf("//") + 2); + if (this.getDvObject() instanceof Dataset) { + fullStorageLocation = this.getDataset().getAuthorityForFileStorage() + "/" + + this.getDataset().getIdentifierForFileStorage() + "/" + fullStorageLocation; + } else if (this.getDvObject() instanceof DataFile) { + fullStorageLocation = this.getDataFile().getOwner().getAuthorityForFileStorage() + "/" + + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStorageLocation; + } else if (dvObject instanceof Dataverse) { + throw new IOException("HttpOverlayAccessIO: Dataverses are not a supported dvObject"); + } + return fullStorageLocation; + } + + @Override + public Path getFileSystemPath() throws UnsupportedDataAccessOperationException { + throw new UnsupportedDataAccessOperationException( + "HttpOverlayAccessIO: this is a remote DataAccess IO object, it has no local filesystem path associated with it."); + } + + @Override + public boolean exists() { + return (getSizeFromHttpHeader() != -1); + } + + @Override + public WritableByteChannel getWriteChannel() throws UnsupportedDataAccessOperationException { + throw new UnsupportedDataAccessOperationException( + "HttpOverlayAccessIO: there are no write Channels associated with S3 objects."); + } + + @Override + public OutputStream getOutputStream() throws UnsupportedDataAccessOperationException { + throw new UnsupportedDataAccessOperationException( + "HttpOverlayAccessIO: there are no output Streams associated with S3 objects."); + } + + @Override + public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException { + return baseStore.getAuxFileAsInputStream(auxItemTag); + } + + @Override + public boolean downloadRedirectEnabled() { + String optionValue = System.getProperty("dataverse.files." + this.driverId + ".download-redirect"); + if ("true".equalsIgnoreCase(optionValue)) { + return true; + } + return false; + } + + public String generateTemporaryDownloadUrl() throws IOException { + String secretKey = System.getProperty("dataverse.files." + this.driverId + ".secretkey"); + if (secretKey == null) { + return baseUrl + "/" + urlPath; + } else { + return UrlSignerUtil.signUrl(baseUrl + "/" + urlPath, getUrlExpirationMinutes(), null, "GET", secretKey); + } + } + + int getUrlExpirationMinutes() { + String optionValue = System.getProperty("dataverse.files." + this.driverId + ".url-expiration-minutes"); + if (optionValue != null) { + Integer num; + try { + num = Integer.parseInt(optionValue); + } catch (NumberFormatException ex) { + num = null; + } + if (num != null) { + return num; + } + } + return 60; + } + + private void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { + baseUrl = System.getProperty("dataverse.files." + this.driverId + ".baseUrl"); + + if (baseStore == null) { + String baseDriverId = System.getProperty("dataverse.files." + driverId + ".baseStore"); + String fullStorageLocation = null; + if (this.getDvObject() != null) { + fullStorageLocation = getStorageLocation(); + + // S3 expects :/// + switch (System.getProperty("dataverse.files." + baseDriverId + ".type")) { + case "s3": + fullStorageLocation = baseDriverId + "://" + + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + "/" + + fullStorageLocation; + break; + case "file": + fullStorageLocation = baseDriverId + "://" + + System.getProperty("dataverse.files." + baseDriverId + ".directory") + "/" + + fullStorageLocation; + break; + default: + logger.warning("Not Implemented: HTTPOverlay store with base store type: " + + System.getProperty("dataverse.files." + baseDriverId + ".type")); + throw new IOException("Not implemented"); + } + + } else if (storageLocation != null) { + // ://// + String storageId = storageLocation.substring(storageLocation.indexOf("://" + 3)); + fullStorageLocation = storageId.substring(0, storageId.indexOf("//")); + + switch (System.getProperty("dataverse.files." + baseDriverId + ".type")) { + case "s3": + fullStorageLocation = baseDriverId + "://" + + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + "/" + + fullStorageLocation; + break; + case "file": + fullStorageLocation = baseDriverId + "://" + + System.getProperty("dataverse.files." + baseDriverId + ".directory") + "/" + + fullStorageLocation; + break; + default: + logger.warning("Not Implemented: HTTPOverlay store with base store type: " + + System.getProperty("dataverse.files." + baseDriverId + ".type")); + throw new IOException("Not implemented"); + } + } + baseStore = DataAccess.getDirectStorageIO(fullStorageLocation); + } + } + + public CloseableHttpClient getSharedHttpClient() { + if (httpclient == null) { + try { + initHttpPool(); + httpclient = HttpClients.custom().setConnectionManager(cm).setDefaultRequestConfig(config).build(); + + } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException ex) { + logger.warning(ex.getMessage()); + } + } + return httpclient; + } + + private void initHttpPool() throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException { + if (trustCerts) { + // use the TrustSelfSignedStrategy to allow Self Signed Certificates + SSLContext sslContext; + SSLConnectionSocketFactory connectionFactory; + + sslContext = SSLContextBuilder.create().loadTrustMaterial(new TrustAllStrategy()).build(); + // create an SSL Socket Factory to use the SSLContext with the trust self signed + // certificate strategy + // and allow all hosts verifier. + connectionFactory = new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE); + + Registry registry = RegistryBuilder.create() + .register("https", connectionFactory).build(); + cm = new PoolingHttpClientConnectionManager(registry); + } else { + cm = new PoolingHttpClientConnectionManager(); + } + cm.setDefaultMaxPerRoute(httpConcurrency); + cm.setMaxTotal(httpConcurrency > 20 ? httpConcurrency : 20); + } + + @Override + public void savePath(Path fileSystemPath) throws IOException { + throw new UnsupportedDataAccessOperationException( + "HttpOverlayAccessIO: savePath() not implemented in this storage driver."); + + } + + @Override + public void saveInputStream(InputStream inputStream) throws IOException { + throw new UnsupportedDataAccessOperationException( + "HttpOverlayAccessIO: saveInputStream() not implemented in this storage driver."); + + } + + @Override + public void saveInputStream(InputStream inputStream, Long filesize) throws IOException { + throw new UnsupportedDataAccessOperationException( + "HttpOverlayAccessIO: saveInputStream(InputStream, Long) not implemented in this storage driver."); + + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index c0defccfdef..533498cad97 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -817,16 +817,17 @@ private static String getMainFileKey(String baseKey, String storageIdentifier, S } return key; } - - public boolean downloadRedirectEnabled() { - String optionValue = System.getProperty("dataverse.files." + this.driverId + ".download-redirect"); - if ("true".equalsIgnoreCase(optionValue)) { - return true; - } - return false; - } - public String generateTemporaryS3Url() throws IOException { + @Override + public boolean downloadRedirectEnabled() { + String optionValue = System.getProperty("dataverse.files." + this.driverId + ".download-redirect"); + if ("true".equalsIgnoreCase(optionValue)) { + return true; + } + return false; + } + + public String generateTemporaryDownloadUrl() throws IOException { //Questions: // Q. Should this work for private and public? // A. Yes! Since the URL has a limited, short life span. -- L.A. diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 2f66eec5f4c..148858ce544 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -183,7 +183,7 @@ public boolean canWrite() { public abstract void deleteAllAuxObjects() throws IOException; private DataAccessRequest req; - private InputStream in; + private InputStream in = null; private OutputStream out; protected Channel channel; protected DvObject dvObject; @@ -542,4 +542,12 @@ public boolean isBelowIngestSizeLimit() { return true; } } + + public boolean downloadRedirectEnabled() { + return false; + } + + public String generateTemporaryDownloadUrl() throws IOException { + throw new UnsupportedDataAccessOperationException("Direct download not implemented for this storage type"); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java new file mode 100644 index 00000000000..9a04a056fa0 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java @@ -0,0 +1,140 @@ +package edu.harvard.iq.dataverse.util; + +import java.net.URL; +import java.nio.charset.Charset; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.http.NameValuePair; +import org.apache.http.client.utils.URLEncodedUtils; +import org.joda.time.LocalDateTime; + +/** + * Simple class to sign/validate URLs. + * + */ +public class UrlSignerUtil { + + private static final Logger logger = Logger.getLogger(UrlSignerUtil.class.getName()); + + /** + * + * @param baseUrl - the URL to sign - cannot contain query params + * "until","user", "method", or "token" + * @param timeout - how many minutes to make the URL valid for (note - time skew + * between the creator and receiver could affect the validation + * @param user - a string representing the user - should be understood by the + * creator/receiver + * @param method - one of the HTTP methods + * @param key - a secret key shared by the creator/receiver. In Dataverse + * this could be an APIKey (when sending URL to a tool that will + * use it to retrieve info from Dataverse) + * @return - the signed URL + */ + public static String signUrl(String baseUrl, Integer timeout, String user, String method, String key) { + StringBuilder signedUrl = new StringBuilder(baseUrl); + + boolean firstParam = true; + if (baseUrl.contains("?")) { + firstParam = false; + } + if (timeout != null) { + LocalDateTime validTime = LocalDateTime.now(); + validTime = validTime.plusMinutes(timeout); + validTime.toString(); + signedUrl.append(firstParam ? "?" : "&").append("until=").append(validTime); + } + if (user != null) { + signedUrl.append(firstParam ? "?" : "&").append("user=").append(user); + } + if (method != null) { + signedUrl.append(firstParam ? "?" : "&").append("method=").append(method); + } + signedUrl.append("&token=").append(DigestUtils.sha512Hex(signedUrl.toString() + key)); + logger.fine("Generated Signed URL: " + signedUrl.toString()); + if (logger.isLoggable(Level.FINE)) { + logger.fine( + "URL signature is " + (isValidUrl(signedUrl.toString(), method, user, key) ? "valid" : "invalid")); + } + return signedUrl.toString(); + } + + /** + * This method will only return true if the URL and parameters except the + * "token" are unchanged from the original/match the values sent to this method, + * and the "token" parameter matches what this method recalculates using the + * shared key THe method also assures that the "until" timestamp is after the + * current time. + * + * @param signedUrl - the signed URL as received from Dataverse + * @param method - an HTTP method. If provided, the method in the URL must + * match + * @param user - a string representing the user, if provided the value must + * match the one in the url + * @param key - the shared secret key to be used in validation + * @return - true if valid, false if not: e.g. the key is not the same as the + * one used to generate the "token" any part of the URL preceding the + * "token" has been altered the method doesn't match (e.g. the server + * has received a POST request and the URL only allows GET) the user + * string doesn't match (e.g. the server knows user A is logged in, but + * the URL is only for user B) the url has expired (was used after the + * until timestamp) + */ + public static boolean isValidUrl(String signedUrl, String method, String user, String key) { + boolean valid = true; + try { + URL url = new URL(signedUrl); + List params = URLEncodedUtils.parse(url.getQuery(), Charset.forName("UTF-8")); + String hash = null; + String dateString = null; + String allowedMethod = null; + String allowedUser = null; + for (NameValuePair nvp : params) { + if (nvp.getName().equals("token")) { + hash = nvp.getValue(); + } + if (nvp.getName().equals("until")) { + dateString = nvp.getValue(); + } + if (nvp.getName().equals("method")) { + allowedMethod = nvp.getValue(); + } + if (nvp.getName().equals("user")) { + allowedUser = nvp.getValue(); + } + } + + int index = signedUrl.indexOf("&token="); + // Assuming the token is last - doesn't have to be, but no reason for the URL + // params to be rearranged either, and this should only cause false negatives if + // it does happen + String urlToHash = signedUrl.substring(0, index); + String newHash = DigestUtils.sha512Hex(urlToHash + key); + if (!hash.contentEquals(newHash)) { + logger.fine("Hash doesn't match"); + valid = false; + } + if (LocalDateTime.parse(dateString).isAfter(LocalDateTime.now())) { + logger.fine("Url is expired"); + valid = false; + } + if (method != null && !method.equals(allowedMethod)) { + logger.fine("Method doesn't match"); + valid = false; + } + if (user != null && user.equals(allowedUser)) { + logger.fine("User doesn't match"); + valid = false; + } + } catch (Throwable t) { + // Want to catch anything like null pointers, etc. to force valid=false upon any + // error + logger.warning("Bad URL: " + signedUrl + " : " + t.getMessage()); + valid = false; + } + return valid; + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOTest.java index 1f118a0ea68..e2756d70663 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOTest.java @@ -30,7 +30,7 @@ public class S3AccessIOTest { @Mock private AmazonS3 s3client; - private S3AccessIO dataSetAccess; + private StorageIO dataSetAccess; private S3AccessIO dataFileAccess; private Dataset dataSet; private DataFile dataFile; From e8c15785b16651e728881bf0856e5347e198e5ec Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 13 Oct 2020 13:32:25 -0400 Subject: [PATCH 0005/1551] null check on dateString --- src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java index 9a04a056fa0..3c91387f169 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java @@ -116,7 +116,7 @@ public static boolean isValidUrl(String signedUrl, String method, String user, S logger.fine("Hash doesn't match"); valid = false; } - if (LocalDateTime.parse(dateString).isAfter(LocalDateTime.now())) { + if (dateString != null && LocalDateTime.parse(dateString).isAfter(LocalDateTime.now())) { logger.fine("Url is expired"); valid = false; } From 9c6e85128037600dfc9895502d1f60826cb398b6 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Wed, 14 Oct 2020 11:55:44 -0400 Subject: [PATCH 0006/1551] add logs for publishing file validation --- .../java/edu/harvard/iq/dataverse/util/FileUtil.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 2706d840d21..2b7b6416085 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1707,6 +1707,8 @@ public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { public static void validateDataFileChecksum(DataFile dataFile) throws IOException { DataFile.ChecksumType checksumType = dataFile.getChecksumType(); + + logger.info(checksumType.toString()); if (checksumType == null) { String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.noChecksumType", Arrays.asList(dataFile.getId().toString())); logger.log(Level.INFO, info); @@ -1720,6 +1722,7 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio storage.open(DataAccessOption.READ_ACCESS); if (!dataFile.isTabularData()) { + logger.info("It is not tabular"); in = storage.getInputStream(); } else { // if this is a tabular file, read the preserved original "auxiliary file" @@ -1738,7 +1741,9 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio String recalculatedChecksum = null; try { + logger.info("Before calculating checksum"); recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); + logger.info("Checksum:" + recalculatedChecksum); } catch (RuntimeException rte) { recalculatedChecksum = null; } finally { @@ -1757,6 +1762,9 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio if (!recalculatedChecksum.equals(dataFile.getChecksumValue())) { // There's one possible condition that is 100% recoverable and can // be automatically fixed (issue #6660): + logger.info(dataFile.getChecksumValue()); + logger.info(recalculatedChecksum); + logger.info("Checksums are not equal"); boolean fixed = false; if (!dataFile.isTabularData() && dataFile.getIngestReport() != null) { // try again, see if the .orig file happens to be there: @@ -1786,6 +1794,7 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio } if (!fixed) { + logger.info("checksum cannot be fixed"); String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.wrongChecksumValue", Arrays.asList(dataFile.getId().toString())); logger.log(Level.INFO, info); throw new IOException(info); From 00d53ee4d43fd49a81faa1f31b9b0c6a1cad8b46 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 12:39:49 -0400 Subject: [PATCH 0007/1551] adjust incoming identifier for HttpOverlay drivers --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 655cdafe04c..7fd3b1ab63d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1782,6 +1782,12 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, if (null == contentDispositionHeader) { if (optionalFileParams.hasStorageIdentifier()) { newStorageIdentifier = optionalFileParams.getStorageIdentifier(); + String driverType = DataAccess.getDriverType(newStorageIdentifier.substring(0, newStorageIdentifier.indexOf(":"))); + if(driverType.equals("http")) { + //Add a generated identifier for the aux files + int lastColon = newStorageIdentifier.lastIndexOf(':'); + newStorageIdentifier= newStorageIdentifier.substring(0,lastColon) + "/" + FileUtil.generateStorageIdentifier() + "//" +newStorageIdentifier.substring(lastColon+1); + } // ToDo - check that storageIdentifier is valid if (optionalFileParams.hasFileName()) { newFilename = optionalFileParams.getFileName(); From 94921bd2de1bfdff7bb73c6d7da55528fc9c418a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 12:40:12 -0400 Subject: [PATCH 0008/1551] support overlay case --- .../edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index bd0549622f0..f96f948f0a9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -32,7 +32,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; - +import java.util.logging.Logger; // Dataverse imports: import edu.harvard.iq.dataverse.DataFile; @@ -48,6 +48,9 @@ public class FileAccessIO extends StorageIO { + + private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.FileAccessIO"); + public FileAccessIO() { //Constructor only for testing @@ -169,7 +172,8 @@ public void open (DataAccessOption... options) throws IOException { } else if (dvObject instanceof Dataverse) { dataverse = this.getDataverse(); } else { - throw new IOException("Data Access: Invalid DvObject type"); + logger.fine("Overlay case: FileAccessIO open for : " + physicalPath.toString()); + //throw new IOException("Data Access: Invalid DvObject type"); } // This "status" is a leftover from 3.6; we don't have a use for it // in 4.0 yet; and we may not need it at all. From cbdd35c0b186a535d6df358d9f57082c713c6ff3 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 12:40:33 -0400 Subject: [PATCH 0009/1551] document need to update for overlay case --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 533498cad97..2b7b1b91ae2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -224,6 +224,9 @@ public void open(DataAccessOption... options) throws IOException { } else if (dvObject instanceof Dataverse) { throw new IOException("Data Access: Storage driver does not support dvObject type Dataverse yet"); } else { + + //ToDo - skip this for overlay case + // Direct access, e.g. for external upload - no associated DVobject yet, but we want to be able to get the size // With small files, it looks like we may call before S3 says it exists, so try some retries before failing if(key!=null) { From 11535bd4e7401d9f0100aa4d0ecfddbd3d2a9da2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 12:40:57 -0400 Subject: [PATCH 0010/1551] keep owner for getStorageIO call for HttpOverlay case --- .../harvard/iq/dataverse/ingest/IngestServiceBean.java | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index f5eeaa1c316..5a5ab8cc86e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -290,10 +290,6 @@ public List saveAndAddFilesToDataset(DatasetVersion version, List saveAndAddFilesToDataset(DatasetVersion version, List Date: Wed, 14 Oct 2020 12:41:32 -0400 Subject: [PATCH 0011/1551] typos --- .../harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java index 0bf4eb515de..a058dfc070e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java @@ -95,7 +95,7 @@ public HTTPOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) t this.setIsLocalFile(false); configureStores(req, driverId, null); // TODO: validate the storage location supplied - urlPath = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//" + 2)); + urlPath = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); logger.fine("Base URL: " + urlPath); } @@ -105,7 +105,7 @@ public HTTPOverlayAccessIO(String storageLocation, String driverId) throws IOExc configureStores(null, driverId, storageLocation); // TODO: validate the storage location supplied - urlPath = storageLocation.substring(storageLocation.lastIndexOf("//" + 2)); + urlPath = storageLocation.substring(storageLocation.lastIndexOf("//") + 2); logger.fine("Base URL: " + urlPath); } @@ -345,6 +345,7 @@ public void deleteAllAuxObjects() throws IOException { @Override public String getStorageLocation() throws IOException { String fullStorageLocation = dvObject.getStorageIdentifier(); + logger.fine("storageidentifier: " + fullStorageLocation); fullStorageLocation = fullStorageLocation.substring(fullStorageLocation.lastIndexOf("://") + 3); fullStorageLocation = fullStorageLocation.substring(0, fullStorageLocation.indexOf("//") + 2); if (this.getDvObject() instanceof Dataset) { From 2fb9106ef6d0a6f07fd50615da8565b9d49a619f Mon Sep 17 00:00:00 2001 From: lubitchv Date: Wed, 14 Oct 2020 12:46:06 -0400 Subject: [PATCH 0012/1551] Check for globus file checksum before publishing --- .../harvard/iq/dataverse/util/FileUtil.java | 175 ++++++++++-------- 1 file changed, 94 insertions(+), 81 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 2b7b6416085..f9ee57a07d5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -21,6 +21,8 @@ package edu.harvard.iq.dataverse.util; import static edu.harvard.iq.dataverse.dataaccess.S3AccessIO.S3_IDENTIFIER_PREFIX; + +import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFile.ChecksumType; import edu.harvard.iq.dataverse.DataFileServiceBean; @@ -1706,102 +1708,113 @@ public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { } public static void validateDataFileChecksum(DataFile dataFile) throws IOException { - DataFile.ChecksumType checksumType = dataFile.getChecksumType(); - - logger.info(checksumType.toString()); - if (checksumType == null) { - String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.noChecksumType", Arrays.asList(dataFile.getId().toString())); - logger.log(Level.INFO, info); - throw new IOException(info); - } + String recalculatedChecksum = null; + if (dataFile.getContentType().equals(DataFileServiceBean.MIME_TYPE_GLOBUS_FILE)) { + for (S3ObjectSummary s3ObjectSummary : dataFile.getStorageIO().listAuxObjects("")) { + recalculatedChecksum = s3ObjectSummary.getETag(); + if (!recalculatedChecksum.equals(dataFile.getChecksumValue())) { + String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.wrongChecksumValue", Arrays.asList(dataFile.getId().toString())); + logger.log(Level.INFO, info); + throw new IOException(info); + } + } + } else { + DataFile.ChecksumType checksumType = dataFile.getChecksumType(); - StorageIO storage = dataFile.getStorageIO(); - InputStream in = null; - - try { - storage.open(DataAccessOption.READ_ACCESS); - - if (!dataFile.isTabularData()) { - logger.info("It is not tabular"); - in = storage.getInputStream(); - } else { - // if this is a tabular file, read the preserved original "auxiliary file" - // instead: - in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION); + logger.info(checksumType.toString()); + if (checksumType == null) { + String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.noChecksumType", Arrays.asList(dataFile.getId().toString())); + logger.log(Level.INFO, info); + throw new IOException(info); } - } catch (IOException ioex) { - in = null; - } - if (in == null) { - String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.failRead", Arrays.asList(dataFile.getId().toString())); - logger.log(Level.INFO, info); - throw new IOException(info); - } + StorageIO storage = dataFile.getStorageIO(); + InputStream in = null; - String recalculatedChecksum = null; - try { - logger.info("Before calculating checksum"); - recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); - logger.info("Checksum:" + recalculatedChecksum); - } catch (RuntimeException rte) { - recalculatedChecksum = null; - } finally { - IOUtils.closeQuietly(in); - } - - if (recalculatedChecksum == null) { - String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.failCalculateChecksum", Arrays.asList(dataFile.getId().toString())); - logger.log(Level.INFO, info); - throw new IOException(info); - } - - // TODO? What should we do if the datafile does not have a non-null checksum? - // Should we fail, or should we assume that the recalculated checksum - // is correct, and populate the checksumValue field with it? - if (!recalculatedChecksum.equals(dataFile.getChecksumValue())) { - // There's one possible condition that is 100% recoverable and can - // be automatically fixed (issue #6660): - logger.info(dataFile.getChecksumValue()); - logger.info(recalculatedChecksum); - logger.info("Checksums are not equal"); - boolean fixed = false; - if (!dataFile.isTabularData() && dataFile.getIngestReport() != null) { - // try again, see if the .orig file happens to be there: - try { + try { + storage.open(DataAccessOption.READ_ACCESS); + + if (!dataFile.isTabularData()) { + logger.info("It is not tabular"); + in = storage.getInputStream(); + } else { + // if this is a tabular file, read the preserved original "auxiliary file" + // instead: in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION); - } catch (IOException ioex) { - in = null; } - if (in != null) { + } catch (IOException ioex) { + in = null; + } + + if (in == null) { + String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.failRead", Arrays.asList(dataFile.getId().toString())); + logger.log(Level.INFO, info); + throw new IOException(info); + } + + try { + logger.info("Before calculating checksum"); + recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); + logger.info("Checksum:" + recalculatedChecksum); + } catch (RuntimeException rte) { + recalculatedChecksum = null; + } finally { + IOUtils.closeQuietly(in); + } + + if (recalculatedChecksum == null) { + String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.failCalculateChecksum", Arrays.asList(dataFile.getId().toString())); + logger.log(Level.INFO, info); + throw new IOException(info); + } + + // TODO? What should we do if the datafile does not have a non-null checksum? + // Should we fail, or should we assume that the recalculated checksum + // is correct, and populate the checksumValue field with it? + if (!recalculatedChecksum.equals(dataFile.getChecksumValue())) { + // There's one possible condition that is 100% recoverable and can + // be automatically fixed (issue #6660): + logger.info(dataFile.getChecksumValue()); + logger.info(recalculatedChecksum); + logger.info("Checksums are not equal"); + boolean fixed = false; + if (!dataFile.isTabularData() && dataFile.getIngestReport() != null) { + // try again, see if the .orig file happens to be there: try { - recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); - } catch (RuntimeException rte) { - recalculatedChecksum = null; - } finally { - IOUtils.closeQuietly(in); + in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION); + } catch (IOException ioex) { + in = null; } - // try again: - if (recalculatedChecksum.equals(dataFile.getChecksumValue())) { - fixed = true; + if (in != null) { try { - storage.revertBackupAsAux(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION); - } catch (IOException ioex) { - fixed = false; + recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType); + } catch (RuntimeException rte) { + recalculatedChecksum = null; + } finally { + IOUtils.closeQuietly(in); + } + // try again: + if (recalculatedChecksum.equals(dataFile.getChecksumValue())) { + fixed = true; + try { + storage.revertBackupAsAux(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION); + } catch (IOException ioex) { + fixed = false; + } } } } - } - - if (!fixed) { - logger.info("checksum cannot be fixed"); - String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.wrongChecksumValue", Arrays.asList(dataFile.getId().toString())); - logger.log(Level.INFO, info); - throw new IOException(info); + + if (!fixed) { + logger.info("checksum cannot be fixed"); + String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.wrongChecksumValue", Arrays.asList(dataFile.getId().toString())); + logger.log(Level.INFO, info); + throw new IOException(info); + } } } - logger.log(Level.INFO, "successfully validated DataFile {0}; checksum {1}", new Object[]{dataFile.getId(), recalculatedChecksum}); + } public static String getStorageIdentifierFromLocation(String location) { From 239d5a8de208b6bf4bb2c809264d2069526e33ff Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 14:03:44 -0400 Subject: [PATCH 0013/1551] debug logging --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 2 ++ .../harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java | 1 + 2 files changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 7fd3b1ab63d..3fb2e7c2bc3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1785,8 +1785,10 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, String driverType = DataAccess.getDriverType(newStorageIdentifier.substring(0, newStorageIdentifier.indexOf(":"))); if(driverType.equals("http")) { //Add a generated identifier for the aux files + logger.fine("in: " + newStorageIdentifier); int lastColon = newStorageIdentifier.lastIndexOf(':'); newStorageIdentifier= newStorageIdentifier.substring(0,lastColon) + "/" + FileUtil.generateStorageIdentifier() + "//" +newStorageIdentifier.substring(lastColon+1); + logger.fine("out: " + newStorageIdentifier); } // ToDo - check that storageIdentifier is valid if (optionalFileParams.hasFileName()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java index a058dfc070e..3ebc5f807ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java @@ -94,6 +94,7 @@ public HTTPOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) t super(dvObject, req, driverId); this.setIsLocalFile(false); configureStores(req, driverId, null); + logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); // TODO: validate the storage location supplied urlPath = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); logger.fine("Base URL: " + urlPath); From e86c2d0fca0693614c3e8d90adf89dfd5f1dd1da Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 15:27:51 -0400 Subject: [PATCH 0014/1551] more logging --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 3fb2e7c2bc3..0a8adc31591 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1782,7 +1782,9 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, if (null == contentDispositionHeader) { if (optionalFileParams.hasStorageIdentifier()) { newStorageIdentifier = optionalFileParams.getStorageIdentifier(); + logger.fine("found: " + newStorageIdentifier); String driverType = DataAccess.getDriverType(newStorageIdentifier.substring(0, newStorageIdentifier.indexOf(":"))); + logger.fine("drivertype: " + driverType); if(driverType.equals("http")) { //Add a generated identifier for the aux files logger.fine("in: " + newStorageIdentifier); From 0062c681c124c40a016e775b8da6acb9646a9c43 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 15:59:00 -0400 Subject: [PATCH 0015/1551] fix storageidentifier parsing/updating --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 0a8adc31591..bd52ff1bece 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1788,8 +1788,8 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, if(driverType.equals("http")) { //Add a generated identifier for the aux files logger.fine("in: " + newStorageIdentifier); - int lastColon = newStorageIdentifier.lastIndexOf(':'); - newStorageIdentifier= newStorageIdentifier.substring(0,lastColon) + "/" + FileUtil.generateStorageIdentifier() + "//" +newStorageIdentifier.substring(lastColon+1); + int lastColon = newStorageIdentifier.lastIndexOf("://"); + newStorageIdentifier= newStorageIdentifier.substring(0,lastColon +3) + FileUtil.generateStorageIdentifier() + "//" +newStorageIdentifier.substring(lastColon+3); logger.fine("out: " + newStorageIdentifier); } // ToDo - check that storageIdentifier is valid From d6a5f65379ed6db78f896cddf0f3c88f5162a509 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 17:46:28 -0400 Subject: [PATCH 0016/1551] more info about errors handled by ThrowableHandler --- .../edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java b/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java index d3c6fd2df50..0f6be9c4dfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java @@ -215,6 +215,7 @@ public JsonResponseBuilder log(Logger logger, Level level, Optional e metadata.deleteCharAt(metadata.length()-1); if (ex.isPresent()) { + ex.get().printStackTrace(); metadata.append("|"); logger.log(level, metadata.toString(), ex); } else { From d821b626a804aedd3bba4f1bea99539649fb4a48 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 17:46:54 -0400 Subject: [PATCH 0017/1551] fine debug to show size --- .../java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 5a5ab8cc86e..6b79a3079f4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -298,6 +298,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, List)dataAccess).removeTempTag(); From 1a8f0f12003322104b6130d12d2495afe5d6ae2c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 17:47:08 -0400 Subject: [PATCH 0018/1551] actually instantiate an HttpClient ! --- .../dataaccess/HTTPOverlayAccessIO.java | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java index 3ebc5f807ab..b3f095b7bda 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java @@ -37,7 +37,10 @@ import java.util.List; import java.util.Random; import java.util.logging.Logger; + + import org.apache.commons.io.IOUtils; +import org.apache.http.Header; import org.apache.http.client.config.CookieSpecs; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; @@ -54,6 +57,7 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.protocol.HTTP; import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.util.EntityUtils; @@ -148,6 +152,7 @@ public void open(DataAccessOption... options) throws IOException { if (dataFile.getFilesize() >= 0) { this.setSize(dataFile.getFilesize()); } else { + logger.fine("Setting size"); this.setSize(getSizeFromHttpHeader()); } if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") @@ -182,13 +187,18 @@ private long getSizeFromHttpHeader() { long size = -1; HttpHead head = new HttpHead(baseUrl + "/" + urlPath); try { - CloseableHttpResponse response = httpclient.execute(head, localContext); + CloseableHttpResponse response = getSharedHttpClient().execute(head, localContext); try { int code = response.getStatusLine().getStatusCode(); + logger.fine("Response for HEAD: " + code); switch (code) { case 200: - size = Long.parseLong(response.getHeaders("Content-Length")[0].getValue()); + Header[] headers =response.getHeaders(HTTP.CONTENT_LEN); + logger.fine("Num headers: " + headers.length); + String sizeString = response.getHeaders(HTTP.CONTENT_LEN )[0].getValue(); + logger.fine("Content-Length: " + sizeString); + size = Long.parseLong(response.getHeaders(HTTP.CONTENT_LEN )[0].getValue()); logger.fine("Found file size: " + size); break; default: @@ -208,7 +218,7 @@ public InputStream getInputStream() throws IOException { if (super.getInputStream() == null) { try { HttpGet get = new HttpGet(baseUrl + "/" + urlPath); - CloseableHttpResponse response = httpclient.execute(get, localContext); + CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); int code = response.getStatusLine().getStatusCode(); switch (code) { @@ -217,11 +227,12 @@ public InputStream getInputStream() throws IOException { break; default: logger.warning("Response from " + get.getURI().toString() + " was " + code); - throw new IOException("Cannot retrieve: " + baseUrl + "/" + urlPath); + throw new IOException("Cannot retrieve: " + baseUrl + "/" + urlPath + " code: " + code); } } catch (Exception e) { logger.warning(e.getMessage()); - throw new IOException("Error retrieving: " + baseUrl + "/" + urlPath); + e.printStackTrace(); + throw new IOException("Error retrieving: " + baseUrl + "/" + urlPath + " " + e.getMessage()); } setChannel(Channels.newChannel(super.getInputStream())); @@ -253,7 +264,7 @@ public void delete() throws IOException { } try { HttpDelete del = new HttpDelete(baseUrl + "/" + urlPath); - CloseableHttpResponse response = httpclient.execute(del, localContext); + CloseableHttpResponse response = getSharedHttpClient().execute(del, localContext); try { int code = response.getStatusLine().getStatusCode(); switch (code) { @@ -267,7 +278,7 @@ public void delete() throws IOException { } } catch (Exception e) { logger.warning(e.getMessage()); - throw new IOException("Error retrieving: " + baseUrl + "/" + urlPath); + throw new IOException("Error deleting: " + baseUrl + "/" + urlPath); } @@ -369,6 +380,7 @@ public Path getFileSystemPath() throws UnsupportedDataAccessOperationException { @Override public boolean exists() { + logger.fine("Exists called"); return (getSizeFromHttpHeader() != -1); } From ad86e4cdc68e3c518aac2f603439198bf192d304 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Oct 2020 18:59:20 -0400 Subject: [PATCH 0019/1551] algorithm fixes and logging --- .../iq/dataverse/util/UrlSignerUtil.java | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java index 3c91387f169..233b94ce007 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java @@ -45,14 +45,18 @@ public static String signUrl(String baseUrl, Integer timeout, String user, Strin validTime = validTime.plusMinutes(timeout); validTime.toString(); signedUrl.append(firstParam ? "?" : "&").append("until=").append(validTime); + firstParam=false; } if (user != null) { signedUrl.append(firstParam ? "?" : "&").append("user=").append(user); + firstParam=false; } if (method != null) { signedUrl.append(firstParam ? "?" : "&").append("method=").append(method); } - signedUrl.append("&token=").append(DigestUtils.sha512Hex(signedUrl.toString() + key)); + signedUrl.append("&token="); + logger.fine("String to sign: " + signedUrl.toString() + ""); + signedUrl.append(DigestUtils.sha512Hex(signedUrl.toString() + key)); logger.fine("Generated Signed URL: " + signedUrl.toString()); if (logger.isLoggable(Level.FINE)) { logger.fine( @@ -94,15 +98,19 @@ public static boolean isValidUrl(String signedUrl, String method, String user, S for (NameValuePair nvp : params) { if (nvp.getName().equals("token")) { hash = nvp.getValue(); + logger.fine("Hash: " + hash); } if (nvp.getName().equals("until")) { dateString = nvp.getValue(); + logger.fine("Until: " + dateString); } if (nvp.getName().equals("method")) { allowedMethod = nvp.getValue(); + logger.fine("Method: " + allowedMethod); } if (nvp.getName().equals("user")) { allowedUser = nvp.getValue(); + logger.fine("User: " + allowedUser); } } @@ -110,13 +118,15 @@ public static boolean isValidUrl(String signedUrl, String method, String user, S // Assuming the token is last - doesn't have to be, but no reason for the URL // params to be rearranged either, and this should only cause false negatives if // it does happen - String urlToHash = signedUrl.substring(0, index); + String urlToHash = signedUrl.substring(0, index + 7); + logger.fine("String to hash: " + urlToHash + ""); String newHash = DigestUtils.sha512Hex(urlToHash + key); - if (!hash.contentEquals(newHash)) { + logger.fine("Calculated Hash: " + newHash); + if (!hash.equals(newHash)) { logger.fine("Hash doesn't match"); valid = false; } - if (dateString != null && LocalDateTime.parse(dateString).isAfter(LocalDateTime.now())) { + if (dateString != null && LocalDateTime.parse(dateString).isBefore(LocalDateTime.now())) { logger.fine("Url is expired"); valid = false; } From 4a9f2098640b305dee37d17da5d84e331b9ec620 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 15 Oct 2020 09:34:51 -0400 Subject: [PATCH 0020/1551] log exception --- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index ec18f23a5a0..01ee19bf2d0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -416,7 +416,7 @@ private static boolean isThumbnailCached(StorageIO storageIO, int size try { cached = storageIO.isAuxObjectCached(THUMBNAIL_SUFFIX + size); } catch (Exception ioex) { - logger.fine("caught Exception while checking for a cached thumbnail (file " + storageIO.getDataFile().getStorageIdentifier() + ")"); + logger.fine("caught Exception while checking for a cached thumbnail (file " + storageIO.getDataFile().getStorageIdentifier() + "): " + ioex.getMessage()); return false; } From b33958307e2726daa89e816bc5bccd7d341f52c4 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 15 Oct 2020 09:35:37 -0400 Subject: [PATCH 0021/1551] support auxPath for direct/overlay case --- .../edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index f96f948f0a9..4ac28713ec8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -69,6 +69,7 @@ public FileAccessIO(T dvObject, DataAccessRequest req, String driverId ) { public FileAccessIO(String storageLocation, String driverId) { super(storageLocation, driverId); this.setIsLocalFile(true); + logger.fine("Storage path: " + storageLocation); physicalPath = Paths.get(storageLocation); } @@ -297,7 +298,10 @@ public Path getAuxObjectAsPath(String auxItemTag) throws IOException { if (auxItemTag == null || "".equals(auxItemTag)) { throw new IOException("Null or invalid Auxiliary Object Tag."); } - + if(isDirectAccess()) { + //Overlay case + return Paths.get(physicalPath.toString() + "." + auxItemTag); + } String datasetDirectory = getDatasetDirectory(); if (dvObject.getStorageIdentifier() == null || "".equals(dvObject.getStorageIdentifier())) { @@ -549,7 +553,7 @@ public FileOutputStream openLocalFileAsOutputStream () { } private String getDatasetDirectory() throws IOException { - if (dvObject == null) { + if (isDirectAccess()) { throw new IOException("No DvObject defined in the Data Access Object"); } From 5131e5edf9e3e8eb5e83b142793861942054ed8a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 15 Oct 2020 12:03:14 -0400 Subject: [PATCH 0022/1551] create dir when needed for aux --- .../edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index 4ac28713ec8..91701418240 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -174,6 +174,10 @@ public void open (DataAccessOption... options) throws IOException { dataverse = this.getDataverse(); } else { logger.fine("Overlay case: FileAccessIO open for : " + physicalPath.toString()); + Path datasetPath= physicalPath.getParent(); + if (datasetPath != null && !Files.exists(datasetPath)) { + Files.createDirectories(datasetPath); + } //throw new IOException("Data Access: Invalid DvObject type"); } // This "status" is a leftover from 3.6; we don't have a use for it @@ -237,7 +241,7 @@ public Channel openAuxChannel(String auxItemTag, DataAccessOption... options) th Path auxPath = getAuxObjectAsPath(auxItemTag); if (isWriteAccessRequested(options)) { - if (dvObject instanceof Dataset && !this.canWrite()) { + if (((dvObject instanceof Dataset) || isDirectAccess()) && !this.canWrite()) { // If this is a dataset-level auxilary file (a cached metadata export, // dataset logo, etc.) there's a chance that no "real" files // have been saved for this dataset yet, and thus the filesystem From afa37ef03ffb42995782177c58bf3cbaaf37f780 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 15 Oct 2020 13:32:13 -0400 Subject: [PATCH 0023/1551] S3 flag to distinguish overlap and direct-upload cases --- .../dataaccess/HTTPOverlayAccessIO.java | 8 +- .../iq/dataverse/dataaccess/S3AccessIO.java | 78 +++++++++++-------- 2 files changed, 52 insertions(+), 34 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java index b3f095b7bda..6d218d1800c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java @@ -441,11 +441,12 @@ private void configureStores(DataAccessRequest req, String driverId, String stor if (baseStore == null) { String baseDriverId = System.getProperty("dataverse.files." + driverId + ".baseStore"); String fullStorageLocation = null; + String baseDriverType= System.getProperty("dataverse.files." + baseDriverId + ".type"); if (this.getDvObject() != null) { fullStorageLocation = getStorageLocation(); // S3 expects :/// - switch (System.getProperty("dataverse.files." + baseDriverId + ".type")) { + switch (baseDriverType) { case "s3": fullStorageLocation = baseDriverId + "://" + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + "/" @@ -467,7 +468,7 @@ private void configureStores(DataAccessRequest req, String driverId, String stor String storageId = storageLocation.substring(storageLocation.indexOf("://" + 3)); fullStorageLocation = storageId.substring(0, storageId.indexOf("//")); - switch (System.getProperty("dataverse.files." + baseDriverId + ".type")) { + switch (baseDriverType) { case "s3": fullStorageLocation = baseDriverId + "://" + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + "/" @@ -485,6 +486,9 @@ private void configureStores(DataAccessRequest req, String driverId, String stor } } baseStore = DataAccess.getDirectStorageIO(fullStorageLocation); + if(baseDriverType.contentEquals("s3")) { + ((S3AccessIO)baseStore).setMainDriver(false); + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 2b7b1b91ae2..672d9b11aa7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -76,6 +76,8 @@ public class S3AccessIO extends StorageIO { private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.S3AccessIO"); + + private boolean mainDriver = true; private static HashMap driverClientMap = new HashMap(); private static HashMap driverTMMap = new HashMap(); @@ -225,38 +227,41 @@ public void open(DataAccessOption... options) throws IOException { throw new IOException("Data Access: Storage driver does not support dvObject type Dataverse yet"); } else { - //ToDo - skip this for overlay case - - // Direct access, e.g. for external upload - no associated DVobject yet, but we want to be able to get the size - // With small files, it looks like we may call before S3 says it exists, so try some retries before failing - if(key!=null) { - ObjectMetadata objectMetadata = null; - int retries = 20; - while(retries > 0) { - try { - objectMetadata = s3.getObjectMetadata(bucketName, key); - if(retries != 20) { - logger.warning("Success for key: " + key + " after " + ((20-retries)*3) + " seconds"); - } - retries = 0; - } catch (SdkClientException sce) { - if(retries > 1) { - retries--; - try { - Thread.sleep(3000); - } catch (InterruptedException e) { - e.printStackTrace(); - } - logger.warning("Retrying after: " + sce.getMessage()); - } else { - throw new IOException("Cannot get S3 object " + key + " ("+sce.getMessage()+")"); - } - } - } - this.setSize(objectMetadata.getContentLength()); - }else { - throw new IOException("Data Access: Invalid DvObject type"); - } + if (isMainDriver()) { + // Direct access, e.g. for external upload - no associated DVobject yet, but we + // want to be able to get the size + // With small files, it looks like we may call before S3 says it exists, so try + // some retries before failing + if (key != null) { + ObjectMetadata objectMetadata = null; + int retries = 20; + while (retries > 0) { + try { + objectMetadata = s3.getObjectMetadata(bucketName, key); + if (retries != 20) { + logger.warning( + "Success for key: " + key + " after " + ((20 - retries) * 3) + " seconds"); + } + retries = 0; + } catch (SdkClientException sce) { + if (retries > 1) { + retries--; + try { + Thread.sleep(3000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + logger.warning("Retrying after: " + sce.getMessage()); + } else { + throw new IOException("Cannot get S3 object " + key + " (" + sce.getMessage() + ")"); + } + } + } + this.setSize(objectMetadata.getContentLength()); + } else { + throw new IOException("Data Access: Invalid DvObject type"); + } + } } } @@ -425,6 +430,7 @@ public void delete() throws IOException { @Override public Channel openAuxChannel(String auxItemTag, DataAccessOption... options) throws IOException { if (isWriteAccessRequested(options)) { + //Need size to write to S3 throw new UnsupportedDataAccessOperationException("S3AccessIO: write mode openAuxChannel() not yet implemented in this storage driver."); } @@ -1171,4 +1177,12 @@ public static void completeMultipartUpload(String globalId, String storageIdenti s3Client.completeMultipartUpload(req); } + public boolean isMainDriver() { + return mainDriver; + } + + public void setMainDriver(boolean mainDriver) { + this.mainDriver = mainDriver; + } + } From 6aaabe23796f3ac11b60ef9cae5be5f590a4b76f Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 15 Oct 2020 14:30:08 -0400 Subject: [PATCH 0024/1551] fix s3 storagelocation --- .../harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java index 6d218d1800c..eb97acb21ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java @@ -449,7 +449,7 @@ private void configureStores(DataAccessRequest req, String driverId, String stor switch (baseDriverType) { case "s3": fullStorageLocation = baseDriverId + "://" - + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + "/" + + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + ":" + fullStorageLocation; break; case "file": @@ -471,7 +471,7 @@ private void configureStores(DataAccessRequest req, String driverId, String stor switch (baseDriverType) { case "s3": fullStorageLocation = baseDriverId + "://" - + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + "/" + + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + ":" + fullStorageLocation; break; case "file": From bd37c2e93fa2e0c74bc94648b9aa40026a176a9a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 15 Oct 2020 14:36:29 -0400 Subject: [PATCH 0025/1551] Revert "fix s3 storagelocation" This reverts commit 6aaabe23796f3ac11b60ef9cae5be5f590a4b76f. --- .../harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java index eb97acb21ea..6d218d1800c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java @@ -449,7 +449,7 @@ private void configureStores(DataAccessRequest req, String driverId, String stor switch (baseDriverType) { case "s3": fullStorageLocation = baseDriverId + "://" - + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + ":" + + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + "/" + fullStorageLocation; break; case "file": @@ -471,7 +471,7 @@ private void configureStores(DataAccessRequest req, String driverId, String stor switch (baseDriverType) { case "s3": fullStorageLocation = baseDriverId + "://" - + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + ":" + + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + "/" + fullStorageLocation; break; case "file": From 14a119612f65e8de0d2026d1ea25c5cc5dfee652 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 15 Oct 2020 14:48:44 -0400 Subject: [PATCH 0026/1551] fine logging --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 672d9b11aa7..adcc8ae95fa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -109,6 +109,7 @@ public S3AccessIO(T dvObject, DataAccessRequest req, String driverId) { public S3AccessIO(String storageLocation, String driverId) { this(null, null, driverId); // TODO: validate the storage location supplied + logger.fine("Instantiating with location: " + storageLocation); bucketName = storageLocation.substring(0,storageLocation.indexOf('/')); minPartSize = getMinPartSize(driverId); key = storageLocation.substring(storageLocation.indexOf('/')+1); From e47eed7a75717f8538a0061baa022442d7798836 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 15 Oct 2020 14:49:09 -0400 Subject: [PATCH 0027/1551] fix storagelocation issues --- .../iq/dataverse/dataaccess/HTTPOverlayAccessIO.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java index 6d218d1800c..79f7d6b23a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/HTTPOverlayAccessIO.java @@ -359,7 +359,7 @@ public String getStorageLocation() throws IOException { String fullStorageLocation = dvObject.getStorageIdentifier(); logger.fine("storageidentifier: " + fullStorageLocation); fullStorageLocation = fullStorageLocation.substring(fullStorageLocation.lastIndexOf("://") + 3); - fullStorageLocation = fullStorageLocation.substring(0, fullStorageLocation.indexOf("//") + 2); + fullStorageLocation = fullStorageLocation.substring(0, fullStorageLocation.indexOf("//")); if (this.getDvObject() instanceof Dataset) { fullStorageLocation = this.getDataset().getAuthorityForFileStorage() + "/" + this.getDataset().getIdentifierForFileStorage() + "/" + fullStorageLocation; @@ -449,7 +449,7 @@ private void configureStores(DataAccessRequest req, String driverId, String stor switch (baseDriverType) { case "s3": fullStorageLocation = baseDriverId + "://" - + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + "/" + + System.getProperty("dataverse.files." + baseDriverId + ".bucket-name") + "/" + fullStorageLocation; break; case "file": @@ -471,7 +471,7 @@ private void configureStores(DataAccessRequest req, String driverId, String stor switch (baseDriverType) { case "s3": fullStorageLocation = baseDriverId + "://" - + System.getProperty("dataverse.files." + baseDriverId + ".bucketName") + "/" + + System.getProperty("dataverse.files." + baseDriverId + ".bucket-name") + "/" + fullStorageLocation; break; case "file": From 230013bef5a341025146d3a9ccf046b8d6dd8d3d Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 19 Oct 2020 11:05:54 -0400 Subject: [PATCH 0028/1551] applied manually remove flyway script --- .../V5.0.0.1__6872-assign-storage-drivers-to-datasets.sql | 1 - 1 file changed, 1 deletion(-) delete mode 100644 src/main/resources/db/migration/V5.0.0.1__6872-assign-storage-drivers-to-datasets.sql diff --git a/src/main/resources/db/migration/V5.0.0.1__6872-assign-storage-drivers-to-datasets.sql b/src/main/resources/db/migration/V5.0.0.1__6872-assign-storage-drivers-to-datasets.sql deleted file mode 100644 index 453b2054c43..00000000000 --- a/src/main/resources/db/migration/V5.0.0.1__6872-assign-storage-drivers-to-datasets.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE dataset ADD COLUMN IF NOT EXISTS storagedriver VARCHAR(255); \ No newline at end of file From e1ad7d671bbf33e4d43c46c8525503de7ca55e09 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 19 Oct 2020 11:55:57 -0400 Subject: [PATCH 0029/1551] add logs for publishing --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 4ffd7d05d3f..85c95ef5d15 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -1862,11 +1862,12 @@ private String init(boolean initFull) { return permissionsWrapper.notFound(); } logger.fine("retrieved dataset, id="+dataset.getId()); - + logger.info("retrieved dataset, id="+dataset.getId()); retrieveDatasetVersionResponse = datasetVersionService.selectRequestedVersion(dataset.getVersions(), version); //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByPersistentId(persistentId, version); this.workingVersion = retrieveDatasetVersionResponse.getDatasetVersion(); logger.fine("retrieved version: id: " + workingVersion.getId() + ", state: " + this.workingVersion.getVersionState()); + logger.info("retrieved version: id: " + workingVersion.getId() + ", state: " + this.workingVersion.getVersionState()); } else if (this.getId() != null) { // Set Working Version and Dataset by Datasaet Id and Version From 5f754d15d5381eb305e884ffa6ab995cb1c0f50d Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 19 Oct 2020 13:06:29 -0400 Subject: [PATCH 0030/1551] Removr SiteMapUtilTest --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 3 +-- .../edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 85c95ef5d15..af3b60fca91 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -1862,12 +1862,11 @@ private String init(boolean initFull) { return permissionsWrapper.notFound(); } logger.fine("retrieved dataset, id="+dataset.getId()); - logger.info("retrieved dataset, id="+dataset.getId()); + retrieveDatasetVersionResponse = datasetVersionService.selectRequestedVersion(dataset.getVersions(), version); //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByPersistentId(persistentId, version); this.workingVersion = retrieveDatasetVersionResponse.getDatasetVersion(); logger.fine("retrieved version: id: " + workingVersion.getId() + ", state: " + this.workingVersion.getVersionState()); - logger.info("retrieved version: id: " + workingVersion.getId() + ", state: " + this.workingVersion.getVersionState()); } else if (this.getId() != null) { // Set Working Version and Dataset by Datasaet Id and Version diff --git a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java index cc691f0a3b5..09acb0e3bf1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java @@ -31,7 +31,7 @@ public class SiteMapUtilTest { @Test public void testUpdateSiteMap() throws IOException, ParseException { - List dataverses = new ArrayList<>(); + /* List dataverses = new ArrayList<>(); String publishedDvString = "publishedDv1"; Dataverse publishedDataverse = new Dataverse(); publishedDataverse.setAlias(publishedDvString); @@ -115,7 +115,7 @@ public void testUpdateSiteMap() throws IOException, ParseException { assertFalse(sitemapString.contains(deaccessionedPid)); System.clearProperty("com.sun.aas.instanceRoot"); - +*/ } } From f443c7328d2a574afeba58cddf9eb8884cfc7457 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Tue, 20 Oct 2020 13:00:05 -0400 Subject: [PATCH 0031/1551] MD5 checksum --- .../edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 3 ++- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index e060a5de59b..23e4435e6f3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -755,7 +755,8 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th try { // We persist "SHA1" rather than "SHA-1". - datafile.setChecksumType(DataFile.ChecksumType.SHA1); + //datafile.setChecksumType(DataFile.ChecksumType.SHA1); + datafile.setChecksumType(DataFile.ChecksumType.MD5); datafile.setChecksumValue(checksumVal); } catch (Exception cksumEx) { logger.info("==== datasetId :" + dataset.getId() + "======Could not calculate checksumType signature for the new file "); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 33d1ec51da2..96006bdf735 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1735,7 +1735,7 @@ public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { public static void validateDataFileChecksum(DataFile dataFile) throws IOException { String recalculatedChecksum = null; - if (dataFile.getContentType().equals(DataFileServiceBean.MIME_TYPE_GLOBUS_FILE)) { + /* if (dataFile.getContentType().equals(DataFileServiceBean.MIME_TYPE_GLOBUS_FILE)) { for (S3ObjectSummary s3ObjectSummary : dataFile.getStorageIO().listAuxObjects("")) { recalculatedChecksum = s3ObjectSummary.getETag(); if (!recalculatedChecksum.equals(dataFile.getChecksumValue())) { @@ -1744,7 +1744,7 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio throw new IOException(info); } } - } else { + } else {*/ DataFile.ChecksumType checksumType = dataFile.getChecksumType(); logger.info(checksumType.toString()); @@ -1838,7 +1838,7 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio throw new IOException(info); } } - } + //} logger.log(Level.INFO, "successfully validated DataFile {0}; checksum {1}", new Object[]{dataFile.getId(), recalculatedChecksum}); } From f799c7b18e70385289037c4331ea240c4804508c Mon Sep 17 00:00:00 2001 From: lubitchv Date: Wed, 21 Oct 2020 11:25:51 -0400 Subject: [PATCH 0032/1551] add back SiteMap test --- .../edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java index 09acb0e3bf1..cc691f0a3b5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java @@ -31,7 +31,7 @@ public class SiteMapUtilTest { @Test public void testUpdateSiteMap() throws IOException, ParseException { - /* List dataverses = new ArrayList<>(); + List dataverses = new ArrayList<>(); String publishedDvString = "publishedDv1"; Dataverse publishedDataverse = new Dataverse(); publishedDataverse.setAlias(publishedDvString); @@ -115,7 +115,7 @@ public void testUpdateSiteMap() throws IOException, ParseException { assertFalse(sitemapString.contains(deaccessionedPid)); System.clearProperty("com.sun.aas.instanceRoot"); -*/ + } } From 40de0afd18d427c30ebdd683cb771d10c4a38362 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Wed, 21 Oct 2020 15:58:16 -0400 Subject: [PATCH 0033/1551] downloadPopupRequired removed globus --- src/main/webapp/file-download-button-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 9a8e535bcdd..d543723fe6b 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -223,7 +223,7 @@ #{bundle.download} - Date: Wed, 21 Oct 2020 16:13:03 -0400 Subject: [PATCH 0034/1551] downloadPopupRequired filelevel globus removed --- src/main/webapp/file-download-button-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index d543723fe6b..64b36fcf39e 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -60,7 +60,7 @@ #{bundle.download} - Date: Thu, 22 Oct 2020 12:05:01 -0400 Subject: [PATCH 0035/1551] New checksum test --- .../edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 23e4435e6f3..15a43301c55 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -724,7 +724,9 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th if (t.indexOf(".") > 0) { long totalSize = s3ObjectSummary.getSize(); String filePath = s3ObjectKey; - String checksumVal = s3ObjectSummary.getETag(); + logger.info("File Path " + filePath); + String checksumVal = FileUtil.calculateChecksum(filePath, DataFile.ChecksumType.MD5); + //String checksumVal = s3ObjectSummary.getETag(); if ((checksumMapOld.get(checksumVal) != null)) { logger.info("datasetId :" + dataset.getId() + "======= filename ==== " + filePath + " == file already exists "); From 0905db577d9ddcc3468e44040c41311461a7b60c Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 22 Oct 2020 12:40:25 -0400 Subject: [PATCH 0036/1551] New checksum test 2 --- .../edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 15a43301c55..e73b2cea7b3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -689,6 +689,7 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th StorageIO datasetSIO = DataAccess.getStorageIO(dataset); + DatasetVersion workingVersion = dataset.getEditVersion(); if (workingVersion.getCreateTime() != null) { @@ -724,8 +725,9 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th if (t.indexOf(".") > 0) { long totalSize = s3ObjectSummary.getSize(); String filePath = s3ObjectKey; - logger.info("File Path " + filePath); - String checksumVal = FileUtil.calculateChecksum(filePath, DataFile.ChecksumType.MD5); + String fullPath = dataset.getStorageIdentifier() + filePath; + logger.info("File Path " + fullPath); + String checksumVal = FileUtil.calculateChecksum(fullPath, DataFile.ChecksumType.MD5); //String checksumVal = s3ObjectSummary.getETag(); if ((checksumMapOld.get(checksumVal) != null)) { From 254c4b77c7d5ce1331bb3c70a1246522163aaf27 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 22 Oct 2020 12:57:17 -0400 Subject: [PATCH 0037/1551] Storage locatioin test --- .../java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index e73b2cea7b3..5b07bcb6616 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -725,6 +725,7 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th if (t.indexOf(".") > 0) { long totalSize = s3ObjectSummary.getSize(); String filePath = s3ObjectKey; + logger.info("Storage location " + datasetSIO.getStorageLocation()); String fullPath = dataset.getStorageIdentifier() + filePath; logger.info("File Path " + fullPath); String checksumVal = FileUtil.calculateChecksum(fullPath, DataFile.ChecksumType.MD5); From 718d0eb96f5163f3239d7cd95d66f9419c7ba679 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 22 Oct 2020 13:10:07 -0400 Subject: [PATCH 0038/1551] Storage locatioin test 3 --- .../edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 5b07bcb6616..dbd790ac3ad 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -726,7 +726,9 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th long totalSize = s3ObjectSummary.getSize(); String filePath = s3ObjectKey; logger.info("Storage location " + datasetSIO.getStorageLocation()); - String fullPath = dataset.getStorageIdentifier() + filePath; + String fileName = s3ObjectKey.substring(s3ObjectKey.lastIndexOf("/")); + logger.info("fileName " + fileName); + String fullPath = datasetSIO.getStorageLocation() + "/" + fileName; logger.info("File Path " + fullPath); String checksumVal = FileUtil.calculateChecksum(fullPath, DataFile.ChecksumType.MD5); //String checksumVal = s3ObjectSummary.getETag(); From 5418fb85b07c17bd59c6e25f7e72cd69cd5cb9a0 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 22 Oct 2020 13:16:21 -0400 Subject: [PATCH 0039/1551] Storage locatioin test 4 --- .../edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index dbd790ac3ad..6adab874601 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -726,7 +726,7 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th long totalSize = s3ObjectSummary.getSize(); String filePath = s3ObjectKey; logger.info("Storage location " + datasetSIO.getStorageLocation()); - String fileName = s3ObjectKey.substring(s3ObjectKey.lastIndexOf("/")); + String fileName = filePath.split("/")[filePath.split("/").length - 1]; logger.info("fileName " + fileName); String fullPath = datasetSIO.getStorageLocation() + "/" + fileName; logger.info("File Path " + fullPath); @@ -749,7 +749,7 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th FileMetadata fmd = new FileMetadata(); - String fileName = filePath.split("/")[filePath.split("/").length - 1]; + fmd.setLabel(fileName); fmd.setDirectoryLabel(filePath.replace(directory, "").replace(File.separator + fileName, "")); From 25bedba4faba402836e802997559a33a4ee8f7bd Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 22 Oct 2020 14:24:35 -0400 Subject: [PATCH 0040/1551] s3 input stream test --- .../edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 6adab874601..5ceff270eeb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.globus; +import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectSummary; import com.google.gson.FieldNamingPolicy; import com.google.gson.GsonBuilder; @@ -720,6 +721,7 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th String s3ObjectKey = s3ObjectSummary.getKey(); + String t = s3ObjectKey.replace(directory, ""); if (t.indexOf(".") > 0) { @@ -730,7 +732,10 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th logger.info("fileName " + fileName); String fullPath = datasetSIO.getStorageLocation() + "/" + fileName; logger.info("File Path " + fullPath); - String checksumVal = FileUtil.calculateChecksum(fullPath, DataFile.ChecksumType.MD5); + logger.info("Get storage class " + s3ObjectSummary.getStorageClass()); + InputStream in = datasetSIO.getAuxFileAsInputStream(s3ObjectSummary.getETag()); + + String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); //String checksumVal = s3ObjectSummary.getETag(); if ((checksumMapOld.get(checksumVal) != null)) { From 3c27aea78b08c4f57fd1f45a4d35204475c602b2 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 22 Oct 2020 14:39:34 -0400 Subject: [PATCH 0041/1551] test --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 1 + .../java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 0c4558edb30..0107de28d54 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -792,6 +792,7 @@ public OutputStream getOutputStream() throws UnsupportedDataAccessOperationExcep @Override public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException { String destinationKey = getDestinationKey(auxItemTag); + logger.info("Destination key " + destinationKey); try { S3Object s3object = s3.getObject(new GetObjectRequest(bucketName, destinationKey)); if (s3object != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 5ceff270eeb..27518e7f3d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -733,7 +733,7 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th String fullPath = datasetSIO.getStorageLocation() + "/" + fileName; logger.info("File Path " + fullPath); logger.info("Get storage class " + s3ObjectSummary.getStorageClass()); - InputStream in = datasetSIO.getAuxFileAsInputStream(s3ObjectSummary.getETag()); + InputStream in = datasetSIO.getAuxFileAsInputStream(filePath); String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); //String checksumVal = s3ObjectSummary.getETag(); From bef7e3cd8f0be1c5784eac48a68eac2a1ba6c2a8 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 22 Oct 2020 17:05:42 -0400 Subject: [PATCH 0042/1551] test --- .../iq/dataverse/dataaccess/S3AccessIO.java | 14 ++++++++++++++ .../iq/dataverse/globus/GlobusServiceBean.java | 5 ++--- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 0107de28d54..22ac0c86d07 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -805,6 +805,20 @@ public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException } } + public InputStream getFileAsInputStream(String destinationKey) throws IOException { + + try { + S3Object s3object = s3.getObject(new GetObjectRequest(bucketName, destinationKey)); + if (s3object != null) { + return s3object.getObjectContent(); + } + return null; + } catch (AmazonClientException ase) { + logger.fine("Caught an AmazonClientException in S3AccessIO.getAuxFileAsInputStream() (object not cached?): " + ase.getMessage()); + return null; + } + } + String getDestinationKey(String auxItemTag) throws IOException { if (isDirectAccess() || dvObject instanceof DataFile) { return getMainFileKey() + "." + auxItemTag; diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 27518e7f3d8..d4398e85b30 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -731,9 +731,8 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th String fileName = filePath.split("/")[filePath.split("/").length - 1]; logger.info("fileName " + fileName); String fullPath = datasetSIO.getStorageLocation() + "/" + fileName; - logger.info("File Path " + fullPath); - logger.info("Get storage class " + s3ObjectSummary.getStorageClass()); - InputStream in = datasetSIO.getAuxFileAsInputStream(filePath); + logger.info("Key " + s3ObjectKey); + InputStream in = datasetSIO.getAuxFileAsInputStream(s3ObjectKey); String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); //String checksumVal = s3ObjectSummary.getETag(); From 7752cdfa00dbd876b06afb46ecca9d377f876228 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 22 Oct 2020 17:16:51 -0400 Subject: [PATCH 0043/1551] test --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 22ac0c86d07..79d5a9ba84a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -808,7 +808,10 @@ public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException public InputStream getFileAsInputStream(String destinationKey) throws IOException { try { - S3Object s3object = s3.getObject(new GetObjectRequest(bucketName, destinationKey)); + GetObjectRequest o = new GetObjectRequest(bucketName, destinationKey; + logger.info("Bucket name " + o.getBucketName()); + S3Object s3object = s3.getObject(o); + logger.info("Key " + s3object.getKey()); if (s3object != null) { return s3object.getObjectContent(); } From 9cf09f7c2cd13295a2ded1e9fa270cb0037df4de Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 22 Oct 2020 17:18:16 -0400 Subject: [PATCH 0044/1551] test --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 79d5a9ba84a..b700e01b83d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -808,7 +808,7 @@ public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException public InputStream getFileAsInputStream(String destinationKey) throws IOException { try { - GetObjectRequest o = new GetObjectRequest(bucketName, destinationKey; + GetObjectRequest o = new GetObjectRequest(bucketName, destinationKey); logger.info("Bucket name " + o.getBucketName()); S3Object s3object = s3.getObject(o); logger.info("Key " + s3object.getKey()); From d6a7561acc9f23649be0bb8f91faf1cfa436fde1 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 22 Oct 2020 18:10:03 -0400 Subject: [PATCH 0045/1551] test --- .../iq/dataverse/dataaccess/S3AccessIO.java | 16 ---------------- .../iq/dataverse/globus/GlobusServiceBean.java | 5 ++++- 2 files changed, 4 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index b700e01b83d..31f074d5c19 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -805,22 +805,6 @@ public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException } } - public InputStream getFileAsInputStream(String destinationKey) throws IOException { - - try { - GetObjectRequest o = new GetObjectRequest(bucketName, destinationKey); - logger.info("Bucket name " + o.getBucketName()); - S3Object s3object = s3.getObject(o); - logger.info("Key " + s3object.getKey()); - if (s3object != null) { - return s3object.getObjectContent(); - } - return null; - } catch (AmazonClientException ase) { - logger.fine("Caught an AmazonClientException in S3AccessIO.getAuxFileAsInputStream() (object not cached?): " + ase.getMessage()); - return null; - } - } String getDestinationKey(String auxItemTag) throws IOException { if (isDirectAccess() || dvObject instanceof DataFile) { diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index d4398e85b30..4971802307e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -691,6 +691,7 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th StorageIO datasetSIO = DataAccess.getStorageIO(dataset); + DatasetVersion workingVersion = dataset.getEditVersion(); if (workingVersion.getCreateTime() != null) { @@ -731,8 +732,10 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th String fileName = filePath.split("/")[filePath.split("/").length - 1]; logger.info("fileName " + fileName); String fullPath = datasetSIO.getStorageLocation() + "/" + fileName; + logger.info("Key " + s3ObjectKey); - InputStream in = datasetSIO.getAuxFileAsInputStream(s3ObjectKey); + StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); + InputStream in = dataFileStorageIO.getInputStream(); String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); //String checksumVal = s3ObjectSummary.getETag(); From 432f9cbba611e9fe6793212ecbed3145dc2ac016 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Fri, 23 Oct 2020 10:45:24 -0400 Subject: [PATCH 0046/1551] add logs --- .../java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 1 + .../edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 2 -- .../edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 6 ++---- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index b28d5f2c471..a485ca125ca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -3168,6 +3168,7 @@ public void startTaskList() throws MalformedURLException { } logger.info(httpString); + logger.info("Moving to Dataset page"); PrimeFaces.current().executeScript(httpString); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 31f074d5c19..0c4558edb30 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -792,7 +792,6 @@ public OutputStream getOutputStream() throws UnsupportedDataAccessOperationExcep @Override public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException { String destinationKey = getDestinationKey(auxItemTag); - logger.info("Destination key " + destinationKey); try { S3Object s3object = s3.getObject(new GetObjectRequest(bucketName, destinationKey)); if (s3object != null) { @@ -805,7 +804,6 @@ public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException } } - String getDestinationKey(String auxItemTag) throws IOException { if (isDirectAccess() || dvObject instanceof DataFile) { return getMainFileKey() + "." + auxItemTag; diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 4971802307e..82b22e87020 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -728,18 +728,16 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th if (t.indexOf(".") > 0) { long totalSize = s3ObjectSummary.getSize(); String filePath = s3ObjectKey; - logger.info("Storage location " + datasetSIO.getStorageLocation()); String fileName = filePath.split("/")[filePath.split("/").length - 1]; - logger.info("fileName " + fileName); String fullPath = datasetSIO.getStorageLocation() + "/" + fileName; - logger.info("Key " + s3ObjectKey); + logger.info("Full path " + fullPath); StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); InputStream in = dataFileStorageIO.getInputStream(); String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); //String checksumVal = s3ObjectSummary.getETag(); - + logger.info("The checksum is " + checksumVal); if ((checksumMapOld.get(checksumVal) != null)) { logger.info("datasetId :" + dataset.getId() + "======= filename ==== " + filePath + " == file already exists "); } else if (filePath.contains("cached") || filePath.contains(".thumb")) { From 0591f7ff1c2c84b2e4fc7dbf4a5d150bcb919c76 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Fri, 23 Oct 2020 12:50:39 -0400 Subject: [PATCH 0047/1551] publishing globus not minor --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index af3b60fca91..ab7e553c7af 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2671,7 +2671,7 @@ private String releaseDataset(boolean minor) { boolean globus = checkForGlobus(); if ( result.isCompleted() ) { - if (globus) { + if (!minor && globus) { if (!globusService.giveGlobusPublicPermissions(dataset.getId().toString())) { JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.publishGlobusFailure.details")); } else { @@ -2681,7 +2681,7 @@ private String releaseDataset(boolean minor) { JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.publishSuccess")); } } else { - if (globus) { + if (!minor && globus) { globusService.giveGlobusPublicPermissions(dataset.getId().toString()); } JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.locked.message"), BundleUtil.getStringFromBundle("dataset.locked.message.details")); From e7e0742a1dacd383cd287ca82edabd69bff850a2 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Fri, 23 Oct 2020 13:37:38 -0400 Subject: [PATCH 0048/1551] add message --- src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index a485ca125ca..37eff2ea8a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -3152,6 +3152,8 @@ public String getClientId() { public void startTaskList() throws MalformedURLException { + JH.addMessage(FacesMessage.SEVERITY_WARN, "Registering files in Dataset", + "In progress"); AuthenticatedUser user = (AuthenticatedUser) session.getUser(); globusServiceBean.globusFinishTransfer(dataset, user); HttpServletRequest origRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); From f16711856c30f9e67b1536b8a73cae576d561296 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Fri, 23 Oct 2020 14:07:59 -0400 Subject: [PATCH 0049/1551] remove message --- src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 37eff2ea8a3..5b73de0fbf4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -3151,9 +3151,7 @@ public String getClientId() { } public void startTaskList() throws MalformedURLException { - - JH.addMessage(FacesMessage.SEVERITY_WARN, "Registering files in Dataset", - "In progress"); + AuthenticatedUser user = (AuthenticatedUser) session.getUser(); globusServiceBean.globusFinishTransfer(dataset, user); HttpServletRequest origRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); From 28c7ba0dbdcc6a02cee676629b5790a870a132a3 Mon Sep 17 00:00:00 2001 From: chenganj Date: Thu, 26 Nov 2020 09:36:29 -0500 Subject: [PATCH 0050/1551] testing S3 url connection --- .../iq/dataverse/dataaccess/S3AccessIO.java | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 0c4558edb30..75d47fd0228 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -1106,12 +1106,22 @@ private static AmazonS3 getClient(String driverId) { String s3CERegion = System.getProperty("dataverse.files." + driverId + ".custom-endpoint-region", "dataverse"); // if the admin has set a system property (see below) we use this endpoint URL instead of the standard ones. - if (!s3CEUrl.isEmpty()) { - //s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl, s3CERegion)); - BasicAWSCredentials creds = new BasicAWSCredentials("14e4f8b986874272894d527a16c06473", "f7b28fbec4984588b0da7d0288ce67f6"); - s3CB.withCredentials(new AWSStaticCredentialsProvider(creds)); - s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl.trim(), s3CERegion.trim())); - } + if (!s3CEUrl.isEmpty()) { + logger.info("s3CEURL =============== " + s3CEUrl); + logger.info("s3CERegion =============== " + s3CERegion); + try { + s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl, s3CERegion)); + logger.info(" ==================== Successfully connected ================== "); + } + catch(Exception e) { + logger.info(" ==================== Read the exception ================== "); + e.printStackTrace(); + BasicAWSCredentials creds = new BasicAWSCredentials("14e4f8b986874272894d527a16c06473", "f7b28fbec4984588b0da7d0288ce67f6"); + s3CB.withCredentials(new AWSStaticCredentialsProvider(creds)); + s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl.trim(), s3CERegion.trim())); + logger.info(" ==================== Read the exception ================== "); + } + } /** * Pass in a boolean value if path style access should be used within the S3 client. * Anything but case-insensitive "true" will lead to value of false, which is default value, too. From f5bdbaf6bf838ae0cfd552a049e19e31e757f98e Mon Sep 17 00:00:00 2001 From: chenganj Date: Thu, 26 Nov 2020 10:26:22 -0500 Subject: [PATCH 0051/1551] testing S3 url connection --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 75d47fd0228..585ee18f978 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -1107,7 +1107,7 @@ private static AmazonS3 getClient(String driverId) { // if the admin has set a system property (see below) we use this endpoint URL instead of the standard ones. if (!s3CEUrl.isEmpty()) { - logger.info("s3CEURL =============== " + s3CEUrl); + logger.info("test s3CEURL =============== " + s3CEUrl); logger.info("s3CERegion =============== " + s3CERegion); try { s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl, s3CERegion)); From 615c1ffebe8a9c072a928b92a60b7436d5eb0f68 Mon Sep 17 00:00:00 2001 From: chenganj Date: Thu, 26 Nov 2020 10:27:47 -0500 Subject: [PATCH 0052/1551] testing S3 url connection --- .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 585ee18f978..75d47fd0228 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -1107,7 +1107,7 @@ private static AmazonS3 getClient(String driverId) { // if the admin has set a system property (see below) we use this endpoint URL instead of the standard ones. if (!s3CEUrl.isEmpty()) { - logger.info("test s3CEURL =============== " + s3CEUrl); + logger.info("s3CEURL =============== " + s3CEUrl); logger.info("s3CERegion =============== " + s3CERegion); try { s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl, s3CERegion)); From 21174758ed3f7964599819d9a06570dc775f6e32 Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 30 Nov 2020 16:21:18 -0500 Subject: [PATCH 0053/1551] DAT353 - removed hardcoded credential information --- .../edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 75d47fd0228..bf3365330ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -1116,9 +1116,9 @@ private static AmazonS3 getClient(String driverId) { catch(Exception e) { logger.info(" ==================== Read the exception ================== "); e.printStackTrace(); - BasicAWSCredentials creds = new BasicAWSCredentials("14e4f8b986874272894d527a16c06473", "f7b28fbec4984588b0da7d0288ce67f6"); - s3CB.withCredentials(new AWSStaticCredentialsProvider(creds)); - s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl.trim(), s3CERegion.trim())); + //BasicAWSCredentials creds = new BasicAWSCredentials("14e4f8b986874272894d527a16c06473", "f7b28fbec4984588b0da7d0288ce67f6"); + //s3CB.withCredentials(new AWSStaticCredentialsProvider(creds)); + //s3CB.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(s3CEUrl.trim(), s3CERegion.trim())); logger.info(" ==================== Read the exception ================== "); } } From fc2adb460495403794a648f89d85becb28ee494b Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 22 Dec 2020 10:54:01 -0500 Subject: [PATCH 0054/1551] GlobusAPI call refactored --- .../harvard/iq/dataverse/api/GlobusApi.java | 370 ++++++------------ .../dataverse/globus/GlobusServiceBean.java | 16 + 2 files changed, 145 insertions(+), 241 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index ff5c3c6eb51..5eca9345b20 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.DataAccess; @@ -16,20 +17,40 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.globus.AccessToken; import edu.harvard.iq.dataverse.globus.GlobusServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; - +import edu.harvard.iq.dataverse.util.json.JsonParseException; +import org.apache.http.HttpEntity; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.mime.MultipartEntityBuilder; +import org.apache.http.util.EntityUtils; +import org.glassfish.jersey.media.multipart.FormDataBodyPart; +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; +import org.json.JSONObject; import javax.ejb.EJB; import javax.ejb.EJBException; import javax.ejb.Stateless; import javax.inject.Inject; +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonObject; +import javax.json.JsonPatch; +import javax.json.stream.JsonParsingException; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.*; +import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.StringReader; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.*; @@ -58,289 +79,156 @@ public class GlobusApi extends AbstractApiBean { @POST - @Path("{datasetId}") - public Response globus(@PathParam("datasetId") String datasetId ) { - - logger.info("Async:======Start Async Tasklist == dataset id :"+ datasetId ); - Dataset dataset = null; + @Path("{id}/add") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response globus(@PathParam("id") String datasetId, + @FormDataParam("jsonData") String jsonData + ) { + + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + User authUser; try { - dataset = findDatasetOrDie(datasetId); - + authUser = findUserOrDie(); } catch (WrappedResponse ex) { - return ex.getResponse(); - } - User apiTokenUser = checkAuth(dataset); - - if (apiTokenUser == null) { - return unauthorized("Access denied"); + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("file.addreplace.error.auth") + ); } - try { - - - /* - String lockInfoMessage = "Globus upload in progress"; - DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload, apiTokenUser != null ? ((AuthenticatedUser)apiTokenUser).getId() : null, lockInfoMessage); - if (lock != null) { - dataset.addLock(lock); - } else { - logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); - } - */ - - List fileMetadatas = new ArrayList<>(); - - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - - StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - - - String task_id = null; - - String timeWhenAsyncStarted = sdf.format(new Date(System.currentTimeMillis() + (5 * 60 * 60 * 1000))); // added 5 hrs to match output from globus api - - String endDateTime = sdf.format(new Date(System.currentTimeMillis() + (4 * 60 * 60 * 1000))); // the tasklist will be monitored for 4 hrs - Calendar cal1 = Calendar.getInstance(); - cal1.setTime(sdf.parse(endDateTime)); - - - do { - try { - String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); - - task_id = globusServiceBean.getTaskList(basicGlobusToken, dataset.getIdentifierForFileStorage(), timeWhenAsyncStarted); - //Thread.sleep(10000); - String currentDateTime = sdf.format(new Date(System.currentTimeMillis())); - Calendar cal2 = Calendar.getInstance(); - cal2.setTime(sdf.parse(currentDateTime)); - - if (cal2.after(cal1)) { - logger.info("Async:======Time exceeded " + endDateTime + " ====== " + currentDateTime + " ==== datasetId :" + datasetId); - break; - } else if (task_id != null) { - break; - } - - } catch (Exception ex) { - ex.printStackTrace(); - logger.info(ex.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to get task id" ); - } - - } while (task_id == null); - - - logger.info("Async:======Found matching task id " + task_id + " ==== datasetId :" + datasetId); - - - DatasetVersion workingVersion = dataset.getEditVersion(); - - if (workingVersion.getCreateTime() != null) { - workingVersion.setCreateTime(new Timestamp(new Date().getTime())); - } - - - String directory = dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); - - System.out.println("Async:======= directory ==== " + directory+ " ==== datasetId :" + datasetId); - Map checksumMapOld = new HashMap<>(); - - Iterator fmIt = workingVersion.getFileMetadatas().iterator(); - - while (fmIt.hasNext()) { - FileMetadata fm = fmIt.next(); - if (fm.getDataFile() != null && fm.getDataFile().getId() != null) { - String chksum = fm.getDataFile().getChecksumValue(); - if (chksum != null) { - checksumMapOld.put(chksum, 1); - } - } - } - - List dFileList = new ArrayList<>(); - for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { - - String s3ObjectKey = s3ObjectSummary.getKey(); - - String t = s3ObjectKey.replace(directory, ""); - - if (t.indexOf(".") > 0) { - long totalSize = s3ObjectSummary.getSize(); - String filePath = s3ObjectKey; - String checksumVal = s3ObjectSummary.getETag(); - - if ((checksumMapOld.get(checksumVal) != null)) { - logger.info("Async: ==== datasetId :" + datasetId + "======= filename ==== " + filePath + " == file already exists "); - } else if (!filePath.contains("cached")) { + // ------------------------------------- + // (2) Get the User ApiToken + // ------------------------------------- + ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser)authUser); - logger.info("Async: ==== datasetId :" + datasetId + "======= filename ==== " + filePath + " == new file "); - try { + // ------------------------------------- + // (3) Get the Dataset Id + // ------------------------------------- + Dataset dataset; - DataFile datafile = new DataFile(DataFileServiceBean.MIME_TYPE_GLOBUS_FILE); //MIME_TYPE_GLOBUS - datafile.setModificationTime(new Timestamp(new Date().getTime())); - datafile.setCreateDate(new Timestamp(new Date().getTime())); - datafile.setPermissionModificationTime(new Timestamp(new Date().getTime())); + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } - FileMetadata fmd = new FileMetadata(); + // ------------------------------------- + // (4) Parse JsonData + // ------------------------------------- - String fileName = filePath.split("/")[filePath.split("/").length - 1]; - fmd.setLabel(fileName); - fmd.setDirectoryLabel(filePath.replace(directory, "").replace(File.separator + fileName, "")); + String taskIdentifier = null; - fmd.setDataFile(datafile); + msgt("******* (api) jsonData: " + jsonData); - datafile.getFileMetadatas().add(fmd); + JsonObject jsonObject = null; + try (StringReader rdr = new StringReader(jsonData)) { + jsonObject = Json.createReader(rdr).readObject(); + } catch (Exception jpe) { + jpe.printStackTrace(); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + } - FileUtil.generateS3PackageStorageIdentifier(datafile); - logger.info("Async: ==== datasetId :" + datasetId + "======= filename ==== " + filePath + " == added to datafile, filemetadata "); + // ------------------------------------- + // (5) Get taskIdentifier + // ------------------------------------- - try { - // We persist "SHA1" rather than "SHA-1". - datafile.setChecksumType(DataFile.ChecksumType.SHA1); - datafile.setChecksumValue(checksumVal); - } catch (Exception cksumEx) { - logger.info("Async: ==== datasetId :" + datasetId + "======Could not calculate checksumType signature for the new file "); - } - datafile.setFilesize(totalSize); + taskIdentifier = jsonObject.getString("taskIdentifier"); + msgt("******* (api) newTaskIdentifier: " + taskIdentifier); - dFileList.add(datafile); + // ------------------------------------- + // (6) Wait until task completion + // ------------------------------------- - } catch (Exception ioex) { - logger.info("Async: ==== datasetId :" + datasetId + "======Failed to process and/or save the file " + ioex.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to do task_list" ); + boolean success = false; - } - } - } + do { + try { + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + basicGlobusToken = "ODA0ODBhNzEtODA5ZC00ZTJhLWExNmQtY2JkMzA1NTk0ZDdhOmQvM3NFd1BVUGY0V20ra2hkSkF3NTZMWFJPaFZSTVhnRmR3TU5qM2Q3TjA9"; + msgt("******* (api) basicGlobusToken: " + basicGlobusToken); + AccessToken clientTokenUser = globusServiceBean.getClientToken(basicGlobusToken); + + success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier ) ; + msgt("******* (api) success: " + success); + + } catch (Exception ex) { + ex.printStackTrace(); + logger.info(ex.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to get task id" ); } -/* - DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.GlobusUpload); - if (dcmLock == null) { - logger.info("Dataset not locked for DCM upload"); - } else { - datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.GlobusUpload); - dataset.removeLock(dcmLock); - } - logger.info(" ======= Remove Dataset Lock "); -*/ + } while (!success); - List filesAdded = new ArrayList<>(); + // ------------------------------------- + // (6) Parse files information from jsondata and add to dataset + // ------------------------------------- - if (dFileList != null && dFileList.size() > 0) { + try { + String directory = null; + StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - // Dataset dataset = version.getDataset(); + directory = dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); - for (DataFile dataFile : dFileList) { + JsonArray filesJson = jsonObject.getJsonArray("files"); - if (dataFile.getOwner() == null) { - dataFile.setOwner(dataset); + if (filesJson != null) { + for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { - workingVersion.getFileMetadatas().add(dataFile.getFileMetadata()); - dataFile.getFileMetadata().setDatasetVersion(workingVersion); - dataset.getFiles().add(dataFile); + for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { } - filesAdded.add(dataFile); + String storageIdentifier = fileJson.getString("storageIdentifier"); - } + String s = datasetSIO.getStorageLocation(); - logger.info("Async: ==== datasetId :" + datasetId + " ===== Done! Finished saving new files to the dataset."); - } - - fileMetadatas.clear(); - for (DataFile addedFile : filesAdded) { - fileMetadatas.add(addedFile.getFileMetadata()); - } - filesAdded = null; + String fullPath = s + "/" + storageIdentifier.replace("s3://", ""); - if (workingVersion.isDraft()) { + StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); + InputStream in = dataFileStorageIO.getInputStream(); - logger.info("Async: ==== datasetId :" + datasetId + " ==== inside draft version "); + String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); - Timestamp updateTime = new Timestamp(new Date().getTime()); + JsonPatch path = Json.createPatchBuilder().add("/md5Hash",checksumVal).build(); + fileJson = path.apply(fileJson); - workingVersion.setLastUpdateTime(updateTime); - dataset.setModificationTime(updateTime); + String requestUrl = httpRequest.getRequestURL().toString() ; - - for (FileMetadata fileMetadata : fileMetadatas) { - - if (fileMetadata.getDataFile().getCreateDate() == null) { - fileMetadata.getDataFile().setCreateDate(updateTime); - fileMetadata.getDataFile().setCreator((AuthenticatedUser) apiTokenUser); - } - fileMetadata.getDataFile().setModificationTime(updateTime); + ProcessBuilder processBuilder = new ProcessBuilder(); + String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST " + requestUrl.substring(0, requestUrl.indexOf("/globus")) + "/datasets/:persistentId/add?persistentId=doi:"+ directory + " -F jsonData='"+fileJson.toString() +"'"; + msgt("*******====command ==== " + command); + processBuilder.command("bash", "-c", command); + msgt("*******=== Start api/datasets/:persistentId/add call"); + Process process = processBuilder.start(); } - - - } else { - logger.info("Async: ==== datasetId :" + datasetId + " ==== inside released version "); - - for (int i = 0; i < workingVersion.getFileMetadatas().size(); i++) { - for (FileMetadata fileMetadata : fileMetadatas) { - if (fileMetadata.getDataFile().getStorageIdentifier() != null) { - - if (fileMetadata.getDataFile().getStorageIdentifier().equals(workingVersion.getFileMetadatas().get(i).getDataFile().getStorageIdentifier())) { - workingVersion.getFileMetadatas().set(i, fileMetadata); - } - } - } - } - - } - - try { - Command cmd; - logger.info("Async: ==== datasetId :" + datasetId + " ======= UpdateDatasetVersionCommand START in globus function "); - cmd = new UpdateDatasetVersionCommand(dataset,new DataverseRequest(apiTokenUser, (HttpServletRequest) null)); - ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true); - //new DataverseRequest(authenticatedUser, (HttpServletRequest) null) - //dvRequestService.getDataverseRequest() - commandEngine.submit(cmd); - } catch (CommandException ex) { - logger.log(Level.WARNING, "Async: ==== datasetId :" + datasetId + "======CommandException updating DatasetVersion from batch job: " + ex.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to do task_list" ); - } - - logger.info("Async: ==== datasetId :" + datasetId + " ======= GLOBUS ASYNC CALL COMPLETED SUCCESSFULLY "); - - return ok("Async: ==== datasetId :" + datasetId + ": Finished task_list"); - } catch(Exception e) { + } catch (Exception e) { String message = e.getMessage(); - - logger.info("Async: ==== datasetId :" + datasetId + " ======= GLOBUS ASYNC CALL Exception ============== " + message); + msgt("******* UNsuccessfully completed " + message); + msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); e.printStackTrace(); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to do task_list" ); - //return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'."); - } - + } + msgt("******* successfully completed " ); + return ok("Async: ==== datasetId :" + dataset.getId() + ": will add files to the table"); } - private User checkAuth(Dataset dataset) { - - User apiTokenUser = null; - - try { - apiTokenUser = findUserOrDie(); - } catch (WrappedResponse wr) { - apiTokenUser = null; - logger.log(Level.FINE, "Message from findUserOrDie(): {0}", wr.getMessage()); - } - - if (apiTokenUser != null) { - // used in an API context - if (!permissionService.requestOn(createDataverseRequest(apiTokenUser), dataset.getOwner()).has(Permission.EditDataset)) { - apiTokenUser = null; - } - } + private void msg(String m) { + //System.out.println(m); + logger.fine(m); + } - return apiTokenUser; + private void dashes() { + msg("----------------"); + } + private void msgt(String m) { + //dashes(); + msg(m); + //dashes(); } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 82b22e87020..25ea9735087 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -435,6 +435,22 @@ public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId return false; } + public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId ) throws MalformedURLException { + + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/"+taskId+"/successful_transfers"); + + MakeRequestResponse result = makeRequest(url, "Bearer",clientTokenUser.getOtherTokens().get(0).getAccessToken(), + "GET", null); + + Transferlist transferlist = null; + + if (result.status == 200) { + logger.info(" SUCCESS ====== " ); + return true; + } + return false; + } + public AccessToken getClientToken(String basicGlobusToken) throws MalformedURLException { From 6eae5e4fec9be0435a91921881e0a64fab46dffd Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:12:32 -0500 Subject: [PATCH 0055/1551] implement batch processing of new versions to archive --- .../dataverse/DatasetVersionServiceBean.java | 26 ++++++++- .../edu/harvard/iq/dataverse/api/Admin.java | 57 +++++++++++++++++++ 2 files changed, 82 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index e4eb6aac88e..ea6a05a2c3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -987,7 +987,7 @@ public List> getBasicDatasetVersionInfo(Dataset dataset) - public HashMap getFileMetadataHistory(DataFile df){ + public HashMap getFileMetadataHistory(DataFile df){ if (df == null){ throw new NullPointerException("DataFile 'df' cannot be null"); @@ -1165,4 +1165,28 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion) return null; } + /** + * Execute a query to return DatasetVersion + * + * @param queryString + * @return + */ + public List getUnarchivedDatasetVersions(){ + + String queryString = "select * from datasetversion where releasetime is not null and archivalcopylocation is null;"; + + try{ + TypedQuery query = em.createQuery(queryString, DatasetVersion.class); + List dsl = query.getResultList(); + return dsl; + + } catch (javax.persistence.NoResultException e) { + logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}", queryString); + return null; + } catch (EJBException e) { + logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage()); + return null; + } + } // end getUnarchivedDatasetVersions + } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index b52665a7747..81fe1ecd2a9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1722,6 +1722,63 @@ public void run() { } } + + @GET + @Path("/archiveAllUnarchivedDataVersions") + public Response archiveAllUnarchivedDatasetVersions() { + + try { + AuthenticatedUser au = findAuthenticatedUserOrDie(); + // Note - the user is being set in the session so it becomes part of the + // DataverseRequest and is sent to the back-end command where it is used to get + // the API Token which is then used to retrieve files (e.g. via S3 direct + // downloads) to create the Bag + session.setUser(au); + List dsl = datasetversionService.getUnarchivedDatasetVersions(); + if (dsl != null) { + String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); + AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0)); + + if (cmd != null) { + new Thread(new Runnable() { + public void run() { + int total = dsl.size(); + int successes = 0; + int failures = 0; + for (DatasetVersion dv : dsl) { + try { + AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); + + dv = commandEngine.submit(cmd); + if (dv.getArchivalCopyLocation() != null) { + successes++; + logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: " + + dv.getArchivalCopyLocation()); + } else { + failures++; + logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); + } + } catch (CommandException ex) { + logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); + } + logger.fine(successes + failures + " of " + total + " archive submissions complete"); + } + logger.info("Archiving complete: " + successes + " Successes, " + failures + " Failures. See prior log messages for details."); + } + }).start(); + return ok("Archiving all unarchived published dataset versions using " + cmd.getClass().getCanonicalName() + ". Processing can take significant time for large datasets/ large numbers of dataset versions. View log and/or check archive for results."); + } else { + logger.log(Level.SEVERE, "Could not find Archiver class: " + className); + return error(Status.INTERNAL_SERVER_ERROR, "Could not find Archiver class: " + className); + } + } else { + return error(Status.BAD_REQUEST, "No unarchived published dataset versions found"); + } + } catch (WrappedResponse e1) { + return error(Status.UNAUTHORIZED, "api key required"); + } + } + @DELETE @Path("/clearMetricsCache") public Response clearMetricsCache() { From 8313404e6604daba3ee53d32d9b09e83ebaae9f2 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:26:19 -0500 Subject: [PATCH 0056/1551] add listonly and limit options, count commandEx as failure --- .../edu/harvard/iq/dataverse/api/Admin.java | 24 ++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 81fe1ecd2a9..3c61d2e8919 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -46,6 +46,7 @@ import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; import javax.ws.rs.DELETE; +import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; @@ -1723,9 +1724,16 @@ public void run() { } + /** + * Iteratively archives all unarchived dataset versions + * @param + * listonly - don't archive, just list unarchived versions + * limit - max number to process + * @return + */ @GET @Path("/archiveAllUnarchivedDataVersions") - public Response archiveAllUnarchivedDatasetVersions() { + public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit) { try { AuthenticatedUser au = findAuthenticatedUserOrDie(); @@ -1736,6 +1744,16 @@ public Response archiveAllUnarchivedDatasetVersions() { session.setUser(au); List dsl = datasetversionService.getUnarchivedDatasetVersions(); if (dsl != null) { + if (listonly) { + logger.info("Unarchived versions found: "); + int current = 0; + for (DatasetVersion dv : dsl) { + if (limit != null && current > limit) { + break; + } + logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + } + } String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0)); @@ -1746,6 +1764,9 @@ public void run() { int successes = 0; int failures = 0; for (DatasetVersion dv : dsl) { + if (limit != null && (successes + failures) > limit) { + break; + } try { AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); @@ -1759,6 +1780,7 @@ public void run() { logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); } } catch (CommandException ex) { + failures++; logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); } logger.fine(successes + failures + " of " + total + " archive submissions complete"); From 70d923ae08b80d6248acc062ec836ed5812fa645 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:36:50 -0500 Subject: [PATCH 0057/1551] send list in response for listonly --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 3c61d2e8919..4fd3f43b127 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1745,14 +1745,17 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool List dsl = datasetversionService.getUnarchivedDatasetVersions(); if (dsl != null) { if (listonly) { + JsonArrayBuilder jab = Json.createArrayBuilder(); logger.info("Unarchived versions found: "); int current = 0; for (DatasetVersion dv : dsl) { if (limit != null && current > limit) { break; } + jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); } + return ok(jab); } String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0)); From 96d3723307c26668e5687f4ba61fb80d0d207a16 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:51:02 -0500 Subject: [PATCH 0058/1551] fix query --- .../edu/harvard/iq/dataverse/DatasetVersionServiceBean.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index ea6a05a2c3c..344f8af3b87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -1173,10 +1173,10 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion) */ public List getUnarchivedDatasetVersions(){ - String queryString = "select * from datasetversion where releasetime is not null and archivalcopylocation is null;"; + String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releasetime IS NOT NULL and o.archivalcopylocation IS NULL"; try{ - TypedQuery query = em.createQuery(queryString, DatasetVersion.class); + TypedQuery query = em.createQuery(queryString, DatasetVersion.class); List dsl = query.getResultList(); return dsl; From cb9f374e6452cffa5069ef941a0a5f65a8248ca7 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 16:00:54 -0500 Subject: [PATCH 0059/1551] case sensitive in query --- .../edu/harvard/iq/dataverse/DatasetVersionServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 344f8af3b87..3f46a25c91e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -1173,7 +1173,7 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion) */ public List getUnarchivedDatasetVersions(){ - String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releasetime IS NOT NULL and o.archivalcopylocation IS NULL"; + String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL"; try{ TypedQuery query = em.createQuery(queryString, DatasetVersion.class); From 76e23960219f7cdf0cde5bede1cf8fda55fddd9e Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 16:24:13 -0500 Subject: [PATCH 0060/1551] param to only archive latest version --- .../edu/harvard/iq/dataverse/api/Admin.java | 38 +++++++++++-------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 4fd3f43b127..e06289dfac8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1729,11 +1729,12 @@ public void run() { * @param * listonly - don't archive, just list unarchived versions * limit - max number to process + * lastestonly - only archive the latest versions * @return */ @GET @Path("/archiveAllUnarchivedDataVersions") - public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit) { + public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) { try { AuthenticatedUser au = findAuthenticatedUserOrDie(); @@ -1752,8 +1753,11 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool if (limit != null && current > limit) { break; } - jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); - logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { + jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + current++; + } } return ok(jab); } @@ -1770,21 +1774,23 @@ public void run() { if (limit != null && (successes + failures) > limit) { break; } - try { - AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); - - dv = commandEngine.submit(cmd); - if (dv.getArchivalCopyLocation() != null) { - successes++; - logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: " - + dv.getArchivalCopyLocation()); - } else { + if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { + try { + AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); + + dv = commandEngine.submit(cmd); + if (dv.getArchivalCopyLocation() != null) { + successes++; + logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: " + + dv.getArchivalCopyLocation()); + } else { + failures++; + logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); + } + } catch (CommandException ex) { failures++; - logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); + logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); } - } catch (CommandException ex) { - failures++; - logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); } logger.fine(successes + failures + " of " + total + " archive submissions complete"); } From 2e8d990ad4b75719c2d8e6b35a0f3d104822f3c3 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 16:41:58 -0500 Subject: [PATCH 0061/1551] off by one in limit --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index e06289dfac8..9f819ff13a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1750,7 +1750,7 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool logger.info("Unarchived versions found: "); int current = 0; for (DatasetVersion dv : dsl) { - if (limit != null && current > limit) { + if (limit != null && current >= limit) { break; } if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { @@ -1771,7 +1771,7 @@ public void run() { int successes = 0; int failures = 0; for (DatasetVersion dv : dsl) { - if (limit != null && (successes + failures) > limit) { + if (limit != null && (successes + failures) >= limit) { break; } if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { From b7968333b5950f44bbf086ebc1d020ee4ca4535f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 23 Dec 2020 11:52:43 -0500 Subject: [PATCH 0062/1551] documentation --- doc/sphinx-guides/source/installation/config.rst | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4a877eabff7..5b9433d7c31 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -866,9 +866,9 @@ For example: ``cp /usr/local/payara5/glassfish/domains/domain1/files/googlecloudkey.json`` -.. _Archiving API Call: +.. _Archiving API Calls: -API Call +API Calls ++++++++ Once this configuration is complete, you, as a user with the *PublishDataset* permission, should be able to use the API call to manually submit a DatasetVersion for processing: @@ -881,6 +881,18 @@ where: ``{version}`` is the friendly version number, e.g. "1.2". +A batch API call is also available that will attempt to archive any currently unarchived dataset versions: + +``curl -H "X-Dataverse-key: " http://localhost:8080/api/admin/archiveAllUnarchivedDataVersions`` + +The call supports three optional query parameters that can be used in combination: + +``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any + +``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions) + +``limit={n}`` default is no limit/process all unarchived versions (subject to other parameters). Defines a maximum number of versions to attempt to archive in response to one invocation of the API call. + The submitDataVersionToArchive API (and the workflow discussed below) attempt to archive the dataset version via an archive specific method. For Chronopolis, a DuraCloud space named for the dataset (it's DOI with ':' and '.' replaced with '-') is created and two files are uploaded to it: a version-specific datacite.xml metadata file and a BagIt bag containing the data and an OAI-ORE map file. (The datacite.xml file, stored outside the Bag as well as inside is intended to aid in discovery while the ORE map file is 'complete', containing all user-entered metadata and is intended as an archival record.) In the Chronopolis case, since the transfer from the DuraCloud front-end to archival storage in Chronopolis can take significant time, it is currently up to the admin/curator to submit a 'snap-shot' of the space within DuraCloud and to monitor its successful transfer. Once transfer is complete the space should be deleted, at which point the Dataverse API call can be used to submit a Bag for other versions of the same Dataset. (The space is reused, so that archival copies of different Dataset versions correspond to different snapshots of the same DuraCloud space.). From d9eaeede17397089e2f8b5a81c1be8a0788c204c Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 4 Jan 2021 14:06:17 -0500 Subject: [PATCH 0063/1551] DAT353 - removed hardcoded credential information --- src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index 5eca9345b20..9ab66c27162 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -218,7 +218,7 @@ public Response globus(@PathParam("id") String datasetId, private void msg(String m) { //System.out.println(m); - logger.fine(m); + logger.info(m); } private void dashes() { From c89400db0103bea1d922e62a6dcdaba4e11352ad Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 4 Jan 2021 15:08:29 -0500 Subject: [PATCH 0064/1551] correction to api/datasets/$id/add call --- src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index 9ab66c27162..6eb83d2ce25 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -197,7 +197,8 @@ public Response globus(@PathParam("id") String datasetId, String requestUrl = httpRequest.getRequestURL().toString() ; ProcessBuilder processBuilder = new ProcessBuilder(); - String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST " + requestUrl.substring(0, requestUrl.indexOf("/globus")) + "/datasets/:persistentId/add?persistentId=doi:"+ directory + " -F jsonData='"+fileJson.toString() +"'"; + + String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST " + httpRequest.getProtocol() +"//" + httpRequest.getServerName() + "/api/datasets/:persistentId/add?persistentId=doi:"+ directory + " -F jsonData='"+fileJson.toString() +"'"; msgt("*******====command ==== " + command); processBuilder.command("bash", "-c", command); msgt("*******=== Start api/datasets/:persistentId/add call"); From dea2dad734ed2f6d5a1964fb2155ce8699e1b7b3 Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 4 Jan 2021 15:28:44 -0500 Subject: [PATCH 0065/1551] correction to api/datasets/$id/add call --- src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index 6eb83d2ce25..be05d5389f3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -198,7 +198,7 @@ public Response globus(@PathParam("id") String datasetId, ProcessBuilder processBuilder = new ProcessBuilder(); - String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST " + httpRequest.getProtocol() +"//" + httpRequest.getServerName() + "/api/datasets/:persistentId/add?persistentId=doi:"+ directory + " -F jsonData='"+fileJson.toString() +"'"; + String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST https://" + httpRequest.getServerName() + "/api/datasets/:persistentId/add?persistentId=doi:"+ directory + " -F jsonData='"+fileJson.toString() +"'"; msgt("*******====command ==== " + command); processBuilder.command("bash", "-c", command); msgt("*******=== Start api/datasets/:persistentId/add call"); From d9be3685d231cbe22ed575a4a0a93d3d1ba630ac Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 4 Jan 2021 15:30:22 -0500 Subject: [PATCH 0066/1551] DAT353 - removed hardcoded credential information --- src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index be05d5389f3..2e4f475ae90 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -214,7 +214,7 @@ public Response globus(@PathParam("id") String datasetId, } msgt("******* successfully completed " ); - return ok("Async: ==== datasetId :" + dataset.getId() + ": will add files to the table"); + return ok(" dataset Name :" + dataset.getDisplayName() + ": Files to this dataset will be added to the table and will display in the UI."); } private void msg(String m) { From 15362206545851a8252d0599442c6d53192eb8ac Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 5 Jan 2021 10:30:14 -0500 Subject: [PATCH 0067/1551] calculate mimeType --- .../harvard/iq/dataverse/api/GlobusApi.java | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index 2e4f475ae90..9d4384fd117 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -23,6 +23,7 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; +import org.apache.commons.lang.StringUtils; import org.apache.http.HttpEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; @@ -189,9 +190,27 @@ public Response globus(@PathParam("id") String datasetId, StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); InputStream in = dataFileStorageIO.getInputStream(); + + String suppliedContentType = fileJson.getString("contentType"); + String fileName = fileJson.getString("fileName"); + // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied + String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + String type = FileUtil.determineFileTypeByExtension(fileName); + if (!StringUtils.isBlank(type)) { + //Use rules for deciding when to trust browser supplied type + if (FileUtil.useRecognizedType(finalType, type)) { + finalType = type; + } + logger.info("Supplied type: " + suppliedContentType + ", finalType: " + finalType); + } + + JsonPatch path = Json.createPatchBuilder().add("/mimeType",finalType).build(); + fileJson = path.apply(fileJson); + + String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); - JsonPatch path = Json.createPatchBuilder().add("/md5Hash",checksumVal).build(); + path = Json.createPatchBuilder().add("/md5Hash",checksumVal).build(); fileJson = path.apply(fileJson); String requestUrl = httpRequest.getRequestURL().toString() ; From 99a58235f78b4f79ea1e14faa590fe651c7d5d0a Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 5 Jan 2021 10:30:40 -0500 Subject: [PATCH 0068/1551] changed method of public --- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 96006bdf735..88c175db8f3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1133,7 +1133,7 @@ public static List createDataFiles(DatasetVersion version, InputStream } // end createDataFiles - private static boolean useRecognizedType(String suppliedContentType, String recognizedType) { + public static boolean useRecognizedType(String suppliedContentType, String recognizedType) { // is it any better than the type that was supplied to us, // if any? // This is not as trivial a task as one might expect... From 73942b96bd4a78451d7c88895cdf2dc66e57f826 Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 5 Jan 2021 17:04:57 -0500 Subject: [PATCH 0069/1551] dataset lock issue while submitting multiple files to datasets/:persistentid/add api - Debugging --- .../harvard/iq/dataverse/api/GlobusApi.java | 48 +++++++++++++------ 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index 9d4384fd117..c39f65fa497 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -101,7 +101,7 @@ public Response globus(@PathParam("id") String datasetId, // ------------------------------------- // (2) Get the User ApiToken // ------------------------------------- - ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser)authUser); + ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser) authUser); // ------------------------------------- // (3) Get the Dataset Id @@ -151,13 +151,13 @@ public Response globus(@PathParam("id") String datasetId, msgt("******* (api) basicGlobusToken: " + basicGlobusToken); AccessToken clientTokenUser = globusServiceBean.getClientToken(basicGlobusToken); - success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier ) ; + success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier); msgt("******* (api) success: " + success); } catch (Exception ex) { ex.printStackTrace(); logger.info(ex.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to get task id" ); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to get task id"); } } while (!success); @@ -204,38 +204,58 @@ public Response globus(@PathParam("id") String datasetId, logger.info("Supplied type: " + suppliedContentType + ", finalType: " + finalType); } - JsonPatch path = Json.createPatchBuilder().add("/mimeType",finalType).build(); + JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); fileJson = path.apply(fileJson); - String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); + String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); - path = Json.createPatchBuilder().add("/md5Hash",checksumVal).build(); + path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); fileJson = path.apply(fileJson); - String requestUrl = httpRequest.getRequestURL().toString() ; + String requestUrl = httpRequest.getRequestURL().toString(); ProcessBuilder processBuilder = new ProcessBuilder(); - String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST https://" + httpRequest.getServerName() + "/api/datasets/:persistentId/add?persistentId=doi:"+ directory + " -F jsonData='"+fileJson.toString() +"'"; + String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST https://" + httpRequest.getServerName() + "/api/datasets/:persistentId/add?persistentId=doi:" + directory + " -F jsonData='" + fileJson.toString() + "'"; msgt("*******====command ==== " + command); - processBuilder.command("bash", "-c", command); + + + //processBuilder.command("bash", "-c", command); msgt("*******=== Start api/datasets/:persistentId/add call"); - Process process = processBuilder.start(); + //Process process = processBuilder.start(); + + + new Thread(new Runnable() { + public void run() { + try { + processBuilder.command("bash", "-c", command); + Process process = processBuilder.start(); + } catch (Exception ex) { + logger.log(Level.SEVERE, "******* Unexpected Exception while executing api/datasets/:persistentId/add call ", ex); + } + } + }).start(); + + } } + } catch (Exception e) { String message = e.getMessage(); - msgt("******* UNsuccessfully completed " + message); + msgt("******* Exception from globus API call " + message); msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); e.printStackTrace(); - } + } + //msgt("******* successfully completed " ); + return ok(" dataset Name :" + dataset.getDisplayName() + ": Files to this dataset will be added to the table and will display in the UI. Processing can take significant time for large datasets."); + - msgt("******* successfully completed " ); - return ok(" dataset Name :" + dataset.getDisplayName() + ": Files to this dataset will be added to the table and will display in the UI."); } + + private void msg(String m) { //System.out.println(m); logger.info(m); From 006a4baff870ebd1c11c86caaacaf96511fadd0c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Jan 2021 12:28:55 -0500 Subject: [PATCH 0070/1551] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 5b9433d7c31..84ec0699d62 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -869,7 +869,7 @@ For example: .. _Archiving API Calls: API Calls -++++++++ ++++++++++ Once this configuration is complete, you, as a user with the *PublishDataset* permission, should be able to use the API call to manually submit a DatasetVersion for processing: From bba8ba0a13703410a9196713c6920150291d4643 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Jan 2021 12:29:20 -0500 Subject: [PATCH 0071/1551] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 84ec0699d62..a997f0e353f 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -887,7 +887,7 @@ A batch API call is also available that will attempt to archive any currently un The call supports three optional query parameters that can be used in combination: -``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any +``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any. ``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions) From 011c97a4b73775cf152e0cf06127d8da9e8d2780 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Jan 2021 12:29:46 -0500 Subject: [PATCH 0072/1551] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index a997f0e353f..67ee66af763 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -889,7 +889,7 @@ The call supports three optional query parameters that can be used in combinatio ``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any. -``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions) +``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions). ``limit={n}`` default is no limit/process all unarchived versions (subject to other parameters). Defines a maximum number of versions to attempt to archive in response to one invocation of the API call. From 1a1c28ccb7a6c0427f349cd8569c516bca43bf68 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 8 Jan 2021 13:10:22 -0500 Subject: [PATCH 0073/1551] updates per review --- .../dataverse/DatasetVersionServiceBean.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 3f46a25c91e..33cc236b902 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -986,8 +986,8 @@ public List> getBasicDatasetVersionInfo(Dataset dataset) } // end getBasicDatasetVersionInfo - - public HashMap getFileMetadataHistory(DataFile df){ + //Not used? + public HashMap getFileMetadataHistory(DataFile df){ if (df == null){ throw new NullPointerException("DataFile 'df' cannot be null"); @@ -1175,18 +1175,18 @@ public List getUnarchivedDatasetVersions(){ String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL"; - try{ + try { TypedQuery query = em.createQuery(queryString, DatasetVersion.class); List dsl = query.getResultList(); return dsl; - + } catch (javax.persistence.NoResultException e) { logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}", queryString); return null; - } catch (EJBException e) { - logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage()); - return null; - } + } catch (EJBException e) { + logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage()); + return null; + } } // end getUnarchivedDatasetVersions - + } // end class From fca67ffa0da72255fc291cfb7e0ffbabad52f71e Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 12 Jan 2021 11:01:59 -0500 Subject: [PATCH 0074/1551] DAT353 - removed hardcoded credential information --- .../harvard/iq/dataverse/api/GlobusApi.java | 229 +++++++++++++----- 1 file changed, 165 insertions(+), 64 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index c39f65fa497..f68498a502d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -13,21 +13,30 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper; +import edu.harvard.iq.dataverse.datasetutility.DataFileTagException; +import edu.harvard.iq.dataverse.datasetutility.NoFilesException; +import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.globus.AccessToken; import edu.harvard.iq.dataverse.globus.GlobusServiceBean; +import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; +import edu.harvard.iq.dataverse.util.json.JsonPrinter; import org.apache.commons.lang.StringUtils; import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; import org.apache.http.entity.mime.MultipartEntityBuilder; +import org.apache.http.entity.mime.content.ContentBody; import org.apache.http.util.EntityUtils; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; @@ -39,11 +48,10 @@ import javax.ejb.EJBException; import javax.ejb.Stateless; import javax.inject.Inject; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonObject; -import javax.json.JsonPatch; +import javax.json.*; import javax.json.stream.JsonParsingException; +import javax.persistence.NoResultException; +import javax.persistence.Query; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; @@ -55,9 +63,16 @@ import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.*; +import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; + +import edu.harvard.iq.dataverse.api.Datasets; + +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; +import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; + @Stateless @Path("globus") public class GlobusApi extends AbstractApiBean { @@ -75,6 +90,10 @@ public class GlobusApi extends AbstractApiBean { @EJB PermissionServiceBean permissionService; + @EJB + IngestServiceBean ingestService; + + @Inject DataverseRequestServiceBean dvRequestService; @@ -84,7 +103,9 @@ public class GlobusApi extends AbstractApiBean { @Consumes(MediaType.MULTIPART_FORM_DATA) public Response globus(@PathParam("id") String datasetId, @FormDataParam("jsonData") String jsonData - ) { + ) + { + JsonArrayBuilder jarr = Json.createArrayBuilder(); // ------------------------------------- // (1) Get the user from the API key @@ -99,12 +120,7 @@ public Response globus(@PathParam("id") String datasetId, } // ------------------------------------- - // (2) Get the User ApiToken - // ------------------------------------- - ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser) authUser); - - // ------------------------------------- - // (3) Get the Dataset Id + // (2) Get the Dataset Id // ------------------------------------- Dataset dataset; @@ -114,13 +130,14 @@ public Response globus(@PathParam("id") String datasetId, return wr.getResponse(); } + // ------------------------------------- - // (4) Parse JsonData + // (3) Parse JsonData // ------------------------------------- String taskIdentifier = null; - msgt("******* (api) jsonData: " + jsonData); + msgt("******* (api) jsonData 1: " + jsonData); JsonObject jsonObject = null; try (StringReader rdr = new StringReader(jsonData)) { @@ -131,7 +148,7 @@ public Response globus(@PathParam("id") String datasetId, } // ------------------------------------- - // (5) Get taskIdentifier + // (4) Get taskIdentifier // ------------------------------------- @@ -139,7 +156,7 @@ public Response globus(@PathParam("id") String datasetId, msgt("******* (api) newTaskIdentifier: " + taskIdentifier); // ------------------------------------- - // (6) Wait until task completion + // (5) Wait until task completion // ------------------------------------- boolean success = false; @@ -162,15 +179,25 @@ public Response globus(@PathParam("id") String datasetId, } while (!success); - // ------------------------------------- - // (6) Parse files information from jsondata and add to dataset - // ------------------------------------- - try { - String directory = null; + try + { StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - directory = dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); + DataverseRequest dvRequest2 = createDataverseRequest(authUser); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, + ingestService, + datasetService, + fileService, + permissionSvc, + commandEngine, + systemConfig); + + // ------------------------------------- + // (6) Parse files information from jsondata + // calculate checksum + // determine mimetype + // ------------------------------------- JsonArray filesJson = jsonObject.getJsonArray("files"); @@ -182,75 +209,70 @@ public Response globus(@PathParam("id") String datasetId, } String storageIdentifier = fileJson.getString("storageIdentifier"); + String suppliedContentType = fileJson.getString("contentType"); + String fileName = fileJson.getString("fileName"); - String s = datasetSIO.getStorageLocation(); + String fullPath = datasetSIO.getStorageLocation() + "/" + storageIdentifier.replace("s3://", ""); - String fullPath = s + "/" + storageIdentifier.replace("s3://", ""); + String bucketName = System.getProperty("dataverse.files." + storageIdentifier.split(":")[0] + ".bucket-name"); - StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); - InputStream in = dataFileStorageIO.getInputStream(); + String dbstorageIdentifier = storageIdentifier.split(":")[0] + "://" + bucketName + ":" + storageIdentifier.replace("s3://", ""); + Query query = em.createQuery("select object(o) from DvObject as o where o.storageIdentifier = :storageIdentifier"); + query.setParameter("storageIdentifier", dbstorageIdentifier); - String suppliedContentType = fileJson.getString("contentType"); - String fileName = fileJson.getString("fileName"); - // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied - String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; - String type = FileUtil.determineFileTypeByExtension(fileName); - if (!StringUtils.isBlank(type)) { - //Use rules for deciding when to trust browser supplied type - if (FileUtil.useRecognizedType(finalType, type)) { - finalType = type; - } - logger.info("Supplied type: " + suppliedContentType + ", finalType: " + finalType); - } + msgt("******* dbstorageIdentifier :" + dbstorageIdentifier + " ======= query.getResultList().size()============== " + query.getResultList().size()); - JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); - fileJson = path.apply(fileJson); + if (query.getResultList().size() > 0) { - String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("Result " , " The datatable is not updated since the Storage Identifier already exists in dvObject. "); - path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); - fileJson = path.apply(fileJson); + jarr.add(fileoutput); + } else { - String requestUrl = httpRequest.getRequestURL().toString(); + // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied + String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + String type = FileUtil.determineFileTypeByExtension(fileName); + if (!StringUtils.isBlank(type)) { + //Use rules for deciding when to trust browser supplied type + if (FileUtil.useRecognizedType(finalType, type)) { + finalType = type; + } + logger.info("Supplied type: " + suppliedContentType + ", finalType: " + finalType); + } - ProcessBuilder processBuilder = new ProcessBuilder(); + JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); + fileJson = path.apply(fileJson); - String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST https://" + httpRequest.getServerName() + "/api/datasets/:persistentId/add?persistentId=doi:" + directory + " -F jsonData='" + fileJson.toString() + "'"; - msgt("*******====command ==== " + command); + StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); + InputStream in = dataFileStorageIO.getInputStream(); + String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); + path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); + fileJson = path.apply(fileJson); - //processBuilder.command("bash", "-c", command); - msgt("*******=== Start api/datasets/:persistentId/add call"); - //Process process = processBuilder.start(); + addGlobusFileToDataset(dataset, fileJson.toString(), addFileHelper, fileName, finalType, storageIdentifier); + JsonObject a1 = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); - new Thread(new Runnable() { - public void run() { - try { - processBuilder.command("bash", "-c", command); - Process process = processBuilder.start(); - } catch (Exception ex) { - logger.log(Level.SEVERE, "******* Unexpected Exception while executing api/datasets/:persistentId/add call ", ex); - } - } - }).start(); + JsonArray f1 = a1.getJsonArray("files"); + JsonObject file1 = f1.getJsonObject(0); + jarr.add(file1); + } } } - - } catch (Exception e) { String message = e.getMessage(); msgt("******* Exception from globus API call " + message); msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); e.printStackTrace(); } - //msgt("******* successfully completed " ); - return ok(" dataset Name :" + dataset.getDisplayName() + ": Files to this dataset will be added to the table and will display in the UI. Processing can take significant time for large datasets."); - + return ok(Json.createObjectBuilder().add("Files", jarr)); } @@ -271,4 +293,83 @@ private void msgt(String m) { //dashes(); } + public Response addGlobusFileToDataset( Dataset dataset, + String jsonData, AddReplaceFileHelper addFileHelper,String fileName, + String finalType, + String storageIdentifier + ){ + + + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } + + + //------------------------------------ + // (1) Make sure dataset does not have package file + // -------------------------------------- + + for (DatasetVersion dv : dataset.getVersions()) { + if (dv.isHasPackageFile()) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") + ); + } + } + + //--------------------------------------- + // (2) Load up optional params via JSON + //--------------------------------------- + + OptionalFileParams optionalFileParams = null; + msgt("(api) jsonData 2: " + jsonData); + + try { + optionalFileParams = new OptionalFileParams(jsonData); + } catch (DataFileTagException ex) { + return error( Response.Status.BAD_REQUEST, ex.getMessage()); + } + + + //------------------- + // (3) Create the AddReplaceFileHelper object + //------------------- + msg("ADD!"); + + //------------------- + // (4) Run "runAddFileByDatasetId" + //------------------- + addFileHelper.runAddFileByDataset(dataset, + fileName, + finalType, + storageIdentifier, + null, + optionalFileParams); + + + if (addFileHelper.hasError()){ + return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n")); + }else{ + String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); + try { + //msgt("as String: " + addFileHelper.getSuccessResult()); + + logger.fine("successMsg: " + successMsg); + String duplicateWarning = addFileHelper.getDuplicateFileWarning(); + if (duplicateWarning != null && !duplicateWarning.isEmpty()) { + return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); + } else { + return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder()); + } + + //"Look at that! You added a file! (hey hey, it may have worked)"); + } catch (NoFilesException ex) { + Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + + } + } + + } // end: addFileToDataset + } From 073d97e0cfc72301e9df2077f7832217ef4daaa7 Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 12 Jan 2021 13:15:07 -0500 Subject: [PATCH 0075/1551] restructured the API response object --- .../harvard/iq/dataverse/api/GlobusApi.java | 100 ++++++++++++++++-- 1 file changed, 93 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index f68498a502d..078da050f28 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -228,10 +228,10 @@ public Response globus(@PathParam("id") String datasetId, JsonObjectBuilder fileoutput= Json.createObjectBuilder() .add("storageIdentifier " , storageIdentifier) - .add("Result " , " The datatable is not updated since the Storage Identifier already exists in dvObject. "); + .add("message " , " The datatable is not updated since the Storage Identifier already exists in dvObject. "); jarr.add(fileoutput); - } else { + } else { // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; @@ -254,15 +254,99 @@ public Response globus(@PathParam("id") String datasetId, path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); fileJson = path.apply(fileJson); - addGlobusFileToDataset(dataset, fileJson.toString(), addFileHelper, fileName, finalType, storageIdentifier); + //addGlobusFileToDataset(dataset, fileJson.toString(), addFileHelper, fileName, finalType, storageIdentifier); - JsonObject a1 = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); - JsonArray f1 = a1.getJsonArray("files"); - JsonObject file1 = f1.getJsonObject(0); + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } + + + //------------------------------------ + // (1) Make sure dataset does not have package file + // -------------------------------------- + + for (DatasetVersion dv : dataset.getVersions()) { + if (dv.isHasPackageFile()) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") + ); + } + } + + //--------------------------------------- + // (2) Load up optional params via JSON + //--------------------------------------- + + OptionalFileParams optionalFileParams = null; + msgt("(api) jsonData 2: " + fileJson.toString()); + + try { + optionalFileParams = new OptionalFileParams(fileJson.toString()); + } catch (DataFileTagException ex) { + return error( Response.Status.BAD_REQUEST, ex.getMessage()); + } + + + //------------------- + // (3) Create the AddReplaceFileHelper object + //------------------- + msg("ADD!"); + + //------------------- + // (4) Run "runAddFileByDatasetId" + //------------------- + addFileHelper.runAddFileByDataset(dataset, + fileName, + finalType, + storageIdentifier, + null, + optionalFileParams); - jarr.add(file1); + if (addFileHelper.hasError()){ + + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("error Code: " ,addFileHelper.getHttpErrorCode().toString()) + .add("message " , addFileHelper.getErrorMessagesAsString("\n")); + + jarr.add(fileoutput); + + }else{ + String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); + + JsonObject a1 = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); + + JsonArray f1 = a1.getJsonArray("files"); + JsonObject file1 = f1.getJsonObject(0); + + try { + //msgt("as String: " + addFileHelper.getSuccessResult()); + + logger.fine("successMsg: " + successMsg); + String duplicateWarning = addFileHelper.getDuplicateFileWarning(); + if (duplicateWarning != null && !duplicateWarning.isEmpty()) { + // return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("warning message: " ,addFileHelper.getDuplicateFileWarning()) + .add("message " , file1); + jarr.add(fileoutput); + + } else { + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("message " , file1); + jarr.add(fileoutput); + } + + //"Look at that! You added a file! (hey hey, it may have worked)"); + } catch (Exception ex) { + Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + } + } } } } @@ -370,6 +454,8 @@ public Response addGlobusFileToDataset( Dataset dataset, } } + + } // end: addFileToDataset } From b84587bf01ec7ccd08e0a9b0ede0b2c881702cd9 Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 18 Jan 2021 11:47:51 -0500 Subject: [PATCH 0076/1551] moved the globus api into Datasets.java --- .../harvard/iq/dataverse/api/Datasets.java | 291 +++++++++++++++++- 1 file changed, 287 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 655cdafe04c..25c80f48e47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -25,6 +25,9 @@ import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.UserNotificationServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; +import edu.harvard.iq.dataverse.globus.AccessToken; +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.RoleAssignee; @@ -107,6 +110,7 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import com.amazonaws.services.s3.model.S3ObjectSummary; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; @@ -132,6 +136,7 @@ import javax.json.JsonObject; import javax.json.JsonObjectBuilder; import javax.json.JsonReader; +import javax.json.JsonPatch; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.Consumes; @@ -157,6 +162,8 @@ import com.amazonaws.services.s3.model.PartETag; import java.util.Map.Entry; +import javax.persistence.Query; +import org.apache.commons.lang.StringUtils; @Path("datasets") public class Datasets extends AbstractApiBean { @@ -170,6 +177,9 @@ public class Datasets extends AbstractApiBean { @EJB DataverseServiceBean dataverseService; + + @EJB + GlobusServiceBean globusServiceBean; @EJB UserNotificationServiceBean userNotificationService; @@ -1727,16 +1737,20 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, // ------------------------------------- // (1) Get the user from the API key // ------------------------------------- + + msgt("**** BEFORE STEP 1 " ); User authUser; try { authUser = findUserOrDie(); + msgt("**** IN STEP 1 : " + authUser.getIdentifier() + " : "); } catch (WrappedResponse ex) { return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") ); } - - + + msgt("**** AFTER STEP 1 " ); + msgt("**** BEFORE STEP 2 " ); // ------------------------------------- // (2) Get the Dataset Id // @@ -1748,7 +1762,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } catch (WrappedResponse wr) { return wr.getResponse(); } - + msgt("**** AFTER STEP 2 " ); //------------------------------------ // (2a) Make sure dataset does not have package file // @@ -1857,7 +1871,6 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } // end: addFileToDataset - private void msg(String m){ //System.out.println(m); logger.fine(m); @@ -1872,6 +1885,9 @@ private void msgt(String m){ public static T handleVersion( String versionId, DsVersionHandler hdl ) throws WrappedResponse { + + logger.info("**** DEBUG handleVersion " ); + switch (versionId) { case ":latest": return hdl.handleLatest(); case ":draft": return hdl.handleDraft(); @@ -1894,6 +1910,8 @@ public static T handleVersion( String versionId, DsVersionHandler hdl ) } private DatasetVersion getDatasetVersionOrDie( final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { + logger.info("**** DEBUG getDatasetVersionOrDie " ); + DatasetVersion dsv = execCommand( handleVersion(versionNumber, new DsVersionHandler>(){ @Override @@ -2287,5 +2305,270 @@ public Response resetFileStore(@PathParam("identifier") String dvIdtf, datasetService.merge(dataset); return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); } + + + + @POST + @Path("{id}/addglobusFiles") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response globus(@PathParam("id") String datasetId, + @FormDataParam("jsonData") String jsonData + ) + { + JsonArrayBuilder jarr = Json.createArrayBuilder(); + + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + User authUser; + try { + authUser = findUserOrDie(); + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("file.addreplace.error.auth") + ); + } + + // ------------------------------------- + // (2) Get the Dataset Id + // ------------------------------------- + Dataset dataset; + + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + + // ------------------------------------- + // (3) Parse JsonData + // ------------------------------------- + + String taskIdentifier = null; + + msgt("******* (api) jsonData 1: " + jsonData); + + JsonObject jsonObject = null; + try (StringReader rdr = new StringReader(jsonData)) { + jsonObject = Json.createReader(rdr).readObject(); + } catch (Exception jpe) { + jpe.printStackTrace(); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + } + + // ------------------------------------- + // (4) Get taskIdentifier + // ------------------------------------- + + + taskIdentifier = jsonObject.getString("taskIdentifier"); + msgt("******* (api) newTaskIdentifier: " + taskIdentifier); + + // ------------------------------------- + // (5) Wait until task completion + // ------------------------------------- + + boolean success = false; + + do { + try { + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + basicGlobusToken = "ODA0ODBhNzEtODA5ZC00ZTJhLWExNmQtY2JkMzA1NTk0ZDdhOmQvM3NFd1BVUGY0V20ra2hkSkF3NTZMWFJPaFZSTVhnRmR3TU5qM2Q3TjA9"; + msgt("******* (api) basicGlobusToken: " + basicGlobusToken); + AccessToken clientTokenUser = globusServiceBean.getClientToken(basicGlobusToken); + + success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier); + msgt("******* (api) success: " + success); + + } catch (Exception ex) { + ex.printStackTrace(); + logger.info(ex.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to get task id"); + } + + } while (!success); + + + try + { + StorageIO datasetSIO = DataAccess.getStorageIO(dataset); + + DataverseRequest dvRequest2 = createDataverseRequest(authUser); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, + ingestService, + datasetService, + fileService, + permissionSvc, + commandEngine, + systemConfig); + + // ------------------------------------- + // (6) Parse files information from jsondata + // calculate checksum + // determine mimetype + // ------------------------------------- + + JsonArray filesJson = jsonObject.getJsonArray("files"); + + if (filesJson != null) { + for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { + + for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { + + } + + String storageIdentifier = fileJson.getString("storageIdentifier"); + String suppliedContentType = fileJson.getString("contentType"); + String fileName = fileJson.getString("fileName"); + + String fullPath = datasetSIO.getStorageLocation() + "/" + storageIdentifier.replace("s3://", ""); + + String bucketName = System.getProperty("dataverse.files." + storageIdentifier.split(":")[0] + ".bucket-name"); + + String dbstorageIdentifier = storageIdentifier.split(":")[0] + "://" + bucketName + ":" + storageIdentifier.replace("s3://", ""); + + Query query = em.createQuery("select object(o) from DvObject as o where o.storageIdentifier = :storageIdentifier"); + query.setParameter("storageIdentifier", dbstorageIdentifier); + + msgt("******* dbstorageIdentifier :" + dbstorageIdentifier + " ======= query.getResultList().size()============== " + query.getResultList().size()); + + + if (query.getResultList().size() > 0) { + + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("message " , " The datatable is not updated since the Storage Identifier already exists in dvObject. "); + + jarr.add(fileoutput); + } else { + + // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied + String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + String type = FileUtil.determineFileTypeByExtension(fileName); + if (!StringUtils.isBlank(type)) { + //Use rules for deciding when to trust browser supplied type + if (FileUtil.useRecognizedType(finalType, type)) { + finalType = type; + } + logger.info("Supplied type: " + suppliedContentType + ", finalType: " + finalType); + } + + JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); + fileJson = path.apply(fileJson); + + StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); + InputStream in = dataFileStorageIO.getInputStream(); + String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); + + path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); + fileJson = path.apply(fileJson); + + //addGlobusFileToDataset(dataset, fileJson.toString(), addFileHelper, fileName, finalType, storageIdentifier); + + + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } + + + //------------------------------------ + // (1) Make sure dataset does not have package file + // -------------------------------------- + + for (DatasetVersion dv : dataset.getVersions()) { + if (dv.isHasPackageFile()) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") + ); + } + } + + //--------------------------------------- + // (2) Load up optional params via JSON + //--------------------------------------- + + OptionalFileParams optionalFileParams = null; + msgt("(api) jsonData 2: " + fileJson.toString()); + + try { + optionalFileParams = new OptionalFileParams(fileJson.toString()); + } catch (DataFileTagException ex) { + return error( Response.Status.BAD_REQUEST, ex.getMessage()); + } + + + //------------------- + // (3) Create the AddReplaceFileHelper object + //------------------- + msg("ADD!"); + + //------------------- + // (4) Run "runAddFileByDatasetId" + //------------------- + addFileHelper.runAddFileByDataset(dataset, + fileName, + finalType, + storageIdentifier, + null, + optionalFileParams); + + + if (addFileHelper.hasError()){ + + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("error Code: " ,addFileHelper.getHttpErrorCode().toString()) + .add("message " , addFileHelper.getErrorMessagesAsString("\n")); + + jarr.add(fileoutput); + + }else{ + String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); + + JsonObject a1 = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); + + JsonArray f1 = a1.getJsonArray("files"); + JsonObject file1 = f1.getJsonObject(0); + + try { + //msgt("as String: " + addFileHelper.getSuccessResult()); + + logger.fine("successMsg: " + successMsg); + String duplicateWarning = addFileHelper.getDuplicateFileWarning(); + if (duplicateWarning != null && !duplicateWarning.isEmpty()) { + // return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("warning message: " ,addFileHelper.getDuplicateFileWarning()) + .add("message " , file1); + jarr.add(fileoutput); + + } else { + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("message " , file1); + jarr.add(fileoutput); + } + + //"Look at that! You added a file! (hey hey, it may have worked)"); + } catch (Exception ex) { + Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + } + } + } + } + } + } catch (Exception e) { + String message = e.getMessage(); + msgt("******* Exception from globus API call " + message); + msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); + e.printStackTrace(); + } + return ok(Json.createObjectBuilder().add("Files", jarr)); + + } + } From 36fd45c0252480144276b2de8e75e722aee6ee53 Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 19 Jan 2021 08:12:03 -0500 Subject: [PATCH 0077/1551] multiple files lock issue resolved --- .../harvard/iq/dataverse/api/Datasets.java | 27 ++++++++- .../datasetutility/AddReplaceFileHelper.java | 55 +++++++++++++++---- 2 files changed, 69 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 25c80f48e47..afe6fb28cb7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1828,6 +1828,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, systemConfig); + //------------------- // (4) Run "runAddFileByDatasetId" //------------------- @@ -1836,7 +1837,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, newFileContentType, newStorageIdentifier, fileInputStream, - optionalFileParams); + optionalFileParams ); if (addFileHelper.hasError()){ @@ -2503,6 +2504,9 @@ public Response globus(@PathParam("id") String datasetId, //------------------- msg("ADD!"); + + boolean globustype = true; + //------------------- // (4) Run "runAddFileByDatasetId" //------------------- @@ -2511,7 +2515,8 @@ public Response globus(@PathParam("id") String datasetId, finalType, storageIdentifier, null, - optionalFileParams); + optionalFileParams, + globustype); if (addFileHelper.hasError()){ @@ -2560,12 +2565,30 @@ public Response globus(@PathParam("id") String datasetId, } } } + + try { + Command cmd; + + logger.info("******* : ==== datasetId :" + dataset.getId() + " ======= UpdateDatasetVersionCommand START in globus function "); + cmd = new UpdateDatasetVersionCommand(dataset, dvRequest2); + ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true); + commandEngine.submit(cmd); + } catch (CommandException ex) { + logger.log(Level.WARNING, "==== datasetId :" + dataset.getId() + "======CommandException updating DatasetVersion from batch job: " + ex.getMessage()); + } + + msg("****** pre ingest start"); + ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); + msg("******* post ingest start"); + } catch (Exception e) { String message = e.getMessage(); msgt("******* Exception from globus API call " + message); msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); e.printStackTrace(); } + + return ok(Json.createObjectBuilder().add("Files", jarr)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index ab34b5b2675..af9b7937afd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -98,7 +98,7 @@ public class AddReplaceFileHelper{ public static String FILE_ADD_OPERATION = "FILE_ADD_OPERATION"; public static String FILE_REPLACE_OPERATION = "FILE_REPLACE_OPERATION"; public static String FILE_REPLACE_FORCE_OPERATION = "FILE_REPLACE_FORCE_OPERATION"; - + public static String GLOBUSFILE_ADD_OPERATION = "GLOBUSFILE_ADD_OPERATION"; private String currentOperation; @@ -312,17 +312,34 @@ public boolean runAddFileByDataset(Dataset chosenDataset, String newStorageIdentifier, InputStream newFileInputStream, OptionalFileParams optionalFileParams){ - + + return this.runAddFileByDataset(chosenDataset,newFileName,newFileContentType,newStorageIdentifier,newFileInputStream,optionalFileParams,false); + + } + + public boolean runAddFileByDataset(Dataset chosenDataset, + String newFileName, + String newFileContentType, + String newStorageIdentifier, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams, + boolean globustype) { + msgt(">> runAddFileByDatasetId"); initErrorHandling(); - - this.currentOperation = FILE_ADD_OPERATION; - + + if(globustype) { + this.currentOperation = GLOBUSFILE_ADD_OPERATION; + } + else { + this.currentOperation = FILE_ADD_OPERATION; + } + if (!this.step_001_loadDataset(chosenDataset)){ return false; } - + //return this.runAddFile(this.dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newStorageIdentifier, newFileInputStream, optionalFileParams); @@ -692,8 +709,10 @@ private boolean runAddReplacePhase2(){ }else{ msgt("step_070_run_update_dataset_command"); - if (!this.step_070_run_update_dataset_command()){ - return false; + if (!this.isGlobusFileAddOperation()) { + if (!this.step_070_run_update_dataset_command()) { + return false; + } } } @@ -707,6 +726,8 @@ private boolean runAddReplacePhase2(){ return false; } + + return true; } @@ -755,6 +776,16 @@ public boolean isFileAddOperation(){ return this.currentOperation.equals(FILE_ADD_OPERATION); } + /** + * Is this a file add operation via Globus? + * + * @return + */ + + public boolean isGlobusFileAddOperation(){ + + return this.currentOperation.equals(GLOBUSFILE_ADD_OPERATION); + } /** * Initialize error handling vars @@ -1897,8 +1928,9 @@ private boolean step_100_startIngestJobs(){ msg("pre ingest start"); // start the ingest! // - - ingestService.startIngestJobsForDataset(dataset, dvRequest.getAuthenticatedUser()); + if (!this.isGlobusFileAddOperation()) { + ingestService.startIngestJobsForDataset(dataset, dvRequest.getAuthenticatedUser()); + } msg("post ingest start"); return true; @@ -1988,7 +2020,8 @@ public String getDuplicateFileWarning() { public void setDuplicateFileWarning(String duplicateFileWarning) { this.duplicateFileWarning = duplicateFileWarning; } - + + } // end class /* DatasetPage sequence: From 416ad7a6d5cc166f63f849a7c40951e4c189e9b1 Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 19 Jan 2021 10:14:10 -0500 Subject: [PATCH 0078/1551] debugging - ingest process during globus API call --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index afe6fb28cb7..2f561f0bb6e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2312,7 +2312,7 @@ public Response resetFileStore(@PathParam("identifier") String dvIdtf, @POST @Path("{id}/addglobusFiles") @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response globus(@PathParam("id") String datasetId, + public Response addGlobusFileToDataset(@PathParam("id") String datasetId, @FormDataParam("jsonData") String jsonData ) { @@ -2578,7 +2578,7 @@ public Response globus(@PathParam("id") String datasetId, } msg("****** pre ingest start"); - ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); + ingestService.startIngestJobsForDataset(dataset, dvRequest2.getAuthenticatedUser() ); //(AuthenticatedUser) authUser); msg("******* post ingest start"); } catch (Exception e) { From fc5ed42be3b50cd1beb684f9b22d5317ffaddce6 Mon Sep 17 00:00:00 2001 From: chenganj Date: Wed, 20 Jan 2021 14:06:43 -0500 Subject: [PATCH 0079/1551] correction to globusAPI --- .../harvard/iq/dataverse/api/Datasets.java | 160 ++++++++++-------- 1 file changed, 93 insertions(+), 67 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 2f561f0bb6e..291b66fde66 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2318,6 +2318,10 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, { JsonArrayBuilder jarr = Json.createArrayBuilder(); + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } + // ------------------------------------- // (1) Get the user from the API key // ------------------------------------- @@ -2341,6 +2345,18 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, return wr.getResponse(); } + //------------------------------------ + // (2a) Make sure dataset does not have package file + // -------------------------------------- + + for (DatasetVersion dv : dataset.getVersions()) { + if (dv.isHasPackageFile()) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") + ); + } + } + // ------------------------------------- // (3) Parse JsonData @@ -2348,7 +2364,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, String taskIdentifier = null; - msgt("******* (api) jsonData 1: " + jsonData); + msgt("******* (api) jsonData 1: " + jsonData.toString()); JsonObject jsonObject = null; try (StringReader rdr = new StringReader(jsonData)) { @@ -2362,7 +2378,6 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, // (4) Get taskIdentifier // ------------------------------------- - taskIdentifier = jsonObject.getString("taskIdentifier"); msgt("******* (api) newTaskIdentifier: " + taskIdentifier); @@ -2371,6 +2386,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, // ------------------------------------- boolean success = false; + boolean globustype = true; do { try { @@ -2395,14 +2411,20 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, { StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - DataverseRequest dvRequest2 = createDataverseRequest(authUser); - AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, - ingestService, - datasetService, - fileService, - permissionSvc, - commandEngine, - systemConfig); + for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { + + } + + DataverseRequest dvRequest = createDataverseRequest(authUser); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper( + dvRequest, + ingestService, + datasetService, + fileService, + permissionSvc, + commandEngine, + systemConfig + ); // ------------------------------------- // (6) Parse files information from jsondata @@ -2412,14 +2434,12 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, JsonArray filesJson = jsonObject.getJsonArray("files"); + + // Start to add the files if (filesJson != null) { for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { - for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { - - } - - String storageIdentifier = fileJson.getString("storageIdentifier"); + String storageIdentifier = fileJson.getString("storageIdentifier"); //"s3://176ce6992af-208dea3661bb50" String suppliedContentType = fileJson.getString("contentType"); String fileName = fileJson.getString("fileName"); @@ -2429,14 +2449,11 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, String dbstorageIdentifier = storageIdentifier.split(":")[0] + "://" + bucketName + ":" + storageIdentifier.replace("s3://", ""); + // the storageidentifier should be unique Query query = em.createQuery("select object(o) from DvObject as o where o.storageIdentifier = :storageIdentifier"); query.setParameter("storageIdentifier", dbstorageIdentifier); - msgt("******* dbstorageIdentifier :" + dbstorageIdentifier + " ======= query.getResultList().size()============== " + query.getResultList().size()); - - if (query.getResultList().size() > 0) { - JsonObjectBuilder fileoutput= Json.createObjectBuilder() .add("storageIdentifier " , storageIdentifier) .add("message " , " The datatable is not updated since the Storage Identifier already exists in dvObject. "); @@ -2444,7 +2461,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, jarr.add(fileoutput); } else { - // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied + // calculate mimeType String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; String type = FileUtil.determineFileTypeByExtension(fileName); if (!StringUtils.isBlank(type)) { @@ -2458,6 +2475,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); fileJson = path.apply(fileJson); + // calculate md5 checksum StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); InputStream in = dataFileStorageIO.getInputStream(); String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); @@ -2465,28 +2483,8 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); fileJson = path.apply(fileJson); - //addGlobusFileToDataset(dataset, fileJson.toString(), addFileHelper, fileName, finalType, storageIdentifier); - - - if (!systemConfig.isHTTPUpload()) { - return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); - } - - - //------------------------------------ - // (1) Make sure dataset does not have package file - // -------------------------------------- - - for (DatasetVersion dv : dataset.getVersions()) { - if (dv.isHasPackageFile()) { - return error(Response.Status.FORBIDDEN, - BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") - ); - } - } - //--------------------------------------- - // (2) Load up optional params via JSON + // Load up optional params via JSON //--------------------------------------- OptionalFileParams optionalFileParams = null; @@ -2498,17 +2496,10 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, return error( Response.Status.BAD_REQUEST, ex.getMessage()); } - - //------------------- - // (3) Create the AddReplaceFileHelper object - //------------------- msg("ADD!"); - - boolean globustype = true; - //------------------- - // (4) Run "runAddFileByDatasetId" + // Run "runAddFileByDatasetId" //------------------- addFileHelper.runAddFileByDataset(dataset, fileName, @@ -2531,14 +2522,9 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, }else{ String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); - JsonObject a1 = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); - - JsonArray f1 = a1.getJsonArray("files"); - JsonObject file1 = f1.getJsonObject(0); + JsonObject successresult = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); try { - //msgt("as String: " + addFileHelper.getSuccessResult()); - logger.fine("successMsg: " + successMsg); String duplicateWarning = addFileHelper.getDuplicateFileWarning(); if (duplicateWarning != null && !duplicateWarning.isEmpty()) { @@ -2546,17 +2532,16 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, JsonObjectBuilder fileoutput= Json.createObjectBuilder() .add("storageIdentifier " , storageIdentifier) .add("warning message: " ,addFileHelper.getDuplicateFileWarning()) - .add("message " , file1); + .add("message " , successresult.getJsonArray("files").getJsonObject(0)); jarr.add(fileoutput); } else { JsonObjectBuilder fileoutput= Json.createObjectBuilder() .add("storageIdentifier " , storageIdentifier) - .add("message " , file1); + .add("message " , successresult.getJsonArray("files").getJsonObject(0)); jarr.add(fileoutput); } - //"Look at that! You added a file! (hey hey, it may have worked)"); } catch (Exception ex) { Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); @@ -2564,34 +2549,75 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, } } } - } + }// End of adding files try { Command cmd; - - logger.info("******* : ==== datasetId :" + dataset.getId() + " ======= UpdateDatasetVersionCommand START in globus function "); - cmd = new UpdateDatasetVersionCommand(dataset, dvRequest2); + cmd = new UpdateDatasetVersionCommand(dataset, dvRequest); ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true); commandEngine.submit(cmd); } catch (CommandException ex) { - logger.log(Level.WARNING, "==== datasetId :" + dataset.getId() + "======CommandException updating DatasetVersion from batch job: " + ex.getMessage()); + logger.log(Level.WARNING, "==== datasetId :" + dataset.getId() + "====== UpdateDatasetVersionCommand Exception : " + ex.getMessage()); } - msg("****** pre ingest start"); - ingestService.startIngestJobsForDataset(dataset, dvRequest2.getAuthenticatedUser() ); //(AuthenticatedUser) authUser); + dataset = datasetService.find(dataset.getId()); + + List s= dataset.getFiles(); + for (DataFile dataFile : s) { + logger.info(" ******** TEST the datafile id is = " + dataFile.getId() + " = " + dataFile.getDisplayName()); + } + + msg("******* pre ingest start"); + + ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); + msg("******* post ingest start"); } catch (Exception e) { String message = e.getMessage(); - msgt("******* Exception from globus API call " + message); msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); e.printStackTrace(); } - return ok(Json.createObjectBuilder().add("Files", jarr)); } } + + /* + + ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); + + + + + if (dvRequest2 != null) { + msg("****** dvRequest2 not null"); + ingestService.startIngestJobsForDataset(dataset, dvRequest2.getAuthenticatedUser()); + } else { + msg("****** dvRequest2 is null"); + ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); + } + */ + + /* + msg("****** JC update command completed "); + + // queue the data ingest job for asynchronous execution: + List dataFiles = addFileHelper.getNewlyAddedFiles(); + for (DataFile dataFile : dataFiles) { + // refresh the copy of the DataFile: + logger.info(" ******** JC the datafile id is = " + dataFile.getId()); + } + + msg("****** JC pre ingest start"); + String status = ingestService.startIngestJobs(dataFiles, (AuthenticatedUser) authUser); + msg("****** JC post ingest start"); + + */ + + + + From 8fc88d745e312d2912b43d25cf4593f4871eeca5 Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 26 Jan 2021 09:33:40 -0500 Subject: [PATCH 0080/1551] fix for mimetype calculation --- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 291b66fde66..752c1a8c4c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2462,13 +2462,18 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, } else { // calculate mimeType + //logger.info(" JC Step 0 Supplied type: " + fileName ) ; + //logger.info(" JC Step 1 Supplied type: " + suppliedContentType ) ; String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + //logger.info(" JC Step 2 finalType: " + finalType ) ; String type = FileUtil.determineFileTypeByExtension(fileName); + //logger.info(" JC Step 3 type by fileextension: " + type ) ; if (!StringUtils.isBlank(type)) { //Use rules for deciding when to trust browser supplied type - if (FileUtil.useRecognizedType(finalType, type)) { + //if (FileUtil.useRecognizedType(finalType, type)) { finalType = type; - } + //logger.info(" JC Step 4 type after useRecognized function : " + finalType ) ; + //} logger.info("Supplied type: " + suppliedContentType + ", finalType: " + finalType); } @@ -2567,11 +2572,11 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, logger.info(" ******** TEST the datafile id is = " + dataFile.getId() + " = " + dataFile.getDisplayName()); } - msg("******* pre ingest start"); + msg("******* pre ingest start in globus API"); ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); - msg("******* post ingest start"); + msg("******* post ingest start in globus API"); } catch (Exception e) { String message = e.getMessage(); From 68888bf34dd7f9d1b6519be79eeccd9d2e6653f4 Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 1 Feb 2021 10:01:35 -0500 Subject: [PATCH 0081/1551] - add lock to the dataset page when the Globus API call is executing. --- .../harvard/iq/dataverse/api/Datasets.java | 29 ++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 752c1a8c4c0..a95ff6fcdf3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetPage; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; import edu.harvard.iq.dataverse.DatasetFieldServiceBean; @@ -230,6 +231,9 @@ public class Datasets extends AbstractApiBean { @Inject DataverseRequestServiceBean dvRequestService; + @Inject + DatasetPage datasetPage; + /** * Used to consolidate the way we parse and handle dataset versions. * @param @@ -2346,7 +2350,20 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, } //------------------------------------ - // (2a) Make sure dataset does not have package file + // (2a) Add lock to the dataset page + // -------------------------------------- + + String lockInfoMessage = "Globus Upload API is running "; + DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload, + ((AuthenticatedUser) authUser).getId() , lockInfoMessage); + if (lock != null) { + dataset.addLock(lock); + } else { + logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); + } + + //------------------------------------ + // (2b) Make sure dataset does not have package file // -------------------------------------- for (DatasetVersion dv : dataset.getVersions()) { @@ -2556,6 +2573,16 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, } }// End of adding files + + DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.GlobusUpload); + if (dcmLock == null) { + logger.log(Level.WARNING, "Dataset not locked for Globus upload"); + } else { + logger.log(Level.INFO, "Dataset remove locked for Globus upload"); + datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.GlobusUpload); + //dataset.removeLock(dcmLock); + } + try { Command cmd; cmd = new UpdateDatasetVersionCommand(dataset, dvRequest); From 38a1d38f37f8ef0d9d75db51a5b6707f58fb8227 Mon Sep 17 00:00:00 2001 From: chenganj Date: Wed, 10 Feb 2021 16:04:53 -0500 Subject: [PATCH 0082/1551] globusAPI initial commit --- .../edu/harvard/iq/dataverse/DatasetLock.java | 3 + .../harvard/iq/dataverse/api/Datasets.java | 323 ++++++- .../iq/dataverse/dataaccess/FileAccessIO.java | 8 +- .../dataverse/dataaccess/InputStreamIO.java | 6 + .../iq/dataverse/dataaccess/S3AccessIO.java | 47 +- .../iq/dataverse/dataaccess/StorageIO.java | 3 + .../dataverse/dataaccess/SwiftAccessIO.java | 6 + .../datasetutility/AddReplaceFileHelper.java | 13 +- .../iq/dataverse/globus/AccessList.java | 33 + .../iq/dataverse/globus/AccessToken.java | 71 ++ .../harvard/iq/dataverse/globus/FileG.java | 67 ++ .../iq/dataverse/globus/FilesList.java | 60 ++ .../dataverse/globus/GlobusServiceBean.java | 909 ++++++++++++++++++ .../iq/dataverse/globus/Identities.java | 16 + .../harvard/iq/dataverse/globus/Identity.java | 67 ++ .../harvard/iq/dataverse/globus/MkDir.java | 22 + .../iq/dataverse/globus/MkDirResponse.java | 50 + .../iq/dataverse/globus/Permissions.java | 58 ++ .../dataverse/globus/PermissionsResponse.java | 58 ++ .../dataverse/globus/SuccessfulTransfer.java | 35 + .../edu/harvard/iq/dataverse/globus/Task.java | 69 ++ .../harvard/iq/dataverse/globus/Tasklist.java | 17 + .../iq/dataverse/globus/Transferlist.java | 18 + .../harvard/iq/dataverse/globus/UserInfo.java | 68 ++ .../settings/SettingsServiceBean.java | 15 +- 25 files changed, 2031 insertions(+), 11 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/AccessList.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/FileG.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Identities.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Identity.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Permissions.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Task.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java index 93f4aca13d1..09c52a739f8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java @@ -72,6 +72,9 @@ public enum Reason { /** DCM (rsync) upload in progress */ DcmUpload, + /** Globus upload in progress */ + GlobusUpload, + /** Tasks handled by FinalizeDatasetPublicationCommand: Registering PIDs for DS and DFs and/or file validation */ finalizePublication, diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 655cdafe04c..1db28d5dccc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; @@ -31,6 +32,7 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; @@ -75,6 +77,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.export.ExportService; +import edu.harvard.iq.dataverse.globus.AccessToken; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.S3PackageImporter; @@ -107,6 +110,9 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import edu.harvard.iq.dataverse.globus.AccessToken; +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; + import java.io.IOException; import java.io.InputStream; import java.io.StringReader; @@ -125,13 +131,8 @@ import javax.ejb.EJB; import javax.ejb.EJBException; import javax.inject.Inject; -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonException; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.json.JsonReader; +import javax.json.*; +import javax.persistence.Query; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.Consumes; @@ -150,6 +151,8 @@ import javax.ws.rs.core.Response.Status; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import javax.ws.rs.core.UriInfo; + +import org.apache.commons.lang.StringUtils; import org.apache.solr.client.solrj.SolrServerException; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; @@ -171,6 +174,9 @@ public class Datasets extends AbstractApiBean { @EJB DataverseServiceBean dataverseService; + @EJB + GlobusServiceBean globusServiceBean; + @EJB UserNotificationServiceBean userNotificationService; @@ -2287,5 +2293,308 @@ public Response resetFileStore(@PathParam("identifier") String dvIdtf, datasetService.merge(dataset); return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); } + + + @POST + @Path("{id}/addglobusFiles") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response addGlobusFileToDataset(@PathParam("id") String datasetId, + @FormDataParam("jsonData") String jsonData + ) + { + JsonArrayBuilder jarr = Json.createArrayBuilder(); + + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } + + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + User authUser; + try { + authUser = findUserOrDie(); + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("file.addreplace.error.auth") + ); + } + + // ------------------------------------- + // (2) Get the Dataset Id + // ------------------------------------- + Dataset dataset; + + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + //------------------------------------ + // (2a) Add lock to the dataset page + // -------------------------------------- + + String lockInfoMessage = "Globus Upload API is running "; + DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload, + ((AuthenticatedUser) authUser).getId() , lockInfoMessage); + if (lock != null) { + dataset.addLock(lock); + } else { + logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); + } + + //------------------------------------ + // (2b) Make sure dataset does not have package file + // -------------------------------------- + + for (DatasetVersion dv : dataset.getVersions()) { + if (dv.isHasPackageFile()) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") + ); + } + } + + + // ------------------------------------- + // (3) Parse JsonData + // ------------------------------------- + + String taskIdentifier = null; + + msgt("******* (api) jsonData 1: " + jsonData.toString()); + + JsonObject jsonObject = null; + try (StringReader rdr = new StringReader(jsonData)) { + jsonObject = Json.createReader(rdr).readObject(); + } catch (Exception jpe) { + jpe.printStackTrace(); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + } + + // ------------------------------------- + // (4) Get taskIdentifier + // ------------------------------------- + + taskIdentifier = jsonObject.getString("taskIdentifier"); + msgt("******* (api) newTaskIdentifier: " + taskIdentifier); + + // ------------------------------------- + // (5) Wait until task completion + // ------------------------------------- + + boolean success = false; + boolean globustype = true; + + do { + try { + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + basicGlobusToken = "ODA0ODBhNzEtODA5ZC00ZTJhLWExNmQtY2JkMzA1NTk0ZDdhOmQvM3NFd1BVUGY0V20ra2hkSkF3NTZMWFJPaFZSTVhnRmR3TU5qM2Q3TjA9"; + msgt("******* (api) basicGlobusToken: " + basicGlobusToken); + AccessToken clientTokenUser = globusServiceBean.getClientToken(basicGlobusToken); + + success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier); + msgt("******* (api) success: " + success); + + } catch (Exception ex) { + ex.printStackTrace(); + logger.info(ex.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to get task id"); + } + + } while (!success); + + + try + { + StorageIO datasetSIO = DataAccess.getStorageIO(dataset); + + for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { + + } + + DataverseRequest dvRequest = createDataverseRequest(authUser); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper( + dvRequest, + ingestService, + datasetService, + fileService, + permissionSvc, + commandEngine, + systemConfig + ); + + // ------------------------------------- + // (6) Parse files information from jsondata + // calculate checksum + // determine mimetype + // ------------------------------------- + + JsonArray filesJson = jsonObject.getJsonArray("files"); + + + // Start to add the files + if (filesJson != null) { + for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { + + String storageIdentifier = fileJson.getString("storageIdentifier"); //"s3://176ce6992af-208dea3661bb50" + String suppliedContentType = fileJson.getString("contentType"); + String fileName = fileJson.getString("fileName"); + + String fullPath = datasetSIO.getStorageLocation() + "/" + storageIdentifier.replace("s3://", ""); + + String bucketName = System.getProperty("dataverse.files." + storageIdentifier.split(":")[0] + ".bucket-name"); + + String dbstorageIdentifier = storageIdentifier.split(":")[0] + "://" + bucketName + ":" + storageIdentifier.replace("s3://", ""); + + // the storageidentifier should be unique + Query query = em.createQuery("select object(o) from DvObject as o where o.storageIdentifier = :storageIdentifier"); + query.setParameter("storageIdentifier", dbstorageIdentifier); + + if (query.getResultList().size() > 0) { + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("message " , " The datatable is not updated since the Storage Identifier already exists in dvObject. "); + + jarr.add(fileoutput); + } else { + + // calculate mimeType + //logger.info(" JC Step 0 Supplied type: " + fileName ) ; + //logger.info(" JC Step 1 Supplied type: " + suppliedContentType ) ; + String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + //logger.info(" JC Step 2 finalType: " + finalType ) ; + String type = FileUtil.determineFileTypeByExtension(fileName); + //logger.info(" JC Step 3 type by fileextension: " + type ) ; + if (!StringUtils.isBlank(type)) { + //Use rules for deciding when to trust browser supplied type + //if (FileUtil.useRecognizedType(finalType, type)) { + finalType = type; + //logger.info(" JC Step 4 type after useRecognized function : " + finalType ) ; + //} + logger.info("Supplied type: " + suppliedContentType + ", finalType: " + finalType); + } + + JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); + fileJson = path.apply(fileJson); + + // calculate md5 checksum + StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); + InputStream in = dataFileStorageIO.getInputStream(); + String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); + + path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); + fileJson = path.apply(fileJson); + + //--------------------------------------- + // Load up optional params via JSON + //--------------------------------------- + + OptionalFileParams optionalFileParams = null; + msgt("(api) jsonData 2: " + fileJson.toString()); + + try { + optionalFileParams = new OptionalFileParams(fileJson.toString()); + } catch (DataFileTagException ex) { + return error( Response.Status.BAD_REQUEST, ex.getMessage()); + } + + msg("ADD!"); + + //------------------- + // Run "runAddFileByDatasetId" + //------------------- + addFileHelper.runAddFileByDataset(dataset, + fileName, + finalType, + storageIdentifier, + null, + optionalFileParams, + globustype); + + + if (addFileHelper.hasError()){ + + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("error Code: " ,addFileHelper.getHttpErrorCode().toString()) + .add("message " , addFileHelper.getErrorMessagesAsString("\n")); + + jarr.add(fileoutput); + + }else{ + String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); + + JsonObject successresult = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); + + try { + logger.fine("successMsg: " + successMsg); + String duplicateWarning = addFileHelper.getDuplicateFileWarning(); + if (duplicateWarning != null && !duplicateWarning.isEmpty()) { + // return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("warning message: " ,addFileHelper.getDuplicateFileWarning()) + .add("message " , successresult.getJsonArray("files").getJsonObject(0)); + jarr.add(fileoutput); + + } else { + JsonObjectBuilder fileoutput= Json.createObjectBuilder() + .add("storageIdentifier " , storageIdentifier) + .add("message " , successresult.getJsonArray("files").getJsonObject(0)); + jarr.add(fileoutput); + } + + } catch (Exception ex) { + Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + } + } + } + } + }// End of adding files + + + DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.GlobusUpload); + if (dcmLock == null) { + logger.log(Level.WARNING, "Dataset not locked for Globus upload"); + } else { + logger.log(Level.INFO, "Dataset remove locked for Globus upload"); + datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.GlobusUpload); + //dataset.removeLock(dcmLock); + } + + try { + Command cmd; + cmd = new UpdateDatasetVersionCommand(dataset, dvRequest); + ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true); + commandEngine.submit(cmd); + } catch (CommandException ex) { + logger.log(Level.WARNING, "==== datasetId :" + dataset.getId() + "====== UpdateDatasetVersionCommand Exception : " + ex.getMessage()); + } + + dataset = datasetService.find(dataset.getId()); + + List s= dataset.getFiles(); + for (DataFile dataFile : s) { + logger.info(" ******** TEST the datafile id is = " + dataFile.getId() + " = " + dataFile.getDisplayName()); + } + + msg("******* pre ingest start in globus API"); + + ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); + + msg("******* post ingest start in globus API"); + + } catch (Exception e) { + String message = e.getMessage(); + msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); + e.printStackTrace(); + } + + return ok(Json.createObjectBuilder().add("Files", jarr)); + + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index bd0549622f0..d11d55ede9f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -35,6 +35,7 @@ // Dataverse imports: +import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; @@ -416,7 +417,12 @@ public void deleteAllAuxObjects() throws IOException { } - + @Override + public List listAuxObjects(String s) throws IOException { + return null; + } + + @Override public String getStorageLocation() { // For a local file, the "storage location" is a complete, absolute diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java index c9796d24b27..2befee82d0c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java @@ -5,6 +5,7 @@ */ package edu.harvard.iq.dataverse.dataaccess; +import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.DataFile; import java.io.IOException; import java.io.InputStream; @@ -149,6 +150,11 @@ public OutputStream getOutputStream() throws IOException { throw new UnsupportedDataAccessOperationException("InputStreamIO: there is no output stream associated with this object."); } + @Override + public List listAuxObjects(String s) throws IOException { + return null; + } + @Override public InputStream getAuxFileAsInputStream(String auxItemTag) { throw new UnsupportedOperationException("InputStreamIO: this method is not supported in this DataAccess driver."); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index c0defccfdef..0b4e8b43cd9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -4,6 +4,8 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.HttpMethod; import com.amazonaws.SdkClientException; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.profile.ProfileCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.services.s3.AmazonS3; @@ -112,6 +114,8 @@ public S3AccessIO(String storageLocation, String driverId) { key = storageLocation.substring(storageLocation.indexOf('/')+1); } + public static String S3_IDENTIFIER_PREFIX = "s3"; + //Used for tests only public S3AccessIO(T dvObject, DataAccessRequest req, @NotNull AmazonS3 s3client, String driverId) { super(dvObject, req, driverId); @@ -636,6 +640,46 @@ public List listAuxObjects() throws IOException { return ret; } + @Override + public List listAuxObjects(String s ) throws IOException { + if (!this.canWrite()) { + open(); + } + String prefix = getDestinationKey(""); + + List ret = new ArrayList<>(); + + System.out.println("======= bucketname ===== "+ bucketName); + System.out.println("======= prefix ===== "+ prefix); + + ListObjectsRequest req = new ListObjectsRequest().withBucketName(bucketName).withPrefix(prefix); + ObjectListing storedAuxFilesList = null; + try { + storedAuxFilesList = s3.listObjects(req); + } catch (SdkClientException sce) { + throw new IOException ("S3 listAuxObjects: failed to get a listing for "+prefix); + } + if (storedAuxFilesList == null) { + return ret; + } + List storedAuxFilesSummary = storedAuxFilesList.getObjectSummaries(); + try { + while (storedAuxFilesList.isTruncated()) { + logger.fine("S3 listAuxObjects: going to next page of list"); + storedAuxFilesList = s3.listNextBatchOfObjects(storedAuxFilesList); + if (storedAuxFilesList != null) { + storedAuxFilesSummary.addAll(storedAuxFilesList.getObjectSummaries()); + } + } + } catch (AmazonClientException ase) { + //logger.warning("Caught an AmazonServiceException in S3AccessIO.listAuxObjects(): " + ase.getMessage()); + throw new IOException("S3AccessIO: Failed to get aux objects for listing."); + } + + + return storedAuxFilesSummary; + } + @Override public void deleteAuxObject(String auxItemTag) throws IOException { if (!this.canWrite()) { @@ -875,7 +919,8 @@ public String generateTemporaryS3Url() throws IOException { if (s != null) { return s.toString(); } - + + //throw new IOException("Failed to generate temporary S3 url for "+key); return null; } else if (dvObject instanceof Dataset) { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 2f66eec5f4c..9bfd9154323 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -37,6 +37,7 @@ import java.util.Iterator; import java.util.List; +import com.amazonaws.services.s3.model.S3ObjectSummary; //import org.apache.commons.httpclient.Header; //import org.apache.commons.httpclient.methods.GetMethod; @@ -542,4 +543,6 @@ public boolean isBelowIngestSizeLimit() { return true; } } + + public abstract ListlistAuxObjects(String s) throws IOException; } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java index 3bc29cb9836..bee67f85a55 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java @@ -1,4 +1,5 @@ package edu.harvard.iq.dataverse.dataaccess; +import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; @@ -875,6 +876,11 @@ public String getSwiftContainerName() { return null; } + @Override + public List listAuxObjects(String s) throws IOException { + return null; + } + //https://gist.github.com/ishikawa/88599 public static String toHexString(byte[] bytes) { Formatter formatter = new Formatter(); diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index ea1cfc38cfa..c0d5afb95cd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -312,7 +312,18 @@ public boolean runAddFileByDataset(Dataset chosenDataset, String newStorageIdentifier, InputStream newFileInputStream, OptionalFileParams optionalFileParams){ - + return this.runAddFileByDataset(chosenDataset,newFileName,newFileContentType,newStorageIdentifier,newFileInputStream,optionalFileParams,false); + + } + + public boolean runAddFileByDataset(Dataset chosenDataset, + String newFileName, + String newFileContentType, + String newStorageIdentifier, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams, + boolean globustype) { + msgt(">> runAddFileByDatasetId"); initErrorHandling(); diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/AccessList.java b/src/main/java/edu/harvard/iq/dataverse/globus/AccessList.java new file mode 100644 index 00000000000..9a963000541 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/AccessList.java @@ -0,0 +1,33 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + +public class AccessList { + private int length; + private String endpoint; + private ArrayList DATA; + + public void setDATA(ArrayList DATA) { + this.DATA = DATA; + } + + public void setEndpoint(String endpoint) { + this.endpoint = endpoint; + } + + public void setLength(int length) { + this.length = length; + } + + public String getEndpoint() { + return endpoint; + } + + public ArrayList getDATA() { + return DATA; + } + + public int getLength() { + return length; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java new file mode 100644 index 00000000000..2d68c5c8839 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java @@ -0,0 +1,71 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + + +public class AccessToken implements java.io.Serializable { + + private String accessToken; + private String idToken; + private Long expiresIn; + private String resourceServer; + private String tokenType; + private String state; + private String scope; + private String refreshToken; + private ArrayList otherTokens; + + public String getAccessToken() { return accessToken; } + + String getIdToken() { return idToken; } + + Long getExpiresIn() { return expiresIn; } + + String getResourceServer() { return resourceServer; } + + String getTokenType() { return tokenType; } + + String getState() { return state; } + + String getScope() {return scope; } + + String getRefreshToken() { return refreshToken; } + + ArrayList getOtherTokens() { return otherTokens; } + + public void setAccessToken(String accessToken) { + this.accessToken = accessToken; + } + + public void setExpiresIn(Long expiresIn) { + this.expiresIn = expiresIn; + } + + public void setIdToken(String idToken) { + this.idToken = idToken; + } + + public void setOtherTokens(ArrayList otherTokens) { + this.otherTokens = otherTokens; + } + + public void setRefreshToken(String refreshToken) { + this.refreshToken = refreshToken; + } + + public void setResourceServer(String resourceServer) { + this.resourceServer = resourceServer; + } + + public void setScope(String scope) { + this.scope = scope; + } + + public void setState(String state) { + this.state = state; + } + + public void setTokenType(String tokenType) { + this.tokenType = tokenType; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/FileG.java b/src/main/java/edu/harvard/iq/dataverse/globus/FileG.java new file mode 100644 index 00000000000..bd6a4b3b881 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/FileG.java @@ -0,0 +1,67 @@ +package edu.harvard.iq.dataverse.globus; + +public class FileG { + private String DATA_TYPE; + private String group; + private String name; + private String permissions; + private String size; + private String type; + private String user; + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public String getGroup() { + return group; + } + + public String getName() { + return name; + } + + public String getPermissions() { + return permissions; + } + + public String getSize() { + return size; + } + + public String getType() { + return type; + } + + public String getUser() { + return user; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public void setGroup(String group) { + this.group = group; + } + + public void setName(String name) { + this.name = name; + } + + public void setPermissions(String permissions) { + this.permissions = permissions; + } + + public void setSize(String size) { + this.size = size; + } + + public void setType(String type) { + this.type = type; + } + + public void setUser(String user) { + this.user = user; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java b/src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java new file mode 100644 index 00000000000..777e37f9b80 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java @@ -0,0 +1,60 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + +public class FilesList { + private ArrayList DATA; + private String DATA_TYPE; + private String absolute_path; + private String endpoint; + private String length; + private String path; + + public String getEndpoint() { + return endpoint; + } + + public ArrayList getDATA() { + return DATA; + } + + public String getAbsolute_path() { + return absolute_path; + } + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public String getLength() { + return length; + } + + public String getPath() { + return path; + } + + public void setLength(String length) { + this.length = length; + } + + public void setEndpoint(String endpoint) { + this.endpoint = endpoint; + } + + public void setDATA(ArrayList DATA) { + this.DATA = DATA; + } + + public void setAbsolute_path(String absolute_path) { + this.absolute_path = absolute_path; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public void setPath(String path) { + this.path = path; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java new file mode 100644 index 00000000000..5e314c4f47e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -0,0 +1,909 @@ +package edu.harvard.iq.dataverse.globus; + +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.S3ObjectSummary; +import com.google.gson.FieldNamingPolicy; +import com.google.gson.GsonBuilder; +import edu.harvard.iq.dataverse.*; + +import javax.ejb.EJB; +import javax.ejb.Stateless; +import javax.faces.application.FacesMessage; +import javax.faces.context.FacesContext; +import javax.faces.view.ViewScoped; +import javax.inject.Inject; +import javax.inject.Named; + +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.*; + +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLEncoder; + +import java.sql.Timestamp; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.logging.Level; +import java.util.logging.Logger; +import com.google.gson.Gson; +import edu.harvard.iq.dataverse.api.AbstractApiBean; +import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.engine.command.Command; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.FileUtil; +import edu.harvard.iq.dataverse.util.JsfHelper; +import edu.harvard.iq.dataverse.util.SystemConfig; +import org.primefaces.PrimeFaces; + +import static edu.harvard.iq.dataverse.util.JsfHelper.JH; + + +@Stateless +@Named("GlobusServiceBean") +public class GlobusServiceBean implements java.io.Serializable{ + + @EJB + protected DatasetServiceBean datasetSvc; + + @EJB + protected SettingsServiceBean settingsSvc; + + @Inject + DataverseSession session; + + @EJB + protected AuthenticationServiceBean authSvc; + + @EJB + EjbDataverseEngine commandEngine; + + private static final Logger logger = Logger.getLogger(FeaturedDataverseServiceBean.class.getCanonicalName()); + + private String code; + private String userTransferToken; + private String state; + + public String getState() { + return state; + } + + public void setState(String state) { + this.state = state; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getUserTransferToken() { + return userTransferToken; + } + + public void setUserTransferToken(String userTransferToken) { + this.userTransferToken = userTransferToken; + } + + public void onLoad() { + logger.info("Start Globus " + code); + logger.info("State " + state); + + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + if (globusEndpoint.equals("") || basicGlobusToken.equals("")) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + String datasetId = state; + logger.info("DatasetId = " + datasetId); + + String directory = getDirectory(datasetId); + if (directory == null) { + logger.severe("Cannot find directory"); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + HttpServletRequest origRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); + + logger.info(origRequest.getScheme()); + logger.info(origRequest.getServerName()); + + if (code != null ) { + + try { + AccessToken accessTokenUser = getAccessToken(origRequest, basicGlobusToken); + if (accessTokenUser == null) { + logger.severe("Cannot get access user token for code " + code); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } else { + setUserTransferToken(accessTokenUser.getOtherTokens().get(0).getAccessToken()); + } + + UserInfo usr = getUserInfo(accessTokenUser); + if (usr == null) { + logger.severe("Cannot get user info for " + accessTokenUser.getAccessToken()); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + logger.info(accessTokenUser.getAccessToken()); + logger.info(usr.getEmail()); + AccessToken clientTokenUser = getClientToken(basicGlobusToken); + if (clientTokenUser == null) { + logger.severe("Cannot get client token "); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + logger.info(clientTokenUser.getAccessToken()); + + int status = createDirectory(clientTokenUser, directory, globusEndpoint); + if (status == 202) { + int perStatus = givePermission("identity", usr.getSub(), "rw", clientTokenUser, directory, globusEndpoint); + if (perStatus != 201 && perStatus != 200) { + logger.severe("Cannot get permissions "); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + } else if (status == 502) { //directory already exists + int perStatus = givePermission("identity", usr.getSub(), "rw", clientTokenUser, directory, globusEndpoint); + if (perStatus == 409) { + logger.info("permissions already exist"); + } else if (perStatus != 201 && perStatus != 200) { + logger.severe("Cannot get permissions "); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + } else { + logger.severe("Cannot create directory, status code " + status); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + return; + } + // ProcessBuilder processBuilder = new ProcessBuilder(); + // AuthenticatedUser user = (AuthenticatedUser) session.getUser(); + // ApiToken token = authSvc.findApiTokenByUser(user); + // String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST https://" + origRequest.getServerName() + "/api/globus/" + datasetId; + // logger.info("====command ==== " + command); + // processBuilder.command("bash", "-c", command); + // logger.info("=== Start process"); + // Process process = processBuilder.start(); + // logger.info("=== Going globus"); + goGlobusUpload(directory, globusEndpoint); + logger.info("=== Finished globus"); + + + } catch (MalformedURLException ex) { + logger.severe(ex.getMessage()); + logger.severe(ex.getCause().toString()); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + } catch (UnsupportedEncodingException ex) { + logger.severe(ex.getMessage()); + logger.severe(ex.getCause().toString()); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + } catch (IOException ex) { + logger.severe(ex.getMessage()); + logger.severe(ex.getCause().toString()); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); + } + + } + + } + + private void goGlobusUpload(String directory, String globusEndpoint ) { + + String httpString = "window.location.replace('" + "https://app.globus.org/file-manager?destination_id=" + globusEndpoint + "&destination_path=" + directory + "'" +")"; + PrimeFaces.current().executeScript(httpString); + } + + public void goGlobusDownload(String datasetId) { + + String directory = getDirectory(datasetId); + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + String httpString = "window.location.replace('" + "https://app.globus.org/file-manager?origin_id=" + globusEndpoint + "&origin_path=" + directory + "'" +")"; + PrimeFaces.current().executeScript(httpString); + } + + ArrayList checkPermisions( AccessToken clientTokenUser, String directory, String globusEndpoint, String principalType, String principal) throws MalformedURLException { + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access_list"); + MakeRequestResponse result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(),"GET", null); + ArrayList ids = new ArrayList(); + if (result.status == 200) { + AccessList al = parseJson(result.jsonResponse, AccessList.class, false); + + for (int i = 0; i< al.getDATA().size(); i++) { + Permissions pr = al.getDATA().get(i); + if ((pr.getPath().equals(directory + "/") || pr.getPath().equals(directory )) && pr.getPrincipalType().equals(principalType) && + ((principal == null) || (principal != null && pr.getPrincipal().equals(principal))) ) { + ids.add(pr.getId()); + } else { + continue; + } + } + } + + return ids; + } + + public void updatePermision(AccessToken clientTokenUser, String directory, String principalType, String perm) throws MalformedURLException { + if (directory != null && !directory.equals("")) { + directory = "/" + directory + "/"; + } + logger.info("Start updating permissions." + " Directory is " + directory); + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + ArrayList rules = checkPermisions( clientTokenUser, directory, globusEndpoint, principalType, null); + logger.info("Size of rules " + rules.size()); + int count = 0; + while (count < rules.size()) { + logger.info("Start removing rules " + rules.get(count) ); + Permissions permissions = new Permissions(); + permissions.setDATA_TYPE("access"); + permissions.setPermissions(perm); + permissions.setPath(directory); + + Gson gson = new GsonBuilder().create(); + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + rules.get(count)); + logger.info("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + rules.get(count)); + MakeRequestResponse result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(),"PUT", gson.toJson(permissions)); + if (result.status != 200) { + logger.warning("Cannot update access rule " + rules.get(count)); + } else { + logger.info("Access rule " + rules.get(count) + " was updated"); + } + count++; + } + } + + public int givePermission(String principalType, String principal, String perm, AccessToken clientTokenUser, String directory, String globusEndpoint) throws MalformedURLException { + + ArrayList rules = checkPermisions( clientTokenUser, directory, globusEndpoint, principalType, principal); + + + + Permissions permissions = new Permissions(); + permissions.setDATA_TYPE("access"); + permissions.setPrincipalType(principalType); + permissions.setPrincipal(principal); + permissions.setPath(directory + "/" ); + permissions.setPermissions(perm); + + Gson gson = new GsonBuilder().create(); + MakeRequestResponse result = null; + if (rules.size() == 0) { + logger.info("Start creating the rule"); + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/"+ globusEndpoint + "/access"); + result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(), "POST", gson.toJson(permissions)); + + if (result.status == 400) { + logger.severe("Path " + permissions.getPath() + " is not valid"); + } else if (result.status == 409) { + logger.warning("ACL already exists or Endpoint ACL already has the maximum number of access rules"); + } + + return result.status; + } else { + logger.info("Start Updating the rule"); + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/"+ globusEndpoint + "/access/" + rules.get(0)); + result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(), "PUT", gson.toJson(permissions)); + + if (result.status == 400) { + logger.severe("Path " + permissions.getPath() + " is not valid"); + } else if (result.status == 409) { + logger.warning("ACL already exists or Endpoint ACL already has the maximum number of access rules"); + } + logger.info("Result status " + result.status); + } + + return result.status; + } + + private int createDirectory(AccessToken clientTokenUser, String directory, String globusEndpoint) throws MalformedURLException { + URL url = new URL("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + globusEndpoint + "/mkdir"); + + MkDir mkDir = new MkDir(); + mkDir.setDataType("mkdir"); + mkDir.setPath(directory); + Gson gson = new GsonBuilder().create(); + + MakeRequestResponse result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(),"POST", gson.toJson(mkDir)); + logger.info(result.toString()); + + if (result.status == 502) { + logger.warning("Cannot create directory " + mkDir.getPath() + ", it already exists"); + } else if (result.status == 403) { + logger.severe("Cannot create directory " + mkDir.getPath() + ", permission denied"); + } else if (result.status == 202) { + logger.info("Directory created " + mkDir.getPath()); + } + + return result.status; + + } + + public String getTaskList(String basicGlobusToken, String identifierForFileStorage, String timeWhenAsyncStarted) throws MalformedURLException { + try + { + logger.info("1.getTaskList ====== timeWhenAsyncStarted = " + timeWhenAsyncStarted + " ====== identifierForFileStorage ====== " + identifierForFileStorage); + + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + AccessToken clientTokenUser = getClientToken(basicGlobusToken); + + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task_list?filter_endpoint="+globusEndpoint+"&filter_status=SUCCEEDED&filter_completion_time="+timeWhenAsyncStarted); + + //AccessToken accessTokenUser + //accessTokenUser.getOtherTokens().get(0).getAccessToken() + MakeRequestResponse result = makeRequest(url, "Bearer", clientTokenUser.getOtherTokens().get(0).getAccessToken(),"GET", null); + //logger.info("==TEST ==" + result.toString()); + + + + //2019-12-01 18:34:37+00:00 + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + //SimpleDateFormat task_sdf = new SimpleDateFormat("yyyy-MM-ddTHH:mm:ss"); + + Calendar cal1 = Calendar.getInstance(); + cal1.setTime(sdf.parse(timeWhenAsyncStarted)); + + Calendar cal2 = Calendar.getInstance(); + + Tasklist tasklist = null; + //2019-12-01 18:34:37+00:00 + + if (result.status == 200) { + tasklist = parseJson(result.jsonResponse, Tasklist.class, false); + for (int i = 0; i< tasklist.getDATA().size(); i++) { + Task task = tasklist.getDATA().get(i); + Date tastTime = sdf.parse(task.getRequest_time().replace("T" , " ")); + cal2.setTime(tastTime); + + + if ( cal1.before(cal2)) { + + // get /task//successful_transfers + // verify datasetid in "destination_path": "/~/test_godata_copy/file1.txt", + // go to aws and get files and write to database tables + + logger.info("====== timeWhenAsyncStarted = " + timeWhenAsyncStarted + " ====== task.getRequest_time().toString() ====== " + task.getRequest_time()); + + boolean success = getSuccessfulTransfers(clientTokenUser, task.getTask_id() , identifierForFileStorage) ; + + if(success) + { + logger.info("SUCCESS ====== " + timeWhenAsyncStarted + " timeWhenAsyncStarted is before tastTime = TASK time = " + task.getTask_id()); + return task.getTask_id(); + } + } + else + { + //logger.info("====== " + timeWhenAsyncStarted + " timeWhenAsyncStarted is after tastTime = TASK time = " + task.getTask_id()); + //return task.getTask_id(); + } + } + } + } catch (MalformedURLException ex) { + logger.severe(ex.getMessage()); + logger.severe(ex.getCause().toString()); + } catch (Exception e) { + e.printStackTrace(); + } + return null; + } + + public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId, String identifierForFileStorage) throws MalformedURLException { + + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/"+taskId+"/successful_transfers"); + + MakeRequestResponse result = makeRequest(url, "Bearer",clientTokenUser.getOtherTokens().get(0).getAccessToken(), + "GET", null); + + Transferlist transferlist = null; + + if (result.status == 200) { + transferlist = parseJson(result.jsonResponse, Transferlist.class, false); + for (int i = 0; i < transferlist.getDATA().size(); i++) { + SuccessfulTransfer successfulTransfer = transferlist.getDATA().get(i); + String pathToVerify = successfulTransfer.getDestination_path(); + logger.info("getSuccessfulTransfers : ======pathToVerify === " + pathToVerify + " ====identifierForFileStorage === " + identifierForFileStorage); + if(pathToVerify.contains(identifierForFileStorage)) + { + logger.info(" SUCCESS ====== " + pathToVerify + " ==== " + identifierForFileStorage); + return true; + } + } + } + return false; + } + + public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId ) throws MalformedURLException { + + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/"+taskId+"/successful_transfers"); + + MakeRequestResponse result = makeRequest(url, "Bearer",clientTokenUser.getOtherTokens().get(0).getAccessToken(), + "GET", null); + + Transferlist transferlist = null; + + if (result.status == 200) { + logger.info(" SUCCESS ====== " ); + return true; + } + return false; + } + + + + public AccessToken getClientToken(String basicGlobusToken) throws MalformedURLException { + URL url = new URL("https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); + + MakeRequestResponse result = makeRequest(url, "Basic", + basicGlobusToken,"POST", null); + AccessToken clientTokenUser = null; + if (result.status == 200) { + clientTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); + } + return clientTokenUser; + } + + public AccessToken getAccessToken(HttpServletRequest origRequest, String basicGlobusToken ) throws UnsupportedEncodingException, MalformedURLException { + String serverName = origRequest.getServerName(); + if (serverName.equals("localhost")) { + serverName = "utl-192-123.library.utoronto.ca"; + } + + String redirectURL = "https://" + serverName + "/globus.xhtml"; + + redirectURL = URLEncoder.encode(redirectURL, "UTF-8"); + + URL url = new URL("https://auth.globus.org/v2/oauth2/token?code=" + code + "&redirect_uri=" + redirectURL + + "&grant_type=authorization_code"); + logger.info(url.toString()); + + MakeRequestResponse result = makeRequest(url, "Basic", basicGlobusToken,"POST", null); + AccessToken accessTokenUser = null; + + if (result.status == 200) { + logger.info("Access Token: \n" + result.toString()); + accessTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); + logger.info(accessTokenUser.getAccessToken()); + } + + return accessTokenUser; + + } + + public UserInfo getUserInfo(AccessToken accessTokenUser) throws MalformedURLException { + + URL url = new URL("https://auth.globus.org/v2/oauth2/userinfo"); + MakeRequestResponse result = makeRequest(url, "Bearer" , accessTokenUser.getAccessToken() , "GET", null); + UserInfo usr = null; + if (result.status == 200) { + usr = parseJson(result.jsonResponse, UserInfo.class, true); + } + + return usr; + } + + public MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, String jsonString) { + String str = null; + HttpURLConnection connection = null; + int status = 0; + try { + connection = (HttpURLConnection) url.openConnection(); + //Basic NThjMGYxNDQtN2QzMy00ZTYzLTk3MmUtMjljNjY5YzJjNGJiOktzSUVDMDZtTUxlRHNKTDBsTmRibXBIbjZvaWpQNGkwWVVuRmQyVDZRSnc9 + logger.info(authType + " " + authCode); + connection.setRequestProperty("Authorization", authType + " " + authCode); + //connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); + connection.setRequestMethod(method); + if (jsonString != null) { + connection.setRequestProperty("Content-Type", "application/json"); + connection.setRequestProperty("Accept", "application/json"); + logger.info(jsonString); + connection.setDoOutput(true); + OutputStreamWriter wr = new OutputStreamWriter(connection.getOutputStream()); + wr.write(jsonString); + wr.flush(); + } + + status = connection.getResponseCode(); + logger.info("Status now " + status); + InputStream result = connection.getInputStream(); + if (result != null) { + logger.info("Result is not null"); + str = readResultJson(result).toString(); + logger.info("str is "); + logger.info(result.toString()); + } else { + logger.info("Result is null"); + str = null; + } + + logger.info("status: " + status); + } catch (IOException ex) { + logger.info("IO"); + logger.severe(ex.getMessage()); + logger.info(ex.getCause().toString()); + logger.info(ex.getStackTrace().toString()); + } finally { + if (connection != null) { + connection.disconnect(); + } + } + MakeRequestResponse r = new MakeRequestResponse(str, status); + return r; + + } + + private StringBuilder readResultJson(InputStream in) { + StringBuilder sb = null; + try { + + BufferedReader br = new BufferedReader(new InputStreamReader(in)); + sb = new StringBuilder(); + String line; + while ((line = br.readLine()) != null) { + sb.append(line + "\n"); + } + br.close(); + logger.info(sb.toString()); + } catch (IOException e) { + sb = null; + logger.severe(e.getMessage()); + } + return sb; + } + + private T parseJson(String sb, Class jsonParserClass, boolean namingPolicy) { + if (sb != null) { + Gson gson = null; + if (namingPolicy) { + gson = new GsonBuilder().setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES).create(); + + } else { + gson = new GsonBuilder().create(); + } + T jsonClass = gson.fromJson(sb, jsonParserClass); + return jsonClass; + } else { + logger.severe("Bad respond from token rquest"); + return null; + } + } + + String getDirectory(String datasetId) { + Dataset dataset = null; + String directory = null; + try { + dataset = datasetSvc.find(Long.parseLong(datasetId)); + if (dataset == null) { + logger.severe("Dataset not found " + datasetId); + return null; + } + String storeId = dataset.getStorageIdentifier(); + storeId.substring(storeId.indexOf("//") + 1); + directory = storeId.substring(storeId.indexOf("//") + 1); + logger.info(storeId); + logger.info(directory); + logger.info("Storage identifier:" + dataset.getIdentifierForFileStorage()); + return directory; + + } catch (NumberFormatException nfe) { + logger.severe(nfe.getMessage()); + + return null; + } + + } + + class MakeRequestResponse { + public String jsonResponse; + public int status; + MakeRequestResponse(String jsonResponse, int status) { + this.jsonResponse = jsonResponse; + this.status = status; + } + + } + + private MakeRequestResponse findDirectory(String directory, AccessToken clientTokenUser, String globusEndpoint) throws MalformedURLException { + URL url = new URL(" https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint +"/ls?path=" + directory + "/"); + + MakeRequestResponse result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(),"GET", null); + logger.info("find directory status:" + result.status); + + return result; + } + + public boolean giveGlobusPublicPermissions(String datasetId) throws UnsupportedEncodingException, MalformedURLException { + + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + if (globusEndpoint.equals("") || basicGlobusToken.equals("")) { + return false; + } + AccessToken clientTokenUser = getClientToken(basicGlobusToken); + if (clientTokenUser == null) { + logger.severe("Cannot get client token "); + return false; + } + + String directory = getDirectory(datasetId); + logger.info(directory); + + MakeRequestResponse status = findDirectory(directory, clientTokenUser, globusEndpoint); + + if (status.status == 200) { + + /* FilesList fl = parseJson(status.jsonResponse, FilesList.class, false); + ArrayList files = fl.getDATA(); + if (files != null) { + for (FileG file: files) { + if (!file.getName().contains("cached") && !file.getName().contains(".thumb")) { + int perStatus = givePermission("all_authenticated_users", "", "r", clientTokenUser, + directory + "/" + file.getName(), globusEndpoint); + logger.info("givePermission status " + perStatus + " for " + file.getName()); + if (perStatus == 409) { + logger.info("Permissions already exist or limit was reached for " + file.getName()); + } else if (perStatus == 400) { + logger.info("No file in Globus " + file.getName()); + } else if (perStatus != 201) { + logger.info("Cannot get permission for " + file.getName()); + } + } + } + }*/ + + int perStatus = givePermission("all_authenticated_users", "", "r", clientTokenUser, directory, globusEndpoint); + logger.info("givePermission status " + perStatus); + if (perStatus == 409) { + logger.info("Permissions already exist or limit was reached"); + } else if (perStatus == 400) { + logger.info("No directory in Globus"); + } else if (perStatus != 201 && perStatus != 200) { + logger.info("Cannot give read permission"); + return false; + } + + } else if (status.status == 404) { + logger.info("There is no globus directory"); + }else { + logger.severe("Cannot find directory in globus, status " + status ); + return false; + } + + return true; + } +/* + public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) throws MalformedURLException { + + logger.info("=====Tasklist == dataset id :" + dataset.getId()); + String directory = null; + + try { + + List fileMetadatas = new ArrayList<>(); + + StorageIO datasetSIO = DataAccess.getStorageIO(dataset); + + + + DatasetVersion workingVersion = dataset.getEditVersion(); + + if (workingVersion.getCreateTime() != null) { + workingVersion.setCreateTime(new Timestamp(new Date().getTime())); + } + + + directory = dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); + + System.out.println("======= directory ==== " + directory + " ==== datasetId :" + dataset.getId()); + Map checksumMapOld = new HashMap<>(); + + Iterator fmIt = workingVersion.getFileMetadatas().iterator(); + + while (fmIt.hasNext()) { + FileMetadata fm = fmIt.next(); + if (fm.getDataFile() != null && fm.getDataFile().getId() != null) { + String chksum = fm.getDataFile().getChecksumValue(); + if (chksum != null) { + checksumMapOld.put(chksum, 1); + } + } + } + + List dFileList = new ArrayList<>(); + boolean update = false; + for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { + + String s3ObjectKey = s3ObjectSummary.getKey(); + + + String t = s3ObjectKey.replace(directory, ""); + + if (t.indexOf(".") > 0) { + long totalSize = s3ObjectSummary.getSize(); + String filePath = s3ObjectKey; + String fileName = filePath.split("/")[filePath.split("/").length - 1]; + String fullPath = datasetSIO.getStorageLocation() + "/" + fileName; + + logger.info("Full path " + fullPath); + StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); + InputStream in = dataFileStorageIO.getInputStream(); + + String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); + //String checksumVal = s3ObjectSummary.getETag(); + logger.info("The checksum is " + checksumVal); + if ((checksumMapOld.get(checksumVal) != null)) { + logger.info("datasetId :" + dataset.getId() + "======= filename ==== " + filePath + " == file already exists "); + } else if (filePath.contains("cached") || filePath.contains(".thumb")) { + logger.info(filePath + " is ignored"); + } else { + update = true; + logger.info("datasetId :" + dataset.getId() + "======= filename ==== " + filePath + " == new file "); + try { + + DataFile datafile = new DataFile(DataFileServiceBean.MIME_TYPE_GLOBUS_FILE); //MIME_TYPE_GLOBUS + datafile.setModificationTime(new Timestamp(new Date().getTime())); + datafile.setCreateDate(new Timestamp(new Date().getTime())); + datafile.setPermissionModificationTime(new Timestamp(new Date().getTime())); + + FileMetadata fmd = new FileMetadata(); + + + fmd.setLabel(fileName); + fmd.setDirectoryLabel(filePath.replace(directory, "").replace(File.separator + fileName, "")); + + fmd.setDataFile(datafile); + + datafile.getFileMetadatas().add(fmd); + + FileUtil.generateS3PackageStorageIdentifierForGlobus(datafile); + logger.info("==== datasetId :" + dataset.getId() + "======= filename ==== " + filePath + " == added to datafile, filemetadata "); + + try { + // We persist "SHA1" rather than "SHA-1". + //datafile.setChecksumType(DataFile.ChecksumType.SHA1); + datafile.setChecksumType(DataFile.ChecksumType.MD5); + datafile.setChecksumValue(checksumVal); + } catch (Exception cksumEx) { + logger.info("==== datasetId :" + dataset.getId() + "======Could not calculate checksumType signature for the new file "); + } + + datafile.setFilesize(totalSize); + + dFileList.add(datafile); + + } catch (Exception ioex) { + logger.info("datasetId :" + dataset.getId() + "======Failed to process and/or save the file " + ioex.getMessage()); + return false; + + } + } + } + } + if (update) { + + List filesAdded = new ArrayList<>(); + + if (dFileList != null && dFileList.size() > 0) { + + // Dataset dataset = version.getDataset(); + + for (DataFile dataFile : dFileList) { + + if (dataFile.getOwner() == null) { + dataFile.setOwner(dataset); + + workingVersion.getFileMetadatas().add(dataFile.getFileMetadata()); + dataFile.getFileMetadata().setDatasetVersion(workingVersion); + dataset.getFiles().add(dataFile); + + } + + filesAdded.add(dataFile); + + } + + logger.info("==== datasetId :" + dataset.getId() + " ===== Done! Finished saving new files to the dataset."); + } + + fileMetadatas.clear(); + for (DataFile addedFile : filesAdded) { + fileMetadatas.add(addedFile.getFileMetadata()); + } + filesAdded = null; + + if (workingVersion.isDraft()) { + + logger.info("Async: ==== datasetId :" + dataset.getId() + " ==== inside draft version "); + + Timestamp updateTime = new Timestamp(new Date().getTime()); + + workingVersion.setLastUpdateTime(updateTime); + dataset.setModificationTime(updateTime); + + + for (FileMetadata fileMetadata : fileMetadatas) { + + if (fileMetadata.getDataFile().getCreateDate() == null) { + fileMetadata.getDataFile().setCreateDate(updateTime); + fileMetadata.getDataFile().setCreator((AuthenticatedUser) user); + } + fileMetadata.getDataFile().setModificationTime(updateTime); + } + + + } else { + logger.info("datasetId :" + dataset.getId() + " ==== inside released version "); + + for (int i = 0; i < workingVersion.getFileMetadatas().size(); i++) { + for (FileMetadata fileMetadata : fileMetadatas) { + if (fileMetadata.getDataFile().getStorageIdentifier() != null) { + + if (fileMetadata.getDataFile().getStorageIdentifier().equals(workingVersion.getFileMetadatas().get(i).getDataFile().getStorageIdentifier())) { + workingVersion.getFileMetadatas().set(i, fileMetadata); + } + } + } + } + + + } + + + try { + Command cmd; + logger.info("Async: ==== datasetId :" + dataset.getId() + " ======= UpdateDatasetVersionCommand START in globus function "); + cmd = new UpdateDatasetVersionCommand(dataset, new DataverseRequest(user, (HttpServletRequest) null)); + ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true); + //new DataverseRequest(authenticatedUser, (HttpServletRequest) null) + //dvRequestService.getDataverseRequest() + commandEngine.submit(cmd); + } catch (CommandException ex) { + logger.log(Level.WARNING, "==== datasetId :" + dataset.getId() + "======CommandException updating DatasetVersion from batch job: " + ex.getMessage()); + return false; + } + + logger.info("==== datasetId :" + dataset.getId() + " ======= GLOBUS CALL COMPLETED SUCCESSFULLY "); + + //return true; + } + + } catch (Exception e) { + String message = e.getMessage(); + + logger.info("==== datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); + e.printStackTrace(); + return false; + //return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'."); + } + + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + AccessToken clientTokenUser = getClientToken(basicGlobusToken); + updatePermision(clientTokenUser, directory, "identity", "r"); + return true; + } + +*/ +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Identities.java b/src/main/java/edu/harvard/iq/dataverse/globus/Identities.java new file mode 100644 index 00000000000..6411262b5c9 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Identities.java @@ -0,0 +1,16 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + + +public class Identities { + ArrayList identities; + + public void setIdentities(ArrayList identities) { + this.identities = identities; + } + + public ArrayList getIdentities() { + return identities; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Identity.java b/src/main/java/edu/harvard/iq/dataverse/globus/Identity.java new file mode 100644 index 00000000000..265bd55217a --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Identity.java @@ -0,0 +1,67 @@ +package edu.harvard.iq.dataverse.globus; + +public class Identity { + private String id; + private String username; + private String status; + private String name; + private String email; + private String identityProvider; + private String organization; + + public void setOrganization(String organization) { + this.organization = organization; + } + + public void setIdentityProvider(String identityProvider) { + this.identityProvider = identityProvider; + } + + public void setName(String name) { + this.name = name; + } + + public void setEmail(String email) { + this.email = email; + } + + public void setId(String id) { + this.id = id; + } + + public void setStatus(String status) { + this.status = status; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getOrganization() { + return organization; + } + + public String getIdentityProvider() { + return identityProvider; + } + + public String getName() { + return name; + } + + public String getEmail() { + return email; + } + + public String getId() { + return id; + } + + public String getStatus() { + return status; + } + + public String getUsername() { + return username; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java b/src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java new file mode 100644 index 00000000000..2c906f1f31d --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java @@ -0,0 +1,22 @@ +package edu.harvard.iq.dataverse.globus; + +public class MkDir { + private String DATA_TYPE; + private String path; + + public void setDataType(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public void setPath(String path) { + this.path = path; + } + + public String getDataType() { + return DATA_TYPE; + } + + public String getPath() { + return path; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java b/src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java new file mode 100644 index 00000000000..d31b34b8e70 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java @@ -0,0 +1,50 @@ +package edu.harvard.iq.dataverse.globus; + +public class MkDirResponse { + private String DATA_TYPE; + private String code; + private String message; + private String request_id; + private String resource; + + public void setCode(String code) { + this.code = code; + } + + public void setDataType(String dataType) { + this.DATA_TYPE = dataType; + } + + public void setMessage(String message) { + this.message = message; + } + + public void setRequestId(String requestId) { + this.request_id = requestId; + } + + public void setResource(String resource) { + this.resource = resource; + } + + public String getCode() { + return code; + } + + public String getDataType() { + return DATA_TYPE; + } + + public String getMessage() { + return message; + } + + public String getRequestId() { + return request_id; + } + + public String getResource() { + return resource; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Permissions.java b/src/main/java/edu/harvard/iq/dataverse/globus/Permissions.java new file mode 100644 index 00000000000..b8bb5193fa4 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Permissions.java @@ -0,0 +1,58 @@ +package edu.harvard.iq.dataverse.globus; + +public class Permissions { + private String DATA_TYPE; + private String principal_type; + private String principal; + private String id; + private String path; + private String permissions; + + public void setPath(String path) { + this.path = path; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public void setPermissions(String permissions) { + this.permissions = permissions; + } + + public void setPrincipal(String principal) { + this.principal = principal; + } + + public void setPrincipalType(String principalType) { + this.principal_type = principalType; + } + + public String getPath() { + return path; + } + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public String getPermissions() { + return permissions; + } + + public String getPrincipal() { + return principal; + } + + public String getPrincipalType() { + return principal_type; + } + + public void setId(String id) { + this.id = id; + } + + public String getId() { + return id; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java b/src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java new file mode 100644 index 00000000000..a30b1ecdc04 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java @@ -0,0 +1,58 @@ +package edu.harvard.iq.dataverse.globus; + +public class PermissionsResponse { + private String code; + private String resource; + private String DATA_TYPE; + private String request_id; + private String access_id; + private String message; + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public String getResource() { + return resource; + } + + public String getRequestId() { + return request_id; + } + + public String getMessage() { + return message; + } + + public String getCode() { + return code; + } + + public String getAccessId() { + return access_id; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public void setResource(String resource) { + this.resource = resource; + } + + public void setRequestId(String requestId) { + this.request_id = requestId; + } + + public void setMessage(String message) { + this.message = message; + } + + public void setCode(String code) { + this.code = code; + } + + public void setAccessId(String accessId) { + this.access_id = accessId; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java b/src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java new file mode 100644 index 00000000000..6e2e5810a0a --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java @@ -0,0 +1,35 @@ +package edu.harvard.iq.dataverse.globus; + +public class SuccessfulTransfer { + + private String DATA_TYPE; + private String destination_path; + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public String getDestination_path() { + return destination_path; + } + + public void setDestination_path(String destination_path) { + this.destination_path = destination_path; + } + + public String getSource_path() { + return source_path; + } + + public void setSource_path(String source_path) { + this.source_path = source_path; + } + + private String source_path; + + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Task.java b/src/main/java/edu/harvard/iq/dataverse/globus/Task.java new file mode 100644 index 00000000000..8d9f13f8ddf --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Task.java @@ -0,0 +1,69 @@ +package edu.harvard.iq.dataverse.globus; + +public class Task { + + private String DATA_TYPE; + private String type; + private String status; + private String owner_id; + private String request_time; + private String task_id; + private String destination_endpoint_display_name; + + public String getDestination_endpoint_display_name() { + return destination_endpoint_display_name; + } + + public void setDestination_endpoint_display_name(String destination_endpoint_display_name) { + this.destination_endpoint_display_name = destination_endpoint_display_name; + } + + public void setRequest_time(String request_time) { + this.request_time = request_time; + } + + public String getRequest_time() { + return request_time; + } + + public String getTask_id() { + return task_id; + } + + public void setTask_id(String task_id) { + this.task_id = task_id; + } + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getOwner_id() { + return owner_id; + } + + public void setOwner_id(String owner_id) { + this.owner_id = owner_id; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java b/src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java new file mode 100644 index 00000000000..34e8c6c528e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + +public class Tasklist { + + private ArrayList DATA; + + public void setDATA(ArrayList DATA) { + this.DATA = DATA; + } + + public ArrayList getDATA() { + return DATA; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java b/src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java new file mode 100644 index 00000000000..0a1bd607ee2 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java @@ -0,0 +1,18 @@ +package edu.harvard.iq.dataverse.globus; + +import java.util.ArrayList; + +public class Transferlist { + + + private ArrayList DATA; + + public void setDATA(ArrayList DATA) { + this.DATA = DATA; + } + + public ArrayList getDATA() { + return DATA; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java b/src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java new file mode 100644 index 00000000000..a195486dd0b --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java @@ -0,0 +1,68 @@ +package edu.harvard.iq.dataverse.globus; + +public class UserInfo implements java.io.Serializable{ + + private String identityProviderDisplayName; + private String identityProvider; + private String organization; + private String sub; + private String preferredUsername; + private String name; + private String email; + + public void setEmail(String email) { + this.email = email; + } + + public void setName(String name) { + this.name = name; + } + + public void setPreferredUsername(String preferredUsername) { + this.preferredUsername = preferredUsername; + } + + public void setSub(String sub) { + this.sub = sub; + } + + public void setIdentityProvider(String identityProvider) { + this.identityProvider = identityProvider; + } + + public void setIdentityProviderDisplayName(String identityProviderDisplayName) { + this.identityProviderDisplayName = identityProviderDisplayName; + } + + public void setOrganization(String organization) { + this.organization = organization; + } + + public String getEmail() { + return email; + } + + public String getPreferredUsername() { + return preferredUsername; + } + + public String getSub() { + return sub; + } + + public String getName() { + return name; + } + + public String getIdentityProvider() { + return identityProvider; + } + + public String getIdentityProviderDisplayName() { + return identityProviderDisplayName; + } + + public String getOrganization() { + return organization; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index e292ee39722..cfa972bb8d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -436,7 +436,20 @@ Whether Harvesting (OAI) service is enabled /** * Sort Date Facets Chronologically instead or presenting them in order of # of hits as other facets are. Default is true */ - ChronologicalDateFacets + ChronologicalDateFacets, + + /** + * BasicGlobusToken for Globus Application + */ + BasicGlobusToken, + /** + * GlobusEndpoint is Glopus endpoint for Globus application + */ + GlobusEndpoint, + /**Client id for Globus application + * + */ + GlobusClientId ; @Override From 66a4ca056cf16450ed5bf788aa9b726928efb6ec Mon Sep 17 00:00:00 2001 From: chenganj Date: Thu, 11 Feb 2021 15:23:55 -0500 Subject: [PATCH 0083/1551] debug 1 --- .../harvard/iq/dataverse/api/Datasets.java | 19 +- .../iq/dataverse/dataaccess/S3AccessIO.java | 2 - .../datasetutility/AddReplaceFileHelper.java | 50 +++- .../dataverse/ingest/IngestServiceBean.java | 263 +++++++++++++++++- .../iq/dataverse/ingest/IngestUtil.java | 17 ++ 5 files changed, 324 insertions(+), 27 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 1db28d5dccc..7ad53638942 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -77,7 +77,6 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.export.ExportService; -import edu.harvard.iq.dataverse.globus.AccessToken; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.S3PackageImporter; @@ -2378,7 +2377,6 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, // ------------------------------------- taskIdentifier = jsonObject.getString("taskIdentifier"); - msgt("******* (api) newTaskIdentifier: " + taskIdentifier); // ------------------------------------- // (5) Wait until task completion @@ -2391,11 +2389,9 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, try { String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); basicGlobusToken = "ODA0ODBhNzEtODA5ZC00ZTJhLWExNmQtY2JkMzA1NTk0ZDdhOmQvM3NFd1BVUGY0V20ra2hkSkF3NTZMWFJPaFZSTVhnRmR3TU5qM2Q3TjA9"; - msgt("******* (api) basicGlobusToken: " + basicGlobusToken); AccessToken clientTokenUser = globusServiceBean.getClientToken(basicGlobusToken); success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier); - msgt("******* (api) success: " + success); } catch (Exception ex) { ex.printStackTrace(); @@ -2433,7 +2429,6 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, JsonArray filesJson = jsonObject.getJsonArray("files"); - // Start to add the files if (filesJson != null) { for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { @@ -2461,20 +2456,13 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, } else { // calculate mimeType - //logger.info(" JC Step 0 Supplied type: " + fileName ) ; - //logger.info(" JC Step 1 Supplied type: " + suppliedContentType ) ; String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; - //logger.info(" JC Step 2 finalType: " + finalType ) ; + String type = FileUtil.determineFileTypeByExtension(fileName); - //logger.info(" JC Step 3 type by fileextension: " + type ) ; + if (!StringUtils.isBlank(type)) { - //Use rules for deciding when to trust browser supplied type - //if (FileUtil.useRecognizedType(finalType, type)) { finalType = type; - //logger.info(" JC Step 4 type after useRecognized function : " + finalType ) ; - //} - logger.info("Supplied type: " + suppliedContentType + ", finalType: " + finalType); - } + } JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); fileJson = path.apply(fileJson); @@ -2492,7 +2480,6 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, //--------------------------------------- OptionalFileParams optionalFileParams = null; - msgt("(api) jsonData 2: " + fileJson.toString()); try { optionalFileParams = new OptionalFileParams(fileJson.toString()); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 0b4e8b43cd9..92026aef170 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -113,8 +113,6 @@ public S3AccessIO(String storageLocation, String driverId) { minPartSize = getMinPartSize(driverId); key = storageLocation.substring(storageLocation.indexOf('/')+1); } - - public static String S3_IDENTIFIER_PREFIX = "s3"; //Used for tests only public S3AccessIO(T dvObject, DataAccessRequest req, @NotNull AmazonS3 s3client, String driverId) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index c0d5afb95cd..a3d86894251 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -98,7 +98,7 @@ public class AddReplaceFileHelper{ public static String FILE_ADD_OPERATION = "FILE_ADD_OPERATION"; public static String FILE_REPLACE_OPERATION = "FILE_REPLACE_OPERATION"; public static String FILE_REPLACE_FORCE_OPERATION = "FILE_REPLACE_FORCE_OPERATION"; - + public static String GLOBUSFILE_ADD_OPERATION = "GLOBUSFILE_ADD_OPERATION"; private String currentOperation; @@ -316,6 +316,7 @@ public boolean runAddFileByDataset(Dataset chosenDataset, } + // JC STEP 1 public boolean runAddFileByDataset(Dataset chosenDataset, String newFileName, String newFileContentType, @@ -328,8 +329,13 @@ public boolean runAddFileByDataset(Dataset chosenDataset, initErrorHandling(); - this.currentOperation = FILE_ADD_OPERATION; - + if(globustype) { + this.currentOperation = GLOBUSFILE_ADD_OPERATION; + } + else { + this.currentOperation = FILE_ADD_OPERATION; + } + if (!this.step_001_loadDataset(chosenDataset)){ return false; } @@ -455,7 +461,8 @@ private boolean runAddReplaceFile(Dataset owner, String newFileName, String newF InputStream newFileInputStream, OptionalFileParams optionalFileParams) { return runAddReplaceFile(owner,newFileName, newFileContentType, null, newFileInputStream, optionalFileParams); } - + + // JC STEP 4 private boolean runAddReplaceFile(Dataset owner, String newFileName, String newFileContentType, String newStorageIdentifier, InputStream newFileInputStream, @@ -534,6 +541,7 @@ public boolean runReplaceFromUI_Phase1(Long oldFileId, * * @return */ + // JC STEP 5 private boolean runAddReplacePhase1(Dataset owner, String newFileName, String newFileContentType, @@ -703,11 +711,13 @@ private boolean runAddReplacePhase2(){ }else{ msgt("step_070_run_update_dataset_command"); + if (!this.isGlobusFileAddOperation()) { if (!this.step_070_run_update_dataset_command()){ return false; } } - + } + msgt("step_090_notifyUser"); if (!this.step_090_notifyUser()){ return false; @@ -766,10 +776,22 @@ public boolean isFileAddOperation(){ return this.currentOperation.equals(FILE_ADD_OPERATION); } + /** + * Is this a file add operation via Globus? + * + * @return + */ + + public boolean isGlobusFileAddOperation(){ + + return this.currentOperation.equals(GLOBUSFILE_ADD_OPERATION); + } /** * Initialize error handling vars */ + + // JC STEP 2 private void initErrorHandling(){ this.errorFound = false; @@ -937,6 +959,8 @@ private String getBundleErr(String msgName){ /** * */ + + // JC STEP 3 private boolean step_001_loadDataset(Dataset selectedDataset){ if (this.hasError()){ @@ -1512,7 +1536,16 @@ private boolean step_060_addFilesViaIngestService(){ } int nFiles = finalFileList.size(); - finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, fileToReplace); + + if (!this.isGlobusFileAddOperation()) { + finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, fileToReplace); + } + else { + finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, isFileReplaceOperation()); + } + + + if (nFiles != finalFileList.size()) { if (nFiles == 1) { @@ -1908,9 +1941,10 @@ private boolean step_100_startIngestJobs(){ msg("pre ingest start"); // start the ingest! // - + if (!this.isGlobusFileAddOperation()) { ingestService.startIngestJobsForDataset(dataset, dvRequest.getAuthenticatedUser()); - + } + msg("post ingest start"); return true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index eec5504661a..035922f0724 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -154,7 +154,268 @@ public class IngestServiceBean { // attached to the Dataset via some cascade path (for example, via // DataFileCategory objects, if any were already assigned to the files). // It must be called before we attempt to permanently save the files in - // the database by calling the Save command on the dataset and/or version. + // the database by calling the Save command on the dataset and/or version. + + public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles, boolean isReplaceOperation) { + List ret = new ArrayList<>(); + + if (newFiles != null && newFiles.size() > 0) { + // ret = new ArrayList<>(); + // final check for duplicate file names; + // we tried to make the file names unique on upload, but then + // the user may have edited them on the "add files" page, and + // renamed FOOBAR-1.txt back to FOOBAR.txt... + //Don't change the name if we're replacing a file - (the original hasn't yet been deleted but will be in a later step) + if(!isReplaceOperation) { + IngestUtil.checkForDuplicateFileNamesFinal(version, newFiles); + } + Dataset dataset = version.getDataset(); + + for (DataFile dataFile : newFiles) { + boolean unattached = false; + boolean savedSuccess = false; + if (dataFile.getOwner() == null) { + unattached = true; + dataFile.setOwner(dataset); + } + + String[] storageInfo = DataAccess.getDriverIdAndStorageLocation(dataFile.getStorageIdentifier()); + String driverType = DataAccess.getDriverType(storageInfo[0]); + String storageLocation = storageInfo[1]; + String tempFileLocation = null; + Path tempLocationPath = null; + if (driverType.equals("tmp")) { //"tmp" is the default if no prefix or the "tmp://" driver + tempFileLocation = FileUtil.getFilesTempDirectory() + "/" + storageLocation; + + // Try to save the file in its permanent location: + tempLocationPath = Paths.get(tempFileLocation); + WritableByteChannel writeChannel = null; + FileChannel readChannel = null; + + StorageIO dataAccess = null; + + try { + logger.fine("Attempting to create a new storageIO object for " + storageLocation); + dataAccess = DataAccess.createNewStorageIO(dataFile, storageLocation); + + logger.fine("Successfully created a new storageIO object."); + /* + * This commented-out code demonstrates how to copy bytes from a local + * InputStream (or a readChannel) into the writable byte channel of a Dataverse + * DataAccessIO object: + */ + + /* + * storageIO.open(DataAccessOption.WRITE_ACCESS); + * + * writeChannel = storageIO.getWriteChannel(); readChannel = new + * FileInputStream(tempLocationPath.toFile()).getChannel(); + * + * long bytesPerIteration = 16 * 1024; // 16K bytes long start = 0; while ( + * start < readChannel.size() ) { readChannel.transferTo(start, + * bytesPerIteration, writeChannel); start += bytesPerIteration; } + */ + + /* + * But it's easier to use this convenience method from the DataAccessIO: + * + * (if the underlying storage method for this file is local filesystem, the + * DataAccessIO will simply copy the file using Files.copy, like this: + * + * Files.copy(tempLocationPath, storageIO.getFileSystemLocation(), + * StandardCopyOption.REPLACE_EXISTING); + */ + dataAccess.savePath(tempLocationPath); + + // Set filesize in bytes + // + dataFile.setFilesize(dataAccess.getSize()); + savedSuccess = true; + logger.fine("Success: permanently saved file " + dataFile.getFileMetadata().getLabel()); + + } catch (IOException ioex) { + logger.warning("Failed to save the file, storage id " + dataFile.getStorageIdentifier() + " (" + ioex.getMessage() + ")"); + } finally { + if (readChannel != null) { + try { + readChannel.close(); + } catch (IOException e) { + } + } + if (writeChannel != null) { + try { + writeChannel.close(); + } catch (IOException e) { + } + } + } + + // Since we may have already spent some CPU cycles scaling down image thumbnails, + // we may as well save them, by moving these generated images to the permanent + // dataset directory. We should also remember to delete any such files in the + // temp directory: + List generatedTempFiles = listGeneratedTempFiles(Paths.get(FileUtil.getFilesTempDirectory()), + storageLocation); + if (generatedTempFiles != null) { + for (Path generated : generatedTempFiles) { + if (savedSuccess) { // no need to try to save this aux file permanently, if we've failed to + // save the main file! + logger.fine("(Will also try to permanently save generated thumbnail file " + + generated.toString() + ")"); + try { + // Files.copy(generated, Paths.get(dataset.getFileSystemDirectory().toString(), + // generated.getFileName().toString())); + int i = generated.toString().lastIndexOf("thumb"); + if (i > 1) { + String extensionTag = generated.toString().substring(i); + dataAccess.savePathAsAux(generated, extensionTag); + logger.fine( + "Saved generated thumbnail as aux object. \"preview available\" status: " + + dataFile.isPreviewImageAvailable()); + } else { + logger.warning( + "Generated thumbnail file name does not match the expected pattern: " + + generated.toString()); + } + + } catch (IOException ioex) { + logger.warning("Failed to save generated file " + generated.toString()); + } + } + + // ... but we definitely want to delete it: + try { + Files.delete(generated); + } catch (IOException ioex) { + logger.warning("Failed to delete generated file " + generated.toString()); + } + } + } + + if (unattached) { + dataFile.setOwner(null); + } + // Any necessary post-processing: + // performPostProcessingTasks(dataFile); + } else { + try { + StorageIO dataAccess = DataAccess.getStorageIO(dataFile); + //Populate metadata + dataAccess.open(DataAccessOption.READ_ACCESS); + //set file size + dataFile.setFilesize(dataAccess.getSize()); + if(dataAccess instanceof S3AccessIO) { + ((S3AccessIO)dataAccess).removeTempTag(); + } + } catch (IOException ioex) { + logger.warning("Failed to get file size, storage id " + dataFile.getStorageIdentifier() + " (" + + ioex.getMessage() + ")"); + } + savedSuccess = true; + dataFile.setOwner(null); + } + + logger.fine("Done! Finished saving new files in permanent storage and adding them to the dataset."); + boolean belowLimit = false; + + try { + belowLimit = dataFile.getStorageIO().isBelowIngestSizeLimit(); + } catch (IOException e) { + logger.warning("Error getting ingest limit for file: " + dataFile.getIdentifier() + " : " + e.getMessage()); + } + + if (savedSuccess && belowLimit) { + // These are all brand new files, so they should all have + // one filemetadata total. -- L.A. + FileMetadata fileMetadata = dataFile.getFileMetadatas().get(0); + String fileName = fileMetadata.getLabel(); + + boolean metadataExtracted = false; + if (FileUtil.canIngestAsTabular(dataFile)) { + /* + * Note that we don't try to ingest the file right away - instead we mark it as + * "scheduled for ingest", then at the end of the save process it will be queued + * for async. ingest in the background. In the meantime, the file will be + * ingested as a regular, non-tabular file, and appear as such to the user, + * until the ingest job is finished with the Ingest Service. + */ + dataFile.SetIngestScheduled(); + } else if (fileMetadataExtractable(dataFile)) { + + try { + // FITS is the only type supported for metadata + // extraction, as of now. -- L.A. 4.0 + dataFile.setContentType("application/fits"); + metadataExtracted = extractMetadata(tempFileLocation, dataFile, version); + } catch (IOException mex) { + logger.severe("Caught exception trying to extract indexable metadata from file " + + fileName + ", " + mex.getMessage()); + } + if (metadataExtracted) { + logger.fine("Successfully extracted indexable metadata from file " + fileName); + } else { + logger.fine("Failed to extract indexable metadata from file " + fileName); + } + } else if (FileUtil.MIME_TYPE_INGESTED_FILE.equals(dataFile.getContentType())) { + // Make sure no *uningested* tab-delimited files are saved with the type "text/tab-separated-values"! + // "text/tsv" should be used instead: + dataFile.setContentType(FileUtil.MIME_TYPE_TSV); + } + } + // ... and let's delete the main temp file if it exists: + if(tempLocationPath!=null) { + try { + logger.fine("Will attempt to delete the temp file " + tempLocationPath.toString()); + Files.delete(tempLocationPath); + } catch (IOException ex) { + // (non-fatal - it's just a temp file.) + logger.warning("Failed to delete temp file " + tempLocationPath.toString()); + } + } + if (savedSuccess) { + // temp dbug line + // System.out.println("ADDING FILE: " + fileName + "; for dataset: " + + // dataset.getGlobalId()); + // Make sure the file is attached to the dataset and to the version, if this + // hasn't been done yet: + if (dataFile.getOwner() == null) { + dataFile.setOwner(dataset); + + version.getFileMetadatas().add(dataFile.getFileMetadata()); + dataFile.getFileMetadata().setDatasetVersion(version); + dataset.getFiles().add(dataFile); + + if (dataFile.getFileMetadata().getCategories() != null) { + ListIterator dfcIt = dataFile.getFileMetadata().getCategories() + .listIterator(); + + while (dfcIt.hasNext()) { + DataFileCategory dataFileCategory = dfcIt.next(); + + if (dataFileCategory.getDataset() == null) { + DataFileCategory newCategory = dataset + .getCategoryByName(dataFileCategory.getName()); + if (newCategory != null) { + newCategory.addFileMetadata(dataFile.getFileMetadata()); + // dataFileCategory = newCategory; + dfcIt.set(newCategory); + } else { + dfcIt.remove(); + } + } + } + } + } + } + + ret.add(dataFile); + } + } + + return ret; + } + + public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles, DataFile fileToReplace) { List ret = new ArrayList<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java index 13d4ed96815..fa199bd096c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java @@ -51,6 +51,23 @@ public class IngestUtil { private static final Logger logger = Logger.getLogger(IngestUtil.class.getCanonicalName()); + + public static void checkForDuplicateFileNamesFinal(DatasetVersion version, List newFiles) { + + // Step 1: create list of existing path names from all FileMetadata in the DatasetVersion + // unique path name: directoryLabel + file separator + fileLabel + Set pathNamesExisting = existingPathNamesAsSet(version); + + // Step 2: check each new DataFile against the list of path names, if a duplicate create a new unique file name + for (Iterator dfIt = newFiles.iterator(); dfIt.hasNext();) { + + FileMetadata fm = dfIt.next().getFileMetadata(); + + fm.setLabel(duplicateFilenameCheck(fm, pathNamesExisting)); + } + } + + /** * Checks a list of new data files for duplicate names, renaming any * duplicates to ensure that they are unique. From b9689b3f53053896dff8170cac8d5afdbdcce3d9 Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 16 Feb 2021 08:56:08 -0500 Subject: [PATCH 0084/1551] Resolved Globus API for multiple files input (dv version 5.3 ) --- .../harvard/iq/dataverse/api/Datasets.java | 202 ++++++++------ .../iq/dataverse/dataaccess/FileAccessIO.java | 5 - .../dataverse/dataaccess/InputStreamIO.java | 5 - .../iq/dataverse/dataaccess/S3AccessIO.java | 40 --- .../iq/dataverse/dataaccess/StorageIO.java | 1 - .../dataverse/dataaccess/SwiftAccessIO.java | 5 - .../datasetutility/AddReplaceFileHelper.java | 11 +- .../dataverse/ingest/IngestServiceBean.java | 260 ------------------ .../iq/dataverse/ingest/IngestUtil.java | 17 +- .../harvard/iq/dataverse/util/BundleUtil.java | 2 +- 10 files changed, 116 insertions(+), 432 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 7ad53638942..49dbd9bf257 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2314,8 +2314,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, try { authUser = findUserOrDie(); } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, - BundleUtil.getStringFromBundle("file.addreplace.error.auth") + return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") ); } @@ -2349,8 +2348,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, for (DatasetVersion dv : dataset.getVersions()) { if (dv.isHasPackageFile()) { - return error(Response.Status.FORBIDDEN, - BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") + return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") ); } } @@ -2406,9 +2404,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, { StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { - - } + List cachedObjectsTags = datasetSIO.listAuxObjects(); DataverseRequest dvRequest = createDataverseRequest(authUser); AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper( @@ -2429,120 +2425,146 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, JsonArray filesJson = jsonObject.getJsonArray("files"); - // Start to add the files - if (filesJson != null) { - for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { + int totalNumberofFiles = 0 ; + int successNumberofFiles = 0; + try { + // Start to add the files + if (filesJson != null) { + totalNumberofFiles = filesJson.getValuesAs(JsonObject.class).size(); + for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { - String storageIdentifier = fileJson.getString("storageIdentifier"); //"s3://176ce6992af-208dea3661bb50" - String suppliedContentType = fileJson.getString("contentType"); - String fileName = fileJson.getString("fileName"); + String storageIdentifier = fileJson.getString("storageIdentifier"); //"s3://176ce6992af-208dea3661bb50" + String suppliedContentType = fileJson.getString("contentType"); + String fileName = fileJson.getString("fileName"); - String fullPath = datasetSIO.getStorageLocation() + "/" + storageIdentifier.replace("s3://", ""); + String fullPath = datasetSIO.getStorageLocation() + "/" + storageIdentifier.replace("s3://", ""); - String bucketName = System.getProperty("dataverse.files." + storageIdentifier.split(":")[0] + ".bucket-name"); + String bucketName = System.getProperty("dataverse.files." + storageIdentifier.split(":")[0] + ".bucket-name"); - String dbstorageIdentifier = storageIdentifier.split(":")[0] + "://" + bucketName + ":" + storageIdentifier.replace("s3://", ""); + String dbstorageIdentifier = storageIdentifier.split(":")[0] + "://" + bucketName + ":" + storageIdentifier.replace("s3://", ""); - // the storageidentifier should be unique - Query query = em.createQuery("select object(o) from DvObject as o where o.storageIdentifier = :storageIdentifier"); - query.setParameter("storageIdentifier", dbstorageIdentifier); + // the storageidentifier should be unique + Query query = em.createQuery("select object(o) from DvObject as o where o.storageIdentifier = :storageIdentifier"); + query.setParameter("storageIdentifier", dbstorageIdentifier); - if (query.getResultList().size() > 0) { - JsonObjectBuilder fileoutput= Json.createObjectBuilder() - .add("storageIdentifier " , storageIdentifier) - .add("message " , " The datatable is not updated since the Storage Identifier already exists in dvObject. "); + if (query.getResultList().size() > 0) { + JsonObjectBuilder fileoutput = Json.createObjectBuilder() + .add("storageIdentifier", storageIdentifier) + .add("message", " The datatable is not updated since the Storage Identifier already exists in dvObject. "); - jarr.add(fileoutput); - } else { + jarr.add(fileoutput); + } else { - // calculate mimeType - String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + // calculate mimeType + String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; - String type = FileUtil.determineFileTypeByExtension(fileName); + String type = FileUtil.determineFileTypeByExtension(fileName); - if (!StringUtils.isBlank(type)) { - finalType = type; - } + if (!StringUtils.isBlank(type)) { + finalType = type; + } - JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); - fileJson = path.apply(fileJson); + JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); + fileJson = path.apply(fileJson); + + int count = 0; + // calculate md5 checksum + do { + try { + + StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); + InputStream in = dataFileStorageIO.getInputStream(); + String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); + + path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); + fileJson = path.apply(fileJson); + count = 3; + } catch (Exception ex) { + count = count + 1; + ex.printStackTrace(); + logger.info(ex.getMessage()); + Thread.sleep(5000); + msgt(" ***** Try to calculate checksum again for " + fileName); + //error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to calculate checksum"); + } - // calculate md5 checksum - StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); - InputStream in = dataFileStorageIO.getInputStream(); - String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); + } while (count < 3); - path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); - fileJson = path.apply(fileJson); + //--------------------------------------- + // Load up optional params via JSON + //--------------------------------------- - //--------------------------------------- - // Load up optional params via JSON - //--------------------------------------- + OptionalFileParams optionalFileParams = null; - OptionalFileParams optionalFileParams = null; + try { + optionalFileParams = new OptionalFileParams(fileJson.toString()); + } catch (DataFileTagException ex) { + return error(Response.Status.BAD_REQUEST, ex.getMessage()); + } - try { - optionalFileParams = new OptionalFileParams(fileJson.toString()); - } catch (DataFileTagException ex) { - return error( Response.Status.BAD_REQUEST, ex.getMessage()); - } + msg("ADD!"); - msg("ADD!"); + //------------------- + // Run "runAddFileByDatasetId" + //------------------- + addFileHelper.runAddFileByDataset(dataset, + fileName, + finalType, + storageIdentifier, + null, + optionalFileParams, + globustype); - //------------------- - // Run "runAddFileByDatasetId" - //------------------- - addFileHelper.runAddFileByDataset(dataset, - fileName, - finalType, - storageIdentifier, - null, - optionalFileParams, - globustype); + if (addFileHelper.hasError()) { - if (addFileHelper.hasError()){ + JsonObjectBuilder fileoutput = Json.createObjectBuilder() + .add("storageIdentifier ", storageIdentifier) + .add("error Code: ", addFileHelper.getHttpErrorCode().toString()) + .add("message ", addFileHelper.getErrorMessagesAsString("\n")); - JsonObjectBuilder fileoutput= Json.createObjectBuilder() - .add("storageIdentifier " , storageIdentifier) - .add("error Code: " ,addFileHelper.getHttpErrorCode().toString()) - .add("message " , addFileHelper.getErrorMessagesAsString("\n")); + jarr.add(fileoutput); - jarr.add(fileoutput); + } else { + String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); - }else{ - String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); + JsonObject successresult = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); - JsonObject successresult = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); + try { + logger.fine("successMsg: " + successMsg); + String duplicateWarning = addFileHelper.getDuplicateFileWarning(); + if (duplicateWarning != null && !duplicateWarning.isEmpty()) { + // return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); + JsonObjectBuilder fileoutput = Json.createObjectBuilder() + .add("storageIdentifier ", storageIdentifier) + .add("warning message: ", addFileHelper.getDuplicateFileWarning()) + .add("message ", successresult.getJsonArray("files").getJsonObject(0)); + jarr.add(fileoutput); - try { - logger.fine("successMsg: " + successMsg); - String duplicateWarning = addFileHelper.getDuplicateFileWarning(); - if (duplicateWarning != null && !duplicateWarning.isEmpty()) { - // return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); - JsonObjectBuilder fileoutput= Json.createObjectBuilder() - .add("storageIdentifier " , storageIdentifier) - .add("warning message: " ,addFileHelper.getDuplicateFileWarning()) - .add("message " , successresult.getJsonArray("files").getJsonObject(0)); - jarr.add(fileoutput); + } else { + JsonObjectBuilder fileoutput = Json.createObjectBuilder() + .add("storageIdentifier ", storageIdentifier) + .add("message ", successresult.getJsonArray("files").getJsonObject(0)); + jarr.add(fileoutput); + } - } else { - JsonObjectBuilder fileoutput= Json.createObjectBuilder() - .add("storageIdentifier " , storageIdentifier) - .add("message " , successresult.getJsonArray("files").getJsonObject(0)); - jarr.add(fileoutput); + } catch (Exception ex) { + Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); } - - } catch (Exception ex) { - Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); - return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); } } + successNumberofFiles = successNumberofFiles + 1; } - } - }// End of adding files - + }// End of adding files + }catch (Exception e ) + { + Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, e); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + } + logger.log(Level.INFO, "Total Number of Files " + totalNumberofFiles); + logger.log(Level.INFO, "Success Number of Files " + successNumberofFiles); DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.GlobusUpload); if (dcmLock == null) { logger.log(Level.WARNING, "Dataset not locked for Globus upload"); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index d11d55ede9f..fa26232f6cf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -416,11 +416,6 @@ public void deleteAllAuxObjects() throws IOException { } } - - @Override - public List listAuxObjects(String s) throws IOException { - return null; - } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java index 2befee82d0c..90a32d49487 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java @@ -150,11 +150,6 @@ public OutputStream getOutputStream() throws IOException { throw new UnsupportedDataAccessOperationException("InputStreamIO: there is no output stream associated with this object."); } - @Override - public List listAuxObjects(String s) throws IOException { - return null; - } - @Override public InputStream getAuxFileAsInputStream(String auxItemTag) { throw new UnsupportedOperationException("InputStreamIO: this method is not supported in this DataAccess driver."); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 92026aef170..1deda4f49d1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -638,46 +638,6 @@ public List listAuxObjects() throws IOException { return ret; } - @Override - public List listAuxObjects(String s ) throws IOException { - if (!this.canWrite()) { - open(); - } - String prefix = getDestinationKey(""); - - List ret = new ArrayList<>(); - - System.out.println("======= bucketname ===== "+ bucketName); - System.out.println("======= prefix ===== "+ prefix); - - ListObjectsRequest req = new ListObjectsRequest().withBucketName(bucketName).withPrefix(prefix); - ObjectListing storedAuxFilesList = null; - try { - storedAuxFilesList = s3.listObjects(req); - } catch (SdkClientException sce) { - throw new IOException ("S3 listAuxObjects: failed to get a listing for "+prefix); - } - if (storedAuxFilesList == null) { - return ret; - } - List storedAuxFilesSummary = storedAuxFilesList.getObjectSummaries(); - try { - while (storedAuxFilesList.isTruncated()) { - logger.fine("S3 listAuxObjects: going to next page of list"); - storedAuxFilesList = s3.listNextBatchOfObjects(storedAuxFilesList); - if (storedAuxFilesList != null) { - storedAuxFilesSummary.addAll(storedAuxFilesList.getObjectSummaries()); - } - } - } catch (AmazonClientException ase) { - //logger.warning("Caught an AmazonServiceException in S3AccessIO.listAuxObjects(): " + ase.getMessage()); - throw new IOException("S3AccessIO: Failed to get aux objects for listing."); - } - - - return storedAuxFilesSummary; - } - @Override public void deleteAuxObject(String auxItemTag) throws IOException { if (!this.canWrite()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 9bfd9154323..6780984eb92 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -544,5 +544,4 @@ public boolean isBelowIngestSizeLimit() { } } - public abstract ListlistAuxObjects(String s) throws IOException; } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java index bee67f85a55..eaebc86e35a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java @@ -875,11 +875,6 @@ public String getSwiftContainerName() { } return null; } - - @Override - public List listAuxObjects(String s) throws IOException { - return null; - } //https://gist.github.com/ishikawa/88599 public static String toHexString(byte[] bytes) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index a3d86894251..c94b1a81d3a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -1534,17 +1534,10 @@ private boolean step_060_addFilesViaIngestService(){ this.addErrorSevere(getBundleErr("final_file_list_empty")); return false; } - - int nFiles = finalFileList.size(); - - if (!this.isGlobusFileAddOperation()) { - finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, fileToReplace); - } - else { - finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, isFileReplaceOperation()); - } + int nFiles = finalFileList.size(); + finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, fileToReplace); if (nFiles != finalFileList.size()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 035922f0724..b58a34a79ae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -156,266 +156,6 @@ public class IngestServiceBean { // It must be called before we attempt to permanently save the files in // the database by calling the Save command on the dataset and/or version. - public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles, boolean isReplaceOperation) { - List ret = new ArrayList<>(); - - if (newFiles != null && newFiles.size() > 0) { - // ret = new ArrayList<>(); - // final check for duplicate file names; - // we tried to make the file names unique on upload, but then - // the user may have edited them on the "add files" page, and - // renamed FOOBAR-1.txt back to FOOBAR.txt... - //Don't change the name if we're replacing a file - (the original hasn't yet been deleted but will be in a later step) - if(!isReplaceOperation) { - IngestUtil.checkForDuplicateFileNamesFinal(version, newFiles); - } - Dataset dataset = version.getDataset(); - - for (DataFile dataFile : newFiles) { - boolean unattached = false; - boolean savedSuccess = false; - if (dataFile.getOwner() == null) { - unattached = true; - dataFile.setOwner(dataset); - } - - String[] storageInfo = DataAccess.getDriverIdAndStorageLocation(dataFile.getStorageIdentifier()); - String driverType = DataAccess.getDriverType(storageInfo[0]); - String storageLocation = storageInfo[1]; - String tempFileLocation = null; - Path tempLocationPath = null; - if (driverType.equals("tmp")) { //"tmp" is the default if no prefix or the "tmp://" driver - tempFileLocation = FileUtil.getFilesTempDirectory() + "/" + storageLocation; - - // Try to save the file in its permanent location: - tempLocationPath = Paths.get(tempFileLocation); - WritableByteChannel writeChannel = null; - FileChannel readChannel = null; - - StorageIO dataAccess = null; - - try { - logger.fine("Attempting to create a new storageIO object for " + storageLocation); - dataAccess = DataAccess.createNewStorageIO(dataFile, storageLocation); - - logger.fine("Successfully created a new storageIO object."); - /* - * This commented-out code demonstrates how to copy bytes from a local - * InputStream (or a readChannel) into the writable byte channel of a Dataverse - * DataAccessIO object: - */ - - /* - * storageIO.open(DataAccessOption.WRITE_ACCESS); - * - * writeChannel = storageIO.getWriteChannel(); readChannel = new - * FileInputStream(tempLocationPath.toFile()).getChannel(); - * - * long bytesPerIteration = 16 * 1024; // 16K bytes long start = 0; while ( - * start < readChannel.size() ) { readChannel.transferTo(start, - * bytesPerIteration, writeChannel); start += bytesPerIteration; } - */ - - /* - * But it's easier to use this convenience method from the DataAccessIO: - * - * (if the underlying storage method for this file is local filesystem, the - * DataAccessIO will simply copy the file using Files.copy, like this: - * - * Files.copy(tempLocationPath, storageIO.getFileSystemLocation(), - * StandardCopyOption.REPLACE_EXISTING); - */ - dataAccess.savePath(tempLocationPath); - - // Set filesize in bytes - // - dataFile.setFilesize(dataAccess.getSize()); - savedSuccess = true; - logger.fine("Success: permanently saved file " + dataFile.getFileMetadata().getLabel()); - - } catch (IOException ioex) { - logger.warning("Failed to save the file, storage id " + dataFile.getStorageIdentifier() + " (" + ioex.getMessage() + ")"); - } finally { - if (readChannel != null) { - try { - readChannel.close(); - } catch (IOException e) { - } - } - if (writeChannel != null) { - try { - writeChannel.close(); - } catch (IOException e) { - } - } - } - - // Since we may have already spent some CPU cycles scaling down image thumbnails, - // we may as well save them, by moving these generated images to the permanent - // dataset directory. We should also remember to delete any such files in the - // temp directory: - List generatedTempFiles = listGeneratedTempFiles(Paths.get(FileUtil.getFilesTempDirectory()), - storageLocation); - if (generatedTempFiles != null) { - for (Path generated : generatedTempFiles) { - if (savedSuccess) { // no need to try to save this aux file permanently, if we've failed to - // save the main file! - logger.fine("(Will also try to permanently save generated thumbnail file " - + generated.toString() + ")"); - try { - // Files.copy(generated, Paths.get(dataset.getFileSystemDirectory().toString(), - // generated.getFileName().toString())); - int i = generated.toString().lastIndexOf("thumb"); - if (i > 1) { - String extensionTag = generated.toString().substring(i); - dataAccess.savePathAsAux(generated, extensionTag); - logger.fine( - "Saved generated thumbnail as aux object. \"preview available\" status: " - + dataFile.isPreviewImageAvailable()); - } else { - logger.warning( - "Generated thumbnail file name does not match the expected pattern: " - + generated.toString()); - } - - } catch (IOException ioex) { - logger.warning("Failed to save generated file " + generated.toString()); - } - } - - // ... but we definitely want to delete it: - try { - Files.delete(generated); - } catch (IOException ioex) { - logger.warning("Failed to delete generated file " + generated.toString()); - } - } - } - - if (unattached) { - dataFile.setOwner(null); - } - // Any necessary post-processing: - // performPostProcessingTasks(dataFile); - } else { - try { - StorageIO dataAccess = DataAccess.getStorageIO(dataFile); - //Populate metadata - dataAccess.open(DataAccessOption.READ_ACCESS); - //set file size - dataFile.setFilesize(dataAccess.getSize()); - if(dataAccess instanceof S3AccessIO) { - ((S3AccessIO)dataAccess).removeTempTag(); - } - } catch (IOException ioex) { - logger.warning("Failed to get file size, storage id " + dataFile.getStorageIdentifier() + " (" - + ioex.getMessage() + ")"); - } - savedSuccess = true; - dataFile.setOwner(null); - } - - logger.fine("Done! Finished saving new files in permanent storage and adding them to the dataset."); - boolean belowLimit = false; - - try { - belowLimit = dataFile.getStorageIO().isBelowIngestSizeLimit(); - } catch (IOException e) { - logger.warning("Error getting ingest limit for file: " + dataFile.getIdentifier() + " : " + e.getMessage()); - } - - if (savedSuccess && belowLimit) { - // These are all brand new files, so they should all have - // one filemetadata total. -- L.A. - FileMetadata fileMetadata = dataFile.getFileMetadatas().get(0); - String fileName = fileMetadata.getLabel(); - - boolean metadataExtracted = false; - if (FileUtil.canIngestAsTabular(dataFile)) { - /* - * Note that we don't try to ingest the file right away - instead we mark it as - * "scheduled for ingest", then at the end of the save process it will be queued - * for async. ingest in the background. In the meantime, the file will be - * ingested as a regular, non-tabular file, and appear as such to the user, - * until the ingest job is finished with the Ingest Service. - */ - dataFile.SetIngestScheduled(); - } else if (fileMetadataExtractable(dataFile)) { - - try { - // FITS is the only type supported for metadata - // extraction, as of now. -- L.A. 4.0 - dataFile.setContentType("application/fits"); - metadataExtracted = extractMetadata(tempFileLocation, dataFile, version); - } catch (IOException mex) { - logger.severe("Caught exception trying to extract indexable metadata from file " - + fileName + ", " + mex.getMessage()); - } - if (metadataExtracted) { - logger.fine("Successfully extracted indexable metadata from file " + fileName); - } else { - logger.fine("Failed to extract indexable metadata from file " + fileName); - } - } else if (FileUtil.MIME_TYPE_INGESTED_FILE.equals(dataFile.getContentType())) { - // Make sure no *uningested* tab-delimited files are saved with the type "text/tab-separated-values"! - // "text/tsv" should be used instead: - dataFile.setContentType(FileUtil.MIME_TYPE_TSV); - } - } - // ... and let's delete the main temp file if it exists: - if(tempLocationPath!=null) { - try { - logger.fine("Will attempt to delete the temp file " + tempLocationPath.toString()); - Files.delete(tempLocationPath); - } catch (IOException ex) { - // (non-fatal - it's just a temp file.) - logger.warning("Failed to delete temp file " + tempLocationPath.toString()); - } - } - if (savedSuccess) { - // temp dbug line - // System.out.println("ADDING FILE: " + fileName + "; for dataset: " + - // dataset.getGlobalId()); - // Make sure the file is attached to the dataset and to the version, if this - // hasn't been done yet: - if (dataFile.getOwner() == null) { - dataFile.setOwner(dataset); - - version.getFileMetadatas().add(dataFile.getFileMetadata()); - dataFile.getFileMetadata().setDatasetVersion(version); - dataset.getFiles().add(dataFile); - - if (dataFile.getFileMetadata().getCategories() != null) { - ListIterator dfcIt = dataFile.getFileMetadata().getCategories() - .listIterator(); - - while (dfcIt.hasNext()) { - DataFileCategory dataFileCategory = dfcIt.next(); - - if (dataFileCategory.getDataset() == null) { - DataFileCategory newCategory = dataset - .getCategoryByName(dataFileCategory.getName()); - if (newCategory != null) { - newCategory.addFileMetadata(dataFile.getFileMetadata()); - // dataFileCategory = newCategory; - dfcIt.set(newCategory); - } else { - dfcIt.remove(); - } - } - } - } - } - } - - ret.add(dataFile); - } - } - - return ret; - } - - public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles, DataFile fileToReplace) { List ret = new ArrayList<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java index fa199bd096c..7363d9d9430 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java @@ -52,21 +52,6 @@ public class IngestUtil { private static final Logger logger = Logger.getLogger(IngestUtil.class.getCanonicalName()); - public static void checkForDuplicateFileNamesFinal(DatasetVersion version, List newFiles) { - - // Step 1: create list of existing path names from all FileMetadata in the DatasetVersion - // unique path name: directoryLabel + file separator + fileLabel - Set pathNamesExisting = existingPathNamesAsSet(version); - - // Step 2: check each new DataFile against the list of path names, if a duplicate create a new unique file name - for (Iterator dfIt = newFiles.iterator(); dfIt.hasNext();) { - - FileMetadata fm = dfIt.next().getFileMetadata(); - - fm.setLabel(duplicateFilenameCheck(fm, pathNamesExisting)); - } - } - /** * Checks a list of new data files for duplicate names, renaming any @@ -274,7 +259,7 @@ public static Set existingPathNamesAsSet(DatasetVersion version, FileMet // #6942 added proxy for existing files to a boolean set when dataset version copy is done for (Iterator fmIt = version.getFileMetadatas().iterator(); fmIt.hasNext();) { FileMetadata fm = fmIt.next(); - if((fm.isInPriorVersion() || fm.getId() != null) && (replacedFmd==null) || (!fm.getDataFile().equals(replacedFmd.getDataFile()))) { + if((fm.isInPriorVersion() || fm.getId() != null) && (replacedFmd==null || !fm.getDataFile().equals(replacedFmd.getDataFile()))) { String existingName = fm.getLabel(); String existingDir = fm.getDirectoryLabel(); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java index ca12683de15..a9511c65730 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java @@ -56,7 +56,7 @@ private static String getStringFromBundleNoMissingCheck(String key, List String stringFromBundle = null; stringFromBundle = bundle.getString(key); - logger.fine("string found: " + stringFromBundle); + //logger.fine("string found: " + stringFromBundle); if (arguments != null) { Object[] argArray = new String[arguments.size()]; From f8b7c3e2a630595a2d553e542c32b89b171bb24b Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 16 Feb 2021 09:08:56 -0500 Subject: [PATCH 0085/1551] Removed unwanted statements --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 1 - .../java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java | 1 - .../edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java | 1 - .../java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java | 3 --- .../java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 2 -- .../edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java | 1 - .../iq/dataverse/datasetutility/AddReplaceFileHelper.java | 2 -- .../edu/harvard/iq/dataverse/ingest/IngestServiceBean.java | 1 - src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java | 2 -- src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java | 2 +- 10 files changed, 1 insertion(+), 15 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 49dbd9bf257..4382e6ee588 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2386,7 +2386,6 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, do { try { String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); - basicGlobusToken = "ODA0ODBhNzEtODA5ZC00ZTJhLWExNmQtY2JkMzA1NTk0ZDdhOmQvM3NFd1BVUGY0V20ra2hkSkF3NTZMWFJPaFZSTVhnRmR3TU5qM2Q3TjA9"; AccessToken clientTokenUser = globusServiceBean.getClientToken(basicGlobusToken); success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java index fa26232f6cf..a92c6a5a5f6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java @@ -35,7 +35,6 @@ // Dataverse imports: -import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java index 90a32d49487..c9796d24b27 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java @@ -5,7 +5,6 @@ */ package edu.harvard.iq.dataverse.dataaccess; -import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.DataFile; import java.io.IOException; import java.io.InputStream; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 1deda4f49d1..eaa4de8d705 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -4,8 +4,6 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.HttpMethod; import com.amazonaws.SdkClientException; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.profile.ProfileCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.services.s3.AmazonS3; @@ -878,7 +876,6 @@ public String generateTemporaryS3Url() throws IOException { return s.toString(); } - //throw new IOException("Failed to generate temporary S3 url for "+key); return null; } else if (dvObject instanceof Dataset) { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 6780984eb92..2f66eec5f4c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -37,7 +37,6 @@ import java.util.Iterator; import java.util.List; -import com.amazonaws.services.s3.model.S3ObjectSummary; //import org.apache.commons.httpclient.Header; //import org.apache.commons.httpclient.methods.GetMethod; @@ -543,5 +542,4 @@ public boolean isBelowIngestSizeLimit() { return true; } } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java index eaebc86e35a..5bdee44f1e5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java @@ -1,5 +1,4 @@ package edu.harvard.iq.dataverse.dataaccess; -import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index c94b1a81d3a..afd513b244d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -1536,10 +1536,8 @@ private boolean step_060_addFilesViaIngestService(){ } int nFiles = finalFileList.size(); - finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, fileToReplace); - if (nFiles != finalFileList.size()) { if (nFiles == 1) { addError("Failed to save the content of the uploaded file."); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index b58a34a79ae..4d69464c91b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -155,7 +155,6 @@ public class IngestServiceBean { // DataFileCategory objects, if any were already assigned to the files). // It must be called before we attempt to permanently save the files in // the database by calling the Save command on the dataset and/or version. - public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles, DataFile fileToReplace) { List ret = new ArrayList<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java index 7363d9d9430..356ac4f30ae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java @@ -51,8 +51,6 @@ public class IngestUtil { private static final Logger logger = Logger.getLogger(IngestUtil.class.getCanonicalName()); - - /** * Checks a list of new data files for duplicate names, renaming any * duplicates to ensure that they are unique. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java index a9511c65730..ca12683de15 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java @@ -56,7 +56,7 @@ private static String getStringFromBundleNoMissingCheck(String key, List String stringFromBundle = null; stringFromBundle = bundle.getString(key); - //logger.fine("string found: " + stringFromBundle); + logger.fine("string found: " + stringFromBundle); if (arguments != null) { Object[] argArray = new String[arguments.size()]; From d6480aa7cc4f09fa73619af2cc08719b9a84b687 Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 16 Feb 2021 09:25:30 -0500 Subject: [PATCH 0086/1551] mimeType is calculated only from file extension --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 4382e6ee588..9b8c1deb90b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2433,7 +2433,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { String storageIdentifier = fileJson.getString("storageIdentifier"); //"s3://176ce6992af-208dea3661bb50" - String suppliedContentType = fileJson.getString("contentType"); + //String suppliedContentType = fileJson.getString("contentType"); String fileName = fileJson.getString("fileName"); String fullPath = datasetSIO.getStorageLocation() + "/" + storageIdentifier.replace("s3://", ""); @@ -2455,7 +2455,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, } else { // calculate mimeType - String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + String finalType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; String type = FileUtil.determineFileTypeByExtension(fileName); From 22134188bf9f24c931a4b29c5fc4b2603301e956 Mon Sep 17 00:00:00 2001 From: chenganj Date: Thu, 18 Feb 2021 09:07:22 -0500 Subject: [PATCH 0087/1551] corrected compilation errors --- .../edu/harvard/iq/dataverse/DatasetLock.java | 3 - .../harvard/iq/dataverse/api/GlobusApi.java | 7 ++- .../dataverse/dataaccess/InputStreamIO.java | 5 -- .../iq/dataverse/dataaccess/StorageIO.java | 2 +- .../harvard/iq/dataverse/util/FileUtil.java | 15 +---- src/main/webapp/editFilesFragment.xhtml | 63 ++++++++++++++++++- .../file-download-button-fragment.xhtml | 11 ++++ 7 files changed, 80 insertions(+), 26 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java index f3dc4922f6e..62eec80af17 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java @@ -72,9 +72,6 @@ public enum Reason { /** DCM (rsync) upload in progress */ DcmUpload, - /** Globus upload in progress */ - GlobusUpload, - /** Globus upload in progress */ GlobusUpload, diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index 078da050f28..c26b1bec184 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -203,10 +203,12 @@ public Response globus(@PathParam("id") String datasetId, if (filesJson != null) { for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { - +/* for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { } + */ + String storageIdentifier = fileJson.getString("storageIdentifier"); String suppliedContentType = fileJson.getString("contentType"); @@ -238,7 +240,8 @@ public Response globus(@PathParam("id") String datasetId, String type = FileUtil.determineFileTypeByExtension(fileName); if (!StringUtils.isBlank(type)) { //Use rules for deciding when to trust browser supplied type - if (FileUtil.useRecognizedType(finalType, type)) { + //if (FileUtil.useRecognizedType(finalType, type)) + { finalType = type; } logger.info("Supplied type: " + suppliedContentType + ", finalType: " + finalType); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java index e244b8a788a..52dff797e33 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java @@ -151,11 +151,6 @@ public OutputStream getOutputStream() throws IOException { throw new UnsupportedDataAccessOperationException("InputStreamIO: there is no output stream associated with this object."); } - @Override - public List listAuxObjects(String s) throws IOException { - return null; - } - @Override public InputStream getAuxFileAsInputStream(String auxItemTag) { throw new UnsupportedOperationException("InputStreamIO: this method is not supported in this DataAccess driver."); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 9bfd9154323..b3877252bd4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -544,5 +544,5 @@ public boolean isBelowIngestSizeLimit() { } } - public abstract ListlistAuxObjects(String s) throws IOException; + } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index e588dd5659f..6d0c88e886d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -20,8 +20,6 @@ package edu.harvard.iq.dataverse.util; -import static edu.harvard.iq.dataverse.dataaccess.S3AccessIO.S3_IDENTIFIER_PREFIX; - import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFile.ChecksumType; @@ -1146,7 +1144,7 @@ public static List createDataFiles(DatasetVersion version, InputStream } // end createDataFiles - public static boolean useRecognizedType(String suppliedContentType, String recognizedType) { + private static boolean useRecognizedType(String suppliedContentType, String recognizedType) { // is it any better than the type that was supplied to us, // if any? // This is not as trivial a task as one might expect... @@ -1378,17 +1376,6 @@ public static void generateS3PackageStorageIdentifier(DataFile dataFile) { String storageId = driverId + "://" + bucketName + ":" + dataFile.getFileMetadata().getLabel(); dataFile.setStorageIdentifier(storageId); } - - public static void generateS3PackageStorageIdentifierForGlobus(DataFile dataFile) { - String bucketName = System.getProperty("dataverse.files.s3-bucket-name"); - String storageId = null; - if ( dataFile.getFileMetadata().getDirectoryLabel() != null && !dataFile.getFileMetadata().getDirectoryLabel().equals("")) { - storageId = S3_IDENTIFIER_PREFIX + "://" + bucketName + ":" + dataFile.getFileMetadata().getDirectoryLabel() + "/" + dataFile.getFileMetadata().getLabel(); - } else { - storageId = S3_IDENTIFIER_PREFIX + "://" + bucketName + ":" + dataFile.getFileMetadata().getLabel(); - } - dataFile.setStorageIdentifier(storageId); - } public static void generateStorageIdentifier(DataFile dataFile) { //Is it true that this is only used for temp files and we could safely prepend "tmp://" to indicate that? diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index e5e12201fc8..d8d3081afef 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -276,7 +276,54 @@
- + +
+
Globus
+ + +
+
+ + +

+ #{bundle['file.createGlobusUploadDisabled']} +

+
+
+ + +

+ + BEFORE YOU START: You will need to set up a free account with Globus and + have Globus Connect Personal running on your computer to transfer files to and from the service. +
+ + +
+
+ Once Globus transfer has finished, you will get an email notification. Please come back here and press the following button: +
+ + +
+
+ +

+ +
+ Click here to view the dataset page: #{EditDatafilesPage.dataset.displayName} . +
+
+
+
+
@@ -985,6 +1032,20 @@ return true; } } + + function openGlobus(datasetId, client_id) { + var res = location.protocol+'//'+location.hostname+(location.port ? ':'+location.port: ''); + + var scope = encodeURI("openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all", "UTF-8"); + + var new_url = "https://auth.globus.org/v2/oauth2/authorize?client_id=" + client_id + "&response_type=code&" + + "scope=" + scope + "&state=" + datasetId; + new_url = new_url + "&redirect_uri=" + res + "%2Fglobus.xhtml" ; + + + var myWindows = window.open(new_url); + } + //]]> diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 85fe60863b4..cafe1875590 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -58,6 +58,17 @@ #{bundle.download} + + + + + + #{bundle['file.downloadFromGlobus']} + From b6f8f0fad123a67ef6e9d6af5628064110eab9e9 Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 1 Mar 2021 08:58:21 -0500 Subject: [PATCH 0088/1551] sql scripts --- .../V4.11.0.1__5565-sanitize-directory-labels.sql | 9 +++++++++ .../V4.11__5513-database-variablemetadata.sql | 5 +++++ .../V4.12.0.1__4.13-re-sanitize-filemetadata.sql | 12 ++++++++++++ .../db/migration/V4.13.0.1__3575-usernames.sql | 1 + .../db/migration/V4.14.0.1__5822-export-var-meta.sql | 2 ++ .../db/migration/V4.15.0.1__2043-split-gbr-table.sql | 10 ++++++++++ .../V4.16.0.1__5303-addColumn-to-settingTable.sql | 10 ++++++++++ .../db/migration/V4.16.0.2__5028-dataset-explore.sql | 3 +++ .../V4.16.0.3__6156-FooterImageforSub-Dataverse.sql | 4 ++++ .../migration/V4.17.0.1__5991-update-scribejava.sql | 1 + .../migration/V4.17.0.2__3578-file-page-preview.sql | 5 +++++ .../V4.18.1.1__6459-contenttype-nullable.sql | 2 ++ .../db/migration/V4.19.0.1__6485_multistore.sql | 3 +++ .../V4.19.0.2__6644-update-editor-role-alias.sql | 2 ++ ...0.1__2734-alter-data-table-add-orig-file-name.sql | 2 ++ .../V4.20.0.2__6748-configure-dropdown-toolname.sql | 2 ++ .../db/migration/V4.20.0.3__6558-file-validation.sql | 4 ++++ .../migration/V4.20.0.4__6936-maildomain-groups.sql | 1 + .../migration/V4.20.0.5__6505-zipdownload-jobs.sql | 2 ++ ....0.1__6872-assign-storage-drivers-to-datasets.sql | 1 + 20 files changed, 81 insertions(+) create mode 100644 src/main/resources/db/migration/V4.11.0.1__5565-sanitize-directory-labels.sql create mode 100644 src/main/resources/db/migration/V4.11__5513-database-variablemetadata.sql create mode 100644 src/main/resources/db/migration/V4.12.0.1__4.13-re-sanitize-filemetadata.sql create mode 100644 src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql create mode 100644 src/main/resources/db/migration/V4.14.0.1__5822-export-var-meta.sql create mode 100644 src/main/resources/db/migration/V4.15.0.1__2043-split-gbr-table.sql create mode 100644 src/main/resources/db/migration/V4.16.0.1__5303-addColumn-to-settingTable.sql create mode 100644 src/main/resources/db/migration/V4.16.0.2__5028-dataset-explore.sql create mode 100644 src/main/resources/db/migration/V4.16.0.3__6156-FooterImageforSub-Dataverse.sql create mode 100644 src/main/resources/db/migration/V4.17.0.1__5991-update-scribejava.sql create mode 100644 src/main/resources/db/migration/V4.17.0.2__3578-file-page-preview.sql create mode 100644 src/main/resources/db/migration/V4.18.1.1__6459-contenttype-nullable.sql create mode 100644 src/main/resources/db/migration/V4.19.0.1__6485_multistore.sql create mode 100644 src/main/resources/db/migration/V4.19.0.2__6644-update-editor-role-alias.sql create mode 100644 src/main/resources/db/migration/V4.20.0.1__2734-alter-data-table-add-orig-file-name.sql create mode 100644 src/main/resources/db/migration/V4.20.0.2__6748-configure-dropdown-toolname.sql create mode 100644 src/main/resources/db/migration/V4.20.0.3__6558-file-validation.sql create mode 100644 src/main/resources/db/migration/V4.20.0.4__6936-maildomain-groups.sql create mode 100644 src/main/resources/db/migration/V4.20.0.5__6505-zipdownload-jobs.sql create mode 100644 src/main/resources/db/migration/V5.0.0.1__6872-assign-storage-drivers-to-datasets.sql diff --git a/src/main/resources/db/migration/V4.11.0.1__5565-sanitize-directory-labels.sql b/src/main/resources/db/migration/V4.11.0.1__5565-sanitize-directory-labels.sql new file mode 100644 index 00000000000..3d3ed777c9f --- /dev/null +++ b/src/main/resources/db/migration/V4.11.0.1__5565-sanitize-directory-labels.sql @@ -0,0 +1,9 @@ +-- replace any sequences of slashes and backslashes with a single slash: +UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '[/\\][/\\]+', '/', 'g'); +-- strip (and replace with a .) any characters that are no longer allowed in the directory labels: +UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '\.\.+', '.', 'g'); +-- now replace any sequences of .s with a single .: +UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '\.\.+', '.', 'g'); +-- get rid of any leading or trailing slashes, spaces, '-'s and '.'s: +UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '^[/ .\-]+', '', ''); +UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '[/ \.\-]+$', '', ''); diff --git a/src/main/resources/db/migration/V4.11__5513-database-variablemetadata.sql b/src/main/resources/db/migration/V4.11__5513-database-variablemetadata.sql new file mode 100644 index 00000000000..3c29a974bae --- /dev/null +++ b/src/main/resources/db/migration/V4.11__5513-database-variablemetadata.sql @@ -0,0 +1,5 @@ +-- universe is dropped since it is empty in the dataverse +-- this column will be moved to variablemetadata table +-- issue 5513 +ALTER TABLE datavariable +DROP COLUMN if exists universe; diff --git a/src/main/resources/db/migration/V4.12.0.1__4.13-re-sanitize-filemetadata.sql b/src/main/resources/db/migration/V4.12.0.1__4.13-re-sanitize-filemetadata.sql new file mode 100644 index 00000000000..8623ed97b70 --- /dev/null +++ b/src/main/resources/db/migration/V4.12.0.1__4.13-re-sanitize-filemetadata.sql @@ -0,0 +1,12 @@ +-- let's try again and fix the existing directoryLabels: +-- (the script shipped with 4.12 was missing the most important line; bad copy-and-paste) +-- replace any sequences of slashes and backslashes with a single slash: +UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '[/\\][/\\]+', '/', 'g'); +-- strip (and replace with a .) any characters that are no longer allowed in the directory labels: +-- (this line was missing from the script released with 4.12!!) +UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '[^A-Za-z0-9_ ./-]+', '.', 'g'); +-- now replace any sequences of .s with a single .: +UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '\.\.+', '.', 'g'); +-- get rid of any leading or trailing slashes, spaces, '-'s and '.'s: +UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '^[/ .\-]+', '', ''); +UPDATE filemetadata SET directoryLabel = regexp_replace(directoryLabel, '[/ \.\-]+$', '', ''); diff --git a/src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql b/src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql new file mode 100644 index 00000000000..9e35623c455 --- /dev/null +++ b/src/main/resources/db/migration/V4.13.0.1__3575-usernames.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX IF NOT EXISTS index_authenticateduser_lower_useridentifier ON authenticateduser (lower(useridentifier)); diff --git a/src/main/resources/db/migration/V4.14.0.1__5822-export-var-meta.sql b/src/main/resources/db/migration/V4.14.0.1__5822-export-var-meta.sql new file mode 100644 index 00000000000..e65f52c7c91 --- /dev/null +++ b/src/main/resources/db/migration/V4.14.0.1__5822-export-var-meta.sql @@ -0,0 +1,2 @@ +ALTER TABLE variablemetadata +ADD COLUMN IF NOT EXISTS postquestion text; diff --git a/src/main/resources/db/migration/V4.15.0.1__2043-split-gbr-table.sql b/src/main/resources/db/migration/V4.15.0.1__2043-split-gbr-table.sql new file mode 100644 index 00000000000..adde91ee1b0 --- /dev/null +++ b/src/main/resources/db/migration/V4.15.0.1__2043-split-gbr-table.sql @@ -0,0 +1,10 @@ +DO $$ +BEGIN +IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='guestbookresponse' AND column_name='downloadtype') THEN + INSERT INTO filedownload(guestbookresponse_id, downloadtype, downloadtimestamp, sessionid) SELECT id, downloadtype, responsetime, sessionid FROM guestbookresponse; + ALTER TABLE guestbookresponse DROP COLUMN downloadtype, DROP COLUMN sessionid; +END IF; +END +$$ + + diff --git a/src/main/resources/db/migration/V4.16.0.1__5303-addColumn-to-settingTable.sql b/src/main/resources/db/migration/V4.16.0.1__5303-addColumn-to-settingTable.sql new file mode 100644 index 00000000000..66bcb78601c --- /dev/null +++ b/src/main/resources/db/migration/V4.16.0.1__5303-addColumn-to-settingTable.sql @@ -0,0 +1,10 @@ +ALTER TABLE ONLY setting DROP CONSTRAINT setting_pkey ; + +ALTER TABLE setting ADD COLUMN IF NOT EXISTS ID SERIAL PRIMARY KEY; + +ALTER TABLE setting ADD COLUMN IF NOT EXISTS lang text; + + +CREATE UNIQUE INDEX IF NOT EXISTS unique_settings + ON setting + (name, coalesce(lang, '')); diff --git a/src/main/resources/db/migration/V4.16.0.2__5028-dataset-explore.sql b/src/main/resources/db/migration/V4.16.0.2__5028-dataset-explore.sql new file mode 100644 index 00000000000..d880b1bddb4 --- /dev/null +++ b/src/main/resources/db/migration/V4.16.0.2__5028-dataset-explore.sql @@ -0,0 +1,3 @@ +ALTER TABLE externaltool ADD COLUMN IF NOT EXISTS scope VARCHAR(255); +UPDATE externaltool SET scope = 'FILE'; +ALTER TABLE externaltool ALTER COLUMN scope SET NOT NULL; diff --git a/src/main/resources/db/migration/V4.16.0.3__6156-FooterImageforSub-Dataverse.sql b/src/main/resources/db/migration/V4.16.0.3__6156-FooterImageforSub-Dataverse.sql new file mode 100644 index 00000000000..3951897279e --- /dev/null +++ b/src/main/resources/db/migration/V4.16.0.3__6156-FooterImageforSub-Dataverse.sql @@ -0,0 +1,4 @@ +ALTER TABLE dataversetheme +ADD COLUMN IF NOT EXISTS logofooter VARCHAR, +ADD COLUMN IF NOT EXISTS logoFooterBackgroundColor VARCHAR, +ADD COLUMN IF NOT EXISTS logofooteralignment VARCHAR; diff --git a/src/main/resources/db/migration/V4.17.0.1__5991-update-scribejava.sql b/src/main/resources/db/migration/V4.17.0.1__5991-update-scribejava.sql new file mode 100644 index 00000000000..6762e1fc076 --- /dev/null +++ b/src/main/resources/db/migration/V4.17.0.1__5991-update-scribejava.sql @@ -0,0 +1 @@ +ALTER TABLE OAuth2TokenData DROP COLUMN IF EXISTS scope; \ No newline at end of file diff --git a/src/main/resources/db/migration/V4.17.0.2__3578-file-page-preview.sql b/src/main/resources/db/migration/V4.17.0.2__3578-file-page-preview.sql new file mode 100644 index 00000000000..152700ed96c --- /dev/null +++ b/src/main/resources/db/migration/V4.17.0.2__3578-file-page-preview.sql @@ -0,0 +1,5 @@ +ALTER TABLE externalTool +ADD COLUMN IF NOT EXISTS hasPreviewMode BOOLEAN; +UPDATE externaltool SET hasPreviewMode = false; +ALTER TABLE externaltool ALTER COLUMN hasPreviewMode SET NOT NULL; + diff --git a/src/main/resources/db/migration/V4.18.1.1__6459-contenttype-nullable.sql b/src/main/resources/db/migration/V4.18.1.1__6459-contenttype-nullable.sql new file mode 100644 index 00000000000..79eab8583f0 --- /dev/null +++ b/src/main/resources/db/migration/V4.18.1.1__6459-contenttype-nullable.sql @@ -0,0 +1,2 @@ +-- contenttype can be null because dataset tools do not require it +ALTER TABLE externaltool ALTER contenttype DROP NOT NULL; diff --git a/src/main/resources/db/migration/V4.19.0.1__6485_multistore.sql b/src/main/resources/db/migration/V4.19.0.1__6485_multistore.sql new file mode 100644 index 00000000000..84364169614 --- /dev/null +++ b/src/main/resources/db/migration/V4.19.0.1__6485_multistore.sql @@ -0,0 +1,3 @@ +ALTER TABLE dataverse +ADD COLUMN IF NOT EXISTS storagedriver TEXT; +UPDATE dvobject set storageidentifier=CONCAT('file://', storageidentifier) where storageidentifier not like '%://%' and dtype='DataFile'; diff --git a/src/main/resources/db/migration/V4.19.0.2__6644-update-editor-role-alias.sql b/src/main/resources/db/migration/V4.19.0.2__6644-update-editor-role-alias.sql new file mode 100644 index 00000000000..7eccdb5f3c4 --- /dev/null +++ b/src/main/resources/db/migration/V4.19.0.2__6644-update-editor-role-alias.sql @@ -0,0 +1,2 @@ + +update dataverserole set alias = 'contributor' where alias = 'editor'; \ No newline at end of file diff --git a/src/main/resources/db/migration/V4.20.0.1__2734-alter-data-table-add-orig-file-name.sql b/src/main/resources/db/migration/V4.20.0.1__2734-alter-data-table-add-orig-file-name.sql new file mode 100644 index 00000000000..edde8821045 --- /dev/null +++ b/src/main/resources/db/migration/V4.20.0.1__2734-alter-data-table-add-orig-file-name.sql @@ -0,0 +1,2 @@ + +ALTER TABLE datatable ADD COLUMN IF NOT EXISTS originalfilename character varying(255); \ No newline at end of file diff --git a/src/main/resources/db/migration/V4.20.0.2__6748-configure-dropdown-toolname.sql b/src/main/resources/db/migration/V4.20.0.2__6748-configure-dropdown-toolname.sql new file mode 100644 index 00000000000..e360b0adfb6 --- /dev/null +++ b/src/main/resources/db/migration/V4.20.0.2__6748-configure-dropdown-toolname.sql @@ -0,0 +1,2 @@ +ALTER TABLE externaltool +ADD COLUMN IF NOT EXISTS toolname VARCHAR(255); diff --git a/src/main/resources/db/migration/V4.20.0.3__6558-file-validation.sql b/src/main/resources/db/migration/V4.20.0.3__6558-file-validation.sql new file mode 100644 index 00000000000..3e5e742968c --- /dev/null +++ b/src/main/resources/db/migration/V4.20.0.3__6558-file-validation.sql @@ -0,0 +1,4 @@ +-- the lock type "pidRegister" has been removed in 4.20, replaced with "finalizePublication" type +-- (since this script is run as the application is being deployed, any background pid registration +-- job is definitely no longer running - so we do want to remove any such locks left behind) +DELETE FROM DatasetLock WHERE reason='pidRegister'; \ No newline at end of file diff --git a/src/main/resources/db/migration/V4.20.0.4__6936-maildomain-groups.sql b/src/main/resources/db/migration/V4.20.0.4__6936-maildomain-groups.sql new file mode 100644 index 00000000000..8c89b66fdec --- /dev/null +++ b/src/main/resources/db/migration/V4.20.0.4__6936-maildomain-groups.sql @@ -0,0 +1 @@ +ALTER TABLE persistedglobalgroup ADD COLUMN IF NOT EXISTS emaildomains text; \ No newline at end of file diff --git a/src/main/resources/db/migration/V4.20.0.5__6505-zipdownload-jobs.sql b/src/main/resources/db/migration/V4.20.0.5__6505-zipdownload-jobs.sql new file mode 100644 index 00000000000..484d5dd0784 --- /dev/null +++ b/src/main/resources/db/migration/V4.20.0.5__6505-zipdownload-jobs.sql @@ -0,0 +1,2 @@ +-- maybe temporary? - work in progress +CREATE TABLE IF NOT EXISTS CUSTOMZIPSERVICEREQUEST (KEY VARCHAR(63), STORAGELOCATION VARCHAR(255), FILENAME VARCHAR(255), ISSUETIME TIMESTAMP); diff --git a/src/main/resources/db/migration/V5.0.0.1__6872-assign-storage-drivers-to-datasets.sql b/src/main/resources/db/migration/V5.0.0.1__6872-assign-storage-drivers-to-datasets.sql new file mode 100644 index 00000000000..453b2054c43 --- /dev/null +++ b/src/main/resources/db/migration/V5.0.0.1__6872-assign-storage-drivers-to-datasets.sql @@ -0,0 +1 @@ +ALTER TABLE dataset ADD COLUMN IF NOT EXISTS storagedriver VARCHAR(255); \ No newline at end of file From 414721188bc591d8c0f0d137bae58847be0b3c69 Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 1 Mar 2021 09:35:12 -0500 Subject: [PATCH 0089/1551] datasetlock for globusupload --- .../edu/harvard/iq/dataverse/PermissionServiceBean.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index aaf38af1b36..6f05245bafd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -733,6 +733,9 @@ else if (dataset.isLockedFor(DatasetLock.Reason.Workflow)) { else if (dataset.isLockedFor(DatasetLock.Reason.DcmUpload)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.editNotAllowed"), command); } + else if (dataset.isLockedFor(DatasetLock.Reason.GlobusUpload)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.editNotAllowed"), command); + } else if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.editNotAllowed"), command); } @@ -768,6 +771,9 @@ else if (dataset.isLockedFor(DatasetLock.Reason.Workflow)) { else if (dataset.isLockedFor(DatasetLock.Reason.DcmUpload)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.publishNotAllowed"), command); } + else if (dataset.isLockedFor(DatasetLock.Reason.GlobusUpload)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.downloadNotAllowed"), command); + } else if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.publishNotAllowed"), command); } From 07516b29b196a30891e47458df1fdb5ed6bbda45 Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 1 Mar 2021 10:58:07 -0500 Subject: [PATCH 0090/1551] datasetlock for globusupload --- src/main/webapp/editFilesFragment.xhtml | 61 ------------------------- 1 file changed, 61 deletions(-) diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index d8d3081afef..6deb2a7b33f 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -277,53 +277,6 @@ -
-
Globus
- - -
-
- - -

- #{bundle['file.createGlobusUploadDisabled']} -

-
-
- - -

- - BEFORE YOU START: You will need to set up a free account with Globus and - have Globus Connect Personal running on your computer to transfer files to and from the service. -
- - -
-
- Once Globus transfer has finished, you will get an email notification. Please come back here and press the following button: -
- - -
-
- -

- -
- Click here to view the dataset page: #{EditDatafilesPage.dataset.displayName} . -
-
-
-
-
@@ -1032,20 +985,6 @@ return true; } } - - function openGlobus(datasetId, client_id) { - var res = location.protocol+'//'+location.hostname+(location.port ? ':'+location.port: ''); - - var scope = encodeURI("openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all", "UTF-8"); - - var new_url = "https://auth.globus.org/v2/oauth2/authorize?client_id=" + client_id + "&response_type=code&" + - "scope=" + scope + "&state=" + datasetId; - new_url = new_url + "&redirect_uri=" + res + "%2Fglobus.xhtml" ; - - - var myWindows = window.open(new_url); - } - //]]> From bc34031660316c7afde7b121c7fd32603c0d710f Mon Sep 17 00:00:00 2001 From: jingma Date: Tue, 16 Mar 2021 11:39:06 +0100 Subject: [PATCH 0091/1551] First db table and api. --- .../edu/harvard/iq/dataverse/License.java | 135 ++++++++++++++++++ .../iq/dataverse/LicenseServiceBean.java | 80 +++++++++++ .../edu/harvard/iq/dataverse/api/Admin.java | 56 ++++++++ .../iq/dataverse/util/json/JsonPrinter.java | 11 ++ 4 files changed, 282 insertions(+) create mode 100644 src/main/java/edu/harvard/iq/dataverse/License.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java new file mode 100644 index 00000000000..713ac218222 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -0,0 +1,135 @@ +package edu.harvard.iq.dataverse; + +import java.util.Objects; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; + +/** + * @author Jing Ma + */ +// @NamedQueries({ +// @NamedQuery( name="License.findAll", +// query="SELECT l FROM License l"), +// @NamedQuery( name="Setting.findById", +// query = "SELECT l FROM License l WHERE l.id=:id"), +// @NamedQuery( name="License.deleteById", +// query="DELETE FROM License l WHERE l.id=:id") +// +//}) +//@Entity +public class License { + +// @Id +// @GeneratedValue(strategy = GenerationType.IDENTITY) +// private Long id; +// +// @Column(columnDefinition="TEXT", nullable = false, unique = true) +// private String name; +// +// @Column(columnDefinition="TEXT") +// private String shortDescription; +// +// @Column(columnDefinition="TEXT", nullable = false) +// private String uri; +// +// @Column(columnDefinition="TEXT") +// private String iconUrl; +// +// @Column(nullable = false) +// private boolean active; +// +// public License() { +// } +// +// public License(String name, String shortDescription, String uri, String iconUrl, boolean active) { +// this.name = name; +// this.shortDescription = shortDescription; +// this.uri = uri; +// this.iconUrl = iconUrl; +// this.active = active; +// } +// +// public Long getId() { +// return id; +// } +// +// public void setId(Long id) { +// this.id = id; +// } +// +// public String getName() { +// return name; +// } +// +// public void setName(String name) { +// this.name = name; +// } +// +// public String getShortDescription() { +// return shortDescription; +// } +// +// public void setShortDescription(String shortDescription) { +// this.shortDescription = shortDescription; +// } +// +// public String getUri() { +// return uri; +// } +// +// public void setUri(String uri) { +// this.uri = uri; +// } +// +// public String getIconUrl() { +// return iconUrl; +// } +// +// public void setIconUrl(String iconUrl) { +// this.iconUrl = iconUrl; +// } +// +// public boolean isActive() { +// return active; +// } +// +// public void setActive(boolean active) { +// this.active = active; +// } +// +// @Override +// public boolean equals(Object o) { +// if (this == o) return true; +// if (o == null || getClass() != o.getClass()) return false; +// License license = (License) o; +// return active == license.active && +// Objects.equals(id, license.id) && +// Objects.equals(name, license.name) && +// Objects.equals(shortDescription, license.shortDescription) && +// Objects.equals(uri, license.uri) && +// Objects.equals(iconUrl, license.iconUrl); +// } +// +// @Override +// public int hashCode() { +// return Objects.hash(id, name, shortDescription, uri, iconUrl, active); +// } +// +// @Override +// public String toString() { +// return "License{" + +// "id=" + id + +// ", name='" + name + '\'' + +// ", shortDescription='" + shortDescription + '\'' + +// ", uri='" + uri + '\'' + +// ", iconUrl='" + iconUrl + '\'' + +// ", active=" + active + +// '}'; +// } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java new file mode 100644 index 00000000000..7caa5b4bdc8 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -0,0 +1,80 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; +import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import edu.harvard.iq.dataverse.authorization.DataverseRole; +import edu.harvard.iq.dataverse.search.IndexResponse; +import edu.harvard.iq.dataverse.settings.Setting; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import javax.ejb.EJB; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceException; + +/** + * @author Jing Ma + */ +//@Stateless +//@Named +public class LicenseServiceBean { + +// @PersistenceContext +// EntityManager em; +// +// @EJB +// ActionLogServiceBean actionLogSvc; +// +// public List listAll() { +// return em.createNamedQuery("License.findAll", License.class).getResultList(); +// } +// +// public License get( long id ) { +// List tokens = em.createNamedQuery("License.findById", License.class) +// .setParameter("id", id ) +// .getResultList(); +// return tokens.isEmpty() ? null : tokens.get(0); +// } +// +// public License save(License l) throws PersistenceException { +// if (l.getId() == null) { +// em.persist(l); +// return l; +// } else { +// return null; +// } +// } +// +// public License set( long id, String name, String shortDescription, String uri, String iconUrl, boolean active ) { +// List tokens = em.createNamedQuery("License.findById", License.class) +// .setParameter("id", Long.toString(id) ) +// .getResultList(); +// +// if(tokens.size() > 0) { +// License l = tokens.get(0); +// l.setName(name); +// l.setShortDescription(shortDescription); +// l.setUri(uri); +// l.setIconUrl(iconUrl); +// l.setActive(active); +// em.merge(l); +// actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") +// .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); +// return l; +// } else { +// return null; +// } +// } +// +// public void delete( long id ) throws PersistenceException { +// actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") +// .setInfo(Long.toString(id))); +// em.createNamedQuery("License.deleteById") +// .setParameter("id", id) +// .executeUpdate(); +// } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index b52665a7747..ce248d97946 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -16,6 +16,8 @@ import edu.harvard.iq.dataverse.EMailValidator; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.License; +import edu.harvard.iq.dataverse.LicenseServiceBean; import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; @@ -42,9 +44,11 @@ import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand; import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; +import edu.harvard.iq.dataverse.util.json.JsonPrinter; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; +import javax.persistence.PersistenceException; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -152,6 +156,8 @@ public class Admin extends AbstractApiBean { ExplicitGroupServiceBean explicitGroupService; @EJB BannerMessageServiceBean bannerMessageService; + @EJB + LicenseServiceBean licenseService; // Make the session available @@ -1920,4 +1926,54 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon } +// @GET +// @Path("/licenses") +// public Response getLicenses() { +// return ok(licenseService.listAll().stream() +// .map(JsonPrinter::json) +// .collect(toJsonArray())); +// } +// +// @GET +// @Path("/licenses/{id}") +// public Response getLicense(@PathParam("id") long id) { +// License l = licenseService.get(id); +// if (l == null) { +// return error(Response.Status.NOT_FOUND, "Not Found."); +// } +// return ok(json(l)); +// } +// +// @POST +// @Path("/licenses") +// public Response addLicense(License l) { +// try { +// licenseService.save(l); +// return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); +// } catch(PersistenceException e) { +// return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); +// } +// } +// +// @PUT +// @Path("/licenses/{id}") +// public Response putLicense(@PathParam("id") long id, License l) { +// License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); +// if (updated == null) { +// return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); +// } +// return ok("License with ID " + id + " was replaced."); +// } +// +// @DELETE +// @Path("/licenses/{id}") +// public Response deleteLicense(@PathParam("id") long id) { +// try { +// licenseService.delete(id); +// return ok("OK. License with ID " + id + " was deleted."); +// } catch (PersistenceException e) { +// return error(Response.Status.BAD_REQUEST, "The license is still in used and cannot be deleted."); +// } +// } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index c37efc3178f..f43860df23f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.DataverseContact; import edu.harvard.iq.dataverse.DataverseFacet; import edu.harvard.iq.dataverse.DataverseTheme; +import edu.harvard.iq.dataverse.License; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; @@ -775,6 +776,16 @@ public static JsonObjectBuilder json( DataverseFacet aFacet ) { .add("id", String.valueOf(aFacet.getId())) // TODO should just be id I think .add("name", aFacet.getDatasetFieldType().getDisplayName()); } + +// public static JsonObjectBuilder json(License l) { +// return jsonObjectBuilder() +// .add("id", l.getId()) +// .add("name", l.getName()) +// .add("shortDescription", l.getShortDescription()) +// .add("uri", l.getUri()) +// .add("iconUrl", l.getIconUrl()) +// .add("active", l.isActive()); +// } public static Collector stringsToJsonArray() { return new Collector() { From 2fa243abe63c60b07a714070acd4a62d5c8d6e96 Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 16 Mar 2021 10:16:31 -0400 Subject: [PATCH 0092/1551] Globus API upgrade --- .../iq/dataverse/DatasetServiceBean.java | 277 ++- .../harvard/iq/dataverse/api/Datasets.java | 1542 ++++++++++------- .../dataverse/globus/fileDetailsHolder.java | 31 + .../harvard/iq/dataverse/util/FileUtil.java | 3 +- .../iq/dataverse/util/json/JsonPrinter.java | 10 + 5 files changed, 1215 insertions(+), 648 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/fileDetailsHolder.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index c1efe119fd2..f7e37b3d929 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.DataAccess; @@ -16,25 +17,28 @@ import edu.harvard.iq.dataverse.engine.command.impl.FinalizeDatasetPublicationCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetStorageSizeCommand; import edu.harvard.iq.dataverse.export.ExportService; +import edu.harvard.iq.dataverse.globus.AccessToken; +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; +import edu.harvard.iq.dataverse.globus.fileDetailsHolder; import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.workflows.WorkflowComment; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; + +import java.io.*; import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executor; +import java.util.concurrent.Executors; import java.util.logging.FileHandler; import java.util.logging.Level; import java.util.logging.Logger; +import java.util.stream.Collectors; +import java.util.stream.IntStream; import javax.ejb.Asynchronous; import javax.ejb.EJB; import javax.ejb.EJBException; @@ -42,6 +46,7 @@ import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; import javax.inject.Named; +import javax.json.*; import javax.persistence.EntityManager; import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; @@ -49,8 +54,14 @@ import javax.persistence.StoredProcedureQuery; import javax.persistence.TypedQuery; import org.apache.commons.lang.RandomStringUtils; +import org.apache.commons.lang.StringUtils; import org.ocpsoft.common.util.Strings; +import javax.servlet.http.HttpServletRequest; + +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; + /** * * @author skraffmiller @@ -95,6 +106,10 @@ public class DatasetServiceBean implements java.io.Serializable { @EJB SystemConfig systemConfig; + @EJB + GlobusServiceBean globusServiceBean; + + private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); @PersistenceContext(unitName = "VDCNet-ejbPU") @@ -1004,6 +1019,246 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo hdLogger.info("Successfully destroyed the dataset"); } catch (Exception ex) { hdLogger.warning("Failed to destroy the dataset"); - } + } + } + + @Asynchronous + public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, User authUser, String httpRequestUrl) throws ExecutionException, InterruptedException { + + logger.info(httpRequestUrl + " == globusAsyncCall == step 1 "+ dataset.getId()); + + Thread.sleep(5000); + String lockInfoMessage = "Globus Upload API is running "; + DatasetLock lock = addDatasetLock(dataset.getId(), DatasetLock.Reason.EditInProgress, + ((AuthenticatedUser) authUser).getId(), lockInfoMessage); + if (lock != null) { + dataset.addLock(lock); + } else { + logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); + } + + + JsonObject jsonObject = null; + try (StringReader rdr = new StringReader(jsonData)) { + jsonObject = Json.createReader(rdr).readObject(); + } catch (Exception jpe) { + jpe.printStackTrace(); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + } + + String taskIdentifier = jsonObject.getString("taskIdentifier"); + String datasetIdentifier = jsonObject.getString("datasetId").replace("doi:",""); + + // globus task status check + globusStatusCheck(taskIdentifier); + + // calculate checksum, mimetype + try { + List inputList = new ArrayList(); + JsonArray filesJsonArray = jsonObject.getJsonArray("files"); + + if (filesJsonArray != null) { + + for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { + + // storageIdentifier s3://gcs5-bucket1:1781cfeb8a7-748c270a227c from victoria + String storageIdentifier = fileJsonObject.getString("storageIdentifier"); + String fileName = fileJsonObject.getString("fileName"); + String[] bits = storageIdentifier.split(":"); + String fileId = bits[bits.length-1]; + String bucketName = bits[1].replace("/", ""); + + // fullpath s3://gcs5-bucket1/10.5072/FK2/3S6G2E/1781cfeb8a7-4ad9418a5873 + String fullPath = "s3://" + bucketName + "/" + datasetIdentifier +"/" +fileId ; + + inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); + } + + JsonObject newfilesJsonObject= calculateMissingMetadataFields(inputList); + JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); + + JsonArrayBuilder jsonSecondAPI = Json.createArrayBuilder() ; + + for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { + + String storageIdentifier = fileJsonObject.getString("storageIdentifier"); + String[] bits = storageIdentifier.split(":"); + String fileId = bits[bits.length-1]; + + List newfileJsonObject = IntStream.range(0, newfilesJsonArray.size() ) + .mapToObj(index -> ((JsonObject)newfilesJsonArray.get(index)).getJsonObject(fileId)) + .filter(Objects::nonNull).collect(Collectors.toList()); + + if(newfileJsonObject != null) { + JsonPatch path = Json.createPatchBuilder().add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build(); + fileJsonObject = path.apply(fileJsonObject); + path = Json.createPatchBuilder().add("/mimeType", newfileJsonObject.get(0).getString("mime")).build(); + fileJsonObject = path.apply(fileJsonObject); + jsonSecondAPI.add(stringToJsonObjectBuilder(fileJsonObject.toString())); + } + } + + String newjsonData = jsonSecondAPI.build().toString(); + + ProcessBuilder processBuilder = new ProcessBuilder(); + + String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST "+httpRequestUrl.split("/api")[0]+"/api/datasets/:persistentId/addFiles?persistentId=doi:" + datasetIdentifier + " -F jsonData='" + newjsonData + "'"; + System.out.println("*******====command ==== " + command); + + new Thread(new Runnable() { + public void run() { + try { + processBuilder.command("bash", "-c", command); + Process process = processBuilder.start(); + } catch (Exception ex) { + logger.log(Level.SEVERE, "******* Unexpected Exception while executing api/datasets/:persistentId/add call ", ex); + } + } + }).start(); + + } + + } catch (Exception e) { + logger.info("Exception "); + e.printStackTrace(); + } + } + + public static JsonObjectBuilder stringToJsonObjectBuilder(String str) { + JsonReader jsonReader = Json.createReader(new StringReader(str)); + JsonObject jo = jsonReader.readObject(); + jsonReader.close(); + + JsonObjectBuilder job = Json.createObjectBuilder(); + + for (Map.Entry entry : jo.entrySet()) { + job.add(entry.getKey(), entry.getValue()); + } + + return job; } + + Executor executor = Executors.newFixedThreadPool(10); + + + private Boolean globusStatusCheck(String taskId) + { + boolean success = false; + do { + try { + logger.info(" sleep before globus transfer check"); + Thread.sleep(50000); + + String basicGlobusToken = settingsService.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + AccessToken clientTokenUser = globusServiceBean.getClientToken(basicGlobusToken); + + success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskId); + + } catch (Exception ex) { + ex.printStackTrace(); + } + + } while (!success); + + logger.info(" globus transfer completed "); + + return success; + } + + + public JsonObject calculateMissingMetadataFields(List inputList) throws InterruptedException, ExecutionException, IOException { + + List> hashvalueCompletableFutures = + inputList.stream().map(iD -> calculateDetailsAsync(iD)).collect(Collectors.toList()); + + CompletableFuture allFutures = CompletableFuture + .allOf(hashvalueCompletableFutures.toArray(new CompletableFuture[hashvalueCompletableFutures.size()])); + + CompletableFuture> allCompletableFuture = allFutures.thenApply(future -> { + return hashvalueCompletableFutures.stream() + .map(completableFuture -> completableFuture.join()) + .collect(Collectors.toList()); + }); + + CompletableFuture completableFuture = allCompletableFuture.thenApply(files -> { + return files.stream().map(d -> json(d)).collect(toJsonArray()); + }); + + JsonArrayBuilder filesObject = (JsonArrayBuilder) completableFuture.get(); + + JsonObject output = Json.createObjectBuilder().add("files", filesObject).build(); + + return output; + + } + + private CompletableFuture calculateDetailsAsync(String id) { + logger.info(" calcualte additional details for these globus id ==== " + id); + return CompletableFuture.supplyAsync( () -> { + try { + Thread.sleep(2000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + try { + return ( calculateDetails(id) ); + } catch (InterruptedException | IOException e) { + e.printStackTrace(); + } + return null; + }, executor).exceptionally(ex -> { + return null; + }); + } + + + private fileDetailsHolder calculateDetails(String id) throws InterruptedException, IOException { + int count = 0; + String checksumVal = ""; + InputStream in = null; + String fileId = id.split("IDsplit")[0]; + String fullPath = id.split("IDsplit")[1]; + String fileName = id.split("IDsplit")[2]; + do { + try { + StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); + in = dataFileStorageIO.getInputStream(); + checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); + count = 3; + } catch (Exception ex) { + count = count + 1; + ex.printStackTrace(); + logger.info(ex.getMessage()); + Thread.sleep(5000); + } + + } while (count < 3); + + + return new fileDetailsHolder(fileId, checksumVal, calculatemime(fileName)); + //getBytes(in)+"" ); + // calculatemime(fileName)); + } + + public long getBytes(InputStream is) throws IOException { + + FileInputStream fileStream = (FileInputStream)is; + return fileStream.getChannel().size(); + } + + public String calculatemime(String fileName) throws InterruptedException { + + String finalType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; + String type = FileUtil.determineFileTypeByExtension(fileName); + + if (!StringUtils.isBlank(type)) { + if (FileUtil.useRecognizedType(finalType, type)) { + finalType = type; + } + } + + return finalType; + } + + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 7f50504ebc4..c2854b33e29 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse.api; -import com.amazonaws.services.s3.model.S3ObjectSummary; import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; @@ -29,6 +28,7 @@ import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.RoleAssignee; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode; @@ -77,10 +77,10 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.export.ExportService; +import edu.harvard.iq.dataverse.globus.fileDetailsHolder; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.S3PackageImporter; -import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; @@ -117,16 +117,11 @@ import java.io.StringReader; import java.sql.Timestamp; import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; +import java.util.concurrent.*; import java.util.logging.Level; import java.util.logging.Logger; +import javax.ejb.Asynchronous; import javax.ejb.EJB; import javax.ejb.EJBException; import javax.inject.Inject; @@ -158,43 +153,45 @@ import org.glassfish.jersey.media.multipart.FormDataParam; import com.amazonaws.services.s3.model.PartETag; -import edu.harvard.iq.dataverse.FileMetadata; + import java.util.Map.Entry; +import java.util.stream.Collectors; +import java.util.stream.IntStream; @Path("datasets") public class Datasets extends AbstractApiBean { private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName()); - - @Inject DataverseSession session; + + @Inject DataverseSession session; @EJB DatasetServiceBean datasetService; @EJB DataverseServiceBean dataverseService; - + @EJB GlobusServiceBean globusServiceBean; @EJB UserNotificationServiceBean userNotificationService; - + @EJB PermissionServiceBean permissionService; - + @EJB AuthenticationServiceBean authenticationServiceBean; - + @EJB DDIExportServiceBean ddiExportService; - + @EJB DatasetFieldServiceBean datasetfieldService; @EJB MetadataBlockServiceBean metadataBlockService; - + @EJB DataFileServiceBean fileService; @@ -203,65 +200,72 @@ public class Datasets extends AbstractApiBean { @EJB EjbDataverseEngine commandEngine; - + @EJB IndexServiceBean indexService; @EJB S3PackageImporter s3PackageImporter; - + @EJB SettingsServiceBean settingsService; // TODO: Move to AbstractApiBean @EJB DatasetMetricsServiceBean datasetMetricsSvc; - + @EJB DatasetExternalCitationsServiceBean datasetExternalCitationsService; - + @Inject MakeDataCountLoggingServiceBean mdcLogService; - + @Inject DataverseRequestServiceBean dvRequestService; + @Context + protected HttpServletRequest httpRequest; + + /** * Used to consolidate the way we parse and handle dataset versions. - * @param + * @param */ public interface DsVersionHandler { T handleLatest(); + T handleDraft(); - T handleSpecific( long major, long minor ); + + T handleSpecific(long major, long minor); + T handleLatestPublished(); } - + @GET @Path("{id}") public Response getDataset(@PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { - return response( req -> { + return response(req -> { final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id))); final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved)); final JsonObjectBuilder jsonbuilder = json(retrieved); //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum) - if((latest != null) && latest.isReleased()) { + if ((latest != null) && latest.isReleased()) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved); mdcLogService.logEntry(entry); } return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest) : null)); }); } - - // TODO: - // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand + + // TODO: + // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand // to obtain the dataset that we are trying to export - which would handle - // Auth in the process... For now, Auth isn't necessary - since export ONLY + // Auth in the process... For now, Auth isn't necessary - since export ONLY // WORKS on published datasets, which are open to the world. -- L.A. 4.5 - + @GET @Path("/export") - @Produces({"application/xml", "application/json", "application/html" }) + @Produces({"application/xml", "application/json", "application/html"}) public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { try { @@ -269,20 +273,20 @@ public Response exportDataset(@QueryParam("persistentId") String persistentId, @ if (dataset == null) { return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found."); } - + ExportService instance = ExportService.getInstance(settingsSvc); - + InputStream is = instance.getExport(dataset, exporter); - + String mediaType = instance.getMediaType(exporter); - //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft + //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset); mdcLogService.logEntry(entry); - + return Response.ok() .entity(is) .type(mediaType). - build(); + build(); } catch (Exception wr) { return error(Response.Status.FORBIDDEN, "Export Failed"); } @@ -290,23 +294,23 @@ public Response exportDataset(@QueryParam("persistentId") String persistentId, @ @DELETE @Path("{id}") - public Response deleteDataset( @PathParam("id") String id) { + public Response deleteDataset(@PathParam("id") String id) { // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand" // (and there's a comment that says "TODO: remove this command") - // do we need an exposed API call for it? - // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, - // if the dataset only has 1 version... In other words, the functionality + // do we need an exposed API call for it? + // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, + // if the dataset only has 1 version... In other words, the functionality // currently provided by this API is covered between the "deleteDraftVersion" and - // "destroyDataset" API calls. - // (The logic below follows the current implementation of the underlying + // "destroyDataset" API calls. + // (The logic below follows the current implementation of the underlying // commands!) - - return response( req -> { + + return response(req -> { Dataset doomed = findDatasetOrDie(id); DatasetVersion doomedVersion = doomed.getLatestVersion(); User u = findUserOrDie(); boolean destroy = false; - + if (doomed.getVersions().size() == 1) { if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets")); @@ -317,26 +321,26 @@ public Response deleteDataset( @PathParam("id") String id) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT")); } } - - // Gather the locations of the physical files that will need to be + + // Gather the locations of the physical files that will need to be // deleted once the destroy command execution has been finalized: Map deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy); - - execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id))); - - // If we have gotten this far, the destroy command has succeeded, + + execCommand(new DeleteDatasetCommand(req, findDatasetOrDie(id))); + + // If we have gotten this far, the destroy command has succeeded, // so we can finalize it by permanently deleting the physical files: - // (DataFileService will double-check that the datafiles no - // longer exist in the database, before attempting to delete + // (DataFileService will double-check that the datafiles no + // longer exist in the database, before attempting to delete // the physical files) if (!deleteStorageLocations.isEmpty()) { fileService.finalizeFileDeletes(deleteStorageLocations); } - + return ok("Dataset " + id + " deleted"); }); } - + @DELETE @Path("{id}/destroy") public Response destroyDataset(@PathParam("id") String id) { @@ -350,16 +354,16 @@ public Response destroyDataset(@PathParam("id") String id) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers.")); } - // Gather the locations of the physical files that will need to be + // Gather the locations of the physical files that will need to be // deleted once the destroy command execution has been finalized: Map deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed); execCommand(new DestroyDatasetCommand(doomed, req)); - // If we have gotten this far, the destroy command has succeeded, + // If we have gotten this far, the destroy command has succeeded, // so we can finalize permanently deleting the physical files: - // (DataFileService will double-check that the datafiles no - // longer exist in the database, before attempting to delete + // (DataFileService will double-check that the datafiles no + // longer exist in the database, before attempting to delete // the physical files) if (!deleteStorageLocations.isEmpty()) { fileService.finalizeFileDeletes(deleteStorageLocations); @@ -368,59 +372,59 @@ public Response destroyDataset(@PathParam("id") String id) { return ok("Dataset " + id + " destroyed"); }); } - + @DELETE @Path("{id}/versions/{versionId}") - public Response deleteDraftVersion( @PathParam("id") String id, @PathParam("versionId") String versionId ){ - if ( ! ":draft".equals(versionId) ) { + public Response deleteDraftVersion(@PathParam("id") String id, @PathParam("versionId") String versionId) { + if (!":draft".equals(versionId)) { return badRequest("Only the :draft version can be deleted"); } - return response( req -> { + return response(req -> { Dataset dataset = findDatasetOrDie(id); DatasetVersion doomed = dataset.getLatestVersion(); - + if (!doomed.isDraft()) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version")); } - - // Gather the locations of the physical files that will need to be + + // Gather the locations of the physical files that will need to be // deleted once the destroy command execution has been finalized: - + Map deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed); - - execCommand( new DeleteDatasetVersionCommand(req, dataset)); - - // If we have gotten this far, the delete command has succeeded - - // by either deleting the Draft version of a published dataset, - // or destroying an unpublished one. + + execCommand(new DeleteDatasetVersionCommand(req, dataset)); + + // If we have gotten this far, the delete command has succeeded - + // by either deleting the Draft version of a published dataset, + // or destroying an unpublished one. // This means we can finalize permanently deleting the physical files: - // (DataFileService will double-check that the datafiles no - // longer exist in the database, before attempting to delete + // (DataFileService will double-check that the datafiles no + // longer exist in the database, before attempting to delete // the physical files) if (!deleteStorageLocations.isEmpty()) { fileService.finalizeFileDeletes(deleteStorageLocations); } - + return ok("Draft version of dataset " + id + " deleted"); }); } - + @DELETE @Path("{datasetId}/deleteLink/{linkedDataverseId}") - public Response deleteDatasetLinkingDataverse( @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) { - boolean index = true; + public Response deleteDatasetLinkingDataverse(@PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) { + boolean index = true; return response(req -> { execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index)); return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted"); }); } - + @PUT @Path("{id}/citationdate") - public Response setCitationDate( @PathParam("id") String id, String dsfTypeName) { - return response( req -> { - if ( dsfTypeName.trim().isEmpty() ){ + public Response setCitationDate(@PathParam("id") String id, String dsfTypeName) { + return response(req -> { + if (dsfTypeName.trim().isEmpty()) { return badRequest("Please provide a dataset field type in the requst body."); } DatasetFieldType dsfType = null; @@ -434,124 +438,124 @@ public Response setCitationDate( @PathParam("id") String id, String dsfTypeName) execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType)); return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default")); }); - } - + } + @DELETE @Path("{id}/citationdate") - public Response useDefaultCitationDate( @PathParam("id") String id) { - return response( req -> { + public Response useDefaultCitationDate(@PathParam("id") String id) { + return response(req -> { execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null)); return ok("Citation Date for dataset " + id + " set to default"); }); - } - + } + @GET @Path("{id}/versions") - public Response listVersions( @PathParam("id") String id ) { - return response( req -> - ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id)) ) - .stream() - .map( d -> json(d) ) - .collect(toJsonArray()))); - } - + public Response listVersions(@PathParam("id") String id) { + return response(req -> + ok(execCommand(new ListVersionsCommand(req, findDatasetOrDie(id))) + .stream() + .map(d -> json(d)) + .collect(toJsonArray()))); + } + @GET @Path("{id}/versions/{versionId}") - public Response getVersion( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> { - DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + public Response getVersion(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> { + DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") - : ok(json(dsv)); + : ok(json(dsv)); }); } - + @GET @Path("{id}/versions/{versionId}/files") - public Response getVersionFiles( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> ok( jsonFileMetadatas( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas()))); + public Response getVersionFiles(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> ok(jsonFileMetadatas( + getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas()))); } - + @GET @Path("{id}/dirindex") @Produces("text/html") public Response getFileAccessFolderView(@PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { folderName = folderName == null ? "" : folderName; - versionId = versionId == null ? ":latest-published" : versionId; - - DatasetVersion version; + versionId = versionId == null ? ":latest-published" : versionId; + + DatasetVersion version; try { DataverseRequest req = createDataverseRequest(findUserOrDie()); version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); } catch (WrappedResponse wr) { return wr.getResponse(); } - + String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals); - + // return "NOT FOUND" if there is no such folder in the dataset version: - + if ("".equals(output)) { return notFound("Folder " + folderName + " does not exist"); } - - + + String indexFileName = folderName.equals("") ? ".index.html" : ".index-" + folderName.replace('/', '_') + ".html"; response.setHeader("Content-disposition", "attachment; filename=\"" + indexFileName + "\""); - + return Response.ok() .entity(output) //.type("application/html"). .build(); } - + @GET @Path("{id}/versions/{versionId}/metadata") - public Response getVersionMetadata( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> ok( - jsonByBlocks( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers ) + public Response getVersionMetadata(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> ok( + jsonByBlocks( + getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers) .getDatasetFields()))); } - + @GET @Path("{id}/versions/{versionNumber}/metadata/{block}") - public Response getVersionMetadataBlock( @PathParam("id") String datasetId, - @PathParam("versionNumber") String versionNumber, - @PathParam("block") String blockName, - @Context UriInfo uriInfo, - @Context HttpHeaders headers ) { - - return response( req -> { - DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers ); - + public Response getVersionMetadataBlock(@PathParam("id") String datasetId, + @PathParam("versionNumber") String versionNumber, + @PathParam("block") String blockName, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + + return response(req -> { + DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers); + Map> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields()); - for ( Map.Entry> p : fieldsByBlock.entrySet() ) { - if ( p.getKey().getName().equals(blockName) ) { + for (Map.Entry> p : fieldsByBlock.entrySet()) { + if (p.getKey().getName().equals(blockName)) { return ok(json(p.getKey(), p.getValue())); } } return notFound("metadata block named " + blockName + " not found"); }); } - + @GET @Path("{id}/modifyRegistration") - public Response updateDatasetTargetURL(@PathParam("id") String id ) { - return response( req -> { + public Response updateDatasetTargetURL(@PathParam("id") String id) { + return response(req -> { execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req)); return ok("Dataset " + id + " target url updated"); }); } - + @POST @Path("/modifyRegistrationAll") public Response updateDatasetTargetURLAll() { - return response( req -> { - datasetService.findAll().forEach( ds -> { + return response(req -> { + datasetService.findAll().forEach(ds -> { try { execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req)); } catch (WrappedResponse ex) { @@ -561,7 +565,7 @@ public Response updateDatasetTargetURLAll() { return ok("Update All Dataset target url completed"); }); } - + @POST @Path("{id}/modifyRegistrationMetadata") public Response updateDatasetPIDMetadata(@PathParam("id") String id) { @@ -581,36 +585,36 @@ public Response updateDatasetPIDMetadata(@PathParam("id") String id) { return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args)); }); } - + @GET @Path("/modifyRegistrationPIDMetadataAll") public Response updateDatasetPIDMetadataAll() { - return response( req -> { - datasetService.findAll().forEach( ds -> { + return response(req -> { + datasetService.findAll().forEach(ds -> { try { execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req)); } catch (WrappedResponse ex) { Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex); } - }); + }); return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all")); }); } - + @PUT @Path("{id}/versions/{versionId}") - public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId ){ - - if ( ! ":draft".equals(versionId) ) { - return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); + public Response updateDraftVersion(String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) { + + if (!":draft".equals(versionId)) { + return error(Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); } - - try ( StringReader rdr = new StringReader(jsonBody) ) { + + try (StringReader rdr = new StringReader(jsonBody)) { DataverseRequest req = createDataverseRequest(findUserOrDie()); Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json); - + // clear possibly stale fields from the incoming dataset version. // creation and modification dates are updated by the commands. incomingVersion.setId(null); @@ -620,18 +624,18 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, incomingVersion.setDataset(ds); incomingVersion.setCreateTime(null); incomingVersion.setLastUpdateTime(null); - - if (!incomingVersion.getFileMetadatas().isEmpty()){ - return error( Response.Status.BAD_REQUEST, "You may not add files via this api."); + + if (!incomingVersion.getFileMetadatas().isEmpty()) { + return error(Response.Status.BAD_REQUEST, "You may not add files via this api."); } - + boolean updateDraft = ds.getLatestVersion().isDraft(); - + DatasetVersion managedVersion; - if ( updateDraft ) { + if (updateDraft) { final DatasetVersion editVersion = ds.getEditVersion(); editVersion.setDatasetFields(incomingVersion.getDatasetFields()); - editVersion.setTermsOfUseAndAccess( incomingVersion.getTermsOfUseAndAccess() ); + editVersion.setTermsOfUseAndAccess(incomingVersion.getTermsOfUseAndAccess()); Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); managedVersion = managedDataset.getEditVersion(); } else { @@ -640,18 +644,18 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, // DatasetVersion managedVersion = execCommand( updateDraft // ? new UpdateDatasetVersionCommand(req, incomingVersion) // : new CreateDatasetVersionCommand(req, ds, incomingVersion)); - return ok( json(managedVersion) ); - + return ok(json(managedVersion)); + } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex); - return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() ); - + return error(Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage()); + } catch (WrappedResponse ex) { return ex.getResponse(); - + } } - + @PUT @Path("{id}/deleteMetadata") public Response deleteVersionMetadata(String jsonBody, @PathParam("id") String id) throws WrappedResponse { @@ -689,7 +693,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav boolean found = false; for (DatasetField dsf : dsv.getDatasetFields()) { if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) { - if (dsf.getDatasetFieldType().isAllowMultiples()) { + if (dsf.getDatasetFieldType().isAllowMultiples()) { if (updateField.getDatasetFieldType().isControlledVocabulary()) { if (dsf.getDatasetFieldType().isAllowMultiples()) { for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) { @@ -754,7 +758,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav datasetFieldCompoundValueItemsToRemove.forEach((remove) -> { dsf.getDatasetFieldCompoundValues().remove(remove); }); - if (!found) { + if (!found) { logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found."); return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found."); } @@ -769,17 +773,16 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav break; } } - if (!found){ + if (!found) { String displayValue = !updateField.getDisplayValue().isEmpty() ? updateField.getDisplayValue() : updateField.getCompoundDisplayValue(); - logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." ); - return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." ); + logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found."); + return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found."); } - } + } - boolean updateDraft = ds.getLatestVersion().isDraft(); - DatasetVersion managedVersion = updateDraft + DatasetVersion managedVersion = updateDraft ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getEditVersion() : execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); return ok(json(managedVersion)); @@ -793,24 +796,24 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav return ex.getResponse(); } - + } - - private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){ + + private String getCompoundDisplayValue(DatasetFieldCompoundValue dscv) { String returnString = ""; - for (DatasetField dsf : dscv.getChildDatasetFields()) { - for (String value : dsf.getValues()) { - if (!(value == null)) { - returnString += (returnString.isEmpty() ? "" : "; ") + value.trim(); - } + for (DatasetField dsf : dscv.getChildDatasetFields()) { + for (String value : dsf.getValues()) { + if (!(value == null)) { + returnString += (returnString.isEmpty() ? "" : "; ") + value.trim(); } } + } return returnString; } - + @PUT @Path("{id}/editMetadata") - public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) throws WrappedResponse{ + public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) throws WrappedResponse { Boolean replaceData = replace != null; @@ -818,26 +821,26 @@ public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, return processDatasetUpdate(jsonBody, id, req, replaceData); } - - - private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){ + + + private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData) { try (StringReader rdr = new StringReader(jsonBody)) { - + Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); DatasetVersion dsv = ds.getEditVersion(); - + List fields = new LinkedList<>(); - DatasetField singleField = null; - + DatasetField singleField = null; + JsonArray fieldsJson = json.getJsonArray("fields"); - if( fieldsJson == null ){ - singleField = jsonParser().parseField(json, Boolean.FALSE); + if (fieldsJson == null) { + singleField = jsonParser().parseField(json, Boolean.FALSE); fields.add(singleField); - } else{ + } else { fields = jsonParser().parseMultipleFields(json); } - + String valdationErrors = validateDatasetFieldValues(fields); @@ -848,8 +851,8 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque dsv.setVersionState(DatasetVersion.VersionState.DRAFT); - //loop through the update fields - // and compare to the version fields + //loop through the update fields + // and compare to the version fields //if exist add/replace values //if not add entire dsf for (DatasetField updateField : fields) { @@ -947,7 +950,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque } } - + private String validateDatasetFieldValues(List fields) { StringBuilder error = new StringBuilder(); @@ -965,14 +968,14 @@ private String validateDatasetFieldValues(List fields) { } return ""; } - + /** * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431 */ @GET @Path("{id}/actions/:publish") @Deprecated - public Response publishDataseUsingGetDeprecated( @PathParam("id") String id, @QueryParam("type") String type ) { + public Response publishDataseUsingGetDeprecated(@PathParam("id") String id, @QueryParam("type") String type) { logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated."); return publishDataset(id, type); } @@ -984,10 +987,10 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S if (type == null) { return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent')."); } - boolean updateCurrent=false; + boolean updateCurrent = false; AuthenticatedUser user = findAuthenticatedUserOrDie(); type = type.toLowerCase(); - boolean isMinor=false; + boolean isMinor = false; switch (type) { case "minor": isMinor = true; @@ -995,15 +998,15 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S case "major": isMinor = false; break; - case "updatecurrent": - if(user.isSuperuser()) { - updateCurrent=true; - } else { - return error(Response.Status.FORBIDDEN, "Only superusers can update the current version"); - } - break; + case "updatecurrent": + if (user.isSuperuser()) { + updateCurrent = true; + } else { + return error(Response.Status.FORBIDDEN, "Only superusers can update the current version"); + } + break; default: - return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'."); + return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'."); } Dataset ds = findDatasetOrDie(id); @@ -1064,21 +1067,21 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S .build(); } } else { - PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds, + PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds, createDataverseRequest(user), - isMinor)); - return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset())); + isMinor)); + return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset())); } } catch (WrappedResponse ex) { return ex.getResponse(); } } - + @POST @Path("{id}/move/{targetDataverseAlias}") public Response moveDataset(@PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) { try { - User u = findUserOrDie(); + User u = findUserOrDie(); Dataset ds = findDatasetOrDie(id); Dataverse target = dataverseService.findByAlias(targetDataverseAlias); if (target == null) { @@ -1097,32 +1100,32 @@ public Response moveDataset(@PathParam("id") String id, @PathParam("targetDatave } } } - + @PUT - @Path("{linkedDatasetId}/link/{linkingDataverseAlias}") - public Response linkDataset(@PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) { - try{ - User u = findUserOrDie(); + @Path("{linkedDatasetId}/link/{linkingDataverseAlias}") + public Response linkDataset(@PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) { + try { + User u = findUserOrDie(); Dataset linked = findDatasetOrDie(linkedDatasetId); Dataverse linking = findDataverseOrDie(linkingDataverseAlias); - if (linked == null){ + if (linked == null) { return error(Response.Status.BAD_REQUEST, "Linked Dataset not found."); - } - if (linking == null){ + } + if (linking == null) { return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found."); - } + } execCommand(new LinkDatasetCommand( createDataverseRequest(u), linking, linked - )); + )); return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias()); } catch (WrappedResponse ex) { return ex.getResponse(); } } - + @GET @Path("{id}/links") - public Response getLinks(@PathParam("id") String idSupplied ) { + public Response getLinks(@PathParam("id") String idSupplied) { try { User u = findUserOrDie(); if (!u.isSuperuser()) { @@ -1146,8 +1149,8 @@ public Response getLinks(@PathParam("id") String idSupplied ) { /** * Add a given assignment to a given user or group - * @param ra role assignment DTO - * @param id dataset id + * @param ra role assignment DTO + * @param id dataset id * @param apiKey */ @POST @@ -1155,12 +1158,12 @@ public Response getLinks(@PathParam("id") String idSupplied ) { public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) { try { Dataset dataset = findDatasetOrDie(id); - + RoleAssignee assignee = findAssignee(ra.getAssignee()); if (assignee == null) { return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error")); - } - + } + DataverseRole theRole; Dataverse dv = dataset.getOwner(); theRole = null; @@ -1188,7 +1191,7 @@ public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier") } } - + @DELETE @Path("{identifier}/assignments/{id}") public Response deleteAssignment(@PathParam("id") long assignmentId, @PathParam("identifier") String dsId) { @@ -1211,26 +1214,26 @@ public Response deleteAssignment(@PathParam("id") long assignmentId, @PathParam( @GET @Path("{identifier}/assignments") public Response getAssignments(@PathParam("identifier") String id) { - return response( req -> - ok( execCommand( - new ListRoleAssignments(req, findDatasetOrDie(id))) - .stream().map(ra->json(ra)).collect(toJsonArray())) ); + return response(req -> + ok(execCommand( + new ListRoleAssignments(req, findDatasetOrDie(id))) + .stream().map(ra -> json(ra)).collect(toJsonArray()))); } @GET @Path("{id}/privateUrl") public Response getPrivateUrlData(@PathParam("id") String idSupplied) { - return response( req -> { + return response(req -> { PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied))); - return (privateUrl != null) ? ok(json(privateUrl)) - : error(Response.Status.NOT_FOUND, "Private URL not found."); + return (privateUrl != null) ? ok(json(privateUrl)) + : error(Response.Status.NOT_FOUND, "Private URL not found."); }); } @POST @Path("{id}/privateUrl") public Response createPrivateUrl(@PathParam("id") String idSupplied) { - return response( req -> + return response(req -> ok(json(execCommand( new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied)))))); } @@ -1238,7 +1241,7 @@ public Response createPrivateUrl(@PathParam("id") String idSupplied) { @DELETE @Path("{id}/privateUrl") public Response deletePrivateUrl(@PathParam("id") String idSupplied) { - return response( req -> { + return response(req -> { Dataset dataset = findDatasetOrDie(idSupplied); PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset)); if (privateUrl != null) { @@ -1292,7 +1295,7 @@ public Response getDatasetThumbnail(@PathParam("id") String idSupplied) { try { Dataset dataset = findDatasetOrDie(idSupplied); InputStream is = DatasetUtil.getThumbnailAsInputStream(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); - if(is == null) { + if (is == null) { return notFound("Thumbnail not available"); } return Response.ok(is).build(); @@ -1349,11 +1352,11 @@ public Response getRsync(@PathParam("identifier") String id) { dataset = findDatasetOrDie(id); AuthenticatedUser user = findAuthenticatedUserOrDie(); ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset)); - + DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded"); if (lock == null) { logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); - return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")"); + return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id=" + dataset.getId() + ")"); } return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN)); } catch (WrappedResponse wr) { @@ -1362,15 +1365,15 @@ public Response getRsync(@PathParam("identifier") String id) { return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex)); } } - + /** - * This api endpoint triggers the creation of a "package" file in a dataset - * after that package has been moved onto the same filesystem via the Data Capture Module. + * This api endpoint triggers the creation of a "package" file in a dataset + * after that package has been moved onto the same filesystem via the Data Capture Module. * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file. * The "package" can be downloaded over RSAL. - * + *

* This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly. - * + *

* The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse. * But due to the possibly immense number of files (millions) the package approach was taken. * This is relevant because the posix ("file") code contains many remnants of that development work. @@ -1394,13 +1397,13 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String try { Dataset dataset = findDatasetOrDie(id); if ("validation passed".equals(statusMessageFromDcm)) { - logger.log(Level.INFO, "Checksum Validation passed for DCM."); + logger.log(Level.INFO, "Checksum Validation passed for DCM."); String storageDriver = dataset.getDataverseContext().getEffectiveStorageDriverId(); String uploadFolder = jsonFromDcm.getString("uploadFolder"); int totalSize = jsonFromDcm.getInt("totalSize"); String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type"); - + if (storageDriverType.equals("file")) { logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId()); @@ -1417,15 +1420,15 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String String message = wr.getMessage(); return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'."); } - } else if(storageDriverType.equals("s3")) { - + } else if (storageDriverType.equals("s3")) { + logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId()); try { - + //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package s3PackageImporter.copyFromS3(dataset, uploadFolder); DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize)); - + if (packageFile == null) { logger.log(Level.SEVERE, "S3 File package import failed."); return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed."); @@ -1437,7 +1440,7 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload); dataset.removeLock(dcmLock); } - + // update version using the command engine to enforce user permissions and constraints if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) { try { @@ -1455,11 +1458,11 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String JsonObjectBuilder job = Json.createObjectBuilder(); return ok(job); - - } catch (IOException e) { + + } catch (IOException e) { String message = e.getMessage(); return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'."); - } + } } else { return error(Response.Status.INTERNAL_SERVER_ERROR, "Invalid storage driver in Dataverse, not compatible with dcm"); } @@ -1482,7 +1485,7 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String return ex.getResponse(); } } - + @POST @Path("{id}/submitForReview") @@ -1490,9 +1493,9 @@ public Response submitForReview(@PathParam("id") String idSupplied) { try { Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied))); JsonObjectBuilder result = Json.createObjectBuilder(); - + boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview); - + result.add("inReview", inReview); result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review."); return ok(result); @@ -1504,7 +1507,7 @@ public Response submitForReview(@PathParam("id") String idSupplied) { @POST @Path("{id}/returnToAuthor") public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBody) { - + if (jsonBody == null || jsonBody.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn)."); } @@ -1512,14 +1515,14 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo JsonObject json = Json.createReader(rdr).readObject(); try { Dataset dataset = findDatasetOrDie(idSupplied); - String reasonForReturn = null; + String reasonForReturn = null; reasonForReturn = json.getString("reasonForReturn"); // TODO: Once we add a box for the curator to type into, pass the reason for return to the ReturnDatasetToAuthorCommand and delete this check and call to setReturnReason on the API side. if (reasonForReturn == null || reasonForReturn.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must enter a reason for returning a dataset to the author(s)."); } AuthenticatedUser authenticatedUser = findAuthenticatedUserOrDie(); - Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn )); + Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn)); JsonObjectBuilder result = Json.createObjectBuilder(); result.add("inReview", false); @@ -1530,237 +1533,237 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo } } -@GET -@Path("{id}/uploadsid") -@Deprecated -public Response getUploadUrl(@PathParam("id") String idSupplied) { - try { - Dataset dataset = findDatasetOrDie(idSupplied); - - boolean canUpdateDataset = false; - try { - canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset).canIssue(UpdateDatasetVersionCommand.class); - } catch (WrappedResponse ex) { - logger.info("Exception thrown while trying to figure out permissions while getting upload URL for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage()); - throw ex; - } - if (!canUpdateDataset) { - return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset."); - } - S3AccessIO s3io = FileUtil.getS3AccessForDirectUpload(dataset); - if(s3io == null) { - return error(Response.Status.NOT_FOUND,"Direct upload not supported for files in this dataset: " + dataset.getId()); - } - String url = null; - String storageIdentifier = null; - try { - url = s3io.generateTemporaryS3UploadUrl(); - storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); - } catch (IOException io) { - logger.warning(io.getMessage()); - throw new WrappedResponse(io, error( Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request")); - } - - JsonObjectBuilder response = Json.createObjectBuilder() - .add("url", url) - .add("storageIdentifier", storageIdentifier ); - return ok(response); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } -} + @GET + @Path("{id}/uploadsid") + @Deprecated + public Response getUploadUrl(@PathParam("id") String idSupplied) { + try { + Dataset dataset = findDatasetOrDie(idSupplied); -@GET -@Path("{id}/uploadurls") -public Response getMPUploadUrls(@PathParam("id") String idSupplied, @QueryParam("size") long fileSize) { - try { - Dataset dataset = findDatasetOrDie(idSupplied); - - boolean canUpdateDataset = false; - try { - canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset) - .canIssue(UpdateDatasetVersionCommand.class); - } catch (WrappedResponse ex) { - logger.info( - "Exception thrown while trying to figure out permissions while getting upload URLs for dataset id " - + dataset.getId() + ": " + ex.getLocalizedMessage()); - throw ex; - } - if (!canUpdateDataset) { - return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset."); - } - S3AccessIO s3io = FileUtil.getS3AccessForDirectUpload(dataset); - if (s3io == null) { - return error(Response.Status.NOT_FOUND, - "Direct upload not supported for files in this dataset: " + dataset.getId()); - } - JsonObjectBuilder response = null; - String storageIdentifier = null; - try { - storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); - response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize); - - } catch (IOException io) { - logger.warning(io.getMessage()); - throw new WrappedResponse(io, - error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request")); - } - - response.add("storageIdentifier", storageIdentifier); - return ok(response); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } -} + boolean canUpdateDataset = false; + try { + canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset).canIssue(UpdateDatasetVersionCommand.class); + } catch (WrappedResponse ex) { + logger.info("Exception thrown while trying to figure out permissions while getting upload URL for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage()); + throw ex; + } + if (!canUpdateDataset) { + return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset."); + } + S3AccessIO s3io = FileUtil.getS3AccessForDirectUpload(dataset); + if (s3io == null) { + return error(Response.Status.NOT_FOUND, "Direct upload not supported for files in this dataset: " + dataset.getId()); + } + String url = null; + String storageIdentifier = null; + try { + url = s3io.generateTemporaryS3UploadUrl(); + storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); + } catch (IOException io) { + logger.warning(io.getMessage()); + throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request")); + } -@DELETE -@Path("mpupload") -public Response abortMPUpload(@QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) { - try { - Dataset dataset = datasetSvc.findByGlobalId(idSupplied); - //Allow the API to be used within a session (e.g. for direct upload in the UI) - User user =session.getUser(); - if (!user.isAuthenticated()) { - try { - user = findAuthenticatedUserOrDie(); - } catch (WrappedResponse ex) { - logger.info( - "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id " - + dataset.getId() + ": " + ex.getLocalizedMessage()); - throw ex; - } - } - boolean allowed = false; - if (dataset != null) { - allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset) - .canIssue(UpdateDatasetVersionCommand.class); - } else { - /* - * The only legitimate case where a global id won't correspond to a dataset is - * for uploads during creation. Given that this call will still fail unless all - * three parameters correspond to an active multipart upload, it should be safe - * to allow the attempt for an authenticated user. If there are concerns about - * permissions, one could check with the current design that the user is allowed - * to create datasets in some dataverse that is configured to use the storage - * provider specified in the storageidentifier, but testing for the ability to - * create a dataset in a specific dataverse would requiring changing the design - * somehow (e.g. adding the ownerId to this call). - */ - allowed = true; - } - if (!allowed) { - return error(Response.Status.FORBIDDEN, - "You are not permitted to abort file uploads with the supplied parameters."); - } - try { - S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId); - } catch (IOException io) { - logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier=" - + storageidentifier + " dataset Id: " + dataset.getId()); - logger.warning(io.getMessage()); - throw new WrappedResponse(io, - error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload")); - } - return Response.noContent().build(); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } -} + JsonObjectBuilder response = Json.createObjectBuilder() + .add("url", url) + .add("storageIdentifier", storageIdentifier); + return ok(response); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } -@PUT -@Path("mpupload") -public Response completeMPUpload(String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) { - try { - Dataset dataset = datasetSvc.findByGlobalId(idSupplied); - //Allow the API to be used within a session (e.g. for direct upload in the UI) - User user =session.getUser(); - if (!user.isAuthenticated()) { - try { - user=findAuthenticatedUserOrDie(); - } catch (WrappedResponse ex) { - logger.info( - "Exception thrown while trying to figure out permissions to complete mpupload for dataset id " - + dataset.getId() + ": " + ex.getLocalizedMessage()); - throw ex; - } - } - boolean allowed = false; - if (dataset != null) { - allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset) - .canIssue(UpdateDatasetVersionCommand.class); - } else { - /* - * The only legitimate case where a global id won't correspond to a dataset is - * for uploads during creation. Given that this call will still fail unless all - * three parameters correspond to an active multipart upload, it should be safe - * to allow the attempt for an authenticated user. If there are concerns about - * permissions, one could check with the current design that the user is allowed - * to create datasets in some dataverse that is configured to use the storage - * provider specified in the storageidentifier, but testing for the ability to - * create a dataset in a specific dataverse would requiring changing the design - * somehow (e.g. adding the ownerId to this call). - */ - allowed = true; - } - if (!allowed) { - return error(Response.Status.FORBIDDEN, - "You are not permitted to complete file uploads with the supplied parameters."); - } - List eTagList = new ArrayList(); - logger.info("Etags: " + partETagBody); - try { - JsonReader jsonReader = Json.createReader(new StringReader(partETagBody)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); - for(String partNo : object.keySet()) { - eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); - } - for(PartETag et: eTagList) { - logger.info("Part: " + et.getPartNumber() + " : " + et.getETag()); - } - } catch (JsonException je) { - logger.info("Unable to parse eTags from: " + partETagBody); - throw new WrappedResponse(je, error( Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload")); - } - try { - S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList); - } catch (IOException io) { - logger.warning("Multipart upload completion failed for uploadId: " + uploadId +" storageidentifier=" + storageidentifier + " globalId: " + idSupplied); - logger.warning(io.getMessage()); - try { - S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId); - } catch (IOException e) { - logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId +" storageidentifier=" + storageidentifier + " globalId: " + idSupplied); - logger.severe(io.getMessage()); - } - - throw new WrappedResponse(io, error( Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload")); - } - return ok("Multipart Upload completed"); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } -} + @GET + @Path("{id}/uploadurls") + public Response getMPUploadUrls(@PathParam("id") String idSupplied, @QueryParam("size") long fileSize) { + try { + Dataset dataset = findDatasetOrDie(idSupplied); + + boolean canUpdateDataset = false; + try { + canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset) + .canIssue(UpdateDatasetVersionCommand.class); + } catch (WrappedResponse ex) { + logger.info( + "Exception thrown while trying to figure out permissions while getting upload URLs for dataset id " + + dataset.getId() + ": " + ex.getLocalizedMessage()); + throw ex; + } + if (!canUpdateDataset) { + return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset."); + } + S3AccessIO s3io = FileUtil.getS3AccessForDirectUpload(dataset); + if (s3io == null) { + return error(Response.Status.NOT_FOUND, + "Direct upload not supported for files in this dataset: " + dataset.getId()); + } + JsonObjectBuilder response = null; + String storageIdentifier = null; + try { + storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); + response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize); + + } catch (IOException io) { + logger.warning(io.getMessage()); + throw new WrappedResponse(io, + error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request")); + } + + response.add("storageIdentifier", storageIdentifier); + return ok(response); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @DELETE + @Path("mpupload") + public Response abortMPUpload(@QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) { + try { + Dataset dataset = datasetSvc.findByGlobalId(idSupplied); + //Allow the API to be used within a session (e.g. for direct upload in the UI) + User user = session.getUser(); + if (!user.isAuthenticated()) { + try { + user = findAuthenticatedUserOrDie(); + } catch (WrappedResponse ex) { + logger.info( + "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id " + + dataset.getId() + ": " + ex.getLocalizedMessage()); + throw ex; + } + } + boolean allowed = false; + if (dataset != null) { + allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset) + .canIssue(UpdateDatasetVersionCommand.class); + } else { + /* + * The only legitimate case where a global id won't correspond to a dataset is + * for uploads during creation. Given that this call will still fail unless all + * three parameters correspond to an active multipart upload, it should be safe + * to allow the attempt for an authenticated user. If there are concerns about + * permissions, one could check with the current design that the user is allowed + * to create datasets in some dataverse that is configured to use the storage + * provider specified in the storageidentifier, but testing for the ability to + * create a dataset in a specific dataverse would requiring changing the design + * somehow (e.g. adding the ownerId to this call). + */ + allowed = true; + } + if (!allowed) { + return error(Response.Status.FORBIDDEN, + "You are not permitted to abort file uploads with the supplied parameters."); + } + try { + S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId); + } catch (IOException io) { + logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier=" + + storageidentifier + " dataset Id: " + dataset.getId()); + logger.warning(io.getMessage()); + throw new WrappedResponse(io, + error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload")); + } + return Response.noContent().build(); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @PUT + @Path("mpupload") + public Response completeMPUpload(String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) { + try { + Dataset dataset = datasetSvc.findByGlobalId(idSupplied); + //Allow the API to be used within a session (e.g. for direct upload in the UI) + User user = session.getUser(); + if (!user.isAuthenticated()) { + try { + user = findAuthenticatedUserOrDie(); + } catch (WrappedResponse ex) { + logger.info( + "Exception thrown while trying to figure out permissions to complete mpupload for dataset id " + + dataset.getId() + ": " + ex.getLocalizedMessage()); + throw ex; + } + } + boolean allowed = false; + if (dataset != null) { + allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset) + .canIssue(UpdateDatasetVersionCommand.class); + } else { + /* + * The only legitimate case where a global id won't correspond to a dataset is + * for uploads during creation. Given that this call will still fail unless all + * three parameters correspond to an active multipart upload, it should be safe + * to allow the attempt for an authenticated user. If there are concerns about + * permissions, one could check with the current design that the user is allowed + * to create datasets in some dataverse that is configured to use the storage + * provider specified in the storageidentifier, but testing for the ability to + * create a dataset in a specific dataverse would requiring changing the design + * somehow (e.g. adding the ownerId to this call). + */ + allowed = true; + } + if (!allowed) { + return error(Response.Status.FORBIDDEN, + "You are not permitted to complete file uploads with the supplied parameters."); + } + List eTagList = new ArrayList(); + logger.info("Etags: " + partETagBody); + try { + JsonReader jsonReader = Json.createReader(new StringReader(partETagBody)); + JsonObject object = jsonReader.readObject(); + jsonReader.close(); + for (String partNo : object.keySet()) { + eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); + } + for (PartETag et : eTagList) { + logger.info("Part: " + et.getPartNumber() + " : " + et.getETag()); + } + } catch (JsonException je) { + logger.info("Unable to parse eTags from: " + partETagBody); + throw new WrappedResponse(je, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload")); + } + try { + S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList); + } catch (IOException io) { + logger.warning("Multipart upload completion failed for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied); + logger.warning(io.getMessage()); + try { + S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId); + } catch (IOException e) { + logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied); + logger.severe(io.getMessage()); + } + + throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload")); + } + return ok("Multipart Upload completed"); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } /** * Add a File to an existing Dataset - * + * * @param idSupplied * @param jsonData * @param fileInputStream * @param contentDispositionHeader * @param formDataBodyPart - * @return + * @return */ @POST @Path("{id}/add") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response addFileToDataset(@PathParam("id") String idSupplied, - @FormDataParam("jsonData") String jsonData, - @FormDataParam("file") InputStream fileInputStream, - @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, - @FormDataParam("file") final FormDataBodyPart formDataBodyPart - ){ + @FormDataParam("jsonData") String jsonData, + @FormDataParam("file") InputStream fileInputStream, + @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, + @FormDataParam("file") final FormDataBodyPart formDataBodyPart + ) { if (!systemConfig.isHTTPUpload()) { return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); @@ -1775,27 +1778,27 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } catch (WrappedResponse ex) { return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") - ); + ); } - - + + // ------------------------------------- // (2) Get the Dataset Id - // + // // ------------------------------------- Dataset dataset; - + try { dataset = findDatasetOrDie(idSupplied); } catch (WrappedResponse wr) { - return wr.getResponse(); + return wr.getResponse(); } - + //------------------------------------ // (2a) Make sure dataset does not have package file // // -------------------------------------- - + for (DatasetVersion dv : dataset.getVersions()) { if (dv.isHasPackageFile()) { return error(Response.Status.FORBIDDEN, @@ -1807,40 +1810,40 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, // (2a) Load up optional params via JSON //--------------------------------------- OptionalFileParams optionalFileParams = null; - msgt("(api) jsonData: " + jsonData); + msgt("(api) jsonData: " + jsonData); try { optionalFileParams = new OptionalFileParams(jsonData); } catch (DataFileTagException ex) { - return error( Response.Status.BAD_REQUEST, ex.getMessage()); + return error(Response.Status.BAD_REQUEST, ex.getMessage()); } - + // ------------------------------------- // (3) Get the file name and content type // ------------------------------------- String newFilename = null; String newFileContentType = null; String newStorageIdentifier = null; - if (null == contentDispositionHeader) { - if (optionalFileParams.hasStorageIdentifier()) { - newStorageIdentifier = optionalFileParams.getStorageIdentifier(); - // ToDo - check that storageIdentifier is valid - if (optionalFileParams.hasFileName()) { - newFilename = optionalFileParams.getFileName(); - if (optionalFileParams.hasMimetype()) { - newFileContentType = optionalFileParams.getMimeType(); - } - } - } else { - return error(BAD_REQUEST, - "You must upload a file or provide a storageidentifier, filename, and mimetype."); - } - } else { - newFilename = contentDispositionHeader.getFileName(); - newFileContentType = formDataBodyPart.getMediaType().toString(); - } - - + if (null == contentDispositionHeader) { + if (optionalFileParams.hasStorageIdentifier()) { + newStorageIdentifier = optionalFileParams.getStorageIdentifier(); + // ToDo - check that storageIdentifier is valid + if (optionalFileParams.hasFileName()) { + newFilename = optionalFileParams.getFileName(); + if (optionalFileParams.hasMimetype()) { + newFileContentType = optionalFileParams.getMimeType(); + } + } + } else { + return error(BAD_REQUEST, + "You must upload a file or provide a storageidentifier, filename, and mimetype."); + } + } else { + newFilename = contentDispositionHeader.getFileName(); + newFileContentType = formDataBodyPart.getMediaType().toString(); + } + + //------------------- // (3) Create the AddReplaceFileHelper object //------------------- @@ -1848,28 +1851,28 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, DataverseRequest dvRequest2 = createDataverseRequest(authUser); AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, - ingestService, - datasetService, - fileService, - permissionSvc, - commandEngine, - systemConfig); + ingestService, + datasetService, + fileService, + permissionSvc, + commandEngine, + systemConfig); //------------------- // (4) Run "runAddFileByDatasetId" //------------------- addFileHelper.runAddFileByDataset(dataset, - newFilename, - newFileContentType, - newStorageIdentifier, - fileInputStream, - optionalFileParams); + newFilename, + newFileContentType, + newStorageIdentifier, + fileInputStream, + optionalFileParams); - if (addFileHelper.hasError()){ + if (addFileHelper.hasError()) { return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n")); - }else{ + } else { String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); try { //msgt("as String: " + addFileHelper.getSuccessResult()); @@ -1887,7 +1890,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } else { return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder()); } - + //"Look at that! You added a file! (hey hey, it may have worked)"); } catch (NoFilesException ex) { Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); @@ -1895,71 +1898,77 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } } - + } // end: addFileToDataset - - private void msg(String m){ + private void msg(String m) { //System.out.println(m); logger.fine(m); } - private void dashes(){ + + private void dashes() { msg("----------------"); } - private void msgt(String m){ - dashes(); msg(m); dashes(); + + private void msgt(String m) { + dashes(); + msg(m); + dashes(); } - - - public static T handleVersion( String versionId, DsVersionHandler hdl ) - throws WrappedResponse { + + + public static T handleVersion(String versionId, DsVersionHandler hdl) + throws WrappedResponse { switch (versionId) { - case ":latest": return hdl.handleLatest(); - case ":draft": return hdl.handleDraft(); - case ":latest-published": return hdl.handleLatestPublished(); + case ":latest": + return hdl.handleLatest(); + case ":draft": + return hdl.handleDraft(); + case ":latest-published": + return hdl.handleLatestPublished(); default: try { String[] versions = versionId.split("\\."); switch (versions.length) { case 1: - return hdl.handleSpecific(Long.parseLong(versions[0]), (long)0.0); + return hdl.handleSpecific(Long.parseLong(versions[0]), (long) 0.0); case 2: - return hdl.handleSpecific( Long.parseLong(versions[0]), Long.parseLong(versions[1]) ); + return hdl.handleSpecific(Long.parseLong(versions[0]), Long.parseLong(versions[1])); default: - throw new WrappedResponse(error( Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'")); + throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'")); } - } catch ( NumberFormatException nfe ) { - throw new WrappedResponse( error( Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'") ); + } catch (NumberFormatException nfe) { + throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'")); } } } - - private DatasetVersion getDatasetVersionOrDie( final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { - DatasetVersion dsv = execCommand( handleVersion(versionNumber, new DsVersionHandler>(){ - @Override - public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds); - } + private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { + DatasetVersion dsv = execCommand(handleVersion(versionNumber, new DsVersionHandler>() { - @Override - public Command handleDraft() { - return new GetDraftDatasetVersionCommand(req, ds); - } - - @Override - public Command handleSpecific(long major, long minor) { - return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); - } + @Override + public Command handleLatest() { + return new GetLatestAccessibleDatasetVersionCommand(req, ds); + } - @Override - public Command handleLatestPublished() { - return new GetLatestPublishedDatasetVersionCommand(req, ds); - } - })); - if ( dsv == null || dsv.getId() == null ) { - throw new WrappedResponse( notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found") ); + @Override + public Command handleDraft() { + return new GetDraftDatasetVersionCommand(req, ds); + } + + @Override + public Command handleSpecific(long major, long minor) { + return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); + } + + @Override + public Command handleLatestPublished() { + return new GetLatestPublishedDatasetVersionCommand(req, ds); + } + })); + if (dsv == null || dsv.getId() == null) { + throw new WrappedResponse(notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found")); } if (dsv.isReleased()) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds); @@ -1967,7 +1976,7 @@ public Command handleLatestPublished() { } return dsv; } - + @GET @Path("{identifier}/locks") public Response getLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) { @@ -1975,26 +1984,26 @@ public Response getLocks(@PathParam("identifier") String id, @QueryParam("type") Dataset dataset = null; try { dataset = findDatasetOrDie(id); - Set locks; + Set locks; if (lockType == null) { locks = dataset.getLocks(); } else { // request for a specific type lock: DatasetLock lock = dataset.getLockFor(lockType); - locks = new HashSet<>(); + locks = new HashSet<>(); if (lock != null) { locks.add(lock); } } - + return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray())); } catch (WrappedResponse wr) { return wr.getResponse(); - } - } - + } + } + @DELETE @Path("{identifier}/locks") public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) { @@ -2006,7 +2015,7 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only."); } Dataset dataset = findDatasetOrDie(id); - + if (lockType == null) { Set locks = new HashSet<>(); for (DatasetLock lock : dataset.getLocks()) { @@ -2018,7 +2027,7 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ // refresh the dataset: dataset = findDatasetOrDie(id); } - // kick of dataset reindexing, in case the locks removed + // kick of dataset reindexing, in case the locks removed // affected the search card: try { indexService.indexDataset(dataset, true); @@ -2038,7 +2047,7 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ execCommand(new RemoveLockCommand(req, dataset, lock.getReason())); // refresh the dataset: dataset = findDatasetOrDie(id); - // ... and kick of dataset reindexing, in case the lock removed + // ... and kick of dataset reindexing, in case the lock removed // affected the search card: try { indexService.indexDataset(dataset, true); @@ -2058,7 +2067,7 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ }); } - + @POST @Path("{identifier}/lock/{type}") public Response lockDataset(@PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) { @@ -2067,7 +2076,7 @@ public Response lockDataset(@PathParam("identifier") String id, @PathParam("type AuthenticatedUser user = findAuthenticatedUserOrDie(); if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only."); - } + } Dataset dataset = findDatasetOrDie(id); DatasetLock lock = dataset.getLockFor(lockType); if (lock != null) { @@ -2094,16 +2103,16 @@ public Response lockDataset(@PathParam("identifier") String id, @PathParam("type }); } - + @GET @Path("{id}/makeDataCount/citations") public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) { - + try { Dataset dataset = findDatasetOrDie(idSupplied); JsonArrayBuilder datasetsCitations = Json.createArrayBuilder(); List externalCitations = datasetExternalCitationsService.getDatasetExternalCitationsByDataset(dataset); - for (DatasetExternalCitations citation : externalCitations ){ + for (DatasetExternalCitations citation : externalCitations) { JsonObjectBuilder candidateObj = Json.createObjectBuilder(); /** * In the future we can imagine storing and presenting more @@ -2114,9 +2123,9 @@ public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) { */ candidateObj.add("citationUrl", citation.getCitedByUrl()); datasetsCitations.add(candidateObj); - } - return ok(datasetsCitations); - + } + return ok(datasetsCitations); + } catch (WrappedResponse wr) { return wr.getResponse(); } @@ -2129,23 +2138,23 @@ public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSup String nullCurrentMonth = null; return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country); } - + @GET @Path("{identifier}/storagesize") - public Response getStorageSize(@PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + public Response getStorageSize(@PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached, + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached,GetDatasetStorageSizeCommand.Mode.STORAGE, null))))); + execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null))))); } - + @GET @Path("{identifier}/versions/{versionId}/downloadsize") - public Response getDownloadSize(@PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + public Response getDownloadSize(@PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version , findDatasetOrDie(dvIdtf), uriInfo, headers)))))); + execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers)))))); } @GET @@ -2247,29 +2256,29 @@ public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @Path return wr.getResponse(); } } - + @GET @Path("{identifier}/storageDriver") public Response getFileStore(@PathParam("identifier") String dvIdtf, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - - Dataset dataset; - + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + + Dataset dataset; + try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } - + return response(req -> ok(dataset.getEffectiveStorageDriverId())); } - + @PUT @Path("{identifier}/storageDriver") public Response setFileStore(@PathParam("identifier") String dvIdtf, - String storageDriverLabel, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + String storageDriverLabel, + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + // Superuser-only: AuthenticatedUser user; try { @@ -2279,17 +2288,17 @@ public Response setFileStore(@PathParam("identifier") String dvIdtf, } if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); - } - - Dataset dataset; - + } + + Dataset dataset; + try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } - - // We don't want to allow setting this to a store id that does not exist: + + // We don't want to allow setting this to a store id that does not exist: for (Entry store : DataAccess.getStorageDriverLabels().entrySet()) { if (store.getKey().equals(storageDriverLabel)) { dataset.setStorageDriverId(store.getValue()); @@ -2297,15 +2306,15 @@ public Response setFileStore(@PathParam("identifier") String dvIdtf, return ok("Storage driver set to: " + store.getKey() + "/" + store.getValue()); } } - return error(Response.Status.BAD_REQUEST, - "No Storage Driver found for : " + storageDriverLabel); + return error(Response.Status.BAD_REQUEST, + "No Storage Driver found for : " + storageDriverLabel); } - + @DELETE @Path("{identifier}/storageDriver") public Response resetFileStore(@PathParam("identifier") String dvIdtf, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + // Superuser-only: AuthenticatedUser user; try { @@ -2315,29 +2324,28 @@ public Response resetFileStore(@PathParam("identifier") String dvIdtf, } if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); - } - - Dataset dataset; - + } + + Dataset dataset; + try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } - + dataset.setStorageDriverId(null); datasetService.merge(dataset); - return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); } @POST - @Path("{id}/addglobusFiles") + @Path("{id}/addglobusFilesBkup") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response addGlobusFileToDataset(@PathParam("id") String datasetId, @FormDataParam("jsonData") String jsonData - ) - { + ) { JsonArrayBuilder jarr = Json.createArrayBuilder(); if (!systemConfig.isHTTPUpload()) { @@ -2372,7 +2380,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, String lockInfoMessage = "Globus Upload API is running "; DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload, - ((AuthenticatedUser) authUser).getId() , lockInfoMessage); + ((AuthenticatedUser) authUser).getId(), lockInfoMessage); if (lock != null) { dataset.addLock(lock); } else { @@ -2436,8 +2444,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, } while (!success); - try - { + try { StorageIO datasetSIO = DataAccess.getStorageIO(dataset); List cachedObjectsTags = datasetSIO.listAuxObjects(); @@ -2461,7 +2468,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, JsonArray filesJson = jsonObject.getJsonArray("files"); - int totalNumberofFiles = 0 ; + int totalNumberofFiles = 0; int successNumberofFiles = 0; try { // Start to add the files @@ -2549,7 +2556,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, storageIdentifier, null, optionalFileParams, - globustype); + true); if (addFileHelper.hasError()) { @@ -2593,8 +2600,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, successNumberofFiles = successNumberofFiles + 1; } }// End of adding files - }catch (Exception e ) - { + } catch (Exception e) { Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, e); return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); } @@ -2621,7 +2627,7 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, dataset = datasetService.find(dataset.getId()); - List s= dataset.getFiles(); + List s = dataset.getFiles(); for (DataFile dataFile : s) { logger.info(" ******** TEST the datafile id is = " + dataFile.getId() + " = " + dataFile.getDisplayName()); } @@ -2641,5 +2647,269 @@ public Response addGlobusFileToDataset(@PathParam("id") String datasetId, return ok(Json.createObjectBuilder().add("Files", jarr)); } + + + @POST + @Path("{id}/addglobusFiles") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response addGlobusFileToDatasetTrial1(@PathParam("id") String datasetId, + @FormDataParam("jsonData") String jsonData + ) throws IOException, ExecutionException, InterruptedException { + + logger.info ( " ==== 1 (api) jsonData 1 ====== " + jsonData ); + + JsonArrayBuilder jarr = Json.createArrayBuilder(); + + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } + + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + User authUser; + try { + authUser = findUserOrDie(); + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") + ); + } + + ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser) authUser); + + // ------------------------------------- + // (2) Get the Dataset Id + // ------------------------------------- + Dataset dataset; + + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + String requestUrl = httpRequest.getRequestURL().toString(); + + // Async Call + datasetService.globusAsyncCall( jsonData , token , dataset , authUser, requestUrl); + + return ok("Globus Task successfully completed "); + } + + + /** + * Add a File to an existing Dataset + * + * @param idSupplied + * @param jsonData + * @return + */ + @POST + @Path("{id}/addFiles") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response addFilesToDataset(@PathParam("id") String idSupplied, + @FormDataParam("jsonData") String jsonData) { + + JsonArrayBuilder jarr = Json.createArrayBuilder(); + + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } + + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + User authUser; + try { + authUser = findUserOrDie(); + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") + ); + } + + // ------------------------------------- + // (2) Get the Dataset Id + // ------------------------------------- + Dataset dataset; + + try { + dataset = findDatasetOrDie(idSupplied); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + + //------------------------------------ + // (2b) Make sure dataset does not have package file + // -------------------------------------- + + for (DatasetVersion dv : dataset.getVersions()) { + if (dv.isHasPackageFile()) { + return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") + ); + } + } + + + + msgt("******* (api) jsonData 1: " + jsonData.toString()); + + JsonArray filesJson = null; + try (StringReader rdr = new StringReader(jsonData)) { + //jsonObject = Json.createReader(rdr).readObject(); + filesJson = Json.createReader(rdr).readArray(); + } catch (Exception jpe) { + jpe.printStackTrace(); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + } + + + try { + DataverseRequest dvRequest = createDataverseRequest(authUser); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper( + dvRequest, + ingestService, + datasetService, + fileService, + permissionSvc, + commandEngine, + systemConfig + ); + + // ------------------------------------- + // (6) Parse files information from jsondata + // calculate checksum + // determine mimetype + // ------------------------------------- + + int totalNumberofFiles = 0; + int successNumberofFiles = 0; + try { + // Start to add the files + if (filesJson != null) { + totalNumberofFiles = filesJson.getValuesAs(JsonObject.class).size(); + for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { + + OptionalFileParams optionalFileParams = null; + + try { + optionalFileParams = new OptionalFileParams(fileJson.toString()); + } catch (DataFileTagException ex) { + return error(Response.Status.BAD_REQUEST, ex.getMessage()); + } + + // ------------------------------------- + // (3) Get the file name and content type + // ------------------------------------- + String newFilename = null; + String newFileContentType = null; + String newStorageIdentifier = null; + if (optionalFileParams.hasStorageIdentifier()) { + newStorageIdentifier = optionalFileParams.getStorageIdentifier(); + // ToDo - check that storageIdentifier is valid + if (optionalFileParams.hasFileName()) { + newFilename = optionalFileParams.getFileName(); + if (optionalFileParams.hasMimetype()) { + newFileContentType = optionalFileParams.getMimeType(); + } + } + } else { + return error(BAD_REQUEST, + "You must upload a file or provide a storageidentifier, filename, and mimetype."); + } + + + msg("ADD!"); + + //------------------- + // Run "runAddFileByDatasetId" + //------------------- + + addFileHelper.runAddFileByDataset(dataset, + newFilename, + newFileContentType, + newStorageIdentifier, + null, + optionalFileParams,true); + + if (addFileHelper.hasError()) { + + JsonObjectBuilder fileoutput = Json.createObjectBuilder() + .add("storageIdentifier ", newStorageIdentifier) + .add("error Code: ", addFileHelper.getHttpErrorCode().toString()) + .add("message ", addFileHelper.getErrorMessagesAsString("\n")); + + jarr.add(fileoutput); + + } else { + String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); + + JsonObject successresult = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); + + try { + logger.fine("successMsg: " + successMsg); + String duplicateWarning = addFileHelper.getDuplicateFileWarning(); + if (duplicateWarning != null && !duplicateWarning.isEmpty()) { + // return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); + JsonObjectBuilder fileoutput = Json.createObjectBuilder() + .add("storageIdentifier ", newStorageIdentifier) + .add("warning message: ", addFileHelper.getDuplicateFileWarning()) + .add("message ", successresult.getJsonArray("files").getJsonObject(0)); + jarr.add(fileoutput); + + } else { + JsonObjectBuilder fileoutput = Json.createObjectBuilder() + .add("storageIdentifier ", newStorageIdentifier) + .add("message ", successresult.getJsonArray("files").getJsonObject(0)); + jarr.add(fileoutput); + } + + } catch (Exception ex) { + Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + } + } + + successNumberofFiles = successNumberofFiles + 1; + } + }// End of adding files + } catch (Exception e) { + Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, e); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + } + + logger.log(Level.INFO, "Total Number of Files " + totalNumberofFiles); + logger.log(Level.INFO, "Success Number of Files " + successNumberofFiles); + DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.EditInProgress); + if (dcmLock == null) { + logger.log(Level.WARNING, "Dataset not locked for Globus upload"); + } else { + logger.log(Level.INFO, "Dataset remove locked for Globus upload"); + datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.EditInProgress); + //dataset.removeLock(dcmLock); + } + + try { + Command cmd; + cmd = new UpdateDatasetVersionCommand(dataset, dvRequest); + ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true); + commandEngine.submit(cmd); + } catch (CommandException ex) { + logger.log(Level.WARNING, "==== datasetId :" + dataset.getId() + "====== UpdateDatasetVersionCommand Exception : " + ex.getMessage()); + } + + //ingest job + ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); + + } catch (Exception e) { + String message = e.getMessage(); + msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); + e.printStackTrace(); + } + + return ok(Json.createObjectBuilder().add("Files", jarr)); + + } // end: addFileToDataset + } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/fileDetailsHolder.java b/src/main/java/edu/harvard/iq/dataverse/globus/fileDetailsHolder.java new file mode 100644 index 00000000000..fac1192d054 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/fileDetailsHolder.java @@ -0,0 +1,31 @@ +package edu.harvard.iq.dataverse.globus; + + + +public class fileDetailsHolder { + + private String hash; + private String mime; + private String storageID; + + public fileDetailsHolder(String id, String hash, String mime) { + + this.storageID = id; + this.hash = hash ; + this.mime = mime ; + + } + + public String getStorageID() { + return this.storageID; + } + + public String getHash() { + return hash; + } + + public String getMime() { + return mime; + } + +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 3c7cd22644b..5c898be968c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -683,6 +683,7 @@ public static String calculateChecksum(InputStream in, ChecksumType checksumType return checksumDigestToString(md.digest()); } + public static String calculateChecksum(byte[] dataBytes, ChecksumType checksumType) { MessageDigest md = null; @@ -1156,7 +1157,7 @@ public static List createDataFiles(DatasetVersion version, InputStream } // end createDataFiles - private static boolean useRecognizedType(String suppliedContentType, String recognizedType) { + public static boolean useRecognizedType(String suppliedContentType, String recognizedType) { // is it any better than the type that was supplied to us, // if any? // This is not as trivial a task as one might expect... diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index c37efc3178f..70515ca9b0f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.DataverseContact; import edu.harvard.iq.dataverse.DataverseFacet; import edu.harvard.iq.dataverse.DataverseTheme; +import edu.harvard.iq.dataverse.api.Datasets; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; @@ -36,6 +37,7 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.globus.fileDetailsHolder; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.DatasetFieldWalker; @@ -324,6 +326,14 @@ public static JsonObjectBuilder json(Dataset ds) { .add("storageIdentifier", ds.getStorageIdentifier()); } + public static JsonObjectBuilder json(fileDetailsHolder ds) { + return Json.createObjectBuilder().add(ds.getStorageID() , + Json.createObjectBuilder() + .add("id", ds.getStorageID() ) + .add("hash", ds.getHash()) + .add("mime",ds.getMime())); + } + public static JsonObjectBuilder json(DatasetVersion dsv) { JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()) From 5d08b0e0d3b0d5064e94fc9156ad40d8e050407b Mon Sep 17 00:00:00 2001 From: jingma Date: Tue, 16 Mar 2021 17:11:50 +0100 Subject: [PATCH 0093/1551] Final changes for prototype. --- .../edu/harvard/iq/dataverse/License.java | 240 +++++++++--------- .../iq/dataverse/LicenseServiceBean.java | 112 ++++---- .../edu/harvard/iq/dataverse/api/Admin.java | 100 ++++---- .../iq/dataverse/util/json/JsonPrinter.java | 18 +- 4 files changed, 239 insertions(+), 231 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index 713ac218222..56742f76042 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -8,128 +8,134 @@ import javax.persistence.Id; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; +import javax.persistence.Table; +import javax.persistence.UniqueConstraint; /** * @author Jing Ma */ -// @NamedQueries({ -// @NamedQuery( name="License.findAll", -// query="SELECT l FROM License l"), -// @NamedQuery( name="Setting.findById", -// query = "SELECT l FROM License l WHERE l.id=:id"), -// @NamedQuery( name="License.deleteById", -// query="DELETE FROM License l WHERE l.id=:id") -// -//}) -//@Entity + @NamedQueries({ + @NamedQuery( name="License.findAll", + query="SELECT l FROM License l"), + @NamedQuery( name="License.findById", + query = "SELECT l FROM License l WHERE l.id=:id"), + @NamedQuery( name="License.deleteById", + query="DELETE FROM License l WHERE l.id=:id") + +}) +@Entity +@Table(uniqueConstraints = { + @UniqueConstraint(columnNames = "name"), + @UniqueConstraint(columnNames = "uri")} +) public class License { -// @Id -// @GeneratedValue(strategy = GenerationType.IDENTITY) -// private Long id; -// -// @Column(columnDefinition="TEXT", nullable = false, unique = true) -// private String name; -// -// @Column(columnDefinition="TEXT") -// private String shortDescription; -// -// @Column(columnDefinition="TEXT", nullable = false) -// private String uri; -// -// @Column(columnDefinition="TEXT") -// private String iconUrl; -// -// @Column(nullable = false) -// private boolean active; -// -// public License() { -// } -// -// public License(String name, String shortDescription, String uri, String iconUrl, boolean active) { -// this.name = name; -// this.shortDescription = shortDescription; -// this.uri = uri; -// this.iconUrl = iconUrl; -// this.active = active; -// } -// -// public Long getId() { -// return id; -// } -// -// public void setId(Long id) { -// this.id = id; -// } -// -// public String getName() { -// return name; -// } -// -// public void setName(String name) { -// this.name = name; -// } -// -// public String getShortDescription() { -// return shortDescription; -// } -// -// public void setShortDescription(String shortDescription) { -// this.shortDescription = shortDescription; -// } -// -// public String getUri() { -// return uri; -// } -// -// public void setUri(String uri) { -// this.uri = uri; -// } -// -// public String getIconUrl() { -// return iconUrl; -// } -// -// public void setIconUrl(String iconUrl) { -// this.iconUrl = iconUrl; -// } -// -// public boolean isActive() { -// return active; -// } -// -// public void setActive(boolean active) { -// this.active = active; -// } -// -// @Override -// public boolean equals(Object o) { -// if (this == o) return true; -// if (o == null || getClass() != o.getClass()) return false; -// License license = (License) o; -// return active == license.active && -// Objects.equals(id, license.id) && -// Objects.equals(name, license.name) && -// Objects.equals(shortDescription, license.shortDescription) && -// Objects.equals(uri, license.uri) && -// Objects.equals(iconUrl, license.iconUrl); -// } -// -// @Override -// public int hashCode() { -// return Objects.hash(id, name, shortDescription, uri, iconUrl, active); -// } -// -// @Override -// public String toString() { -// return "License{" + -// "id=" + id + -// ", name='" + name + '\'' + -// ", shortDescription='" + shortDescription + '\'' + -// ", uri='" + uri + '\'' + -// ", iconUrl='" + iconUrl + '\'' + -// ", active=" + active + -// '}'; -// } + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(columnDefinition="TEXT", nullable = false) + private String name; + + @Column(columnDefinition="TEXT") + private String shortDescription; + + @Column(columnDefinition="TEXT", nullable = false) + private String uri; + + @Column(columnDefinition="TEXT") + private String iconUrl; + + @Column(nullable = false) + private boolean active; + + public License() { + } + + public License(String name, String shortDescription, String uri, String iconUrl, boolean active) { + this.name = name; + this.shortDescription = shortDescription; + this.uri = uri; + this.iconUrl = iconUrl; + this.active = active; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getShortDescription() { + return shortDescription; + } + + public void setShortDescription(String shortDescription) { + this.shortDescription = shortDescription; + } + + public String getUri() { + return uri; + } + + public void setUri(String uri) { + this.uri = uri; + } + + public String getIconUrl() { + return iconUrl; + } + + public void setIconUrl(String iconUrl) { + this.iconUrl = iconUrl; + } + + public boolean isActive() { + return active; + } + + public void setActive(boolean active) { + this.active = active; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + License license = (License) o; + return active == license.active && + Objects.equals(id, license.id) && + Objects.equals(name, license.name) && + Objects.equals(shortDescription, license.shortDescription) && + Objects.equals(uri, license.uri) && + Objects.equals(iconUrl, license.iconUrl); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, shortDescription, uri, iconUrl, active); + } + + @Override + public String toString() { + return "License{" + + "id=" + id + + ", name='" + name + '\'' + + ", shortDescription='" + shortDescription + '\'' + + ", uri='" + uri + '\'' + + ", iconUrl='" + iconUrl + '\'' + + ", active=" + active + + '}'; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index 7caa5b4bdc8..0604e51ae3d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -18,63 +18,63 @@ /** * @author Jing Ma */ -//@Stateless -//@Named +@Stateless +@Named public class LicenseServiceBean { -// @PersistenceContext -// EntityManager em; -// -// @EJB -// ActionLogServiceBean actionLogSvc; -// -// public List listAll() { -// return em.createNamedQuery("License.findAll", License.class).getResultList(); -// } -// -// public License get( long id ) { -// List tokens = em.createNamedQuery("License.findById", License.class) -// .setParameter("id", id ) -// .getResultList(); -// return tokens.isEmpty() ? null : tokens.get(0); -// } -// -// public License save(License l) throws PersistenceException { -// if (l.getId() == null) { -// em.persist(l); -// return l; -// } else { -// return null; -// } -// } -// -// public License set( long id, String name, String shortDescription, String uri, String iconUrl, boolean active ) { -// List tokens = em.createNamedQuery("License.findById", License.class) -// .setParameter("id", Long.toString(id) ) -// .getResultList(); -// -// if(tokens.size() > 0) { -// License l = tokens.get(0); -// l.setName(name); -// l.setShortDescription(shortDescription); -// l.setUri(uri); -// l.setIconUrl(iconUrl); -// l.setActive(active); -// em.merge(l); -// actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") -// .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); -// return l; -// } else { -// return null; -// } -// } -// -// public void delete( long id ) throws PersistenceException { -// actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") -// .setInfo(Long.toString(id))); -// em.createNamedQuery("License.deleteById") -// .setParameter("id", id) -// .executeUpdate(); -// } + @PersistenceContext + EntityManager em; + + @EJB + ActionLogServiceBean actionLogSvc; + + public List listAll() { + return em.createNamedQuery("License.findAll", License.class).getResultList(); + } + + public License get( long id ) { + List tokens = em.createNamedQuery("License.findById", License.class) + .setParameter("id", id ) + .getResultList(); + return tokens.isEmpty() ? null : tokens.get(0); + } + + public License save(License l) throws PersistenceException { + if (l.getId() == null) { + em.persist(l); + return l; + } else { + return null; + } + } + + public License set( long id, String name, String shortDescription, String uri, String iconUrl, boolean active ) { + List tokens = em.createNamedQuery("License.findById", License.class) + .setParameter("id", id ) + .getResultList(); + + if(tokens.size() > 0) { + License l = tokens.get(0); + l.setName(name); + l.setShortDescription(shortDescription); + l.setUri(uri); + l.setIconUrl(iconUrl); + l.setActive(active); + em.merge(l); + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") + .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); + return l; + } else { + return null; + } + } + + public int delete( long id ) throws PersistenceException { + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") + .setInfo(Long.toString(id))); + return em.createNamedQuery("License.deleteById") + .setParameter("id", id) + .executeUpdate(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index ce248d97946..0e7c8dd32de 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1926,54 +1926,56 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon } -// @GET -// @Path("/licenses") -// public Response getLicenses() { -// return ok(licenseService.listAll().stream() -// .map(JsonPrinter::json) -// .collect(toJsonArray())); -// } -// -// @GET -// @Path("/licenses/{id}") -// public Response getLicense(@PathParam("id") long id) { -// License l = licenseService.get(id); -// if (l == null) { -// return error(Response.Status.NOT_FOUND, "Not Found."); -// } -// return ok(json(l)); -// } -// -// @POST -// @Path("/licenses") -// public Response addLicense(License l) { -// try { -// licenseService.save(l); -// return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); -// } catch(PersistenceException e) { -// return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); -// } -// } -// -// @PUT -// @Path("/licenses/{id}") -// public Response putLicense(@PathParam("id") long id, License l) { -// License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); -// if (updated == null) { -// return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); -// } -// return ok("License with ID " + id + " was replaced."); -// } -// -// @DELETE -// @Path("/licenses/{id}") -// public Response deleteLicense(@PathParam("id") long id) { -// try { -// licenseService.delete(id); -// return ok("OK. License with ID " + id + " was deleted."); -// } catch (PersistenceException e) { -// return error(Response.Status.BAD_REQUEST, "The license is still in used and cannot be deleted."); -// } -// } + @GET + @Path("/licenses") + public Response getLicenses() { + return ok(licenseService.listAll().stream() + .map(JsonPrinter::json) + .collect(toJsonArray())); + } + + @GET + @Path("/licenses/{id}") + public Response getLicense(@PathParam("id") long id) { + License l = licenseService.get(id); + if (l == null) { + return error(Response.Status.NOT_FOUND, "Not Found."); + } + return ok(json(l)); + } + + @POST + @Path("/licenses") + public Response addLicense(License l) { + try { + License added = licenseService.save(l); + if (added == null) { + return error(Response.Status.BAD_REQUEST, "Bad Request."); + } + return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); + } catch(PersistenceException e) { + return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); + } + } + + @PUT + @Path("/licenses/{id}") + public Response putLicense(@PathParam("id") long id, License l) { + License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); + if (updated == null) { + return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); + } + return ok("License with ID " + id + " was replaced."); + } + + @DELETE + @Path("/licenses/{id}") + public Response deleteLicense(@PathParam("id") long id) { + int result = licenseService.delete(id); + if (result == 1) { + return ok("OK. License with ID " + id + " was deleted."); + } + return error(Response.Status.NOT_FOUND, "A license with ID " + id + " doesn't exist."); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index f43860df23f..3cbe8da8717 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -777,15 +777,15 @@ public static JsonObjectBuilder json( DataverseFacet aFacet ) { .add("name", aFacet.getDatasetFieldType().getDisplayName()); } -// public static JsonObjectBuilder json(License l) { -// return jsonObjectBuilder() -// .add("id", l.getId()) -// .add("name", l.getName()) -// .add("shortDescription", l.getShortDescription()) -// .add("uri", l.getUri()) -// .add("iconUrl", l.getIconUrl()) -// .add("active", l.isActive()); -// } + public static JsonObjectBuilder json(License l) { + return jsonObjectBuilder() + .add("id", l.getId()) + .add("name", l.getName()) + .add("shortDescription", l.getShortDescription()) + .add("uri", l.getUri()) + .add("iconUrl", l.getIconUrl()) + .add("active", l.isActive()); + } public static Collector stringsToJsonArray() { return new Collector() { From 282063ebb7b6615b71d2d4fa5f7ec34b510fe521 Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 16 Mar 2021 16:44:08 -0400 Subject: [PATCH 0094/1551] corrected few variables --- .../harvard/iq/dataverse/DatasetServiceBean.java | 14 +++++++++----- .../edu/harvard/iq/dataverse/api/Datasets.java | 5 +++++ 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index f7e37b3d929..e2f3907e4aa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1025,7 +1025,11 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo @Asynchronous public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, User authUser, String httpRequestUrl) throws ExecutionException, InterruptedException { - logger.info(httpRequestUrl + " == globusAsyncCall == step 1 "+ dataset.getId()); + String datasetIdentifier = dataset.getStorageIdentifier(); + + String storageType = datasetIdentifier.substring(0, datasetIdentifier.indexOf("://") +3); + datasetIdentifier = datasetIdentifier.substring(datasetIdentifier.indexOf("://") +3); + Thread.sleep(5000); String lockInfoMessage = "Globus Upload API is running "; @@ -1047,12 +1051,11 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, Us } String taskIdentifier = jsonObject.getString("taskIdentifier"); - String datasetIdentifier = jsonObject.getString("datasetId").replace("doi:",""); // globus task status check globusStatusCheck(taskIdentifier); - // calculate checksum, mimetype + try { List inputList = new ArrayList(); JsonArray filesJsonArray = jsonObject.getJsonArray("files"); @@ -1069,12 +1072,13 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, Us String bucketName = bits[1].replace("/", ""); // fullpath s3://gcs5-bucket1/10.5072/FK2/3S6G2E/1781cfeb8a7-4ad9418a5873 - String fullPath = "s3://" + bucketName + "/" + datasetIdentifier +"/" +fileId ; + String fullPath = storageType + bucketName + "/" + datasetIdentifier +"/" +fileId ; inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); } - JsonObject newfilesJsonObject= calculateMissingMetadataFields(inputList); + // calculate checksum, mimetype + JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList); JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); JsonArrayBuilder jsonSecondAPI = Json.createArrayBuilder() ; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 8836eb62e44..8797f3d26f8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2998,6 +2998,11 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, logger.log(Level.WARNING, "==== datasetId :" + dataset.getId() + "====== UpdateDatasetVersionCommand Exception : " + ex.getMessage()); } + dataset = datasetService.find(dataset.getId()); + + List s = dataset.getFiles(); + for (DataFile dataFile : s) {} + //ingest job ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); From a5413c85073967798ba099f45fff5f865fc5f19d Mon Sep 17 00:00:00 2001 From: chenganj Date: Thu, 18 Mar 2021 13:28:58 -0400 Subject: [PATCH 0095/1551] hardcoded httpRequestUrl --- .../edu/harvard/iq/dataverse/DatasetServiceBean.java | 2 +- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 12 +++++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index e2f3907e4aa..e41a440dd93 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1106,7 +1106,7 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, Us ProcessBuilder processBuilder = new ProcessBuilder(); - String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST "+httpRequestUrl.split("/api")[0]+"/api/datasets/:persistentId/addFiles?persistentId=doi:" + datasetIdentifier + " -F jsonData='" + newjsonData + "'"; + String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST "+httpRequestUrl+"/api/datasets/:persistentId/addFiles?persistentId=doi:" + datasetIdentifier + " -F jsonData='" + newjsonData + "'"; System.out.println("*******====command ==== " + command); new Thread(new Runnable() { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 8797f3d26f8..0ad96872c94 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2787,8 +2787,18 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, } catch (WrappedResponse wr) { return wr.getResponse(); } + /* + String requestUrl = httpRequest.getProtocol().toLowerCase().split("/")[0]+"://"+httpRequest.getServerName(); - String requestUrl = httpRequest.getRequestURL().toString(); + if( httpRequest.getServerPort() > 0 ) + { + requestUrl = requestUrl + ":"+ httpRequest.getServerPort(); + } + */ + + + String requestUrl = "https://dvdev.scholarsportal.info" ; + //String requestUrl = "http://localhost:8080" ; // Async Call datasetService.globusAsyncCall( jsonData , token , dataset , authUser, requestUrl); From f9e3a3ef212171c0756ed4adf1ef196faf80e702 Mon Sep 17 00:00:00 2001 From: jingma Date: Thu, 18 Mar 2021 20:27:12 +0100 Subject: [PATCH 0096/1551] Add integration tests. --- scripts/api/data/license.json | 7 +++ scripts/api/data/licenseError.json | 8 +++ scripts/api/data/licenseUpdate.json | 7 +++ .../edu/harvard/iq/dataverse/api/AdminIT.java | 62 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 41 ++++++++++++ 5 files changed, 125 insertions(+) create mode 100644 scripts/api/data/license.json create mode 100644 scripts/api/data/licenseError.json create mode 100644 scripts/api/data/licenseUpdate.json diff --git a/scripts/api/data/license.json b/scripts/api/data/license.json new file mode 100644 index 00000000000..f891d84dd33 --- /dev/null +++ b/scripts/api/data/license.json @@ -0,0 +1,7 @@ +{ + "name": "Apache License", + "shortDescription": "License description", + "uri": "www.apache.com", + "iconUrl": "www.icon.com", + "active": false +} \ No newline at end of file diff --git a/scripts/api/data/licenseError.json b/scripts/api/data/licenseError.json new file mode 100644 index 00000000000..51eb31ecc0c --- /dev/null +++ b/scripts/api/data/licenseError.json @@ -0,0 +1,8 @@ +{ + "id": 6, + "name": "Apache License", + "shortDescription": "License description", + "uri": "www.apache.com", + "iconUrl": "www.icon.com", + "active": false +} \ No newline at end of file diff --git a/scripts/api/data/licenseUpdate.json b/scripts/api/data/licenseUpdate.json new file mode 100644 index 00000000000..aed1cb0ae26 --- /dev/null +++ b/scripts/api/data/licenseUpdate.json @@ -0,0 +1,7 @@ +{ + "name": "Updated Apache License", + "shortDescription": "Updated license description", + "uri": "www.update-apache.com", + "iconUrl": "www.update-icon.com", + "active": true +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 84da33cd3ee..84ec9defdec 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -785,4 +785,66 @@ public void testBannerMessages(){ assertEquals("OK", status); } + + @Test + public void testLicenses(){ + + String pathToJsonFile = "scripts/api/data/license.json"; + Response addLicenseResponse = UtilIT.addLicense(pathToJsonFile); + addLicenseResponse.prettyPrint(); + String body = addLicenseResponse.getBody().asString(); + String status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + pathToJsonFile = "scripts/api/data/licenseError.json"; + Response addLicenseErrorResponse = UtilIT.addLicense(pathToJsonFile); + addLicenseErrorResponse.prettyPrint(); + body = addLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + Response getLicensesResponse = UtilIT.getLicenses(); + getLicensesResponse.prettyPrint(); + body = getLicensesResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseResponse = UtilIT.getLicense(1L); + getLicenseResponse.prettyPrint(); + body = getLicenseResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseErrorResponse = UtilIT.getLicense(10L); + getLicenseErrorResponse.prettyPrint(); + body = getLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + pathToJsonFile = "scripts/api/data/licenseUpdate.json"; + Response updateLicenseResponse = UtilIT.updateLicense(pathToJsonFile, 1L); + updateLicenseResponse.prettyPrint(); + body = updateLicenseResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response updateLicenseErrorResponse = UtilIT.updateLicense(pathToJsonFile, 10L); + updateLicenseErrorResponse.prettyPrint(); + body = updateLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + Response deleteLicenseResponse = UtilIT.deleteLicense(1L); + deleteLicenseResponse.prettyPrint(); + body = deleteLicenseResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response deleteLicenseErrorResponse = UtilIT.deleteLicense(10L); + deleteLicenseErrorResponse.prettyPrint(); + body = deleteLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index f3ff8f8fae4..51a0cdae93e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2523,5 +2523,46 @@ static String getBannerMessageIdFromResponse(String getBannerMessagesResponse) { return "0"; } + static Response addLicense(String pathToJsonFile) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response addLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .post("/api/admin/licenses"); + return addLicenseResponse; + } + + static Response getLicenses() { + + Response getLicensesResponse = given() + .get("/api/admin/licenses"); + return getLicensesResponse; + } + + static Response getLicense(Long id) { + + Response getLicenseResponse = given() + .get("/api/admin/licenses/"+id.toString()); + return getLicenseResponse; + } + + static Response updateLicense(String pathToJsonFile, Long id) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response updateLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .put("/api/admin/licenses/"+id.toString()); + return updateLicenseResponse; + } + + static Response deleteLicense(Long id) { + + Response deleteLicenseResponse = given() + .delete("/api/admin/licenses/"+id.toString()); + return deleteLicenseResponse; + } + } From f1433266987581e9ac3fc684b646a3923bd9288b Mon Sep 17 00:00:00 2001 From: chenganj Date: Fri, 19 Mar 2021 15:12:57 -0400 Subject: [PATCH 0097/1551] - tweak datasetlock, - skip checksum validation using dataset category --- .../iq/dataverse/DatasetServiceBean.java | 11 ++--------- .../harvard/iq/dataverse/api/Datasets.java | 19 ++++++++++++++++--- .../FinalizeDatasetPublicationCommand.java | 8 +++++++- 3 files changed, 25 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index e41a440dd93..a0ec12a5d64 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1023,7 +1023,7 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo } @Asynchronous - public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, User authUser, String httpRequestUrl) throws ExecutionException, InterruptedException { + public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl) throws ExecutionException, InterruptedException { String datasetIdentifier = dataset.getStorageIdentifier(); @@ -1032,14 +1032,7 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, Us Thread.sleep(5000); - String lockInfoMessage = "Globus Upload API is running "; - DatasetLock lock = addDatasetLock(dataset.getId(), DatasetLock.Reason.EditInProgress, - ((AuthenticatedUser) authUser).getId(), lockInfoMessage); - if (lock != null) { - dataset.addLock(lock); - } else { - logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); - } + JsonObject jsonObject = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 0ad96872c94..7675d008ec0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2775,8 +2775,6 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, ); } - ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser) authUser); - // ------------------------------------- // (2) Get the Dataset Id // ------------------------------------- @@ -2787,6 +2785,21 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, } catch (WrappedResponse wr) { return wr.getResponse(); } + + + String lockInfoMessage = "Globus Upload API is started "; + DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.EditInProgress, + ((AuthenticatedUser) authUser).getId(), lockInfoMessage); + if (lock != null) { + dataset.addLock(lock); + } else { + logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); + } + + + ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser) authUser); + + /* String requestUrl = httpRequest.getProtocol().toLowerCase().split("/")[0]+"://"+httpRequest.getServerName(); @@ -2801,7 +2814,7 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, //String requestUrl = "http://localhost:8080" ; // Async Call - datasetService.globusAsyncCall( jsonData , token , dataset , authUser, requestUrl); + datasetService.globusAsyncCall( jsonData , token , dataset , requestUrl); return ok("Globus Task successfully completed "); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index c2f186f1e8c..04e9e09c6d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -80,7 +80,13 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // some imported datasets may already be released. // validate the physical files (verify checksums): - validateDataFiles(theDataset, ctxt); + if(theDataset.getCategoryByName("GLOBUS") != null) { + logger.info("skip validating checksum "+theDataset.getGlobalId().asString()); + } + else { + logger.info("run validating checksum "); + validateDataFiles(theDataset, ctxt); + } // (this will throw a CommandException if it fails) } From 5e3fb88bc2d67bbdf96012f69cb4e0ed307ab914 Mon Sep 17 00:00:00 2001 From: jingma Date: Mon, 22 Mar 2021 12:08:33 +0100 Subject: [PATCH 0098/1551] Fix indentation. --- .../edu/harvard/iq/dataverse/api/Admin.java | 68 +++++++++---------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 0e7c8dd32de..e77ac08ef83 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1930,52 +1930,52 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon @Path("/licenses") public Response getLicenses() { return ok(licenseService.listAll().stream() - .map(JsonPrinter::json) - .collect(toJsonArray())); + .map(JsonPrinter::json) + .collect(toJsonArray())); } @GET @Path("/licenses/{id}") public Response getLicense(@PathParam("id") long id) { - License l = licenseService.get(id); - if (l == null) { - return error(Response.Status.NOT_FOUND, "Not Found."); - } - return ok(json(l)); + License l = licenseService.get(id); + if (l == null) { + return error(Response.Status.NOT_FOUND, "Not Found."); + } + return ok(json(l)); } @POST @Path("/licenses") public Response addLicense(License l) { - try { - License added = licenseService.save(l); - if (added == null) { - return error(Response.Status.BAD_REQUEST, "Bad Request."); - } - return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); - } catch(PersistenceException e) { - return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); - } + try { + License added = licenseService.save(l); + if (added == null) { + return error(Response.Status.BAD_REQUEST, "Bad Request."); + } + return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); + } catch(PersistenceException e) { + return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); + } } - @PUT - @Path("/licenses/{id}") - public Response putLicense(@PathParam("id") long id, License l) { - License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); - if (updated == null) { - return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); - } - return ok("License with ID " + id + " was replaced."); - } + @PUT + @Path("/licenses/{id}") + public Response putLicense(@PathParam("id") long id, License l) { + License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); + if (updated == null) { + return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); + } + return ok("License with ID " + id + " was replaced."); + } - @DELETE - @Path("/licenses/{id}") - public Response deleteLicense(@PathParam("id") long id) { - int result = licenseService.delete(id); - if (result == 1) { - return ok("OK. License with ID " + id + " was deleted."); - } - return error(Response.Status.NOT_FOUND, "A license with ID " + id + " doesn't exist."); - } + @DELETE + @Path("/licenses/{id}") + public Response deleteLicense(@PathParam("id") long id) { + int result = licenseService.delete(id); + if (result == 1) { + return ok("OK. License with ID " + id + " was deleted."); + } + return error(Response.Status.NOT_FOUND, "A license with ID " + id + " doesn't exist."); + } } From 6cd23a1b327f84fd649a0b802322532df92d345a Mon Sep 17 00:00:00 2001 From: chenganj Date: Wed, 24 Mar 2021 08:55:04 -0400 Subject: [PATCH 0099/1551] - tweak datasetlock, - skip checksum validation using dataset category --- .../iq/dataverse/DatasetServiceBean.java | 71 +++++++++++++++---- .../harvard/iq/dataverse/api/Datasets.java | 9 ++- 2 files changed, 63 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index a0ec12a5d64..48b14f19971 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1025,6 +1025,31 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo @Asynchronous public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl) throws ExecutionException, InterruptedException { + String logTimestamp = logFormatter.format(new Date()); + Logger globusLogger = Logger.getLogger("edu.harvard.iq.dataverse.upload.client.DatasetServiceBean." + "GlobusUpload" + logTimestamp); + + //Logger.getLogger(DatasetServiceBean.class.getCanonicalName()); + //Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.DatasetServiceBean." + "ExportAll" + logTimestamp); + String logFileName = "../logs" + File.separator + "globus_" + logTimestamp + ".log"; + FileHandler fileHandler; + boolean fileHandlerSuceeded; + try { + fileHandler = new FileHandler(logFileName); + globusLogger.setUseParentHandlers(false); + fileHandlerSuceeded = true; + } catch (IOException | SecurityException ex) { + Logger.getLogger(DatasetServiceBean.class.getName()).log(Level.SEVERE, null, ex); + return; + } + + if (fileHandlerSuceeded) { + globusLogger.addHandler(fileHandler); + } else { + globusLogger = logger; + } + + globusLogger.info("Starting an globusAsyncCall"); + String datasetIdentifier = dataset.getStorageIdentifier(); String storageType = datasetIdentifier.substring(0, datasetIdentifier.indexOf("://") +3); @@ -1033,8 +1058,6 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, St Thread.sleep(5000); - - JsonObject jsonObject = null; try (StringReader rdr = new StringReader(jsonData)) { jsonObject = Json.createReader(rdr).readObject(); @@ -1046,7 +1069,7 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, St String taskIdentifier = jsonObject.getString("taskIdentifier"); // globus task status check - globusStatusCheck(taskIdentifier); + globusStatusCheck(taskIdentifier,globusLogger); try { @@ -1071,7 +1094,7 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, St } // calculate checksum, mimetype - JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList); + JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList,globusLogger); JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); JsonArrayBuilder jsonSecondAPI = Json.createArrayBuilder() ; @@ -1097,6 +1120,8 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, St String newjsonData = jsonSecondAPI.build().toString(); + globusLogger.info("Generated new JsonData with calculated values"); + ProcessBuilder processBuilder = new ProcessBuilder(); String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST "+httpRequestUrl+"/api/datasets/:persistentId/addFiles?persistentId=doi:" + datasetIdentifier + " -F jsonData='" + newjsonData + "'"; @@ -1115,6 +1140,13 @@ public void run() { } + + globusLogger.info("Finished export-all job."); + + if (fileHandlerSuceeded) { + fileHandler.close(); + } + } catch (Exception e) { logger.info("Exception "); e.printStackTrace(); @@ -1138,12 +1170,13 @@ public static JsonObjectBuilder stringToJsonObjectBuilder(String str) { Executor executor = Executors.newFixedThreadPool(10); - private Boolean globusStatusCheck(String taskId) + private Boolean globusStatusCheck(String taskId, Logger globusLogger) { boolean success = false; do { try { - logger.info(" sleep before globus transfer check"); + + globusLogger.info("checking globus transfer task " + taskId); Thread.sleep(50000); String basicGlobusToken = settingsService.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); @@ -1157,16 +1190,17 @@ private Boolean globusStatusCheck(String taskId) } while (!success); - logger.info(" globus transfer completed "); + + globusLogger.info("globus transfer task completed successfully"); return success; } - public JsonObject calculateMissingMetadataFields(List inputList) throws InterruptedException, ExecutionException, IOException { + public JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger) throws InterruptedException, ExecutionException, IOException { List> hashvalueCompletableFutures = - inputList.stream().map(iD -> calculateDetailsAsync(iD)).collect(Collectors.toList()); + inputList.stream().map(iD -> calculateDetailsAsync(iD,globusLogger)).collect(Collectors.toList()); CompletableFuture allFutures = CompletableFuture .allOf(hashvalueCompletableFutures.toArray(new CompletableFuture[hashvalueCompletableFutures.size()])); @@ -1189,8 +1223,9 @@ public JsonObject calculateMissingMetadataFields(List inputList) throws } - private CompletableFuture calculateDetailsAsync(String id) { - logger.info(" calcualte additional details for these globus id ==== " + id); + private CompletableFuture calculateDetailsAsync(String id, Logger globusLogger) { + //logger.info(" calcualte additional details for these globus id ==== " + id); + return CompletableFuture.supplyAsync( () -> { try { Thread.sleep(2000); @@ -1198,7 +1233,7 @@ private CompletableFuture calculateDetailsAsync(String id) { e.printStackTrace(); } try { - return ( calculateDetails(id) ); + return ( calculateDetails(id,globusLogger) ); } catch (InterruptedException | IOException e) { e.printStackTrace(); } @@ -1209,13 +1244,17 @@ private CompletableFuture calculateDetailsAsync(String id) { } - private fileDetailsHolder calculateDetails(String id) throws InterruptedException, IOException { + private fileDetailsHolder calculateDetails(String id, Logger globusLogger) throws InterruptedException, IOException { int count = 0; String checksumVal = ""; InputStream in = null; String fileId = id.split("IDsplit")[0]; String fullPath = id.split("IDsplit")[1]; String fileName = id.split("IDsplit")[2]; + + // what if the file doesnot exists in s3 + // what if checksum calculation failed + do { try { StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); @@ -1232,8 +1271,10 @@ private fileDetailsHolder calculateDetails(String id) throws InterruptedExceptio } while (count < 3); - return new fileDetailsHolder(fileId, checksumVal, calculatemime(fileName)); - //getBytes(in)+"" ); + String mimeType = calculatemime(fileName); + globusLogger.info("File Details " + fileId + " checksum = "+ checksumVal + " mimeType = " + mimeType); + return new fileDetailsHolder(fileId, checksumVal,mimeType); + //getBytes(in)+"" ); // calculatemime(fileName)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 7675d008ec0..afeb10e304c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2801,6 +2801,8 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, /* + + x-forwarded-proto String requestUrl = httpRequest.getProtocol().toLowerCase().split("/")[0]+"://"+httpRequest.getServerName(); if( httpRequest.getServerPort() > 0 ) @@ -2810,12 +2812,15 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, */ - String requestUrl = "https://dvdev.scholarsportal.info" ; - //String requestUrl = "http://localhost:8080" ; + //String requestUrl = "https://dvdev.scholarsportal.info" ; + String requestUrl = "http://localhost:8080" ; // Async Call datasetService.globusAsyncCall( jsonData , token , dataset , requestUrl); + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId()); + + return ok("Globus Task successfully completed "); } From 03946521f684f647f99418e01a98089ae8804983 Mon Sep 17 00:00:00 2001 From: jingma Date: Wed, 24 Mar 2021 15:04:46 +0100 Subject: [PATCH 0100/1551] Add prototype of newest changes. --- scripts/api/data/license.json | 8 +- scripts/api/data/licenseError.json | 8 +- scripts/api/data/licenseUpdate.json | 8 +- .../edu/harvard/iq/dataverse/License.java | 20 +++-- .../iq/dataverse/LicenseServiceBean.java | 87 ++++++++++++++----- .../edu/harvard/iq/dataverse/api/Admin.java | 84 ++++++++++++------ .../iq/dataverse/api/FetchException.java | 17 ++++ .../dataverse/api/RequestBodyException.java | 17 ++++ .../iq/dataverse/api/UpdateException.java | 17 ++++ .../iq/dataverse/util/json/JsonPrinter.java | 4 +- .../edu/harvard/iq/dataverse/api/AdminIT.java | 43 ++++++--- .../edu/harvard/iq/dataverse/api/UtilIT.java | 36 ++++++-- 12 files changed, 257 insertions(+), 92 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/FetchException.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java diff --git a/scripts/api/data/license.json b/scripts/api/data/license.json index f891d84dd33..3b56b7dbc16 100644 --- a/scripts/api/data/license.json +++ b/scripts/api/data/license.json @@ -1,7 +1,7 @@ { - "name": "Apache License", - "shortDescription": "License description", - "uri": "www.apache.com", - "iconUrl": "www.icon.com", + "name": "Apache License 1.0", + "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", + "uri": "https://www.apache.org/licenses/LICENSE-1.0", + "iconUrl": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/rick-astley-1-1552336336.png", "active": false } \ No newline at end of file diff --git a/scripts/api/data/licenseError.json b/scripts/api/data/licenseError.json index 51eb31ecc0c..63f7a0f700a 100644 --- a/scripts/api/data/licenseError.json +++ b/scripts/api/data/licenseError.json @@ -1,8 +1,8 @@ { "id": 6, - "name": "Apache License", - "shortDescription": "License description", - "uri": "www.apache.com", - "iconUrl": "www.icon.com", + "name": "Apache License 1.0", + "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", + "uri": "https://www.apache.org/licenses/LICENSE-1.0", + "iconUrl": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/rick-astley-1-1552336336.png", "active": false } \ No newline at end of file diff --git a/scripts/api/data/licenseUpdate.json b/scripts/api/data/licenseUpdate.json index aed1cb0ae26..7fc89d19058 100644 --- a/scripts/api/data/licenseUpdate.json +++ b/scripts/api/data/licenseUpdate.json @@ -1,7 +1,7 @@ { - "name": "Updated Apache License", - "shortDescription": "Updated license description", - "uri": "www.update-apache.com", - "iconUrl": "www.update-icon.com", + "name": "Apache License 2.0", + "shortDescription": "The 2.0 version of the Apache License, approved by the ASF in 2004.", + "uri": "https://www.apache.org/licenses/LICENSE-2.0", + "iconUrl": "https://yt3.ggpht.com/ytc/AAUvwni36SveDisR-vOAmmklBfJxnnjuRG3ihzfrwEfORA=s900-c-k-c0x00ffffff-no-rj", "active": true } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index 56742f76042..c046b6b373f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse; +import java.net.URI; +import java.net.URL; import java.util.Objects; import javax.persistence.Column; import javax.persistence.Entity; @@ -41,10 +43,10 @@ public class License { private String shortDescription; @Column(columnDefinition="TEXT", nullable = false) - private String uri; + private URI uri; @Column(columnDefinition="TEXT") - private String iconUrl; + private URL iconUrl; @Column(nullable = false) private boolean active; @@ -52,7 +54,7 @@ public class License { public License() { } - public License(String name, String shortDescription, String uri, String iconUrl, boolean active) { + public License(String name, String shortDescription, URI uri, URL iconUrl, boolean active) { this.name = name; this.shortDescription = shortDescription; this.uri = uri; @@ -84,19 +86,19 @@ public void setShortDescription(String shortDescription) { this.shortDescription = shortDescription; } - public String getUri() { + public URI getUri() { return uri; } - public void setUri(String uri) { + public void setUri(URI uri) { this.uri = uri; } - public String getIconUrl() { + public URL getIconUrl() { return iconUrl; } - public void setIconUrl(String iconUrl) { + public void setIconUrl(URL iconUrl) { this.iconUrl = iconUrl; } @@ -132,8 +134,8 @@ public String toString() { "id=" + id + ", name='" + name + '\'' + ", shortDescription='" + shortDescription + '\'' + - ", uri='" + uri + '\'' + - ", iconUrl='" + iconUrl + '\'' + + ", uri=" + uri + + ", iconUrl=" + iconUrl + ", active=" + active + '}'; } diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index 0604e51ae3d..af2cfd1328e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -2,12 +2,12 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; -import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.search.IndexResponse; -import edu.harvard.iq.dataverse.settings.Setting; -import java.util.HashSet; +import edu.harvard.iq.dataverse.api.FetchException; +import edu.harvard.iq.dataverse.api.RequestBodyException; +import edu.harvard.iq.dataverse.api.UpdateException; +import java.net.URI; +import java.net.URL; import java.util.List; -import java.util.Set; import javax.ejb.EJB; import javax.ejb.Stateless; import javax.inject.Named; @@ -32,44 +32,77 @@ public List listAll() { return em.createNamedQuery("License.findAll", License.class).getResultList(); } - public License get( long id ) { + public License getById(long id) throws FetchException { List tokens = em.createNamedQuery("License.findById", License.class) .setParameter("id", id ) .getResultList(); - return tokens.isEmpty() ? null : tokens.get(0); + if (tokens.isEmpty()) { + throw new FetchException("License with that ID doesn't exist."); + } + return tokens.get(0); + } + + public License getByName(String name) throws FetchException { + List tokens = em.createNamedQuery("License.findByName", License.class) + .setParameter("name", name ) + .getResultList(); + if (tokens.isEmpty()) { + throw new FetchException("License with that name doesn't exist."); + } + return tokens.get(0); } - public License save(License l) throws PersistenceException { - if (l.getId() == null) { - em.persist(l); - return l; + public License save(License license) throws PersistenceException, RequestBodyException { + if (license.getId() == null) { + em.persist(license); + return license; } else { - return null; + throw new RequestBodyException("There shouldn't be an ID in the request body"); } } - public License set( long id, String name, String shortDescription, String uri, String iconUrl, boolean active ) { + public License setById(long id, String name, String shortDescription, URI uri, URL iconUrl, boolean active) throws UpdateException { List tokens = em.createNamedQuery("License.findById", License.class) .setParameter("id", id ) .getResultList(); if(tokens.size() > 0) { - License l = tokens.get(0); - l.setName(name); - l.setShortDescription(shortDescription); - l.setUri(uri); - l.setIconUrl(iconUrl); - l.setActive(active); - em.merge(l); + License license = tokens.get(0); + license.setName(name); + license.setShortDescription(shortDescription); + license.setUri(uri); + license.setIconUrl(iconUrl); + license.setActive(active); + em.merge(license); + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") + .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); + return license; + } else { + throw new UpdateException("There is no existing License with that ID. To add a license use POST."); + } + } + + public License setByName(String name, String shortDescription, URI uri, URL iconUrl, boolean active) throws UpdateException { + List tokens = em.createNamedQuery("License.findByName", License.class) + .setParameter("name", name ) + .getResultList(); + + if(tokens.size() > 0) { + License license = tokens.get(0); + license.setShortDescription(shortDescription); + license.setUri(uri); + license.setIconUrl(iconUrl); + license.setActive(active); + em.merge(license); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); - return l; + return license; } else { - return null; + throw new UpdateException("There is no existing License with that name. To add a license use POST."); } } - public int delete( long id ) throws PersistenceException { + public int deleteById(long id) throws PersistenceException { actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") .setInfo(Long.toString(id))); return em.createNamedQuery("License.deleteById") @@ -77,4 +110,12 @@ public int delete( long id ) throws PersistenceException { .executeUpdate(); } + public int deleteByName(String name) throws PersistenceException { + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") + .setInfo(name)); + return em.createNamedQuery("License.deleteByName") + .setParameter("name", name) + .executeUpdate(); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index e77ac08ef83..396ef05aea8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -18,7 +18,6 @@ import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.License; import edu.harvard.iq.dataverse.LicenseServiceBean; -import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.dto.RoleDTO; @@ -75,7 +74,6 @@ import javax.ws.rs.core.Response.Status; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; @@ -89,8 +87,6 @@ import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ChangeUserIdentifierCommand; import edu.harvard.iq.dataverse.engine.command.impl.RegisterDvObjectCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -105,7 +101,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Date; -import java.util.function.Consumer; import javax.inject.Inject; import javax.json.JsonArray; import javax.persistence.Query; @@ -1935,47 +1930,80 @@ public Response getLicenses() { } @GET - @Path("/licenses/{id}") - public Response getLicense(@PathParam("id") long id) { - License l = licenseService.get(id); - if (l == null) { - return error(Response.Status.NOT_FOUND, "Not Found."); - } - return ok(json(l)); + @Path("/licenses/id/{id}") + public Response getLicenseById(@PathParam("id") long id) { + try { + License license = licenseService.getById(id); + return ok(json(license)); + } catch (FetchException e) { + return error(Response.Status.NOT_FOUND, e.getMessage()); + } + } + + @GET + @Path("/licenses/name/{name}") + public Response getLicenseByName(@PathParam("name") String name) { + try { + License license = licenseService.getByName(name); + return ok(json(license)); + } catch (FetchException e) { + return error(Response.Status.NOT_FOUND, e.getMessage()); + } } @POST @Path("/licenses") - public Response addLicense(License l) { + public Response addLicense(License license) { try { - License added = licenseService.save(l); - if (added == null) { - return error(Response.Status.BAD_REQUEST, "Bad Request."); - } + licenseService.save(license); return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); - } catch(PersistenceException e) { + } catch (RequestBodyException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } catch(PersistenceException e) { return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); } - } + } @PUT - @Path("/licenses/{id}") - public Response putLicense(@PathParam("id") long id, License l) { - License updated = licenseService.set(id, l.getName(), l.getShortDescription(), l.getUri(), l.getIconUrl(), l.isActive()); - if (updated == null) { - return error(Response.Status.BAD_REQUEST, "Bad Request. There is no existing LicenseInfo with that ID. To add a license use POST."); - } + @Path("/licenses/id/{id}") + public Response putLicenseById(@PathParam("id") long id, License license) { + try { + licenseService.setById(id, license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); + } catch (UpdateException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } return ok("License with ID " + id + " was replaced."); } + @PUT + @Path("/licenses/name/{name}") + public Response putLicenseByName(@PathParam("name") String name, License license) { + try { + licenseService.setByName(license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); + } catch (UpdateException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } + return ok("License with name " + name + " was replaced."); + } + @DELETE - @Path("/licenses/{id}") - public Response deleteLicense(@PathParam("id") long id) { - int result = licenseService.delete(id); + @Path("/licenses/id/{id}") + public Response deleteLicenseById(@PathParam("id") long id) { + int result = licenseService.deleteById(id); if (result == 1) { return ok("OK. License with ID " + id + " was deleted."); } return error(Response.Status.NOT_FOUND, "A license with ID " + id + " doesn't exist."); } + + @DELETE + @Path("/licenses/name/{name}") + public Response deleteLicenseByName(@PathParam("name") String name) { + int result = licenseService.deleteByName(name); + if (result == 1) { + return ok("OK. License with name " + name + " was deleted."); + } + return error(Response.Status.NOT_FOUND, "A license with name " + name + " doesn't exist."); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java b/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java new file mode 100644 index 00000000000..a9c77c7a4c5 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class FetchException extends Exception { + + public FetchException(String message) { + super(message); + } + + public FetchException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java b/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java new file mode 100644 index 00000000000..e78c87abdfa --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class RequestBodyException extends Exception { + + public RequestBodyException(String message) { + super(message); + } + + public RequestBodyException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java b/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java new file mode 100644 index 00000000000..4dbd3ab19a3 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class UpdateException extends Exception { + + public UpdateException(String message) { + super(message); + } + + public UpdateException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 3cbe8da8717..1827a5e1d34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -782,8 +782,8 @@ public static JsonObjectBuilder json(License l) { .add("id", l.getId()) .add("name", l.getName()) .add("shortDescription", l.getShortDescription()) - .add("uri", l.getUri()) - .add("iconUrl", l.getIconUrl()) + .add("uri", l.getUri().toString()) + .add("iconUrl", l.getIconUrl().toString()) .add("active", l.isActive()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 84ec9defdec..b14ef12d93a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -809,38 +809,57 @@ public void testLicenses(){ status = JsonPath.from(body).getString("status"); assertEquals("OK", status); - Response getLicenseResponse = UtilIT.getLicense(1L); - getLicenseResponse.prettyPrint(); - body = getLicenseResponse.getBody().asString(); + Response getLicenseByIdResponse = UtilIT.getLicenseById(1L); + getLicenseByIdResponse.prettyPrint(); + body = getLicenseByIdResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("OK", status); - Response getLicenseErrorResponse = UtilIT.getLicense(10L); + Response getLicenseByNameResponse = UtilIT.getLicenseByName(""); + getLicenseByNameResponse.prettyPrint(); + body = getLicenseByNameResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseErrorResponse = UtilIT.getLicenseById(10L); getLicenseErrorResponse.prettyPrint(); body = getLicenseErrorResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("ERROR", status); pathToJsonFile = "scripts/api/data/licenseUpdate.json"; - Response updateLicenseResponse = UtilIT.updateLicense(pathToJsonFile, 1L); - updateLicenseResponse.prettyPrint(); - body = updateLicenseResponse.getBody().asString(); + Response updateLicenseByIdResponse = UtilIT.updateLicenseById(pathToJsonFile, 1L); + updateLicenseByIdResponse.prettyPrint(); + body = updateLicenseByIdResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + pathToJsonFile = "scripts/api/data/licenseUpdate.json"; + Response updateLicenseByNameResponse = UtilIT.updateLicenseByName(pathToJsonFile, ""); + updateLicenseByNameResponse.prettyPrint(); + body = updateLicenseByNameResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("OK", status); - Response updateLicenseErrorResponse = UtilIT.updateLicense(pathToJsonFile, 10L); + Response updateLicenseErrorResponse = UtilIT.updateLicenseById(pathToJsonFile, 10L); updateLicenseErrorResponse.prettyPrint(); body = updateLicenseErrorResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("ERROR", status); - Response deleteLicenseResponse = UtilIT.deleteLicense(1L); - deleteLicenseResponse.prettyPrint(); - body = deleteLicenseResponse.getBody().asString(); + Response deleteLicenseByIdResponse = UtilIT.deleteLicenseById(1L); + deleteLicenseByIdResponse.prettyPrint(); + body = deleteLicenseByIdResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response deleteLicenseByNameResponse = UtilIT.deleteLicenseByName(""); + deleteLicenseByNameResponse.prettyPrint(); + body = deleteLicenseByNameResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("OK", status); - Response deleteLicenseErrorResponse = UtilIT.deleteLicense(10L); + Response deleteLicenseErrorResponse = UtilIT.deleteLicenseById(10L); deleteLicenseErrorResponse.prettyPrint(); body = deleteLicenseErrorResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 51a0cdae93e..c5f4da033d1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2540,27 +2540,51 @@ static Response getLicenses() { return getLicensesResponse; } - static Response getLicense(Long id) { + static Response getLicenseById(Long id) { Response getLicenseResponse = given() - .get("/api/admin/licenses/"+id.toString()); + .get("/api/admin/licenses/id/"+id.toString()); return getLicenseResponse; } - static Response updateLicense(String pathToJsonFile, Long id) { + static Response getLicenseByName(String name) { + + Response getLicenseResponse = given() + .get("/api/admin/licenses/name/"+name); + return getLicenseResponse; + } + + static Response updateLicenseById(String pathToJsonFile, Long id) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response updateLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .put("/api/admin/licenses/id/"+id.toString()); + return updateLicenseResponse; + } + + static Response updateLicenseByName(String pathToJsonFile, String name) { String jsonIn = getDatasetJson(pathToJsonFile); Response updateLicenseResponse = given() .body(jsonIn) .contentType("application/json") - .put("/api/admin/licenses/"+id.toString()); + .put("/api/admin/licenses/name/"+name); return updateLicenseResponse; } - static Response deleteLicense(Long id) { + static Response deleteLicenseById(Long id) { + + Response deleteLicenseResponse = given() + .delete("/api/admin/licenses/id/"+id.toString()); + return deleteLicenseResponse; + } + + static Response deleteLicenseByName(String name) { Response deleteLicenseResponse = given() - .delete("/api/admin/licenses/"+id.toString()); + .delete("/api/admin/licenses/name/"+name); return deleteLicenseResponse; } From cb5863720b181c688058caa26c9fc010fcbe188e Mon Sep 17 00:00:00 2001 From: jingma Date: Thu, 25 Mar 2021 11:52:23 +0100 Subject: [PATCH 0101/1551] Add URI and URL objects, and new endpoints. --- .../edu/harvard/iq/dataverse/License.java | 33 ++++++++++--------- .../iq/dataverse/LicenseServiceBean.java | 6 ++-- .../edu/harvard/iq/dataverse/api/Admin.java | 29 ++++++++++++---- .../iq/dataverse/util/json/JsonPrinter.java | 15 +++++---- 4 files changed, 50 insertions(+), 33 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index c046b6b373f..957a0a0529f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse; import java.net.URI; -import java.net.URL; +import java.net.URISyntaxException; import java.util.Objects; import javax.persistence.Column; import javax.persistence.Entity; @@ -21,9 +21,12 @@ query="SELECT l FROM License l"), @NamedQuery( name="License.findById", query = "SELECT l FROM License l WHERE l.id=:id"), + @NamedQuery( name="License.findByName", + query = "SELECT l FROM License l WHERE l.name=:name"), @NamedQuery( name="License.deleteById", - query="DELETE FROM License l WHERE l.id=:id") - + query="DELETE FROM License l WHERE l.id=:id"), + @NamedQuery( name="License.deleteByName", + query="DELETE FROM License l WHERE l.name=:name") }) @Entity @Table(uniqueConstraints = { @@ -43,10 +46,10 @@ public class License { private String shortDescription; @Column(columnDefinition="TEXT", nullable = false) - private URI uri; + private String uri; @Column(columnDefinition="TEXT") - private URL iconUrl; + private String iconUrl; @Column(nullable = false) private boolean active; @@ -54,11 +57,11 @@ public class License { public License() { } - public License(String name, String shortDescription, URI uri, URL iconUrl, boolean active) { + public License(String name, String shortDescription, URI uri, URI iconUrl, boolean active) { this.name = name; this.shortDescription = shortDescription; - this.uri = uri; - this.iconUrl = iconUrl; + this.uri = uri.toASCIIString(); + this.iconUrl = iconUrl.toASCIIString(); this.active = active; } @@ -86,20 +89,20 @@ public void setShortDescription(String shortDescription) { this.shortDescription = shortDescription; } - public URI getUri() { - return uri; + public URI getUri() throws URISyntaxException { + return new URI(uri); } public void setUri(URI uri) { - this.uri = uri; + this.uri = uri.toASCIIString(); } - public URL getIconUrl() { - return iconUrl; + public URI getIconUrl() throws URISyntaxException { + return new URI(iconUrl); } - public void setIconUrl(URL iconUrl) { - this.iconUrl = iconUrl; + public void setIconUrl(URI iconUrl) { + this.iconUrl = iconUrl.toASCIIString(); } public boolean isActive() { diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index af2cfd1328e..0c6828fabd0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -61,7 +61,7 @@ public License save(License license) throws PersistenceException, RequestBodyExc } } - public License setById(long id, String name, String shortDescription, URI uri, URL iconUrl, boolean active) throws UpdateException { + public void setById(long id, String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { List tokens = em.createNamedQuery("License.findById", License.class) .setParameter("id", id ) .getResultList(); @@ -76,13 +76,12 @@ public License setById(long id, String name, String shortDescription, URI uri, U em.merge(license); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); - return license; } else { throw new UpdateException("There is no existing License with that ID. To add a license use POST."); } } - public License setByName(String name, String shortDescription, URI uri, URL iconUrl, boolean active) throws UpdateException { + public void setByName(String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { List tokens = em.createNamedQuery("License.findByName", License.class) .setParameter("name", name ) .getResultList(); @@ -96,7 +95,6 @@ public License setByName(String name, String shortDescription, URI uri, URL icon em.merge(license); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); - return license; } else { throw new UpdateException("There is no existing License with that name. To add a license use POST."); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 396ef05aea8..74a1e47c1ae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -44,6 +44,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import java.net.URISyntaxException; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; @@ -1924,9 +1925,15 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon @GET @Path("/licenses") public Response getLicenses() { - return ok(licenseService.listAll().stream() - .map(JsonPrinter::json) - .collect(toJsonArray())); + JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); + for(License license : licenseService.listAll()) { + try { + arrayBuilder.add(JsonPrinter.json(license)); + } catch (URISyntaxException e) { + return error(Status.INTERNAL_SERVER_ERROR, "Incorrect URI in JSON"); + } + } + return ok(arrayBuilder); } @GET @@ -1937,8 +1944,10 @@ public Response getLicenseById(@PathParam("id") long id) { return ok(json(license)); } catch (FetchException e) { return error(Response.Status.NOT_FOUND, e.getMessage()); + } catch (URISyntaxException e) { + return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } - } + } @GET @Path("/licenses/name/{name}") @@ -1948,8 +1957,10 @@ public Response getLicenseByName(@PathParam("name") String name) { return ok(json(license)); } catch (FetchException e) { return error(Response.Status.NOT_FOUND, e.getMessage()); + } catch (URISyntaxException e) { + return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } - } + } @POST @Path("/licenses") @@ -1971,8 +1982,10 @@ public Response putLicenseById(@PathParam("id") long id, License license) { licenseService.setById(id, license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); } catch (UpdateException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); + } catch (URISyntaxException e) { + return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } - return ok("License with ID " + id + " was replaced."); + return ok("License with ID " + id + " was replaced."); } @PUT @@ -1982,8 +1995,10 @@ public Response putLicenseByName(@PathParam("name") String name, License license licenseService.setByName(license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); } catch (UpdateException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); + } catch (URISyntaxException e) { + return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } - return ok("License with name " + name + " was replaced."); + return ok("License with name " + name + " was replaced."); } @DELETE diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 1827a5e1d34..9b243397cfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -45,6 +45,7 @@ import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; +import java.net.URISyntaxException; import java.util.*; import javax.json.Json; import javax.json.JsonArrayBuilder; @@ -777,14 +778,14 @@ public static JsonObjectBuilder json( DataverseFacet aFacet ) { .add("name", aFacet.getDatasetFieldType().getDisplayName()); } - public static JsonObjectBuilder json(License l) { + public static JsonObjectBuilder json(License license) throws URISyntaxException { return jsonObjectBuilder() - .add("id", l.getId()) - .add("name", l.getName()) - .add("shortDescription", l.getShortDescription()) - .add("uri", l.getUri().toString()) - .add("iconUrl", l.getIconUrl().toString()) - .add("active", l.isActive()); + .add("id", license.getId()) + .add("name", license.getName()) + .add("shortDescription", license.getShortDescription()) + .add("uri", license.getUri().toString()) + .add("iconUrl", license.getIconUrl().toString()) + .add("active", license.isActive()); } public static Collector stringsToJsonArray() { From 491fe42c07944db5fc4686a4699ffb1399ca9051 Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 29 Mar 2021 10:50:33 -0400 Subject: [PATCH 0102/1551] - delete globus permission --- .../iq/dataverse/DatasetServiceBean.java | 25 +++++++--- .../harvard/iq/dataverse/api/Datasets.java | 5 +- .../harvard/iq/dataverse/api/GlobusApi.java | 2 +- .../dataverse/globus/GlobusServiceBean.java | 46 ++++++++++++++++--- 4 files changed, 62 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 48b14f19971..007b1060aae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -28,6 +28,7 @@ import edu.harvard.iq.dataverse.workflows.WorkflowComment; import java.io.*; +import java.net.MalformedURLException; import java.text.SimpleDateFormat; import java.util.*; import java.util.concurrent.CompletableFuture; @@ -1022,8 +1023,11 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo } } + + + @Asynchronous - public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl) throws ExecutionException, InterruptedException { + public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl) throws ExecutionException, InterruptedException, MalformedURLException { String logTimestamp = logFormatter.format(new Date()); Logger globusLogger = Logger.getLogger("edu.harvard.iq.dataverse.upload.client.DatasetServiceBean." + "GlobusUpload" + logTimestamp); @@ -1048,7 +1052,7 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, St globusLogger = logger; } - globusLogger.info("Starting an globusAsyncCall"); + globusLogger.info("Starting an globusAsyncCall "); String datasetIdentifier = dataset.getStorageIdentifier(); @@ -1071,6 +1075,8 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, St // globus task status check globusStatusCheck(taskIdentifier,globusLogger); + globusLogger.info("Start removing Globus permission for the client"); + try { List inputList = new ArrayList(); @@ -1170,8 +1176,7 @@ public static JsonObjectBuilder stringToJsonObjectBuilder(String str) { Executor executor = Executors.newFixedThreadPool(10); - private Boolean globusStatusCheck(String taskId, Logger globusLogger) - { + private Boolean globusStatusCheck(String taskId, Logger globusLogger) throws MalformedURLException { boolean success = false; do { try { @@ -1179,18 +1184,24 @@ private Boolean globusStatusCheck(String taskId, Logger globusLogger) globusLogger.info("checking globus transfer task " + taskId); Thread.sleep(50000); - String basicGlobusToken = settingsService.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); - AccessToken clientTokenUser = globusServiceBean.getClientToken(basicGlobusToken); + AccessToken clientTokenUser = globusServiceBean.getClientToken(); success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskId); + } catch (Exception ex) { ex.printStackTrace(); } } while (!success); +/* + AccessToken clientTokenUser = globusServiceBean.getClientToken(); + String directory = globusServiceBean.getDirectory( dataset.getId()+"" ); + globusServiceBean.updatePermision(clientTokenUser, directory, "identity", "r"); + globusLogger.info("Successfully removed Globus permission for the client"); +*/ globusLogger.info("globus transfer task completed successfully"); return success; @@ -1272,7 +1283,7 @@ private fileDetailsHolder calculateDetails(String id, Logger globusLogger) throw String mimeType = calculatemime(fileName); - globusLogger.info("File Details " + fileId + " checksum = "+ checksumVal + " mimeType = " + mimeType); + globusLogger.info(" File Name " + fileName + " File Details " + fileId + " checksum = "+ checksumVal + " mimeType = " + mimeType); return new fileDetailsHolder(fileId, checksumVal,mimeType); //getBytes(in)+"" ); // calculatemime(fileName)); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index afeb10e304c..be46a5fab31 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2531,7 +2531,7 @@ public Response addGlobusFileToDatasetBkup(@PathParam("id") String datasetId, do { try { String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); - AccessToken clientTokenUser = globusServiceBean.getClientToken(basicGlobusToken); + AccessToken clientTokenUser = globusServiceBean.getClientToken(); success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier); @@ -2800,6 +2800,9 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser) authUser); + //String xfp = httpRequest.getHeader("X-Forwarded-Proto"); + //String requestUrl = xfp +"://"+httpRequest.getServerName(); + /* x-forwarded-proto diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java index c26b1bec184..39c1a13842a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java @@ -166,7 +166,7 @@ public Response globus(@PathParam("id") String datasetId, String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); basicGlobusToken = "ODA0ODBhNzEtODA5ZC00ZTJhLWExNmQtY2JkMzA1NTk0ZDdhOmQvM3NFd1BVUGY0V20ra2hkSkF3NTZMWFJPaFZSTVhnRmR3TU5qM2Q3TjA9"; msgt("******* (api) basicGlobusToken: " + basicGlobusToken); - AccessToken clientTokenUser = globusServiceBean.getClientToken(basicGlobusToken); + AccessToken clientTokenUser = globusServiceBean.getClientToken(); success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier); msgt("******* (api) success: " + success); diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 5e314c4f47e..2bb3f6c694d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -145,7 +145,8 @@ public void onLoad() { } logger.info(accessTokenUser.getAccessToken()); logger.info(usr.getEmail()); - AccessToken clientTokenUser = getClientToken(basicGlobusToken); + //AccessToken clientTokenUser = getClientToken(basicGlobusToken); + AccessToken clientTokenUser = getClientToken(); if (clientTokenUser == null) { logger.severe("Cannot get client token "); JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); @@ -219,6 +220,16 @@ public void goGlobusDownload(String datasetId) { String httpString = "window.location.replace('" + "https://app.globus.org/file-manager?origin_id=" + globusEndpoint + "&origin_path=" + directory + "'" +")"; PrimeFaces.current().executeScript(httpString); } +/* + public void removeGlobusPermission() throws MalformedURLException { + //taskId and ruleId + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); + AccessToken clientTokenUser = getClientToken(basicGlobusToken); + String directory = getDirectory( dataset.getId()+"" ); + updatePermision(clientTokenUser, directory, "identity", "r"); + } + + */ ArrayList checkPermisions( AccessToken clientTokenUser, String directory, String globusEndpoint, String principalType, String principal) throws MalformedURLException { URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access_list"); @@ -234,6 +245,7 @@ ArrayList checkPermisions( AccessToken clientTokenUser, String director ((principal == null) || (principal != null && pr.getPrincipal().equals(principal))) ) { ids.add(pr.getId()); } else { + logger.info(pr.getPath() + " === " + directory + " == " + pr.getPrincipalType()); continue; } } @@ -244,7 +256,7 @@ ArrayList checkPermisions( AccessToken clientTokenUser, String director public void updatePermision(AccessToken clientTokenUser, String directory, String principalType, String perm) throws MalformedURLException { if (directory != null && !directory.equals("")) { - directory = "/" + directory + "/"; + directory = directory + "/"; } logger.info("Start updating permissions." + " Directory is " + directory); String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); @@ -272,6 +284,24 @@ public void updatePermision(AccessToken clientTokenUser, String directory, Strin } } + public void deletePermision(String ruleId) throws MalformedURLException { + + AccessToken clientTokenUser = getClientToken(); + logger.info("Start updating permissions." ); + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + ruleId); + logger.info("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + ruleId); + MakeRequestResponse result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(),"DELETE", null); + if (result.status != 200) { + logger.warning("Cannot update access rule " + ruleId); + } else { + logger.info("Access rule " + ruleId + " was updated"); + } + + } + public int givePermission(String principalType, String principal, String perm, AccessToken clientTokenUser, String directory, String globusEndpoint) throws MalformedURLException { ArrayList rules = checkPermisions( clientTokenUser, directory, globusEndpoint, principalType, principal); @@ -347,7 +377,8 @@ public String getTaskList(String basicGlobusToken, String identifierForFileStora logger.info("1.getTaskList ====== timeWhenAsyncStarted = " + timeWhenAsyncStarted + " ====== identifierForFileStorage ====== " + identifierForFileStorage); String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); - AccessToken clientTokenUser = getClientToken(basicGlobusToken); + //AccessToken clientTokenUser = getClientToken(basicGlobusToken); + AccessToken clientTokenUser = getClientToken( ); URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task_list?filter_endpoint="+globusEndpoint+"&filter_status=SUCCEEDED&filter_completion_time="+timeWhenAsyncStarted); @@ -453,7 +484,8 @@ public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId - public AccessToken getClientToken(String basicGlobusToken) throws MalformedURLException { + public AccessToken getClientToken() throws MalformedURLException { + String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); URL url = new URL("https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); MakeRequestResponse result = makeRequest(url, "Basic", @@ -590,7 +622,7 @@ private T parseJson(String sb, Class jsonParserClass, boolean namingPolic } } - String getDirectory(String datasetId) { + public String getDirectory(String datasetId) { Dataset dataset = null; String directory = null; try { @@ -642,7 +674,8 @@ public boolean giveGlobusPublicPermissions(String datasetId) throws UnsupportedE if (globusEndpoint.equals("") || basicGlobusToken.equals("")) { return false; } - AccessToken clientTokenUser = getClientToken(basicGlobusToken); + //AccessToken clientTokenUser = getClientToken(basicGlobusToken); + AccessToken clientTokenUser = getClientToken( ); if (clientTokenUser == null) { logger.severe("Cannot get client token "); return false; @@ -714,7 +747,6 @@ public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user) th workingVersion.setCreateTime(new Timestamp(new Date().getTime())); } - directory = dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage(); System.out.println("======= directory ==== " + directory + " ==== datasetId :" + dataset.getId()); From bc5edf0ad09ecf627cb936a5de50e19be4df34ba Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 30 Mar 2021 17:38:42 -0400 Subject: [PATCH 0103/1551] - added GLOBUSUPLOADSUCCESS notification type and user notification messages - added deleteRule api - --- .../iq/dataverse/DatasetServiceBean.java | 104 +- .../harvard/iq/dataverse/MailServiceBean.java | 11 + .../iq/dataverse/UserNotification.java | 2 +- .../harvard/iq/dataverse/api/Datasets.java | 1289 +++++++++-------- .../providers/builtin/DataverseUserPage.java | 4 + .../dataverse/globus/GlobusServiceBean.java | 10 +- .../harvard/iq/dataverse/util/MailUtil.java | 8 + src/main/java/propertyFiles/Bundle.properties | 3 + src/main/webapp/dataverseuser.xhtml | 7 + 9 files changed, 785 insertions(+), 653 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 007b1060aae..8f53aafc110 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -29,6 +29,7 @@ import java.io.*; import java.net.MalformedURLException; +import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.*; import java.util.concurrent.CompletableFuture; @@ -110,6 +111,8 @@ public class DatasetServiceBean implements java.io.Serializable { @EJB GlobusServiceBean globusServiceBean; + @EJB + UserNotificationServiceBean userNotificationService; private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); @@ -1027,7 +1030,7 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo @Asynchronous - public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl) throws ExecutionException, InterruptedException, MalformedURLException { + public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl, User authUser) throws ExecutionException, InterruptedException, MalformedURLException { String logTimestamp = logFormatter.format(new Date()); Logger globusLogger = Logger.getLogger("edu.harvard.iq.dataverse.upload.client.DatasetServiceBean." + "GlobusUpload" + logTimestamp); @@ -1071,12 +1074,12 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, St } String taskIdentifier = jsonObject.getString("taskIdentifier"); + String ruleId = jsonObject.getString("ruleId"); // globus task status check globusStatusCheck(taskIdentifier,globusLogger); - globusLogger.info("Start removing Globus permission for the client"); - + globusServiceBean.deletePermision(ruleId,globusLogger); try { List inputList = new ArrayList(); @@ -1128,27 +1131,23 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, St globusLogger.info("Generated new JsonData with calculated values"); - ProcessBuilder processBuilder = new ProcessBuilder(); String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST "+httpRequestUrl+"/api/datasets/:persistentId/addFiles?persistentId=doi:" + datasetIdentifier + " -F jsonData='" + newjsonData + "'"; System.out.println("*******====command ==== " + command); - new Thread(new Runnable() { - public void run() { - try { - processBuilder.command("bash", "-c", command); - Process process = processBuilder.start(); - } catch (Exception ex) { - logger.log(Level.SEVERE, "******* Unexpected Exception while executing api/datasets/:persistentId/add call ", ex); - } - } - }).start(); + String output = addFilesAsync(command , globusLogger ) ; + if(output.equalsIgnoreCase("ok")) + { + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSUPLOADSUCCESS, dataset.getId()); + globusLogger.info("Successfully completed api/datasets/:persistentId/addFiles call "); + } + else + { + globusLogger.log(Level.SEVERE, "******* Error while executing api/datasets/:persistentId/add call ", command); + } } - - globusLogger.info("Finished export-all job."); - if (fileHandlerSuceeded) { fileHandler.close(); } @@ -1180,28 +1179,16 @@ private Boolean globusStatusCheck(String taskId, Logger globusLogger) throws Mal boolean success = false; do { try { - globusLogger.info("checking globus transfer task " + taskId); Thread.sleep(50000); - AccessToken clientTokenUser = globusServiceBean.getClientToken(); - success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskId); - - } catch (Exception ex) { ex.printStackTrace(); } } while (!success); -/* - AccessToken clientTokenUser = globusServiceBean.getClientToken(); - String directory = globusServiceBean.getDirectory( dataset.getId()+"" ); - globusServiceBean.updatePermision(clientTokenUser, directory, "identity", "r"); - - globusLogger.info("Successfully removed Globus permission for the client"); -*/ globusLogger.info("globus transfer task completed successfully"); return success; @@ -1309,5 +1296,64 @@ public String calculatemime(String fileName) throws InterruptedException { return finalType; } + public String addFilesAsync(String curlCommand, Logger globusLogger) throws ExecutionException, InterruptedException { + CompletableFuture addFilesFuture = CompletableFuture.supplyAsync(() -> { + try { + Thread.sleep(2000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + return (addFiles(curlCommand, globusLogger)); + }, executor).exceptionally(ex -> { + globusLogger.fine("Something went wrong : " + ex.getLocalizedMessage()); + ex.printStackTrace(); + return null; + }); + + String result = addFilesFuture.get(); + + return result ; + } + + + + + private String addFiles(String curlCommand, Logger globusLogger) + { + boolean success = false; + ProcessBuilder processBuilder = new ProcessBuilder(); + Process process = null; + String line; + String status = ""; + + try { + globusLogger.info("Call to : " + curlCommand); + processBuilder.command("bash", "-c", curlCommand); + process = processBuilder.start(); + process.waitFor(); + + BufferedReader br=new BufferedReader(new InputStreamReader(process.getInputStream())); + + StringBuilder sb = new StringBuilder(); + while((line=br.readLine())!=null) sb.append(line); + globusLogger.info(" API Output : " + sb.toString()); + JsonObject jsonObject = null; + try (StringReader rdr = new StringReader(sb.toString())) { + jsonObject = Json.createReader(rdr).readObject(); + } catch (Exception jpe) { + jpe.printStackTrace(); + globusLogger.log(Level.SEVERE, "Error parsing dataset json."); + } + + status = jsonObject.getString("status"); + } catch (Exception ex) { + globusLogger.log(Level.SEVERE, "******* Unexpected Exception while executing api/datasets/:persistentId/add call ", ex); + } + + + return status; + } + + } diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index 13a92c9cd27..415e3ea1d89 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -529,6 +529,15 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio logger.fine("fileImportMsg: " + fileImportMsg); return messageText += fileImportMsg; + case GLOBUSUPLOADSUCCESS: + dataset = (Dataset) targetObject; + String fileMsg = BundleUtil.getStringFromBundle("notification.mail.import.globus", Arrays.asList( + systemConfig.getDataverseSiteUrl(), + dataset.getGlobalIdString(), + dataset.getDisplayName() + )); + return messageText += fileMsg; + case CHECKSUMIMPORT: version = (DatasetVersion) targetObject; String checksumImportMsg = BundleUtil.getStringFromBundle("notification.import.checksum", Arrays.asList( @@ -601,6 +610,8 @@ private Object getObjectOfNotification (UserNotification userNotification){ return datasetService.find(userNotification.getObjectId()); case FILESYSTEMIMPORT: return versionService.find(userNotification.getObjectId()); + case GLOBUSUPLOADSUCCESS: + return datasetService.find(userNotification.getObjectId()); case CHECKSUMIMPORT: return versionService.find(userNotification.getObjectId()); case APIGENERATED: diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java index e44c5f6333e..82bf6393f86 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java @@ -30,7 +30,7 @@ public enum Type { ASSIGNROLE, REVOKEROLE, CREATEDV, CREATEDS, CREATEACC, SUBMITTEDDS, RETURNEDDS, PUBLISHEDDS, REQUESTFILEACCESS, GRANTFILEACCESS, REJECTFILEACCESS, FILESYSTEMIMPORT, CHECKSUMIMPORT, CHECKSUMFAIL, CONFIRMEMAIL, APIGENERATED, INGESTCOMPLETED, INGESTCOMPLETEDWITHERRORS, - PUBLISHFAILED_PIDREG + PUBLISHFAILED_PIDREG,GLOBUSUPLOADSUCCESS; }; private static final long serialVersionUID = 1L; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index be46a5fab31..b328877e145 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -149,14 +149,11 @@ import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; +import javax.ws.rs.core.*; import javax.ws.rs.core.Response.Status; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; -import javax.ws.rs.core.UriInfo; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.apache.solr.client.solrj.SolrServerException; import org.glassfish.jersey.media.multipart.FormDataBodyPart; @@ -173,36 +170,37 @@ public class Datasets extends AbstractApiBean { private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName()); - - @Inject DataverseSession session; + + @Inject + DataverseSession session; @EJB DatasetServiceBean datasetService; @EJB DataverseServiceBean dataverseService; - + @EJB GlobusServiceBean globusServiceBean; @EJB UserNotificationServiceBean userNotificationService; - + @EJB PermissionServiceBean permissionService; - + @EJB AuthenticationServiceBean authenticationServiceBean; - + @EJB DDIExportServiceBean ddiExportService; - + @EJB DatasetFieldServiceBean datasetfieldService; @EJB MetadataBlockServiceBean metadataBlockService; - + @EJB DataFileServiceBean fileService; @@ -211,26 +209,26 @@ public class Datasets extends AbstractApiBean { @EJB EjbDataverseEngine commandEngine; - + @EJB IndexServiceBean indexService; @EJB S3PackageImporter s3PackageImporter; - + @EJB SettingsServiceBean settingsService; // TODO: Move to AbstractApiBean @EJB DatasetMetricsServiceBean datasetMetricsSvc; - + @EJB DatasetExternalCitationsServiceBean datasetExternalCitationsService; - + @Inject MakeDataCountLoggingServiceBean mdcLogService; - + @Inject DataverseRequestServiceBean dvRequestService; @@ -240,40 +238,43 @@ public class Datasets extends AbstractApiBean { /** * Used to consolidate the way we parse and handle dataset versions. - * @param + * @param */ public interface DsVersionHandler { T handleLatest(); + T handleDraft(); - T handleSpecific( long major, long minor ); + + T handleSpecific(long major, long minor); + T handleLatestPublished(); } - + @GET @Path("{id}") public Response getDataset(@PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { - return response( req -> { + return response(req -> { final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id))); final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved)); final JsonObjectBuilder jsonbuilder = json(retrieved); //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum) - if((latest != null) && latest.isReleased()) { + if ((latest != null) && latest.isReleased()) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved); mdcLogService.logEntry(entry); } return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest) : null)); }); } - + // TODO: // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand // to obtain the dataset that we are trying to export - which would handle // Auth in the process... For now, Auth isn't necessary - since export ONLY // WORKS on published datasets, which are open to the world. -- L.A. 4.5 - + @GET @Path("/export") - @Produces({"application/xml", "application/json", "application/html" }) + @Produces({"application/xml", "application/json", "application/html"}) public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { try { @@ -281,20 +282,20 @@ public Response exportDataset(@QueryParam("persistentId") String persistentId, @ if (dataset == null) { return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found."); } - + ExportService instance = ExportService.getInstance(settingsSvc); - + InputStream is = instance.getExport(dataset, exporter); - + String mediaType = instance.getMediaType(exporter); //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset); mdcLogService.logEntry(entry); - + return Response.ok() .entity(is) .type(mediaType). - build(); + build(); } catch (Exception wr) { return error(Response.Status.FORBIDDEN, "Export Failed"); } @@ -302,7 +303,7 @@ public Response exportDataset(@QueryParam("persistentId") String persistentId, @ @DELETE @Path("{id}") - public Response deleteDataset( @PathParam("id") String id) { + public Response deleteDataset(@PathParam("id") String id) { // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand" // (and there's a comment that says "TODO: remove this command") // do we need an exposed API call for it? @@ -312,13 +313,13 @@ public Response deleteDataset( @PathParam("id") String id) { // "destroyDataset" API calls. // (The logic below follows the current implementation of the underlying // commands!) - - return response( req -> { + + return response(req -> { Dataset doomed = findDatasetOrDie(id); DatasetVersion doomedVersion = doomed.getLatestVersion(); User u = findUserOrDie(); boolean destroy = false; - + if (doomed.getVersions().size() == 1) { if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets")); @@ -329,13 +330,13 @@ public Response deleteDataset( @PathParam("id") String id) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT")); } } - + // Gather the locations of the physical files that will need to be // deleted once the destroy command execution has been finalized: Map deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy); - - execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id))); - + + execCommand(new DeleteDatasetCommand(req, findDatasetOrDie(id))); + // If we have gotten this far, the destroy command has succeeded, // so we can finalize it by permanently deleting the physical files: // (DataFileService will double-check that the datafiles no @@ -344,11 +345,11 @@ public Response deleteDataset( @PathParam("id") String id) { if (!deleteStorageLocations.isEmpty()) { fileService.finalizeFileDeletes(deleteStorageLocations); } - + return ok("Dataset " + id + " deleted"); }); } - + @DELETE @Path("{id}/destroy") public Response destroyDataset(@PathParam("id") String id) { @@ -380,29 +381,29 @@ public Response destroyDataset(@PathParam("id") String id) { return ok("Dataset " + id + " destroyed"); }); } - + @DELETE @Path("{id}/versions/{versionId}") - public Response deleteDraftVersion( @PathParam("id") String id, @PathParam("versionId") String versionId ){ - if ( ! ":draft".equals(versionId) ) { + public Response deleteDraftVersion(@PathParam("id") String id, @PathParam("versionId") String versionId) { + if (!":draft".equals(versionId)) { return badRequest("Only the :draft version can be deleted"); } - return response( req -> { + return response(req -> { Dataset dataset = findDatasetOrDie(id); DatasetVersion doomed = dataset.getLatestVersion(); - + if (!doomed.isDraft()) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version")); } - + // Gather the locations of the physical files that will need to be // deleted once the destroy command execution has been finalized: - + Map deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed); - - execCommand( new DeleteDatasetVersionCommand(req, dataset)); - + + execCommand(new DeleteDatasetVersionCommand(req, dataset)); + // If we have gotten this far, the delete command has succeeded - // by either deleting the Draft version of a published dataset, // or destroying an unpublished one. @@ -413,26 +414,26 @@ public Response deleteDraftVersion( @PathParam("id") String id, @PathParam("ver if (!deleteStorageLocations.isEmpty()) { fileService.finalizeFileDeletes(deleteStorageLocations); } - + return ok("Draft version of dataset " + id + " deleted"); }); } - + @DELETE @Path("{datasetId}/deleteLink/{linkedDataverseId}") - public Response deleteDatasetLinkingDataverse( @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) { - boolean index = true; + public Response deleteDatasetLinkingDataverse(@PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) { + boolean index = true; return response(req -> { execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index)); return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted"); }); } - + @PUT @Path("{id}/citationdate") - public Response setCitationDate( @PathParam("id") String id, String dsfTypeName) { - return response( req -> { - if ( dsfTypeName.trim().isEmpty() ){ + public Response setCitationDate(@PathParam("id") String id, String dsfTypeName) { + return response(req -> { + if (dsfTypeName.trim().isEmpty()) { return badRequest("Please provide a dataset field type in the requst body."); } DatasetFieldType dsfType = null; @@ -446,124 +447,124 @@ public Response setCitationDate( @PathParam("id") String id, String dsfTypeName) execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType)); return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default")); }); - } - + } + @DELETE @Path("{id}/citationdate") - public Response useDefaultCitationDate( @PathParam("id") String id) { - return response( req -> { + public Response useDefaultCitationDate(@PathParam("id") String id) { + return response(req -> { execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null)); return ok("Citation Date for dataset " + id + " set to default"); }); - } - + } + @GET @Path("{id}/versions") - public Response listVersions( @PathParam("id") String id ) { - return response( req -> - ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id)) ) - .stream() - .map( d -> json(d) ) - .collect(toJsonArray()))); - } - + public Response listVersions(@PathParam("id") String id) { + return response(req -> + ok(execCommand(new ListVersionsCommand(req, findDatasetOrDie(id))) + .stream() + .map(d -> json(d)) + .collect(toJsonArray()))); + } + @GET @Path("{id}/versions/{versionId}") - public Response getVersion( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> { - DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + public Response getVersion(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> { + DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") - : ok(json(dsv)); + : ok(json(dsv)); }); } - + @GET @Path("{id}/versions/{versionId}/files") - public Response getVersionFiles( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> ok( jsonFileMetadatas( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas()))); + public Response getVersionFiles(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> ok(jsonFileMetadatas( + getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas()))); } - + @GET @Path("{id}/dirindex") @Produces("text/html") public Response getFileAccessFolderView(@PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { folderName = folderName == null ? "" : folderName; - versionId = versionId == null ? ":latest-published" : versionId; - - DatasetVersion version; + versionId = versionId == null ? ":latest-published" : versionId; + + DatasetVersion version; try { DataverseRequest req = createDataverseRequest(findUserOrDie()); version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); } catch (WrappedResponse wr) { return wr.getResponse(); } - + String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals); - + // return "NOT FOUND" if there is no such folder in the dataset version: - + if ("".equals(output)) { return notFound("Folder " + folderName + " does not exist"); } - - + + String indexFileName = folderName.equals("") ? ".index.html" : ".index-" + folderName.replace('/', '_') + ".html"; response.setHeader("Content-disposition", "attachment; filename=\"" + indexFileName + "\""); - + return Response.ok() .entity(output) //.type("application/html"). .build(); } - + @GET @Path("{id}/versions/{versionId}/metadata") - public Response getVersionMetadata( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> ok( - jsonByBlocks( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers ) + public Response getVersionMetadata(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> ok( + jsonByBlocks( + getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers) .getDatasetFields()))); } - + @GET @Path("{id}/versions/{versionNumber}/metadata/{block}") - public Response getVersionMetadataBlock( @PathParam("id") String datasetId, - @PathParam("versionNumber") String versionNumber, - @PathParam("block") String blockName, - @Context UriInfo uriInfo, - @Context HttpHeaders headers ) { - - return response( req -> { - DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers ); - + public Response getVersionMetadataBlock(@PathParam("id") String datasetId, + @PathParam("versionNumber") String versionNumber, + @PathParam("block") String blockName, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + + return response(req -> { + DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers); + Map> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields()); - for ( Map.Entry> p : fieldsByBlock.entrySet() ) { - if ( p.getKey().getName().equals(blockName) ) { + for (Map.Entry> p : fieldsByBlock.entrySet()) { + if (p.getKey().getName().equals(blockName)) { return ok(json(p.getKey(), p.getValue())); } } return notFound("metadata block named " + blockName + " not found"); }); } - + @GET @Path("{id}/modifyRegistration") - public Response updateDatasetTargetURL(@PathParam("id") String id ) { - return response( req -> { + public Response updateDatasetTargetURL(@PathParam("id") String id) { + return response(req -> { execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req)); return ok("Dataset " + id + " target url updated"); }); } - + @POST @Path("/modifyRegistrationAll") public Response updateDatasetTargetURLAll() { - return response( req -> { - datasetService.findAll().forEach( ds -> { + return response(req -> { + datasetService.findAll().forEach(ds -> { try { execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req)); } catch (WrappedResponse ex) { @@ -573,7 +574,7 @@ public Response updateDatasetTargetURLAll() { return ok("Update All Dataset target url completed"); }); } - + @POST @Path("{id}/modifyRegistrationMetadata") public Response updateDatasetPIDMetadata(@PathParam("id") String id) { @@ -593,36 +594,36 @@ public Response updateDatasetPIDMetadata(@PathParam("id") String id) { return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args)); }); } - + @GET @Path("/modifyRegistrationPIDMetadataAll") public Response updateDatasetPIDMetadataAll() { - return response( req -> { - datasetService.findAll().forEach( ds -> { + return response(req -> { + datasetService.findAll().forEach(ds -> { try { execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req)); } catch (WrappedResponse ex) { Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex); } - }); + }); return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all")); }); } - + @PUT @Path("{id}/versions/{versionId}") - public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId ){ - - if ( ! ":draft".equals(versionId) ) { - return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); + public Response updateDraftVersion(String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) { + + if (!":draft".equals(versionId)) { + return error(Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); } - - try ( StringReader rdr = new StringReader(jsonBody) ) { + + try (StringReader rdr = new StringReader(jsonBody)) { DataverseRequest req = createDataverseRequest(findUserOrDie()); Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json); - + // clear possibly stale fields from the incoming dataset version. // creation and modification dates are updated by the commands. incomingVersion.setId(null); @@ -632,18 +633,18 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, incomingVersion.setDataset(ds); incomingVersion.setCreateTime(null); incomingVersion.setLastUpdateTime(null); - - if (!incomingVersion.getFileMetadatas().isEmpty()){ - return error( Response.Status.BAD_REQUEST, "You may not add files via this api."); + + if (!incomingVersion.getFileMetadatas().isEmpty()) { + return error(Response.Status.BAD_REQUEST, "You may not add files via this api."); } - + boolean updateDraft = ds.getLatestVersion().isDraft(); - + DatasetVersion managedVersion; - if ( updateDraft ) { + if (updateDraft) { final DatasetVersion editVersion = ds.getEditVersion(); editVersion.setDatasetFields(incomingVersion.getDatasetFields()); - editVersion.setTermsOfUseAndAccess( incomingVersion.getTermsOfUseAndAccess() ); + editVersion.setTermsOfUseAndAccess(incomingVersion.getTermsOfUseAndAccess()); Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); managedVersion = managedDataset.getEditVersion(); } else { @@ -652,18 +653,18 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, // DatasetVersion managedVersion = execCommand( updateDraft // ? new UpdateDatasetVersionCommand(req, incomingVersion) // : new CreateDatasetVersionCommand(req, ds, incomingVersion)); - return ok( json(managedVersion) ); - + return ok(json(managedVersion)); + } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex); - return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() ); - + return error(Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage()); + } catch (WrappedResponse ex) { return ex.getResponse(); - + } } - + @PUT @Path("{id}/deleteMetadata") public Response deleteVersionMetadata(String jsonBody, @PathParam("id") String id) throws WrappedResponse { @@ -701,7 +702,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav boolean found = false; for (DatasetField dsf : dsv.getDatasetFields()) { if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) { - if (dsf.getDatasetFieldType().isAllowMultiples()) { + if (dsf.getDatasetFieldType().isAllowMultiples()) { if (updateField.getDatasetFieldType().isControlledVocabulary()) { if (dsf.getDatasetFieldType().isAllowMultiples()) { for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) { @@ -766,7 +767,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav datasetFieldCompoundValueItemsToRemove.forEach((remove) -> { dsf.getDatasetFieldCompoundValues().remove(remove); }); - if (!found) { + if (!found) { logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found."); return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found."); } @@ -781,17 +782,16 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav break; } } - if (!found){ + if (!found) { String displayValue = !updateField.getDisplayValue().isEmpty() ? updateField.getDisplayValue() : updateField.getCompoundDisplayValue(); - logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." ); - return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." ); + logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found."); + return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found."); } - } + } - boolean updateDraft = ds.getLatestVersion().isDraft(); - DatasetVersion managedVersion = updateDraft + DatasetVersion managedVersion = updateDraft ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getEditVersion() : execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); return ok(json(managedVersion)); @@ -805,24 +805,24 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav return ex.getResponse(); } - + } - - private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){ + + private String getCompoundDisplayValue(DatasetFieldCompoundValue dscv) { String returnString = ""; - for (DatasetField dsf : dscv.getChildDatasetFields()) { - for (String value : dsf.getValues()) { - if (!(value == null)) { - returnString += (returnString.isEmpty() ? "" : "; ") + value.trim(); - } + for (DatasetField dsf : dscv.getChildDatasetFields()) { + for (String value : dsf.getValues()) { + if (!(value == null)) { + returnString += (returnString.isEmpty() ? "" : "; ") + value.trim(); } } + } return returnString; } - + @PUT @Path("{id}/editMetadata") - public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) throws WrappedResponse{ + public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) throws WrappedResponse { Boolean replaceData = replace != null; @@ -830,26 +830,26 @@ public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, return processDatasetUpdate(jsonBody, id, req, replaceData); } - - - private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){ + + + private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData) { try (StringReader rdr = new StringReader(jsonBody)) { - + Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); DatasetVersion dsv = ds.getEditVersion(); - + List fields = new LinkedList<>(); - DatasetField singleField = null; - + DatasetField singleField = null; + JsonArray fieldsJson = json.getJsonArray("fields"); - if( fieldsJson == null ){ - singleField = jsonParser().parseField(json, Boolean.FALSE); + if (fieldsJson == null) { + singleField = jsonParser().parseField(json, Boolean.FALSE); fields.add(singleField); - } else{ + } else { fields = jsonParser().parseMultipleFields(json); } - + String valdationErrors = validateDatasetFieldValues(fields); @@ -959,7 +959,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque } } - + private String validateDatasetFieldValues(List fields) { StringBuilder error = new StringBuilder(); @@ -977,14 +977,14 @@ private String validateDatasetFieldValues(List fields) { } return ""; } - + /** * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431 */ @GET @Path("{id}/actions/:publish") @Deprecated - public Response publishDataseUsingGetDeprecated( @PathParam("id") String id, @QueryParam("type") String type ) { + public Response publishDataseUsingGetDeprecated(@PathParam("id") String id, @QueryParam("type") String type) { logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated."); return publishDataset(id, type, false); } @@ -996,10 +996,10 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S if (type == null) { return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent')."); } - boolean updateCurrent=false; + boolean updateCurrent = false; AuthenticatedUser user = findAuthenticatedUserOrDie(); type = type.toLowerCase(); - boolean isMinor=false; + boolean isMinor = false; switch (type) { case "minor": isMinor = true; @@ -1007,15 +1007,15 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S case "major": isMinor = false; break; - case "updatecurrent": - if(user.isSuperuser()) { - updateCurrent=true; - } else { - return error(Response.Status.FORBIDDEN, "Only superusers can update the current version"); - } - break; + case "updatecurrent": + if (user.isSuperuser()) { + updateCurrent = true; + } else { + return error(Response.Status.FORBIDDEN, "Only superusers can update the current version"); + } + break; default: - return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'."); + return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'."); } Dataset ds = findDatasetOrDie(id); @@ -1037,8 +1037,8 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S * error is returned. * */ - if ((ds.getModificationTime()!=null && (ds.getIndexTime() == null || (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0))) || - (ds.getPermissionModificationTime()!=null && (ds.getPermissionIndexTime() == null || (ds.getPermissionIndexTime().compareTo(ds.getPermissionModificationTime()) <= 0)))) { + if ((ds.getModificationTime() != null && (ds.getIndexTime() == null || (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0))) || + (ds.getPermissionModificationTime() != null && (ds.getPermissionIndexTime() == null || (ds.getPermissionIndexTime().compareTo(ds.getPermissionModificationTime()) <= 0)))) { return error(Response.Status.CONFLICT, "Dataset is awaiting indexing"); } } @@ -1099,21 +1099,21 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S .build(); } } else { - PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds, + PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds, createDataverseRequest(user), - isMinor)); - return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset())); + isMinor)); + return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset())); } } catch (WrappedResponse ex) { return ex.getResponse(); } } - + @POST @Path("{id}/move/{targetDataverseAlias}") public Response moveDataset(@PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) { try { - User u = findUserOrDie(); + User u = findUserOrDie(); Dataset ds = findDatasetOrDie(id); Dataverse target = dataverseService.findByAlias(targetDataverseAlias); if (target == null) { @@ -1132,32 +1132,32 @@ public Response moveDataset(@PathParam("id") String id, @PathParam("targetDatave } } } - + @PUT - @Path("{linkedDatasetId}/link/{linkingDataverseAlias}") - public Response linkDataset(@PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) { - try{ - User u = findUserOrDie(); + @Path("{linkedDatasetId}/link/{linkingDataverseAlias}") + public Response linkDataset(@PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) { + try { + User u = findUserOrDie(); Dataset linked = findDatasetOrDie(linkedDatasetId); Dataverse linking = findDataverseOrDie(linkingDataverseAlias); - if (linked == null){ + if (linked == null) { return error(Response.Status.BAD_REQUEST, "Linked Dataset not found."); - } - if (linking == null){ + } + if (linking == null) { return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found."); - } + } execCommand(new LinkDatasetCommand( createDataverseRequest(u), linking, linked - )); + )); return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias()); } catch (WrappedResponse ex) { return ex.getResponse(); } } - + @GET @Path("{id}/links") - public Response getLinks(@PathParam("id") String idSupplied ) { + public Response getLinks(@PathParam("id") String idSupplied) { try { User u = findUserOrDie(); if (!u.isSuperuser()) { @@ -1181,8 +1181,8 @@ public Response getLinks(@PathParam("id") String idSupplied ) { /** * Add a given assignment to a given user or group - * @param ra role assignment DTO - * @param id dataset id + * @param ra role assignment DTO + * @param id dataset id * @param apiKey */ @POST @@ -1190,12 +1190,12 @@ public Response getLinks(@PathParam("id") String idSupplied ) { public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) { try { Dataset dataset = findDatasetOrDie(id); - + RoleAssignee assignee = findAssignee(ra.getAssignee()); if (assignee == null) { return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error")); - } - + } + DataverseRole theRole; Dataverse dv = dataset.getOwner(); theRole = null; @@ -1223,7 +1223,7 @@ public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier") } } - + @DELETE @Path("{identifier}/assignments/{id}") public Response deleteAssignment(@PathParam("id") long assignmentId, @PathParam("identifier") String dsId) { @@ -1246,26 +1246,26 @@ public Response deleteAssignment(@PathParam("id") long assignmentId, @PathParam( @GET @Path("{identifier}/assignments") public Response getAssignments(@PathParam("identifier") String id) { - return response( req -> - ok( execCommand( - new ListRoleAssignments(req, findDatasetOrDie(id))) - .stream().map(ra->json(ra)).collect(toJsonArray())) ); + return response(req -> + ok(execCommand( + new ListRoleAssignments(req, findDatasetOrDie(id))) + .stream().map(ra -> json(ra)).collect(toJsonArray()))); } @GET @Path("{id}/privateUrl") public Response getPrivateUrlData(@PathParam("id") String idSupplied) { - return response( req -> { + return response(req -> { PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied))); - return (privateUrl != null) ? ok(json(privateUrl)) - : error(Response.Status.NOT_FOUND, "Private URL not found."); + return (privateUrl != null) ? ok(json(privateUrl)) + : error(Response.Status.NOT_FOUND, "Private URL not found."); }); } @POST @Path("{id}/privateUrl") public Response createPrivateUrl(@PathParam("id") String idSupplied) { - return response( req -> + return response(req -> ok(json(execCommand( new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied)))))); } @@ -1273,7 +1273,7 @@ public Response createPrivateUrl(@PathParam("id") String idSupplied) { @DELETE @Path("{id}/privateUrl") public Response deletePrivateUrl(@PathParam("id") String idSupplied) { - return response( req -> { + return response(req -> { Dataset dataset = findDatasetOrDie(idSupplied); PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset)); if (privateUrl != null) { @@ -1327,7 +1327,7 @@ public Response getDatasetThumbnail(@PathParam("id") String idSupplied) { try { Dataset dataset = findDatasetOrDie(idSupplied); InputStream is = DatasetUtil.getThumbnailAsInputStream(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); - if(is == null) { + if (is == null) { return notFound("Thumbnail not available"); } return Response.ok(is).build(); @@ -1384,11 +1384,11 @@ public Response getRsync(@PathParam("identifier") String id) { dataset = findDatasetOrDie(id); AuthenticatedUser user = findAuthenticatedUserOrDie(); ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset)); - + DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded"); if (lock == null) { logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); - return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")"); + return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id=" + dataset.getId() + ")"); } return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN)); } catch (WrappedResponse wr) { @@ -1397,15 +1397,15 @@ public Response getRsync(@PathParam("identifier") String id) { return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex)); } } - + /** - * This api endpoint triggers the creation of a "package" file in a dataset - * after that package has been moved onto the same filesystem via the Data Capture Module. + * This api endpoint triggers the creation of a "package" file in a dataset + * after that package has been moved onto the same filesystem via the Data Capture Module. * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file. * The "package" can be downloaded over RSAL. - * + * * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly. - * + * * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse. * But due to the possibly immense number of files (millions) the package approach was taken. * This is relevant because the posix ("file") code contains many remnants of that development work. @@ -1429,13 +1429,13 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String try { Dataset dataset = findDatasetOrDie(id); if ("validation passed".equals(statusMessageFromDcm)) { - logger.log(Level.INFO, "Checksum Validation passed for DCM."); + logger.log(Level.INFO, "Checksum Validation passed for DCM."); String storageDriver = dataset.getDataverseContext().getEffectiveStorageDriverId(); String uploadFolder = jsonFromDcm.getString("uploadFolder"); int totalSize = jsonFromDcm.getInt("totalSize"); String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type"); - + if (storageDriverType.equals("file")) { logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId()); @@ -1452,15 +1452,15 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String String message = wr.getMessage(); return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'."); } - } else if(storageDriverType.equals("s3")) { - + } else if (storageDriverType.equals("s3")) { + logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId()); try { - + //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package s3PackageImporter.copyFromS3(dataset, uploadFolder); DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize)); - + if (packageFile == null) { logger.log(Level.SEVERE, "S3 File package import failed."); return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed."); @@ -1472,7 +1472,7 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload); dataset.removeLock(dcmLock); } - + // update version using the command engine to enforce user permissions and constraints if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) { try { @@ -1490,11 +1490,11 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String JsonObjectBuilder job = Json.createObjectBuilder(); return ok(job); - - } catch (IOException e) { + + } catch (IOException e) { String message = e.getMessage(); return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'."); - } + } } else { return error(Response.Status.INTERNAL_SERVER_ERROR, "Invalid storage driver in Dataverse, not compatible with dcm"); } @@ -1517,7 +1517,7 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String return ex.getResponse(); } } - + @POST @Path("{id}/submitForReview") @@ -1525,9 +1525,9 @@ public Response submitForReview(@PathParam("id") String idSupplied) { try { Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied))); JsonObjectBuilder result = Json.createObjectBuilder(); - + boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview); - + result.add("inReview", inReview); result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review."); return ok(result); @@ -1539,7 +1539,7 @@ public Response submitForReview(@PathParam("id") String idSupplied) { @POST @Path("{id}/returnToAuthor") public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBody) { - + if (jsonBody == null || jsonBody.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn)."); } @@ -1547,14 +1547,14 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo JsonObject json = Json.createReader(rdr).readObject(); try { Dataset dataset = findDatasetOrDie(idSupplied); - String reasonForReturn = null; + String reasonForReturn = null; reasonForReturn = json.getString("reasonForReturn"); // TODO: Once we add a box for the curator to type into, pass the reason for return to the ReturnDatasetToAuthorCommand and delete this check and call to setReturnReason on the API side. if (reasonForReturn == null || reasonForReturn.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must enter a reason for returning a dataset to the author(s)."); } AuthenticatedUser authenticatedUser = findAuthenticatedUserOrDie(); - Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn )); + Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn)); JsonObjectBuilder result = Json.createObjectBuilder(); result.add("inReview", false); @@ -1565,237 +1565,237 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo } } -@GET -@Path("{id}/uploadsid") -@Deprecated -public Response getUploadUrl(@PathParam("id") String idSupplied) { - try { - Dataset dataset = findDatasetOrDie(idSupplied); - - boolean canUpdateDataset = false; - try { - canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset).canIssue(UpdateDatasetVersionCommand.class); - } catch (WrappedResponse ex) { - logger.info("Exception thrown while trying to figure out permissions while getting upload URL for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage()); - throw ex; - } - if (!canUpdateDataset) { - return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset."); - } - S3AccessIO s3io = FileUtil.getS3AccessForDirectUpload(dataset); - if(s3io == null) { - return error(Response.Status.NOT_FOUND,"Direct upload not supported for files in this dataset: " + dataset.getId()); - } - String url = null; - String storageIdentifier = null; - try { - url = s3io.generateTemporaryS3UploadUrl(); - storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); - } catch (IOException io) { - logger.warning(io.getMessage()); - throw new WrappedResponse(io, error( Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request")); - } - - JsonObjectBuilder response = Json.createObjectBuilder() - .add("url", url) - .add("storageIdentifier", storageIdentifier ); - return ok(response); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } -} + @GET + @Path("{id}/uploadsid") + @Deprecated + public Response getUploadUrl(@PathParam("id") String idSupplied) { + try { + Dataset dataset = findDatasetOrDie(idSupplied); -@GET -@Path("{id}/uploadurls") -public Response getMPUploadUrls(@PathParam("id") String idSupplied, @QueryParam("size") long fileSize) { - try { - Dataset dataset = findDatasetOrDie(idSupplied); - - boolean canUpdateDataset = false; - try { - canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset) - .canIssue(UpdateDatasetVersionCommand.class); - } catch (WrappedResponse ex) { - logger.info( - "Exception thrown while trying to figure out permissions while getting upload URLs for dataset id " - + dataset.getId() + ": " + ex.getLocalizedMessage()); - throw ex; - } - if (!canUpdateDataset) { - return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset."); - } - S3AccessIO s3io = FileUtil.getS3AccessForDirectUpload(dataset); - if (s3io == null) { - return error(Response.Status.NOT_FOUND, - "Direct upload not supported for files in this dataset: " + dataset.getId()); - } - JsonObjectBuilder response = null; - String storageIdentifier = null; - try { - storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); - response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize); - - } catch (IOException io) { - logger.warning(io.getMessage()); - throw new WrappedResponse(io, - error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request")); - } - - response.add("storageIdentifier", storageIdentifier); - return ok(response); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } -} + boolean canUpdateDataset = false; + try { + canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset).canIssue(UpdateDatasetVersionCommand.class); + } catch (WrappedResponse ex) { + logger.info("Exception thrown while trying to figure out permissions while getting upload URL for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage()); + throw ex; + } + if (!canUpdateDataset) { + return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset."); + } + S3AccessIO s3io = FileUtil.getS3AccessForDirectUpload(dataset); + if (s3io == null) { + return error(Response.Status.NOT_FOUND, "Direct upload not supported for files in this dataset: " + dataset.getId()); + } + String url = null; + String storageIdentifier = null; + try { + url = s3io.generateTemporaryS3UploadUrl(); + storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); + } catch (IOException io) { + logger.warning(io.getMessage()); + throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request")); + } -@DELETE -@Path("mpupload") -public Response abortMPUpload(@QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) { - try { - Dataset dataset = datasetSvc.findByGlobalId(idSupplied); - //Allow the API to be used within a session (e.g. for direct upload in the UI) - User user =session.getUser(); - if (!user.isAuthenticated()) { - try { - user = findAuthenticatedUserOrDie(); - } catch (WrappedResponse ex) { - logger.info( - "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id " - + dataset.getId() + ": " + ex.getLocalizedMessage()); - throw ex; - } - } - boolean allowed = false; - if (dataset != null) { - allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset) - .canIssue(UpdateDatasetVersionCommand.class); - } else { - /* - * The only legitimate case where a global id won't correspond to a dataset is - * for uploads during creation. Given that this call will still fail unless all - * three parameters correspond to an active multipart upload, it should be safe - * to allow the attempt for an authenticated user. If there are concerns about - * permissions, one could check with the current design that the user is allowed - * to create datasets in some dataverse that is configured to use the storage - * provider specified in the storageidentifier, but testing for the ability to - * create a dataset in a specific dataverse would requiring changing the design - * somehow (e.g. adding the ownerId to this call). - */ - allowed = true; - } - if (!allowed) { - return error(Response.Status.FORBIDDEN, - "You are not permitted to abort file uploads with the supplied parameters."); - } - try { - S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId); - } catch (IOException io) { - logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier=" - + storageidentifier + " dataset Id: " + dataset.getId()); - logger.warning(io.getMessage()); - throw new WrappedResponse(io, - error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload")); - } - return Response.noContent().build(); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } -} + JsonObjectBuilder response = Json.createObjectBuilder() + .add("url", url) + .add("storageIdentifier", storageIdentifier); + return ok(response); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } -@PUT -@Path("mpupload") -public Response completeMPUpload(String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) { - try { - Dataset dataset = datasetSvc.findByGlobalId(idSupplied); - //Allow the API to be used within a session (e.g. for direct upload in the UI) - User user =session.getUser(); - if (!user.isAuthenticated()) { - try { - user=findAuthenticatedUserOrDie(); - } catch (WrappedResponse ex) { - logger.info( - "Exception thrown while trying to figure out permissions to complete mpupload for dataset id " - + dataset.getId() + ": " + ex.getLocalizedMessage()); - throw ex; - } - } - boolean allowed = false; - if (dataset != null) { - allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset) - .canIssue(UpdateDatasetVersionCommand.class); - } else { - /* - * The only legitimate case where a global id won't correspond to a dataset is - * for uploads during creation. Given that this call will still fail unless all - * three parameters correspond to an active multipart upload, it should be safe - * to allow the attempt for an authenticated user. If there are concerns about - * permissions, one could check with the current design that the user is allowed - * to create datasets in some dataverse that is configured to use the storage - * provider specified in the storageidentifier, but testing for the ability to - * create a dataset in a specific dataverse would requiring changing the design - * somehow (e.g. adding the ownerId to this call). - */ - allowed = true; - } - if (!allowed) { - return error(Response.Status.FORBIDDEN, - "You are not permitted to complete file uploads with the supplied parameters."); - } - List eTagList = new ArrayList(); - logger.info("Etags: " + partETagBody); - try { - JsonReader jsonReader = Json.createReader(new StringReader(partETagBody)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); - for(String partNo : object.keySet()) { - eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); - } - for(PartETag et: eTagList) { - logger.info("Part: " + et.getPartNumber() + " : " + et.getETag()); - } - } catch (JsonException je) { - logger.info("Unable to parse eTags from: " + partETagBody); - throw new WrappedResponse(je, error( Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload")); - } - try { - S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList); - } catch (IOException io) { - logger.warning("Multipart upload completion failed for uploadId: " + uploadId +" storageidentifier=" + storageidentifier + " globalId: " + idSupplied); - logger.warning(io.getMessage()); - try { - S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId); - } catch (IOException e) { - logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId +" storageidentifier=" + storageidentifier + " globalId: " + idSupplied); - logger.severe(io.getMessage()); - } - - throw new WrappedResponse(io, error( Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload")); - } - return ok("Multipart Upload completed"); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } -} + @GET + @Path("{id}/uploadurls") + public Response getMPUploadUrls(@PathParam("id") String idSupplied, @QueryParam("size") long fileSize) { + try { + Dataset dataset = findDatasetOrDie(idSupplied); + + boolean canUpdateDataset = false; + try { + canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset) + .canIssue(UpdateDatasetVersionCommand.class); + } catch (WrappedResponse ex) { + logger.info( + "Exception thrown while trying to figure out permissions while getting upload URLs for dataset id " + + dataset.getId() + ": " + ex.getLocalizedMessage()); + throw ex; + } + if (!canUpdateDataset) { + return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset."); + } + S3AccessIO s3io = FileUtil.getS3AccessForDirectUpload(dataset); + if (s3io == null) { + return error(Response.Status.NOT_FOUND, + "Direct upload not supported for files in this dataset: " + dataset.getId()); + } + JsonObjectBuilder response = null; + String storageIdentifier = null; + try { + storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); + response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize); + + } catch (IOException io) { + logger.warning(io.getMessage()); + throw new WrappedResponse(io, + error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request")); + } + + response.add("storageIdentifier", storageIdentifier); + return ok(response); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @DELETE + @Path("mpupload") + public Response abortMPUpload(@QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) { + try { + Dataset dataset = datasetSvc.findByGlobalId(idSupplied); + //Allow the API to be used within a session (e.g. for direct upload in the UI) + User user = session.getUser(); + if (!user.isAuthenticated()) { + try { + user = findAuthenticatedUserOrDie(); + } catch (WrappedResponse ex) { + logger.info( + "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id " + + dataset.getId() + ": " + ex.getLocalizedMessage()); + throw ex; + } + } + boolean allowed = false; + if (dataset != null) { + allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset) + .canIssue(UpdateDatasetVersionCommand.class); + } else { + /* + * The only legitimate case where a global id won't correspond to a dataset is + * for uploads during creation. Given that this call will still fail unless all + * three parameters correspond to an active multipart upload, it should be safe + * to allow the attempt for an authenticated user. If there are concerns about + * permissions, one could check with the current design that the user is allowed + * to create datasets in some dataverse that is configured to use the storage + * provider specified in the storageidentifier, but testing for the ability to + * create a dataset in a specific dataverse would requiring changing the design + * somehow (e.g. adding the ownerId to this call). + */ + allowed = true; + } + if (!allowed) { + return error(Response.Status.FORBIDDEN, + "You are not permitted to abort file uploads with the supplied parameters."); + } + try { + S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId); + } catch (IOException io) { + logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier=" + + storageidentifier + " dataset Id: " + dataset.getId()); + logger.warning(io.getMessage()); + throw new WrappedResponse(io, + error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload")); + } + return Response.noContent().build(); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @PUT + @Path("mpupload") + public Response completeMPUpload(String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) { + try { + Dataset dataset = datasetSvc.findByGlobalId(idSupplied); + //Allow the API to be used within a session (e.g. for direct upload in the UI) + User user = session.getUser(); + if (!user.isAuthenticated()) { + try { + user = findAuthenticatedUserOrDie(); + } catch (WrappedResponse ex) { + logger.info( + "Exception thrown while trying to figure out permissions to complete mpupload for dataset id " + + dataset.getId() + ": " + ex.getLocalizedMessage()); + throw ex; + } + } + boolean allowed = false; + if (dataset != null) { + allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset) + .canIssue(UpdateDatasetVersionCommand.class); + } else { + /* + * The only legitimate case where a global id won't correspond to a dataset is + * for uploads during creation. Given that this call will still fail unless all + * three parameters correspond to an active multipart upload, it should be safe + * to allow the attempt for an authenticated user. If there are concerns about + * permissions, one could check with the current design that the user is allowed + * to create datasets in some dataverse that is configured to use the storage + * provider specified in the storageidentifier, but testing for the ability to + * create a dataset in a specific dataverse would requiring changing the design + * somehow (e.g. adding the ownerId to this call). + */ + allowed = true; + } + if (!allowed) { + return error(Response.Status.FORBIDDEN, + "You are not permitted to complete file uploads with the supplied parameters."); + } + List eTagList = new ArrayList(); + logger.info("Etags: " + partETagBody); + try { + JsonReader jsonReader = Json.createReader(new StringReader(partETagBody)); + JsonObject object = jsonReader.readObject(); + jsonReader.close(); + for (String partNo : object.keySet()) { + eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); + } + for (PartETag et : eTagList) { + logger.info("Part: " + et.getPartNumber() + " : " + et.getETag()); + } + } catch (JsonException je) { + logger.info("Unable to parse eTags from: " + partETagBody); + throw new WrappedResponse(je, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload")); + } + try { + S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList); + } catch (IOException io) { + logger.warning("Multipart upload completion failed for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied); + logger.warning(io.getMessage()); + try { + S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId); + } catch (IOException e) { + logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied); + logger.severe(io.getMessage()); + } + + throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload")); + } + return ok("Multipart Upload completed"); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } /** * Add a File to an existing Dataset - * + * * @param idSupplied * @param jsonData * @param fileInputStream * @param contentDispositionHeader * @param formDataBodyPart - * @return + * @return */ @POST @Path("{id}/add") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response addFileToDataset(@PathParam("id") String idSupplied, - @FormDataParam("jsonData") String jsonData, - @FormDataParam("file") InputStream fileInputStream, - @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, - @FormDataParam("file") final FormDataBodyPart formDataBodyPart - ){ + @FormDataParam("jsonData") String jsonData, + @FormDataParam("file") InputStream fileInputStream, + @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, + @FormDataParam("file") final FormDataBodyPart formDataBodyPart + ) { if (!systemConfig.isHTTPUpload()) { return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); @@ -1810,27 +1810,27 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } catch (WrappedResponse ex) { return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") - ); + ); } - - + + // ------------------------------------- // (2) Get the Dataset Id // // ------------------------------------- Dataset dataset; - + try { dataset = findDatasetOrDie(idSupplied); } catch (WrappedResponse wr) { - return wr.getResponse(); + return wr.getResponse(); } - + //------------------------------------ // (2a) Make sure dataset does not have package file // // -------------------------------------- - + for (DatasetVersion dv : dataset.getVersions()) { if (dv.isHasPackageFile()) { return error(Response.Status.FORBIDDEN, @@ -1842,40 +1842,40 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, // (2a) Load up optional params via JSON //--------------------------------------- OptionalFileParams optionalFileParams = null; - msgt("(api) jsonData: " + jsonData); + msgt("(api) jsonData: " + jsonData); try { optionalFileParams = new OptionalFileParams(jsonData); } catch (DataFileTagException ex) { - return error( Response.Status.BAD_REQUEST, ex.getMessage()); + return error(Response.Status.BAD_REQUEST, ex.getMessage()); } - + // ------------------------------------- // (3) Get the file name and content type // ------------------------------------- String newFilename = null; String newFileContentType = null; String newStorageIdentifier = null; - if (null == contentDispositionHeader) { - if (optionalFileParams.hasStorageIdentifier()) { - newStorageIdentifier = optionalFileParams.getStorageIdentifier(); - // ToDo - check that storageIdentifier is valid - if (optionalFileParams.hasFileName()) { - newFilename = optionalFileParams.getFileName(); - if (optionalFileParams.hasMimetype()) { - newFileContentType = optionalFileParams.getMimeType(); - } - } - } else { - return error(BAD_REQUEST, - "You must upload a file or provide a storageidentifier, filename, and mimetype."); - } - } else { - newFilename = contentDispositionHeader.getFileName(); - newFileContentType = formDataBodyPart.getMediaType().toString(); - } - - + if (null == contentDispositionHeader) { + if (optionalFileParams.hasStorageIdentifier()) { + newStorageIdentifier = optionalFileParams.getStorageIdentifier(); + // ToDo - check that storageIdentifier is valid + if (optionalFileParams.hasFileName()) { + newFilename = optionalFileParams.getFileName(); + if (optionalFileParams.hasMimetype()) { + newFileContentType = optionalFileParams.getMimeType(); + } + } + } else { + return error(BAD_REQUEST, + "You must upload a file or provide a storageidentifier, filename, and mimetype."); + } + } else { + newFilename = contentDispositionHeader.getFileName(); + newFileContentType = formDataBodyPart.getMediaType().toString(); + } + + //------------------- // (3) Create the AddReplaceFileHelper object //------------------- @@ -1883,28 +1883,28 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, DataverseRequest dvRequest2 = createDataverseRequest(authUser); AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, - ingestService, - datasetService, - fileService, - permissionSvc, - commandEngine, - systemConfig); + ingestService, + datasetService, + fileService, + permissionSvc, + commandEngine, + systemConfig); //------------------- // (4) Run "runAddFileByDatasetId" //------------------- addFileHelper.runAddFileByDataset(dataset, - newFilename, - newFileContentType, - newStorageIdentifier, - fileInputStream, - optionalFileParams); + newFilename, + newFileContentType, + newStorageIdentifier, + fileInputStream, + optionalFileParams); - if (addFileHelper.hasError()){ + if (addFileHelper.hasError()) { return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n")); - }else{ + } else { String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); try { //msgt("as String: " + addFileHelper.getSuccessResult()); @@ -1922,7 +1922,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } else { return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder()); } - + //"Look at that! You added a file! (hey hey, it may have worked)"); } catch (NoFilesException ex) { Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); @@ -1930,71 +1930,77 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } } - + } // end: addFileToDataset - - private void msg(String m){ + private void msg(String m) { //System.out.println(m); logger.fine(m); } - private void dashes(){ + + private void dashes() { msg("----------------"); } - private void msgt(String m){ - dashes(); msg(m); dashes(); + + private void msgt(String m) { + dashes(); + msg(m); + dashes(); } - - - public static T handleVersion( String versionId, DsVersionHandler hdl ) - throws WrappedResponse { + + + public static T handleVersion(String versionId, DsVersionHandler hdl) + throws WrappedResponse { switch (versionId) { - case ":latest": return hdl.handleLatest(); - case ":draft": return hdl.handleDraft(); - case ":latest-published": return hdl.handleLatestPublished(); + case ":latest": + return hdl.handleLatest(); + case ":draft": + return hdl.handleDraft(); + case ":latest-published": + return hdl.handleLatestPublished(); default: try { String[] versions = versionId.split("\\."); switch (versions.length) { case 1: - return hdl.handleSpecific(Long.parseLong(versions[0]), (long)0.0); + return hdl.handleSpecific(Long.parseLong(versions[0]), (long) 0.0); case 2: - return hdl.handleSpecific( Long.parseLong(versions[0]), Long.parseLong(versions[1]) ); + return hdl.handleSpecific(Long.parseLong(versions[0]), Long.parseLong(versions[1])); default: - throw new WrappedResponse(error( Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'")); + throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'")); } - } catch ( NumberFormatException nfe ) { - throw new WrappedResponse( error( Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'") ); + } catch (NumberFormatException nfe) { + throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'")); } } } - - private DatasetVersion getDatasetVersionOrDie( final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { - DatasetVersion dsv = execCommand( handleVersion(versionNumber, new DsVersionHandler>(){ - @Override - public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds); - } + private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { + DatasetVersion dsv = execCommand(handleVersion(versionNumber, new DsVersionHandler>() { - @Override - public Command handleDraft() { - return new GetDraftDatasetVersionCommand(req, ds); - } - - @Override - public Command handleSpecific(long major, long minor) { - return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); - } + @Override + public Command handleLatest() { + return new GetLatestAccessibleDatasetVersionCommand(req, ds); + } - @Override - public Command handleLatestPublished() { - return new GetLatestPublishedDatasetVersionCommand(req, ds); - } - })); - if ( dsv == null || dsv.getId() == null ) { - throw new WrappedResponse( notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found") ); + @Override + public Command handleDraft() { + return new GetDraftDatasetVersionCommand(req, ds); + } + + @Override + public Command handleSpecific(long major, long minor) { + return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); + } + + @Override + public Command handleLatestPublished() { + return new GetLatestPublishedDatasetVersionCommand(req, ds); + } + })); + if (dsv == null || dsv.getId() == null) { + throw new WrappedResponse(notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found")); } if (dsv.isReleased()) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds); @@ -2002,7 +2008,7 @@ public Command handleLatestPublished() { } return dsv; } - + @GET @Path("{identifier}/locks") public Response getLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) { @@ -2010,26 +2016,26 @@ public Response getLocks(@PathParam("identifier") String id, @QueryParam("type") Dataset dataset = null; try { dataset = findDatasetOrDie(id); - Set locks; + Set locks; if (lockType == null) { locks = dataset.getLocks(); } else { // request for a specific type lock: DatasetLock lock = dataset.getLockFor(lockType); - locks = new HashSet<>(); + locks = new HashSet<>(); if (lock != null) { locks.add(lock); } } - + return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray())); } catch (WrappedResponse wr) { return wr.getResponse(); - } - } - + } + } + @DELETE @Path("{identifier}/locks") public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) { @@ -2041,7 +2047,7 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only."); } Dataset dataset = findDatasetOrDie(id); - + if (lockType == null) { Set locks = new HashSet<>(); for (DatasetLock lock : dataset.getLocks()) { @@ -2093,7 +2099,7 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ }); } - + @POST @Path("{identifier}/lock/{type}") public Response lockDataset(@PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) { @@ -2102,7 +2108,7 @@ public Response lockDataset(@PathParam("identifier") String id, @PathParam("type AuthenticatedUser user = findAuthenticatedUserOrDie(); if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only."); - } + } Dataset dataset = findDatasetOrDie(id); DatasetLock lock = dataset.getLockFor(lockType); if (lock != null) { @@ -2129,16 +2135,16 @@ public Response lockDataset(@PathParam("identifier") String id, @PathParam("type }); } - + @GET @Path("{id}/makeDataCount/citations") public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) { - + try { Dataset dataset = findDatasetOrDie(idSupplied); JsonArrayBuilder datasetsCitations = Json.createArrayBuilder(); List externalCitations = datasetExternalCitationsService.getDatasetExternalCitationsByDataset(dataset); - for (DatasetExternalCitations citation : externalCitations ){ + for (DatasetExternalCitations citation : externalCitations) { JsonObjectBuilder candidateObj = Json.createObjectBuilder(); /** * In the future we can imagine storing and presenting more @@ -2149,9 +2155,9 @@ public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) { */ candidateObj.add("citationUrl", citation.getCitedByUrl()); datasetsCitations.add(candidateObj); - } - return ok(datasetsCitations); - + } + return ok(datasetsCitations); + } catch (WrappedResponse wr) { return wr.getResponse(); } @@ -2164,23 +2170,23 @@ public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSup String nullCurrentMonth = null; return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country); } - + @GET @Path("{identifier}/storagesize") - public Response getStorageSize(@PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + public Response getStorageSize(@PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached, + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached,GetDatasetStorageSizeCommand.Mode.STORAGE, null))))); + execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null))))); } - + @GET @Path("{identifier}/versions/{versionId}/downloadsize") - public Response getDownloadSize(@PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + public Response getDownloadSize(@PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version , findDatasetOrDie(dvIdtf), uriInfo, headers)))))); + execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers)))))); } @GET @@ -2282,29 +2288,29 @@ public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @Path return wr.getResponse(); } } - + @GET @Path("{identifier}/storageDriver") public Response getFileStore(@PathParam("identifier") String dvIdtf, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - - Dataset dataset; - + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + + Dataset dataset; + try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } - + return response(req -> ok(dataset.getEffectiveStorageDriverId())); } - + @PUT @Path("{identifier}/storageDriver") public Response setFileStore(@PathParam("identifier") String dvIdtf, - String storageDriverLabel, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + String storageDriverLabel, + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + // Superuser-only: AuthenticatedUser user; try { @@ -2314,16 +2320,16 @@ public Response setFileStore(@PathParam("identifier") String dvIdtf, } if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); - } - - Dataset dataset; - + } + + Dataset dataset; + try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } - + // We don't want to allow setting this to a store id that does not exist: for (Entry store : DataAccess.getStorageDriverLabels().entrySet()) { if (store.getKey().equals(storageDriverLabel)) { @@ -2332,15 +2338,15 @@ public Response setFileStore(@PathParam("identifier") String dvIdtf, return ok("Storage driver set to: " + store.getKey() + "/" + store.getValue()); } } - return error(Response.Status.BAD_REQUEST, - "No Storage Driver found for : " + storageDriverLabel); + return error(Response.Status.BAD_REQUEST, + "No Storage Driver found for : " + storageDriverLabel); } - + @DELETE @Path("{identifier}/storageDriver") public Response resetFileStore(@PathParam("identifier") String dvIdtf, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + // Superuser-only: AuthenticatedUser user; try { @@ -2350,19 +2356,19 @@ public Response resetFileStore(@PathParam("identifier") String dvIdtf, } if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); - } - - Dataset dataset; - + } + + Dataset dataset; + try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } - + dataset.setStorageDriverId(null); datasetService.merge(dataset); - return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); } @GET @@ -2406,11 +2412,11 @@ public Response getTimestamps(@PathParam("identifier") String id) { timestamps.add("hasStaleIndex", (dataset.getModificationTime() != null && (dataset.getIndexTime() == null || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true - : false); + : false); timestamps.add("hasStalePermissionIndex", (dataset.getPermissionModificationTime() != null && (dataset.getIndexTime() == null || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true - : false); + : false); } // More detail if you can see a draft if (canSeeDraft) { @@ -2439,12 +2445,11 @@ public Response getTimestamps(@PathParam("identifier") String id) { } - @POST @Path("{id}/addglobusFilesBkup") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response addGlobusFileToDatasetBkup(@PathParam("id") String datasetId, - @FormDataParam("jsonData") String jsonData + @FormDataParam("jsonData") String jsonData ) { JsonArrayBuilder jarr = Json.createArrayBuilder(); @@ -2753,12 +2758,32 @@ public Response addGlobusFileToDatasetBkup(@PathParam("id") String datasetId, @Path("{id}/addglobusFiles") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, - @FormDataParam("jsonData") String jsonData + @FormDataParam("jsonData") String jsonData, + @Context UriInfo uriInfo, + @Context HttpHeaders headers ) throws IOException, ExecutionException, InterruptedException { - logger.info ( " ==== 1 (api) jsonData 1 ====== " + jsonData ); + logger.info(" ==== (api addGlobusFilesToDataset) jsonData ====== " + jsonData); + + if(uriInfo != null) { + logger.info(" ==== (api uriInfo.getRequestUri()) jsonData ====== " + uriInfo.getRequestUri().toString()); + } + + //logger.info(" ==== (api uriInfo.getRequestUri()) jsonData ====== " + headers.getRequestHeaders() + + MultivaluedMap multivaluedMap = headers.getRequestHeaders(); + + Map result = new HashMap<>(); + multivaluedMap.forEach((name, values) -> { + if (!CollectionUtils.isEmpty(values)) { + result.put(name, (values.size() != 1) ? values : values.get(0)); + logger.info(" headers ==== " + name + " ==== "+ values ); + } + }); + + logger.info(" ==== headers.getRequestHeader(origin) ====== " + headers.getRequestHeader("origin") ); + logger.info(" ==== headers.getRequestHeader(referer) ====== " + headers.getRequestHeader("referer") ); - JsonArrayBuilder jarr = Json.createArrayBuilder(); if (!systemConfig.isHTTPUpload()) { return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); @@ -2786,8 +2811,19 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, return wr.getResponse(); } + //------------------------------------ + // (2b) Make sure dataset does not have package file + // -------------------------------------- + + for (DatasetVersion dv : dataset.getVersions()) { + if (dv.isHasPackageFile()) { + return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") + ); + } + } + - String lockInfoMessage = "Globus Upload API is started "; + String lockInfoMessage = "Globus Upload API started "; DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.EditInProgress, ((AuthenticatedUser) authUser).getId(), lockInfoMessage); if (lock != null) { @@ -2800,11 +2836,12 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser) authUser); - //String xfp = httpRequest.getHeader("X-Forwarded-Proto"); - //String requestUrl = xfp +"://"+httpRequest.getServerName(); /* + String xfp = httpRequest.getHeader("X-Forwarded-Proto"); + //String requestUrl = xfp +"://"+httpRequest.getServerName(); + x-forwarded-proto String requestUrl = httpRequest.getProtocol().toLowerCase().split("/")[0]+"://"+httpRequest.getServerName(); @@ -2812,16 +2849,14 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, { requestUrl = requestUrl + ":"+ httpRequest.getServerPort(); } - */ + */ - //String requestUrl = "https://dvdev.scholarsportal.info" ; - String requestUrl = "http://localhost:8080" ; + //String requestUrl = "http://localhost:8080"; + String requestUrl = "https://dvdev.scholarsportal.info" ; // Async Call - datasetService.globusAsyncCall( jsonData , token , dataset , requestUrl); - - userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId()); + datasetService.globusAsyncCall(jsonData, token, dataset, requestUrl, authUser); return ok("Globus Task successfully completed "); @@ -2881,9 +2916,7 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, } } - - - msgt("******* (api) jsonData 1: " + jsonData.toString()); + msgt("******* (addFilesToDataset api) jsonData 1: " + jsonData.toString()); JsonArray filesJson = null; try (StringReader rdr = new StringReader(jsonData)) { @@ -2909,8 +2942,6 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, // ------------------------------------- // (6) Parse files information from jsondata - // calculate checksum - // determine mimetype // ------------------------------------- int totalNumberofFiles = 0; @@ -2949,8 +2980,7 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, "You must upload a file or provide a storageidentifier, filename, and mimetype."); } - - msg("ADD!"); + msg("ADD! = " + newFilename); //------------------- // Run "runAddFileByDatasetId" @@ -2961,7 +2991,7 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, newFileContentType, newStorageIdentifier, null, - optionalFileParams,true); + optionalFileParams, true); if (addFileHelper.hasError()) { @@ -3032,8 +3062,8 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, dataset = datasetService.find(dataset.getId()); List s = dataset.getFiles(); - for (DataFile dataFile : s) {} - + for (DataFile dataFile : s) { + } //ingest job ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); @@ -3046,4 +3076,27 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, return ok(Json.createObjectBuilder().add("Files", jarr)); } // end: addFileToDataset + + + @POST + @Path("/deleteglobusRule") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response deleteglobusRule(@FormDataParam("jsonData") String jsonData + ) throws IOException, ExecutionException, InterruptedException { + + msgt("******* (api deleteglobusRule) jsonData : " + jsonData.toString()); + + JsonObject jsonObject = null; + try (StringReader rdr = new StringReader(jsonData)) { + jsonObject = Json.createReader(rdr).readObject(); + } catch (Exception jpe) { + jpe.printStackTrace(); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + } + + String ruleId = jsonObject.getString("ruleId"); + + globusServiceBean.deletePermision(ruleId,logger); + return ok("Globus Rule deleted successfully "); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java index 204d93b5b8f..1be16f97045 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java @@ -500,6 +500,10 @@ public void displayNotification() { userNotification.setTheObject(datasetVersionService.find(userNotification.getObjectId())); break; + case GLOBUSUPLOADSUCCESS: + userNotification.setTheObject(datasetService.find(userNotification.getObjectId())); + break; + case CHECKSUMIMPORT: userNotification.setTheObject(datasetVersionService.find(userNotification.getObjectId())); break; diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 2bb3f6c694d..b2f6f424722 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -284,20 +284,20 @@ public void updatePermision(AccessToken clientTokenUser, String directory, Strin } } - public void deletePermision(String ruleId) throws MalformedURLException { + public void deletePermision(String ruleId, Logger globusLogger) throws MalformedURLException { AccessToken clientTokenUser = getClientToken(); - logger.info("Start updating permissions." ); + globusLogger.info("Start deleting permissions." ); String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + ruleId); - logger.info("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + ruleId); + //logger.info("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + ruleId); MakeRequestResponse result = makeRequest(url, "Bearer", clientTokenUser.getOtherTokens().get(0).getAccessToken(),"DELETE", null); if (result.status != 200) { - logger.warning("Cannot update access rule " + ruleId); + globusLogger.warning("Cannot delete access rule " + ruleId); } else { - logger.info("Access rule " + ruleId + " was updated"); + globusLogger.info("Access rule " + ruleId + " was deleted successfully"); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index 37667d16b55..c4645409f87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -66,6 +66,14 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti } catch (Exception e) { return BundleUtil.getStringFromBundle("notification.email.import.filesystem.subject", rootDvNameAsList); } + case GLOBUSUPLOADSUCCESS: + try { + DatasetVersion version = (DatasetVersion)objectOfNotification; + List dsNameAsList = Arrays.asList(version.getDataset().getDisplayName()); + return BundleUtil.getStringFromBundle("notification.email.import.globus.subject", dsNameAsList); + } catch (Exception e) { + return BundleUtil.getStringFromBundle("notification.email.import.globus.subject", rootDvNameAsList); + } case CHECKSUMIMPORT: return BundleUtil.getStringFromBundle("notification.email.import.checksum.subject", rootDvNameAsList); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 0927117ff86..f7c4def1943 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -217,7 +217,9 @@ notification.checksumfail=One or more files in your upload failed checksum valid notification.ingest.completed=Dataset {2} ingest process has successfully finished.

Ingested files:{3}
notification.ingest.completedwitherrors=Dataset {2} ingest process has finished with errors.

Ingested files:{3}
notification.mail.import.filesystem=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded and verified. +notification.mail.import.globus=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded via Globus and verified. notification.import.filesystem=Dataset {1} has been successfully uploaded and verified. +notification.import.globus=Dataset {1} has been successfully uploaded via Globus and verified. notification.import.checksum={1}, dataset had file checksums added via a batch job. removeNotification=Remove Notification groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. @@ -696,6 +698,7 @@ contact.delegation={0} on behalf of {1} notification.email.info.unavailable=Unavailable notification.email.apiTokenGenerated=Hello {0} {1},\n\nAPI Token has been generated. Please keep it secure as you would do with a password. notification.email.apiTokenGenerated.subject=API Token was generated +notification.email.import.globus.subject=Dataset {0} has been successfully uploaded via Globus and verified # dataverse.xhtml dataverse.name=Dataverse Name diff --git a/src/main/webapp/dataverseuser.xhtml b/src/main/webapp/dataverseuser.xhtml index 5de0154f49c..8d8baceb6d2 100644 --- a/src/main/webapp/dataverseuser.xhtml +++ b/src/main/webapp/dataverseuser.xhtml @@ -286,6 +286,13 @@ + + + + + + + From dda1335bedc45f4e2cdef93d2d36e15879c9dfe7 Mon Sep 17 00:00:00 2001 From: jingma Date: Wed, 31 Mar 2021 00:47:13 +0200 Subject: [PATCH 0104/1551] Add Apache icons. --- scripts/api/data/license.json | 2 +- scripts/api/data/licenseError.json | 2 +- scripts/api/data/licenseUpdate.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/api/data/license.json b/scripts/api/data/license.json index 3b56b7dbc16..11e0d44c14b 100644 --- a/scripts/api/data/license.json +++ b/scripts/api/data/license.json @@ -2,6 +2,6 @@ "name": "Apache License 1.0", "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", "uri": "https://www.apache.org/licenses/LICENSE-1.0", - "iconUrl": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/rick-astley-1-1552336336.png", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", "active": false } \ No newline at end of file diff --git a/scripts/api/data/licenseError.json b/scripts/api/data/licenseError.json index 63f7a0f700a..d6b1dbbd01b 100644 --- a/scripts/api/data/licenseError.json +++ b/scripts/api/data/licenseError.json @@ -3,6 +3,6 @@ "name": "Apache License 1.0", "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", "uri": "https://www.apache.org/licenses/LICENSE-1.0", - "iconUrl": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/rick-astley-1-1552336336.png", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", "active": false } \ No newline at end of file diff --git a/scripts/api/data/licenseUpdate.json b/scripts/api/data/licenseUpdate.json index 7fc89d19058..eefc4e6f16f 100644 --- a/scripts/api/data/licenseUpdate.json +++ b/scripts/api/data/licenseUpdate.json @@ -2,6 +2,6 @@ "name": "Apache License 2.0", "shortDescription": "The 2.0 version of the Apache License, approved by the ASF in 2004.", "uri": "https://www.apache.org/licenses/LICENSE-2.0", - "iconUrl": "https://yt3.ggpht.com/ytc/AAUvwni36SveDisR-vOAmmklBfJxnnjuRG3ihzfrwEfORA=s900-c-k-c0x00ffffff-no-rj", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", "active": true } \ No newline at end of file From c87c3675bed413621a6af0e9b7e05212520beedd Mon Sep 17 00:00:00 2001 From: jingma Date: Wed, 31 Mar 2021 14:31:57 +0200 Subject: [PATCH 0105/1551] Change tokens to licenses. --- .../iq/dataverse/LicenseServiceBean.java | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index 0c6828fabd0..c49ebd9659e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -33,23 +33,23 @@ public List listAll() { } public License getById(long id) throws FetchException { - List tokens = em.createNamedQuery("License.findById", License.class) + List licenses = em.createNamedQuery("License.findById", License.class) .setParameter("id", id ) .getResultList(); - if (tokens.isEmpty()) { + if (licenses.isEmpty()) { throw new FetchException("License with that ID doesn't exist."); } - return tokens.get(0); + return licenses.get(0); } public License getByName(String name) throws FetchException { - List tokens = em.createNamedQuery("License.findByName", License.class) + List licenses = em.createNamedQuery("License.findByName", License.class) .setParameter("name", name ) .getResultList(); - if (tokens.isEmpty()) { + if (licenses.isEmpty()) { throw new FetchException("License with that name doesn't exist."); } - return tokens.get(0); + return licenses.get(0); } public License save(License license) throws PersistenceException, RequestBodyException { @@ -62,12 +62,12 @@ public License save(License license) throws PersistenceException, RequestBodyExc } public void setById(long id, String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { - List tokens = em.createNamedQuery("License.findById", License.class) + List licenses = em.createNamedQuery("License.findById", License.class) .setParameter("id", id ) .getResultList(); - if(tokens.size() > 0) { - License license = tokens.get(0); + if(licenses.size() > 0) { + License license = licenses.get(0); license.setName(name); license.setShortDescription(shortDescription); license.setUri(uri); @@ -82,12 +82,12 @@ public void setById(long id, String name, String shortDescription, URI uri, URI } public void setByName(String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { - List tokens = em.createNamedQuery("License.findByName", License.class) + List licenses = em.createNamedQuery("License.findByName", License.class) .setParameter("name", name ) .getResultList(); - if(tokens.size() > 0) { - License license = tokens.get(0); + if(licenses.size() > 0) { + License license = licenses.get(0); license.setShortDescription(shortDescription); license.setUri(uri); license.setIconUrl(iconUrl); From 67b7471e0ea127d66ba660f4631461650d2f139e Mon Sep 17 00:00:00 2001 From: jingma Date: Wed, 31 Mar 2021 14:38:25 +0200 Subject: [PATCH 0106/1551] Change URIException to IllegalStateException. --- .../java/edu/harvard/iq/dataverse/License.java | 16 ++++++++++++---- .../java/edu/harvard/iq/dataverse/api/Admin.java | 15 +-------------- .../iq/dataverse/util/json/JsonPrinter.java | 2 +- 3 files changed, 14 insertions(+), 19 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index 957a0a0529f..29653271e01 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -89,16 +89,24 @@ public void setShortDescription(String shortDescription) { this.shortDescription = shortDescription; } - public URI getUri() throws URISyntaxException { - return new URI(uri); + public URI getUri() { + try { + return new URI(uri); + } catch (URISyntaxException e) { + throw new IllegalStateException("Incorrect URI in JSON"); + } } public void setUri(URI uri) { this.uri = uri.toASCIIString(); } - public URI getIconUrl() throws URISyntaxException { - return new URI(iconUrl); + public URI getIconUrl() { + try { + return new URI(iconUrl); + } catch (URISyntaxException e) { + throw new IllegalStateException("Incorrect URI in JSON"); + } } public void setIconUrl(URI iconUrl) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 74a1e47c1ae..fc7cf73d505 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -44,7 +44,6 @@ import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; import edu.harvard.iq.dataverse.util.json.JsonPrinter; -import java.net.URISyntaxException; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; @@ -1927,11 +1926,7 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon public Response getLicenses() { JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); for(License license : licenseService.listAll()) { - try { - arrayBuilder.add(JsonPrinter.json(license)); - } catch (URISyntaxException e) { - return error(Status.INTERNAL_SERVER_ERROR, "Incorrect URI in JSON"); - } + arrayBuilder.add(JsonPrinter.json(license)); } return ok(arrayBuilder); } @@ -1944,8 +1939,6 @@ public Response getLicenseById(@PathParam("id") long id) { return ok(json(license)); } catch (FetchException e) { return error(Response.Status.NOT_FOUND, e.getMessage()); - } catch (URISyntaxException e) { - return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } } @@ -1957,8 +1950,6 @@ public Response getLicenseByName(@PathParam("name") String name) { return ok(json(license)); } catch (FetchException e) { return error(Response.Status.NOT_FOUND, e.getMessage()); - } catch (URISyntaxException e) { - return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } } @@ -1982,8 +1973,6 @@ public Response putLicenseById(@PathParam("id") long id, License license) { licenseService.setById(id, license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); } catch (UpdateException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); - } catch (URISyntaxException e) { - return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } return ok("License with ID " + id + " was replaced."); } @@ -1995,8 +1984,6 @@ public Response putLicenseByName(@PathParam("name") String name, License license licenseService.setByName(license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); } catch (UpdateException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); - } catch (URISyntaxException e) { - return error(Response.Status.BAD_REQUEST, "Incorrect URI in JSON"); } return ok("License with name " + name + " was replaced."); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 9b243397cfa..7a5334114e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -778,7 +778,7 @@ public static JsonObjectBuilder json( DataverseFacet aFacet ) { .add("name", aFacet.getDatasetFieldType().getDisplayName()); } - public static JsonObjectBuilder json(License license) throws URISyntaxException { + public static JsonObjectBuilder json(License license) { return jsonObjectBuilder() .add("id", license.getId()) .add("name", license.getName()) From 4b6c367857e7afa4270281bf48652f0160dc079a Mon Sep 17 00:00:00 2001 From: Jing Ma Date: Wed, 31 Mar 2021 14:41:59 +0200 Subject: [PATCH 0107/1551] DD-387 Add license entity and api (#57) * First db table and api. * Final changes for prototype. * Add integration tests. * Fix indentation. * Add prototype of newest changes. * Add URI and URL objects, and new endpoints. * Add Apache icons. * Change tokens to licenses. * Change URIException to IllegalStateException. --- scripts/api/data/license.json | 7 + scripts/api/data/licenseError.json | 8 + scripts/api/data/licenseUpdate.json | 7 + .../edu/harvard/iq/dataverse/License.java | 154 ++++++++++++++++++ .../iq/dataverse/LicenseServiceBean.java | 119 ++++++++++++++ .../edu/harvard/iq/dataverse/api/Admin.java | 98 ++++++++++- .../iq/dataverse/api/FetchException.java | 17 ++ .../dataverse/api/RequestBodyException.java | 17 ++ .../iq/dataverse/api/UpdateException.java | 17 ++ .../iq/dataverse/util/json/JsonPrinter.java | 12 ++ .../edu/harvard/iq/dataverse/api/AdminIT.java | 81 +++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 65 ++++++++ 12 files changed, 597 insertions(+), 5 deletions(-) create mode 100644 scripts/api/data/license.json create mode 100644 scripts/api/data/licenseError.json create mode 100644 scripts/api/data/licenseUpdate.json create mode 100644 src/main/java/edu/harvard/iq/dataverse/License.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/FetchException.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java diff --git a/scripts/api/data/license.json b/scripts/api/data/license.json new file mode 100644 index 00000000000..11e0d44c14b --- /dev/null +++ b/scripts/api/data/license.json @@ -0,0 +1,7 @@ +{ + "name": "Apache License 1.0", + "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", + "uri": "https://www.apache.org/licenses/LICENSE-1.0", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", + "active": false +} \ No newline at end of file diff --git a/scripts/api/data/licenseError.json b/scripts/api/data/licenseError.json new file mode 100644 index 00000000000..d6b1dbbd01b --- /dev/null +++ b/scripts/api/data/licenseError.json @@ -0,0 +1,8 @@ +{ + "id": 6, + "name": "Apache License 1.0", + "shortDescription": "This is the original Apache License which applies only to very old versions of Apache packages (such as version 1.2 of the Web server).", + "uri": "https://www.apache.org/licenses/LICENSE-1.0", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", + "active": false +} \ No newline at end of file diff --git a/scripts/api/data/licenseUpdate.json b/scripts/api/data/licenseUpdate.json new file mode 100644 index 00000000000..eefc4e6f16f --- /dev/null +++ b/scripts/api/data/licenseUpdate.json @@ -0,0 +1,7 @@ +{ + "name": "Apache License 2.0", + "shortDescription": "The 2.0 version of the Apache License, approved by the ASF in 2004.", + "uri": "https://www.apache.org/licenses/LICENSE-2.0", + "iconUrl": "https://itgala.xyz/wp-content/uploads/2017/10/Apache-HTTP-Server.png", + "active": true +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java new file mode 100644 index 00000000000..29653271e01 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -0,0 +1,154 @@ +package edu.harvard.iq.dataverse; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Objects; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; +import javax.persistence.Table; +import javax.persistence.UniqueConstraint; + +/** + * @author Jing Ma + */ + @NamedQueries({ + @NamedQuery( name="License.findAll", + query="SELECT l FROM License l"), + @NamedQuery( name="License.findById", + query = "SELECT l FROM License l WHERE l.id=:id"), + @NamedQuery( name="License.findByName", + query = "SELECT l FROM License l WHERE l.name=:name"), + @NamedQuery( name="License.deleteById", + query="DELETE FROM License l WHERE l.id=:id"), + @NamedQuery( name="License.deleteByName", + query="DELETE FROM License l WHERE l.name=:name") +}) +@Entity +@Table(uniqueConstraints = { + @UniqueConstraint(columnNames = "name"), + @UniqueConstraint(columnNames = "uri")} +) +public class License { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(columnDefinition="TEXT", nullable = false) + private String name; + + @Column(columnDefinition="TEXT") + private String shortDescription; + + @Column(columnDefinition="TEXT", nullable = false) + private String uri; + + @Column(columnDefinition="TEXT") + private String iconUrl; + + @Column(nullable = false) + private boolean active; + + public License() { + } + + public License(String name, String shortDescription, URI uri, URI iconUrl, boolean active) { + this.name = name; + this.shortDescription = shortDescription; + this.uri = uri.toASCIIString(); + this.iconUrl = iconUrl.toASCIIString(); + this.active = active; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getShortDescription() { + return shortDescription; + } + + public void setShortDescription(String shortDescription) { + this.shortDescription = shortDescription; + } + + public URI getUri() { + try { + return new URI(uri); + } catch (URISyntaxException e) { + throw new IllegalStateException("Incorrect URI in JSON"); + } + } + + public void setUri(URI uri) { + this.uri = uri.toASCIIString(); + } + + public URI getIconUrl() { + try { + return new URI(iconUrl); + } catch (URISyntaxException e) { + throw new IllegalStateException("Incorrect URI in JSON"); + } + } + + public void setIconUrl(URI iconUrl) { + this.iconUrl = iconUrl.toASCIIString(); + } + + public boolean isActive() { + return active; + } + + public void setActive(boolean active) { + this.active = active; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + License license = (License) o; + return active == license.active && + Objects.equals(id, license.id) && + Objects.equals(name, license.name) && + Objects.equals(shortDescription, license.shortDescription) && + Objects.equals(uri, license.uri) && + Objects.equals(iconUrl, license.iconUrl); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, shortDescription, uri, iconUrl, active); + } + + @Override + public String toString() { + return "License{" + + "id=" + id + + ", name='" + name + '\'' + + ", shortDescription='" + shortDescription + '\'' + + ", uri=" + uri + + ", iconUrl=" + iconUrl + + ", active=" + active + + '}'; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java new file mode 100644 index 00000000000..c49ebd9659e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -0,0 +1,119 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; +import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import edu.harvard.iq.dataverse.api.FetchException; +import edu.harvard.iq.dataverse.api.RequestBodyException; +import edu.harvard.iq.dataverse.api.UpdateException; +import java.net.URI; +import java.net.URL; +import java.util.List; +import javax.ejb.EJB; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceException; + +/** + * @author Jing Ma + */ +@Stateless +@Named +public class LicenseServiceBean { + + @PersistenceContext + EntityManager em; + + @EJB + ActionLogServiceBean actionLogSvc; + + public List listAll() { + return em.createNamedQuery("License.findAll", License.class).getResultList(); + } + + public License getById(long id) throws FetchException { + List licenses = em.createNamedQuery("License.findById", License.class) + .setParameter("id", id ) + .getResultList(); + if (licenses.isEmpty()) { + throw new FetchException("License with that ID doesn't exist."); + } + return licenses.get(0); + } + + public License getByName(String name) throws FetchException { + List licenses = em.createNamedQuery("License.findByName", License.class) + .setParameter("name", name ) + .getResultList(); + if (licenses.isEmpty()) { + throw new FetchException("License with that name doesn't exist."); + } + return licenses.get(0); + } + + public License save(License license) throws PersistenceException, RequestBodyException { + if (license.getId() == null) { + em.persist(license); + return license; + } else { + throw new RequestBodyException("There shouldn't be an ID in the request body"); + } + } + + public void setById(long id, String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { + List licenses = em.createNamedQuery("License.findById", License.class) + .setParameter("id", id ) + .getResultList(); + + if(licenses.size() > 0) { + License license = licenses.get(0); + license.setName(name); + license.setShortDescription(shortDescription); + license.setUri(uri); + license.setIconUrl(iconUrl); + license.setActive(active); + em.merge(license); + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") + .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); + } else { + throw new UpdateException("There is no existing License with that ID. To add a license use POST."); + } + } + + public void setByName(String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { + List licenses = em.createNamedQuery("License.findByName", License.class) + .setParameter("name", name ) + .getResultList(); + + if(licenses.size() > 0) { + License license = licenses.get(0); + license.setShortDescription(shortDescription); + license.setUri(uri); + license.setIconUrl(iconUrl); + license.setActive(active); + em.merge(license); + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "set") + .setInfo(name + ": " + shortDescription + ": " + uri + ": " + iconUrl + ": " + active)); + } else { + throw new UpdateException("There is no existing License with that name. To add a license use POST."); + } + } + + public int deleteById(long id) throws PersistenceException { + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") + .setInfo(Long.toString(id))); + return em.createNamedQuery("License.deleteById") + .setParameter("id", id) + .executeUpdate(); + } + + public int deleteByName(String name) throws PersistenceException { + actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Admin, "delete") + .setInfo(name)); + return em.createNamedQuery("License.deleteByName") + .setParameter("name", name) + .executeUpdate(); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index b52665a7747..fc7cf73d505 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -16,7 +16,8 @@ import edu.harvard.iq.dataverse.EMailValidator; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.GlobalId; -import edu.harvard.iq.dataverse.RoleAssignment; +import edu.harvard.iq.dataverse.License; +import edu.harvard.iq.dataverse.LicenseServiceBean; import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.dto.RoleDTO; @@ -42,9 +43,11 @@ import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand; import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; +import edu.harvard.iq.dataverse.util.json.JsonPrinter; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; +import javax.persistence.PersistenceException; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -71,7 +74,6 @@ import javax.ws.rs.core.Response.Status; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; @@ -85,8 +87,6 @@ import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ChangeUserIdentifierCommand; import edu.harvard.iq.dataverse.engine.command.impl.RegisterDvObjectCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -101,7 +101,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Date; -import java.util.function.Consumer; import javax.inject.Inject; import javax.json.JsonArray; import javax.persistence.Query; @@ -152,6 +151,8 @@ public class Admin extends AbstractApiBean { ExplicitGroupServiceBean explicitGroupService; @EJB BannerMessageServiceBean bannerMessageService; + @EJB + LicenseServiceBean licenseService; // Make the session available @@ -1920,4 +1921,91 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon } + @GET + @Path("/licenses") + public Response getLicenses() { + JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); + for(License license : licenseService.listAll()) { + arrayBuilder.add(JsonPrinter.json(license)); + } + return ok(arrayBuilder); + } + + @GET + @Path("/licenses/id/{id}") + public Response getLicenseById(@PathParam("id") long id) { + try { + License license = licenseService.getById(id); + return ok(json(license)); + } catch (FetchException e) { + return error(Response.Status.NOT_FOUND, e.getMessage()); + } + } + + @GET + @Path("/licenses/name/{name}") + public Response getLicenseByName(@PathParam("name") String name) { + try { + License license = licenseService.getByName(name); + return ok(json(license)); + } catch (FetchException e) { + return error(Response.Status.NOT_FOUND, e.getMessage()); + } + } + + @POST + @Path("/licenses") + public Response addLicense(License license) { + try { + licenseService.save(license); + return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); + } catch (RequestBodyException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } catch(PersistenceException e) { + return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); + } + } + + @PUT + @Path("/licenses/id/{id}") + public Response putLicenseById(@PathParam("id") long id, License license) { + try { + licenseService.setById(id, license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); + } catch (UpdateException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } + return ok("License with ID " + id + " was replaced."); + } + + @PUT + @Path("/licenses/name/{name}") + public Response putLicenseByName(@PathParam("name") String name, License license) { + try { + licenseService.setByName(license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); + } catch (UpdateException e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } + return ok("License with name " + name + " was replaced."); + } + + @DELETE + @Path("/licenses/id/{id}") + public Response deleteLicenseById(@PathParam("id") long id) { + int result = licenseService.deleteById(id); + if (result == 1) { + return ok("OK. License with ID " + id + " was deleted."); + } + return error(Response.Status.NOT_FOUND, "A license with ID " + id + " doesn't exist."); + } + + @DELETE + @Path("/licenses/name/{name}") + public Response deleteLicenseByName(@PathParam("name") String name) { + int result = licenseService.deleteByName(name); + if (result == 1) { + return ok("OK. License with name " + name + " was deleted."); + } + return error(Response.Status.NOT_FOUND, "A license with name " + name + " doesn't exist."); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java b/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java new file mode 100644 index 00000000000..a9c77c7a4c5 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/FetchException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class FetchException extends Exception { + + public FetchException(String message) { + super(message); + } + + public FetchException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java b/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java new file mode 100644 index 00000000000..e78c87abdfa --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/RequestBodyException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class RequestBodyException extends Exception { + + public RequestBodyException(String message) { + super(message); + } + + public RequestBodyException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java b/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java new file mode 100644 index 00000000000..4dbd3ab19a3 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/UpdateException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class UpdateException extends Exception { + + public UpdateException(String message) { + super(message); + } + + public UpdateException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index c37efc3178f..7a5334114e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.DataverseContact; import edu.harvard.iq.dataverse.DataverseFacet; import edu.harvard.iq.dataverse.DataverseTheme; +import edu.harvard.iq.dataverse.License; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; @@ -44,6 +45,7 @@ import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; +import java.net.URISyntaxException; import java.util.*; import javax.json.Json; import javax.json.JsonArrayBuilder; @@ -775,6 +777,16 @@ public static JsonObjectBuilder json( DataverseFacet aFacet ) { .add("id", String.valueOf(aFacet.getId())) // TODO should just be id I think .add("name", aFacet.getDatasetFieldType().getDisplayName()); } + + public static JsonObjectBuilder json(License license) { + return jsonObjectBuilder() + .add("id", license.getId()) + .add("name", license.getName()) + .add("shortDescription", license.getShortDescription()) + .add("uri", license.getUri().toString()) + .add("iconUrl", license.getIconUrl().toString()) + .add("active", license.isActive()); + } public static Collector stringsToJsonArray() { return new Collector() { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index f83e9d9c839..a1bcc0b08fd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -786,4 +786,85 @@ public void testBannerMessages(){ assertEquals("OK", status); } + + @Test + public void testLicenses(){ + + String pathToJsonFile = "scripts/api/data/license.json"; + Response addLicenseResponse = UtilIT.addLicense(pathToJsonFile); + addLicenseResponse.prettyPrint(); + String body = addLicenseResponse.getBody().asString(); + String status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + pathToJsonFile = "scripts/api/data/licenseError.json"; + Response addLicenseErrorResponse = UtilIT.addLicense(pathToJsonFile); + addLicenseErrorResponse.prettyPrint(); + body = addLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + Response getLicensesResponse = UtilIT.getLicenses(); + getLicensesResponse.prettyPrint(); + body = getLicensesResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseByIdResponse = UtilIT.getLicenseById(1L); + getLicenseByIdResponse.prettyPrint(); + body = getLicenseByIdResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseByNameResponse = UtilIT.getLicenseByName(""); + getLicenseByNameResponse.prettyPrint(); + body = getLicenseByNameResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response getLicenseErrorResponse = UtilIT.getLicenseById(10L); + getLicenseErrorResponse.prettyPrint(); + body = getLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + pathToJsonFile = "scripts/api/data/licenseUpdate.json"; + Response updateLicenseByIdResponse = UtilIT.updateLicenseById(pathToJsonFile, 1L); + updateLicenseByIdResponse.prettyPrint(); + body = updateLicenseByIdResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + pathToJsonFile = "scripts/api/data/licenseUpdate.json"; + Response updateLicenseByNameResponse = UtilIT.updateLicenseByName(pathToJsonFile, ""); + updateLicenseByNameResponse.prettyPrint(); + body = updateLicenseByNameResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response updateLicenseErrorResponse = UtilIT.updateLicenseById(pathToJsonFile, 10L); + updateLicenseErrorResponse.prettyPrint(); + body = updateLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + Response deleteLicenseByIdResponse = UtilIT.deleteLicenseById(1L); + deleteLicenseByIdResponse.prettyPrint(); + body = deleteLicenseByIdResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response deleteLicenseByNameResponse = UtilIT.deleteLicenseByName(""); + deleteLicenseByNameResponse.prettyPrint(); + body = deleteLicenseByNameResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response deleteLicenseErrorResponse = UtilIT.deleteLicenseById(10L); + deleteLicenseErrorResponse.prettyPrint(); + body = deleteLicenseErrorResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index f3ff8f8fae4..c5f4da033d1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2523,5 +2523,70 @@ static String getBannerMessageIdFromResponse(String getBannerMessagesResponse) { return "0"; } + static Response addLicense(String pathToJsonFile) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response addLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .post("/api/admin/licenses"); + return addLicenseResponse; + } + + static Response getLicenses() { + + Response getLicensesResponse = given() + .get("/api/admin/licenses"); + return getLicensesResponse; + } + + static Response getLicenseById(Long id) { + + Response getLicenseResponse = given() + .get("/api/admin/licenses/id/"+id.toString()); + return getLicenseResponse; + } + + static Response getLicenseByName(String name) { + + Response getLicenseResponse = given() + .get("/api/admin/licenses/name/"+name); + return getLicenseResponse; + } + + static Response updateLicenseById(String pathToJsonFile, Long id) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response updateLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .put("/api/admin/licenses/id/"+id.toString()); + return updateLicenseResponse; + } + + static Response updateLicenseByName(String pathToJsonFile, String name) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response updateLicenseResponse = given() + .body(jsonIn) + .contentType("application/json") + .put("/api/admin/licenses/name/"+name); + return updateLicenseResponse; + } + + static Response deleteLicenseById(Long id) { + + Response deleteLicenseResponse = given() + .delete("/api/admin/licenses/id/"+id.toString()); + return deleteLicenseResponse; + } + + static Response deleteLicenseByName(String name) { + + Response deleteLicenseResponse = given() + .delete("/api/admin/licenses/name/"+name); + return deleteLicenseResponse; + } + } From 14352024df292342d644af182543e6bcb3d0690d Mon Sep 17 00:00:00 2001 From: chenganj Date: Wed, 31 Mar 2021 11:11:05 -0400 Subject: [PATCH 0108/1551] corrected error --- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 9 --------- src/main/webapp/file-download-button-fragment.xhtml | 11 ----------- 2 files changed, 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index a63a86a2586..78d7627a657 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -77,11 +77,9 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.export.ExportService; -import edu.harvard.iq.dataverse.globus.fileDetailsHolder; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.S3PackageImporter; -import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; @@ -132,7 +130,6 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.Asynchronous; import javax.ejb.EJB; import javax.ejb.EJBException; import javax.inject.Inject; @@ -161,10 +158,7 @@ import org.glassfish.jersey.media.multipart.FormDataParam; import com.amazonaws.services.s3.model.PartETag; -import edu.harvard.iq.dataverse.FileMetadata; import java.util.Map.Entry; -import java.util.stream.Collectors; -import java.util.stream.IntStream; @Path("datasets") public class Datasets extends AbstractApiBean { @@ -232,9 +226,6 @@ public class Datasets extends AbstractApiBean { @Inject DataverseRequestServiceBean dvRequestService; - @Context - protected HttpServletRequest httpRequest; - /** * Used to consolidate the way we parse and handle dataset versions. diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index cafe1875590..85fe60863b4 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -58,17 +58,6 @@ #{bundle.download} - - - - - - #{bundle['file.downloadFromGlobus']} - From 784fdb0879b167bd3c209dea6447a256357db64d Mon Sep 17 00:00:00 2001 From: jingma Date: Wed, 31 Mar 2021 22:25:25 +0200 Subject: [PATCH 0109/1551] Handle POST request HTTP 409 conflict error and introduce bug fixes. --- .../edu/harvard/iq/dataverse/License.java | 2 ++ .../iq/dataverse/LicenseServiceBean.java | 23 ++++++++++++------- .../edu/harvard/iq/dataverse/api/Admin.java | 11 ++++----- .../iq/dataverse/api/ConflictException.java | 17 ++++++++++++++ 4 files changed, 39 insertions(+), 14 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/ConflictException.java diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index 29653271e01..60f2805e072 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -23,6 +23,8 @@ query = "SELECT l FROM License l WHERE l.id=:id"), @NamedQuery( name="License.findByName", query = "SELECT l FROM License l WHERE l.name=:name"), + @NamedQuery( name="License.findByNameOrUri", + query = "SELECT l FROM License l WHERE l.name=:name OR l.uri=:uri"), @NamedQuery( name="License.deleteById", query="DELETE FROM License l WHERE l.id=:id"), @NamedQuery( name="License.deleteByName", diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index c49ebd9659e..7e760bb9bf7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -2,11 +2,11 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import edu.harvard.iq.dataverse.api.ConflictException; import edu.harvard.iq.dataverse.api.FetchException; import edu.harvard.iq.dataverse.api.RequestBodyException; import edu.harvard.iq.dataverse.api.UpdateException; import java.net.URI; -import java.net.URL; import java.util.List; import javax.ejb.EJB; import javax.ejb.Stateless; @@ -52,13 +52,19 @@ public License getByName(String name) throws FetchException { return licenses.get(0); } - public License save(License license) throws PersistenceException, RequestBodyException { - if (license.getId() == null) { - em.persist(license); - return license; - } else { + public License save(License license) throws RequestBodyException, ConflictException { + if (license.getId() != null) { throw new RequestBodyException("There shouldn't be an ID in the request body"); } + List licenses = em.createNamedQuery("License.findByNameOrUri", License.class) + .setParameter("name", license.getName() ) + .setParameter("uri", license.getUri().toASCIIString() ) + .getResultList(); + if (!licenses.isEmpty()) { + throw new ConflictException("A license with the same URI or name is already present."); + } + em.persist(license); + return license; } public void setById(long id, String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { @@ -81,13 +87,14 @@ public void setById(long id, String name, String shortDescription, URI uri, URI } } - public void setByName(String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { + public void setByName(String nameArg, String name, String shortDescription, URI uri, URI iconUrl, boolean active) throws UpdateException { List licenses = em.createNamedQuery("License.findByName", License.class) - .setParameter("name", name ) + .setParameter("name", nameArg ) .getResultList(); if(licenses.size() > 0) { License license = licenses.get(0); + license.setName(name); license.setShortDescription(shortDescription); license.setUri(uri); license.setIconUrl(iconUrl); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index fc7cf73d505..f57fa1e9746 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -47,7 +47,6 @@ import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; -import javax.persistence.PersistenceException; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -1961,8 +1960,8 @@ public Response addLicense(License license) { return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); } catch (RequestBodyException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); - } catch(PersistenceException e) { - return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); + } catch(ConflictException e) { + return error(Response.Status.CONFLICT, e.getMessage()); } } @@ -1979,13 +1978,13 @@ public Response putLicenseById(@PathParam("id") long id, License license) { @PUT @Path("/licenses/name/{name}") - public Response putLicenseByName(@PathParam("name") String name, License license) { + public Response putLicenseByName(@PathParam("name") String nameArg, License license) { try { - licenseService.setByName(license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); + licenseService.setByName(nameArg, license.getName(), license.getShortDescription(), license.getUri(), license.getIconUrl(), license.isActive()); } catch (UpdateException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); } - return ok("License with name " + name + " was replaced."); + return ok("License with name " + nameArg + " was replaced."); } @DELETE diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ConflictException.java b/src/main/java/edu/harvard/iq/dataverse/api/ConflictException.java new file mode 100644 index 00000000000..60e60ed41a4 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/ConflictException.java @@ -0,0 +1,17 @@ +package edu.harvard.iq.dataverse.api; + +/** + * + * @author Jing Ma + */ +public class ConflictException extends Exception { + + public ConflictException(String message) { + super(message); + } + + public ConflictException(String message, Throwable cause) { + super(message, cause); + } + +} From 8a9974d6ac2f3158fa3b8b7d86a3de4a8f94484c Mon Sep 17 00:00:00 2001 From: jingma Date: Thu, 1 Apr 2021 13:22:55 +0200 Subject: [PATCH 0110/1551] Change location to correct URL. --- .../java/edu/harvard/iq/dataverse/LicenseServiceBean.java | 3 +-- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 4 +++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index c49ebd9659e..26689d1c085 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -52,10 +52,9 @@ public License getByName(String name) throws FetchException { return licenses.get(0); } - public License save(License license) throws PersistenceException, RequestBodyException { + public void save(License license) throws PersistenceException, RequestBodyException { if (license.getId() == null) { em.persist(license); - return license; } else { throw new RequestBodyException("There shouldn't be an ID in the request body"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index fc7cf73d505..dea054579a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import com.google.common.net.UrlEscapers; import edu.harvard.iq.dataverse.BannerMessage; import edu.harvard.iq.dataverse.BannerMessageServiceBean; import edu.harvard.iq.dataverse.BannerMessageText; @@ -1958,7 +1959,8 @@ public Response getLicenseByName(@PathParam("name") String name) { public Response addLicense(License license) { try { licenseService.save(license); - return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); + String location = "/api/admin/licenses/name/" + UrlEscapers.urlFragmentEscaper().escape(license.getName()); + return created(location, Json.createObjectBuilder().add("message", "License created")); } catch (RequestBodyException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); } catch(PersistenceException e) { From c3ff22927bf27e7716b9cd5f43fb0640752303ba Mon Sep 17 00:00:00 2001 From: chenganj Date: Thu, 1 Apr 2021 11:40:15 -0400 Subject: [PATCH 0111/1551] api to delete globus rule and added notification --- .../iq/dataverse/DatasetServiceBean.java | 53 ++++++++++- .../harvard/iq/dataverse/MailServiceBean.java | 11 +++ .../iq/dataverse/UserNotification.java | 2 +- .../harvard/iq/dataverse/api/Datasets.java | 89 ++++++++----------- .../providers/builtin/DataverseUserPage.java | 4 + src/main/java/propertyFiles/Bundle.properties | 3 + src/main/webapp/dataverseuser.xhtml | 7 ++ 7 files changed, 112 insertions(+), 57 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 51bef2f6f49..6a51e68ddbb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1044,14 +1044,14 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo @Asynchronous - public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl, User authUser) throws ExecutionException, InterruptedException, MalformedURLException { + public void globusUpload(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl, User authUser) throws ExecutionException, InterruptedException, MalformedURLException { String logTimestamp = logFormatter.format(new Date()); Logger globusLogger = Logger.getLogger("edu.harvard.iq.dataverse.upload.client.DatasetServiceBean." + "GlobusUpload" + logTimestamp); //Logger.getLogger(DatasetServiceBean.class.getCanonicalName()); //Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.DatasetServiceBean." + "ExportAll" + logTimestamp); - String logFileName = "../logs" + File.separator + "globus_" + logTimestamp + ".log"; + String logFileName = "../logs" + File.separator + "globusUpload" + dataset.getId()+"_"+authUser.getIdentifier()+"_"+ logTimestamp + ".log"; FileHandler fileHandler; boolean fileHandlerSuceeded; try { @@ -1069,7 +1069,7 @@ public void globusAsyncCall(String jsonData, ApiToken token, Dataset dataset, St globusLogger = logger; } - globusLogger.info("Starting an globusAsyncCall "); + globusLogger.info("Starting an globusUpload "); String datasetIdentifier = dataset.getStorageIdentifier(); @@ -1368,6 +1368,53 @@ private String addFiles(String curlCommand, Logger globusLogger) return status; } + @Asynchronous + public void globusDownload(String jsonData, Dataset dataset, User authUser) throws MalformedURLException { + + String logTimestamp = logFormatter.format(new Date()); + Logger globusLogger = Logger.getLogger("edu.harvard.iq.dataverse.upload.client.DatasetServiceBean." + "GlobusDownload" + logTimestamp); + + String logFileName = "../logs" + File.separator + "globusDownload_" + dataset.getId()+"_"+authUser.getIdentifier()+"_"+logTimestamp + ".log"; + FileHandler fileHandler; + boolean fileHandlerSuceeded; + try { + fileHandler = new FileHandler(logFileName); + globusLogger.setUseParentHandlers(false); + fileHandlerSuceeded = true; + } catch (IOException | SecurityException ex) { + Logger.getLogger(DatasetServiceBean.class.getName()).log(Level.SEVERE, null, ex); + return; + } + + if (fileHandlerSuceeded) { + globusLogger.addHandler(fileHandler); + } else { + globusLogger = logger; + } + + globusLogger.info("Starting an globusDownload "); + JsonObject jsonObject = null; + try (StringReader rdr = new StringReader(jsonData)) { + jsonObject = Json.createReader(rdr).readObject(); + } catch (Exception jpe) { + jpe.printStackTrace(); + globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + } + String taskIdentifier = jsonObject.getString("taskIdentifier"); + String ruleId = jsonObject.getString("ruleId"); + + // globus task status check + globusStatusCheck(taskIdentifier,globusLogger); + + // what if some files failed during download? + + if(ruleId.length() > 0) { + globusServiceBean.deletePermision(ruleId, globusLogger); + } + + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSDOWNLOADSUCCESS, dataset.getId()); + + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index bfe88ac50fd..e476a4e55b0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -555,6 +555,15 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio )); return messageText += fileMsg; + case GLOBUSDOWNLOADSUCCESS: + dataset = (Dataset) targetObject; + String fileDownloadMsg = BundleUtil.getStringFromBundle("notification.mail.download.globus", Arrays.asList( + systemConfig.getDataverseSiteUrl(), + dataset.getGlobalIdString(), + dataset.getDisplayName() + )); + return messageText += fileDownloadMsg; + case CHECKSUMIMPORT: version = (DatasetVersion) targetObject; String checksumImportMsg = BundleUtil.getStringFromBundle("notification.import.checksum", Arrays.asList( @@ -631,6 +640,8 @@ private Object getObjectOfNotification (UserNotification userNotification){ return versionService.find(userNotification.getObjectId()); case GLOBUSUPLOADSUCCESS: return datasetService.find(userNotification.getObjectId()); + case GLOBUSDOWNLOADSUCCESS: + return datasetService.find(userNotification.getObjectId()); case CHECKSUMIMPORT: return versionService.find(userNotification.getObjectId()); case APIGENERATED: diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java index e23c2a72b6c..78ef2bb6783 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java @@ -30,7 +30,7 @@ public enum Type { ASSIGNROLE, REVOKEROLE, CREATEDV, CREATEDS, CREATEACC, SUBMITTEDDS, RETURNEDDS, PUBLISHEDDS, REQUESTFILEACCESS, GRANTFILEACCESS, REJECTFILEACCESS, FILESYSTEMIMPORT, CHECKSUMIMPORT, CHECKSUMFAIL, CONFIRMEMAIL, APIGENERATED, INGESTCOMPLETED, INGESTCOMPLETEDWITHERRORS, - PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE, GLOBUSUPLOADSUCCESS; + PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE, GLOBUSUPLOADSUCCESS,GLOBUSDOWNLOADSUCCESS; }; private static final long serialVersionUID = 1L; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 78d7627a657..e0477c49aee 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2764,26 +2764,6 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, logger.info(" ==== (api addGlobusFilesToDataset) jsonData ====== " + jsonData); - if(uriInfo != null) { - logger.info(" ==== (api uriInfo.getRequestUri()) jsonData ====== " + uriInfo.getRequestUri().toString()); - } - - //logger.info(" ==== (api uriInfo.getRequestUri()) jsonData ====== " + headers.getRequestHeaders() - - MultivaluedMap multivaluedMap = headers.getRequestHeaders(); - - Map result = new HashMap<>(); - multivaluedMap.forEach((name, values) -> { - if (!CollectionUtils.isEmpty(values)) { - result.put(name, (values.size() != 1) ? values : values.get(0)); - logger.info(" headers ==== " + name + " ==== "+ values ); - } - }); - - logger.info(" ==== headers.getRequestHeader(origin) ====== " + headers.getRequestHeader("origin") ); - logger.info(" ==== headers.getRequestHeader(referer) ====== " + headers.getRequestHeader("referer") ); - - if (!systemConfig.isHTTPUpload()) { return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); } @@ -2834,31 +2814,13 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser) authUser); - - - /* - - String xfp = httpRequest.getHeader("X-Forwarded-Proto"); - //String requestUrl = xfp +"://"+httpRequest.getServerName(); - - x-forwarded-proto - String requestUrl = httpRequest.getProtocol().toLowerCase().split("/")[0]+"://"+httpRequest.getServerName(); - - if( httpRequest.getServerPort() > 0 ) - { - requestUrl = requestUrl + ":"+ httpRequest.getServerPort(); - } - - */ - - //String requestUrl = "http://localhost:8080"; - String requestUrl = "https://dvdev.scholarsportal.info" ; + String requestUrl = headers.getRequestHeader("origin").get(0); // Async Call - datasetService.globusAsyncCall(jsonData, token, dataset, requestUrl, authUser); + datasetService.globusUpload(jsonData, token, dataset, requestUrl, authUser); + return ok("Async call to Globus Upload started "); - return ok("Globus Task successfully completed "); } @@ -3078,24 +3040,45 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, @POST - @Path("/deleteglobusRule") + @Path("{id}/deleteglobusRule") @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response deleteglobusRule(@FormDataParam("jsonData") String jsonData + public Response deleteglobusRule(@PathParam("id") String datasetId,@FormDataParam("jsonData") String jsonData ) throws IOException, ExecutionException, InterruptedException { - msgt("******* (api deleteglobusRule) jsonData : " + jsonData.toString()); - JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); - } catch (Exception jpe) { - jpe.printStackTrace(); - logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + logger.info(" ==== (api deleteglobusRule) jsonData ====== " + jsonData); + + + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); } - String ruleId = jsonObject.getString("ruleId"); + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + User authUser; + try { + authUser = findUserOrDie(); + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") + ); + } + + // ------------------------------------- + // (2) Get the Dataset Id + // ------------------------------------- + Dataset dataset; + + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + // Async Call + datasetService.globusDownload(jsonData, dataset, authUser); + + return ok("Async call to Globus Download started"); - globusServiceBean.deletePermision(ruleId,logger); - return ok("Globus Rule deleted successfully "); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java index bf1713ec1d4..4596ac8b3cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java @@ -511,6 +511,10 @@ public void displayNotification() { userNotification.setTheObject(datasetService.find(userNotification.getObjectId())); break; + case GLOBUSDOWNLOADSUCCESS: + userNotification.setTheObject(datasetService.find(userNotification.getObjectId())); + break; + case CHECKSUMIMPORT: userNotification.setTheObject(datasetVersionService.find(userNotification.getObjectId())); break; diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 3af54b84ce3..0908ae7ecd0 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -220,8 +220,10 @@ notification.ingest.completed=Dataset {2} ingest process has finished with errors.

Ingested files:{3}
notification.mail.import.filesystem=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded and verified. notification.mail.import.globus=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded via Globus and verified. +notification.mail.download.globus=Files from the dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully downloaded via Globus and verified. notification.import.filesystem=Dataset {1} has been successfully uploaded and verified. notification.import.globus=Dataset {1} has been successfully uploaded via Globus and verified. +notification.download.globus=Files from the dataset {1} has been successfully downloaded via Globus and verified. notification.import.checksum={1}, dataset had file checksums added via a batch job. removeNotification=Remove Notification groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. @@ -711,6 +713,7 @@ notification.email.info.unavailable=Unavailable notification.email.apiTokenGenerated=Hello {0} {1},\n\nAPI Token has been generated. Please keep it secure as you would do with a password. notification.email.apiTokenGenerated.subject=API Token was generated notification.email.import.globus.subject=Dataset {0} has been successfully uploaded via Globus and verified +notification.email.download.globus.subject=Files from the dataset {0} has been successfully downloaded via Globus and verified # dataverse.xhtml dataverse.name=Dataverse Name diff --git a/src/main/webapp/dataverseuser.xhtml b/src/main/webapp/dataverseuser.xhtml index abaeba46ee3..05ebf5f3b7a 100644 --- a/src/main/webapp/dataverseuser.xhtml +++ b/src/main/webapp/dataverseuser.xhtml @@ -293,6 +293,13 @@
+ + + + + + + From 12e2e6eb1de0e2223c895b1a7fbfb6b29b3d5f14 Mon Sep 17 00:00:00 2001 From: chenganj Date: Thu, 8 Apr 2021 11:29:14 -0400 Subject: [PATCH 0112/1551] correction to verify ruleID existence, added ChecksumDatasetSizeLimit and ChecksumFileSizeLimit settings --- .../iq/dataverse/DatasetServiceBean.java | 18 +++++- .../harvard/iq/dataverse/api/Datasets.java | 24 ++++++++ .../iq/dataverse/dataset/DatasetUtil.java | 12 +++- .../FinalizeDatasetPublicationCommand.java | 55 ++++++++++++------- .../dataverse/globus/GlobusServiceBean.java | 23 ++++---- .../settings/SettingsServiceBean.java | 4 ++ .../iq/dataverse/util/SystemConfig.java | 32 ++++++++++- 7 files changed, 130 insertions(+), 38 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 6a51e68ddbb..ec59972efe1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -59,7 +59,6 @@ import org.apache.commons.lang.StringUtils; import org.ocpsoft.common.util.Strings; -import javax.servlet.http.HttpServletRequest; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; @@ -1049,6 +1048,7 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin String logTimestamp = logFormatter.format(new Date()); Logger globusLogger = Logger.getLogger("edu.harvard.iq.dataverse.upload.client.DatasetServiceBean." + "GlobusUpload" + logTimestamp); + //Logger.getLogger(DatasetServiceBean.class.getCanonicalName()); //Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.DatasetServiceBean." + "ExportAll" + logTimestamp); String logFileName = "../logs" + File.separator + "globusUpload" + dataset.getId()+"_"+authUser.getIdentifier()+"_"+ logTimestamp + ".log"; @@ -1088,7 +1088,13 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin } String taskIdentifier = jsonObject.getString("taskIdentifier"); - String ruleId = jsonObject.getString("ruleId"); + + String ruleId = "" ; + try { + jsonObject.getString("ruleId"); + }catch (NullPointerException npe){ + + } // globus task status check globusStatusCheck(taskIdentifier,globusLogger); @@ -1403,7 +1409,13 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro } String taskIdentifier = jsonObject.getString("taskIdentifier"); - String ruleId = jsonObject.getString("ruleId"); + String ruleId = ""; + + try { + jsonObject.getString("ruleId"); + }catch (NullPointerException npe){ + + } // globus task status check globusStatusCheck(taskIdentifier,globusLogger); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e0477c49aee..ca6425fc732 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2814,8 +2814,32 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, ApiToken token = authSvc.findApiTokenByUser((AuthenticatedUser) authUser); + if(uriInfo != null) { + logger.info(" ==== (api uriInfo.getRequestUri()) jsonData ====== " + uriInfo.getRequestUri().toString()); + } + + //logger.info(" ==== (api uriInfo.getRequestUri()) jsonData ====== " + headers.getRequestHeaders() + + MultivaluedMap multivaluedMap = headers.getRequestHeaders(); + + Map result = new HashMap<>(); + multivaluedMap.forEach((name, values) -> { + if (!CollectionUtils.isEmpty(values)) { + result.put(name, (values.size() != 1) ? values : values.get(0)); + logger.info(" headers ==== " + name + " ==== "+ values ); + } + }); + + logger.info(" ==== headers.getRequestHeader(origin) ====== " + headers.getRequestHeader("origin") ); + logger.info(" ==== headers.getRequestHeader(referer) ====== " + headers.getRequestHeader("referer") ); + + String requestUrl = headers.getRequestHeader("origin").get(0); + if(requestUrl.contains("localhost")){ + requestUrl = "http://localhost:8080"; + } + // Async Call datasetService.globusUpload(jsonData, token, dataset, requestUrl, authUser); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 12a2cf58feb..d7f0d412d9f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -466,15 +466,23 @@ public static boolean isAppropriateStorageDriver(Dataset dataset){ * size for tabular files. */ public static String getDownloadSize(DatasetVersion dsv, boolean original) { + long bytes = 0l; + bytes = getDatasetDownloadSize( dsv, original); + return FileSizeChecker.bytesToHumanReadable(bytes); + } + + public static long getDatasetDownloadSize(DatasetVersion dsv, boolean original) { long bytes = 0l; for (FileMetadata fileMetadata : dsv.getFileMetadatas()) { DataFile dataFile = fileMetadata.getDataFile(); - if (original && dataFile.isTabularData()) { + if (original && dataFile.isTabularData()) { bytes += dataFile.getOriginalFileSize() == null ? 0 : dataFile.getOriginalFileSize(); } else { bytes += dataFile.getFilesize(); } } - return FileSizeChecker.bytesToHumanReadable(bytes); + return (bytes); } + + } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index bab4a719aa0..066813978d2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -1,18 +1,12 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.ControlledVocabularyValue; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldConstant; -import edu.harvard.iq.dataverse.DatasetLock; +import edu.harvard.iq.dataverse.*; + import static edu.harvard.iq.dataverse.DatasetVersion.VersionState.*; -import edu.harvard.iq.dataverse.DatasetVersionUser; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.UserNotification; + import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; @@ -28,7 +22,7 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; -import edu.harvard.iq.dataverse.GlobalIdServiceBean; + import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.util.FileUtil; @@ -36,6 +30,9 @@ import java.util.concurrent.Future; import org.apache.solr.client.solrj.SolrServerException; +import javax.ejb.EJB; +import javax.inject.Inject; + /** * @@ -47,7 +44,9 @@ public class FinalizeDatasetPublicationCommand extends AbstractPublishDatasetCommand { private static final Logger logger = Logger.getLogger(FinalizeDatasetPublicationCommand.class.getName()); - + + + /** * mirror field from {@link PublishDatasetCommand} of same name */ @@ -70,7 +69,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { Dataset theDataset = getDataset(); logger.info("Finalizing publication of the dataset "+theDataset.getGlobalId().asString()); - + // validate the physical files before we do anything else: // (unless specifically disabled; or a minor version) if (theDataset.getLatestVersion().getVersionState() != RELEASED @@ -309,14 +308,28 @@ private void updateParentDataversesSubjectsField(Dataset savedDataset, CommandCo private void validateDataFiles(Dataset dataset, CommandContext ctxt) throws CommandException { try { - for (DataFile dataFile : dataset.getFiles()) { - // TODO: Should we validate all the files in the dataset, or only - // the files that haven't been published previously? - // (the decision was made to validate all the files on every - // major release; we can revisit the decision if there's any - // indication that this makes publishing take significantly longer. - logger.log(Level.FINE, "validating DataFile {0}", dataFile.getId()); - FileUtil.validateDataFileChecksum(dataFile); + long maxDatasetSize = 0l; + long maxFileSize = 0l; + maxDatasetSize = ctxt.systemConfig().getChecksumDatasetSizeLimit(); + maxFileSize = ctxt.systemConfig().getChecksumFileSizeLimit(); + + long datasetSize = DatasetUtil.getDatasetDownloadSize(dataset.getLatestVersion(), false); + if (maxDatasetSize == -1 || datasetSize < maxDatasetSize) { + for (DataFile dataFile : dataset.getFiles()) { + // TODO: Should we validate all the files in the dataset, or only + // the files that haven't been published previously? + // (the decision was made to validate all the files on every + // major release; we can revisit the decision if there's any + // indication that this makes publishing take significantly longer. + logger.log(Level.FINE, "validating DataFile {0}", dataFile.getId()); + if (maxFileSize == -1 || dataFile.getOriginalFileSize() < maxFileSize) { + FileUtil.validateDataFileChecksum(dataFile); + } + } + } + else { + String message = "Skipping to validate File Checksum of the dataset " + dataset.getDisplayName() + ", because of the size of the dataset limit (set to " + maxDatasetSize + " ); "; + logger.info(message); } } catch (Throwable e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index b2f6f424722..2230d5bfcaf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -286,18 +286,19 @@ public void updatePermision(AccessToken clientTokenUser, String directory, Strin public void deletePermision(String ruleId, Logger globusLogger) throws MalformedURLException { - AccessToken clientTokenUser = getClientToken(); - globusLogger.info("Start deleting permissions." ); - String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); + if(ruleId.length() > 0 ) { + AccessToken clientTokenUser = getClientToken(); + globusLogger.info("Start deleting permissions."); + String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); - URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + ruleId); - //logger.info("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + ruleId); - MakeRequestResponse result = makeRequest(url, "Bearer", - clientTokenUser.getOtherTokens().get(0).getAccessToken(),"DELETE", null); - if (result.status != 200) { - globusLogger.warning("Cannot delete access rule " + ruleId); - } else { - globusLogger.info("Access rule " + ruleId + " was deleted successfully"); + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + ruleId); + MakeRequestResponse result = makeRequest(url, "Bearer", + clientTokenUser.getOtherTokens().get(0).getAccessToken(), "DELETE", null); + if (result.status != 200) { + globusLogger.warning("Cannot delete access rule " + ruleId); + } else { + globusLogger.info("Access rule " + ruleId + " was deleted successfully"); + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 7b1d7355649..dcd5b09149a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -228,6 +228,10 @@ public enum Key { SPSS/sav format, "RData" for R, etc. for example: :TabularIngestSizeLimit:RData */ TabularIngestSizeLimit, + /* dataset size limit for checksum validation */ + ChecksumDatasetSizeLimit, + /* file size limit for checksum validation */ + ChecksumFileSizeLimit, /** The message added to a popup upon dataset publish * diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index e9364669c7f..af7cf091c51 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -555,7 +555,37 @@ public Integer getSearchHighlightFragmentSize() { } return null; } - + + public long getChecksumDatasetSizeLimit() { + String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.ChecksumDatasetSizeLimit); + + if (limitEntry != null) { + try { + Long sizeOption = new Long(limitEntry); + return sizeOption; + } catch (NumberFormatException nfe) { + logger.warning("Invalid value for TabularIngestSizeLimit option? - " + limitEntry); + } + } + // -1 means no limit is set; + return -1; + } + + public long getChecksumFileSizeLimit() { + String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.ChecksumFileSizeLimit); + + if (limitEntry != null) { + try { + Long sizeOption = new Long(limitEntry); + return sizeOption; + } catch (NumberFormatException nfe) { + logger.warning("Invalid value for TabularIngestSizeLimit option? - " + limitEntry); + } + } + // -1 means no limit is set; + return -1; + } + public long getTabularIngestSizeLimit() { // This method will return the blanket ingestable size limit, if // set on the system. I.e., the universal limit that applies to all From 67178c964b698d35074ec32fb8906f0347aae3cc Mon Sep 17 00:00:00 2001 From: jingma Date: Fri, 9 Apr 2021 13:29:59 +0200 Subject: [PATCH 0113/1551] Handle wrong JSON. --- .../java/edu/harvard/iq/dataverse/License.java | 7 +------ .../java/edu/harvard/iq/dataverse/api/Admin.java | 14 ++++++++++---- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/License.java b/src/main/java/edu/harvard/iq/dataverse/License.java index 29653271e01..2b78ab46278 100644 --- a/src/main/java/edu/harvard/iq/dataverse/License.java +++ b/src/main/java/edu/harvard/iq/dataverse/License.java @@ -126,12 +126,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; License license = (License) o; - return active == license.active && - Objects.equals(id, license.id) && - Objects.equals(name, license.name) && - Objects.equals(shortDescription, license.shortDescription) && - Objects.equals(uri, license.uri) && - Objects.equals(iconUrl, license.iconUrl); + return active == license.active && id.equals(license.id) && name.equals(license.name) && shortDescription.equals(license.shortDescription) && uri.equals(license.uri) && iconUrl.equals(license.iconUrl); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index fc7cf73d505..3940a35d35c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -44,10 +44,10 @@ import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.settings.Setting; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import java.net.URI; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; -import javax.persistence.PersistenceException; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -1955,14 +1955,20 @@ public Response getLicenseByName(@PathParam("name") String name) { @POST @Path("/licenses") - public Response addLicense(License license) { + public Response addLicense(JsonObject jsonObject) { try { + License license = new License(); + license.setName(jsonObject.getString("name")); + license.setShortDescription(jsonObject.getString("shortDescription")); + license.setUri(new URI(jsonObject.getString("uri"))); + license.setIconUrl(new URI(jsonObject.getString("iconUrl"))); + license.setActive(jsonObject.getBoolean("active")); licenseService.save(license); return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); } catch (RequestBodyException e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); - } catch(PersistenceException e) { - return error(Response.Status.CONFLICT, "A license with the same URI or name is already present."); + } catch (Exception e) { + return error(Response.Status.BAD_REQUEST, "Something went wrong."); } } From c9d33e263d0cc8c82b3d009dabdde18ca110ccf8 Mon Sep 17 00:00:00 2001 From: jingma Date: Fri, 9 Apr 2021 17:43:15 +0200 Subject: [PATCH 0114/1551] Handle posting same JSON twice. --- src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java index c49ebd9659e..37fae3501be 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/LicenseServiceBean.java @@ -55,6 +55,7 @@ public License getByName(String name) throws FetchException { public License save(License license) throws PersistenceException, RequestBodyException { if (license.getId() == null) { em.persist(license); + em.flush(); return license; } else { throw new RequestBodyException("There shouldn't be an ID in the request body"); From f6c99ebd9514aa6d2099cba19c1cc8eea4678ff6 Mon Sep 17 00:00:00 2001 From: jingma Date: Fri, 9 Apr 2021 18:01:44 +0200 Subject: [PATCH 0115/1551] Reformat tabs. --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 3940a35d35c..e9a54c7cb72 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1957,8 +1957,8 @@ public Response getLicenseByName(@PathParam("name") String name) { @Path("/licenses") public Response addLicense(JsonObject jsonObject) { try { - License license = new License(); - license.setName(jsonObject.getString("name")); + License license = new License(); + license.setName(jsonObject.getString("name")); license.setShortDescription(jsonObject.getString("shortDescription")); license.setUri(new URI(jsonObject.getString("uri"))); license.setIconUrl(new URI(jsonObject.getString("iconUrl"))); @@ -1966,8 +1966,8 @@ public Response addLicense(JsonObject jsonObject) { licenseService.save(license); return created("/api/admin/licenses", Json.createObjectBuilder().add("message", "License created")); } catch (RequestBodyException e) { - return error(Response.Status.BAD_REQUEST, e.getMessage()); - } catch (Exception e) { + return error(Response.Status.BAD_REQUEST, e.getMessage()); + } catch (Exception e) { return error(Response.Status.BAD_REQUEST, "Something went wrong."); } } From 878ef6fff6a27510b39cffb058b26afca31301c3 Mon Sep 17 00:00:00 2001 From: Jan van Mansum Date: Wed, 14 Apr 2021 12:09:46 +0200 Subject: [PATCH 0116/1551] Merged back develop --- .gitignore | 4 + conf/docker-aio/run-test-suite.sh | 2 +- doc/release-notes/4259-java-11.md | 111 --- doc/release-notes/5.4-release-notes.md | 337 +++++++++ doc/release-notes/5.4.1-release-notes.md | 46 ++ .../7084-crawlable-file-access.md | 29 - doc/release-notes/7188-utf-8-filenames.md | 9 - doc/release-notes/7205-orig-file-size.md | 11 - doc/release-notes/7373-solr-upgrade.md | 22 - doc/release-notes/7374-postgresql-update.md | 9 - .../7398-saved-search-performance.md | 30 - doc/release-notes/7409-remove-worldmap.md | 9 - doc/release-notes/7501-guides-updates.md | 5 - doc/release-notes/7502-more-mime-types.md | 7 - ...panded-compound-datasetfield-validation.md | 36 - .../7619-restricted-summary-starts.md | 14 - .../820-non-ascii-chars-in-search.md | 10 - .../source/admin/harvestserver.rst | 8 + .../source/admin/make-data-count.rst | 2 +- .../source/admin/solr-search-index.rst | 4 +- .../source/admin/user-administration.rst | 10 + .../source/api/client-libraries.rst | 7 + doc/sphinx-guides/source/api/dataaccess.rst | 2 +- doc/sphinx-guides/source/api/native-api.rst | 221 +++++- doc/sphinx-guides/source/conf.py | 4 +- doc/sphinx-guides/source/developers/index.rst | 1 + .../source/developers/remote-users.rst | 2 +- .../developers/s3-direct-upload-api.rst | 146 ++++ .../source/developers/workflows.rst | 38 +- .../source/installation/config.rst | 28 +- doc/sphinx-guides/source/user/account.rst | 4 + doc/sphinx-guides/source/versions.rst | 4 +- pom.xml | 2 +- scripts/api/data/metadatablocks/citation.tsv | 280 +++---- scripts/api/data/role-test-addRole.json | 10 + scripts/dev/dev-rebuild.sh | 3 - .../7687/file_access_flag_update_bug.txt | 11 + .../AbstractGlobalIdServiceBean.java | 4 +- .../dataverse/ControlledVocabularyValue.java | 2 +- .../dataverse/DOIDataCiteRegisterService.java | 4 +- .../harvard/iq/dataverse/DataCitation.java | 3 +- .../edu/harvard/iq/dataverse/DataFile.java | 10 +- .../iq/dataverse/DataFileServiceBean.java | 59 +- .../edu/harvard/iq/dataverse/Dataset.java | 4 + .../edu/harvard/iq/dataverse/DatasetPage.java | 70 +- .../iq/dataverse/DatasetServiceBean.java | 38 +- .../harvard/iq/dataverse/DatasetVersion.java | 19 +- .../edu/harvard/iq/dataverse/Dataverse.java | 2 + .../iq/dataverse/DataverseServiceBean.java | 18 + .../iq/dataverse/DataverseSession.java | 66 +- .../edu/harvard/iq/dataverse/DvObject.java | 2 +- .../iq/dataverse/EditDatafilesPage.java | 88 ++- .../iq/dataverse/EjbDataverseEngine.java | 19 +- .../edu/harvard/iq/dataverse/FilePage.java | 56 +- .../iq/dataverse/HarvestingClientsPage.java | 7 + .../edu/harvard/iq/dataverse/LoginPage.java | 1 - .../harvard/iq/dataverse/MailServiceBean.java | 35 +- .../iq/dataverse/ManagePermissionsPage.java | 2 +- .../iq/dataverse/RoleAssigneeServiceBean.java | 4 +- .../harvard/iq/dataverse/RoleAssignment.java | 2 +- .../iq/dataverse/SendFeedbackDialog.java | 10 +- .../harvard/iq/dataverse/SettingsWrapper.java | 4 +- .../java/edu/harvard/iq/dataverse/Shib.java | 6 +- .../iq/dataverse/UserNotification.java | 2 +- .../harvard/iq/dataverse/UserServiceBean.java | 8 +- .../iq/dataverse/api/AbstractApiBean.java | 35 + .../edu/harvard/iq/dataverse/api/Access.java | 7 +- .../edu/harvard/iq/dataverse/api/Admin.java | 69 +- .../dataverse/api/DatasetFieldServiceApi.java | 26 +- .../harvard/iq/dataverse/api/Datasets.java | 16 +- .../harvard/iq/dataverse/api/FeedbackApi.java | 5 +- .../edu/harvard/iq/dataverse/api/Files.java | 36 +- .../edu/harvard/iq/dataverse/api/Roles.java | 33 +- .../edu/harvard/iq/dataverse/api/Users.java | 30 + .../ServiceDocumentManagerImpl.java | 11 +- .../dataverse/api/datadeposit/SwordAuth.java | 1 + .../AuthenticationServiceBean.java | 27 +- .../authorization/DataverseRole.java | 24 +- .../providers/builtin/BuiltinUser.java | 7 + .../providers/builtin/DataverseUserPage.java | 11 +- .../oauth2/OAuth2FirstLoginPage.java | 6 +- .../oauth2/OAuth2LoginBackingBean.java | 2 +- .../users/AuthenticatedUser.java | 32 +- .../authorization/users/GuestUser.java | 2 +- .../dataverse/authorization/users/User.java | 4 + .../iq/dataverse/branding/BrandingUtil.java | 36 +- .../branding/BrandingUtilHelper.java | 28 + .../confirmemail/ConfirmEmailPage.java | 1 - .../confirmemail/ConfirmEmailServiceBean.java | 26 +- .../datasetutility/AddReplaceFileHelper.java | 207 +++--- .../datasetutility/FileReplacePageHelper.java | 9 +- .../datasetutility/OptionalFileParams.java | 26 +- ...ddRoleAssigneesToExplicitGroupCommand.java | 7 + .../command/impl/AssignRoleCommand.java | 8 + .../command/impl/CreateRoleCommand.java | 17 +- .../CuratePublishedDatasetVersionCommand.java | 9 +- .../DeaccessionDatasetVersionCommand.java | 2 +- .../command/impl/DeactivateUserCommand.java | 44 ++ .../engine/command/impl/DeletePidCommand.java | 8 +- .../FinalizeDatasetPublicationCommand.java | 8 +- .../command/impl/GetUserTracesCommand.java | 228 ++++++ .../impl/GrantSuperuserStatusCommand.java | 4 + .../command/impl/MergeInAccountCommand.java | 15 +- .../command/impl/PublishDatasetCommand.java | 2 +- .../command/impl/RedetectFileTypeCommand.java | 2 +- .../impl/UpdateDatasetVersionCommand.java | 89 ++- .../iq/dataverse/export/ExportService.java | 18 +- .../export/HtmlCodeBookExporter.java | 2 +- .../iq/dataverse/export/OAI_OREExporter.java | 2 +- .../dataverse/export/ddi/DdiExportUtil.java | 28 +- .../export/ddi/DdiExportUtilHelper.java | 25 + .../harvest/server/OAIRecordServiceBean.java | 4 +- .../server/web/servlet/OAIServlet.java | 6 +- .../passwordreset/PasswordResetData.java | 4 +- .../passwordreset/PasswordResetPage.java | 15 +- .../PasswordResetServiceBean.java | 18 +- .../dataverse/privateurl/PrivateUrlPage.java | 1 - .../iq/dataverse/search/IndexServiceBean.java | 32 +- .../settings/SettingsServiceBean.java | 13 +- .../source/DbSettingConfigSource.java | 11 +- .../iq/dataverse/util/FileMetadataUtil.java | 95 +++ .../harvard/iq/dataverse/util/MailUtil.java | 8 +- .../iq/dataverse/util/SystemConfig.java | 2 +- .../iq/dataverse/util/bagit/BagGenerator.java | 4 +- .../iq/dataverse/util/bagit/OREMap.java | 29 +- .../iq/dataverse/util/bagit/OREMapHelper.java | 25 + .../iq/dataverse/util/json/JsonPrinter.java | 28 +- .../util/json/JsonPrinterHelper.java | 25 + .../workflow/WorkflowServiceBean.java | 115 ++- .../internalspi/AuthorizedExternalStep.java | 26 +- .../internalspi/InternalWorkflowStepSP.java | 2 + .../internalspi/PauseWithMessageStep.java | 48 ++ .../iq/dataverse/workflow/step/Failure.java | 2 +- .../iq/dataverse/workflow/step/Success.java | 28 +- .../dataverse/workflows/WorkflowComment.java | 20 +- .../iq/dataverse/workflows/WorkflowUtil.java | 60 ++ src/main/java/propertyFiles/Bundle.properties | 53 +- .../META-INF/microprofile-aliases.properties | 3 +- .../db/migration/V5.3.0.5__7564-workflow.sql | 2 + .../V5.3.0.6__2419-deactivate-users.sql | 6 + src/main/webapp/dashboard-users.xhtml | 4 +- src/main/webapp/dataverseuser.xhtml | 22 + src/main/webapp/editdatafiles.xhtml | 16 +- .../webapp/file-edit-button-fragment.xhtml | 26 +- .../webapp/file-edit-popup-fragment.xhtml | 20 +- src/main/webapp/file.xhtml | 43 +- src/main/webapp/filesFragment.xhtml | 13 +- src/main/webapp/passwordreset.xhtml | 1 - src/main/webapp/roles-edit.xhtml | 28 +- .../edu/harvard/iq/dataverse/api/AdminIT.java | 57 ++ .../iq/dataverse/api/DeactivateUsersIT.java | 282 +++++++ .../iq/dataverse/api/DeleteUsersIT.java | 701 ++++++++++++++++++ .../edu/harvard/iq/dataverse/api/FilesIT.java | 52 +- .../edu/harvard/iq/dataverse/api/RolesIT.java | 101 +++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 89 ++- .../dataverse/branding/BrandingUtilTest.java | 57 +- .../command/impl/CreateRoleCommandTest.java | 41 + .../iq/dataverse/export/DDIExporterTest.java | 8 +- .../export/ddi/DdiExportUtilTest.java | 22 +- .../source/DbSettingConfigSourceTest.java | 2 +- .../iq/dataverse/util/MailUtilTest.java | 59 +- .../dataverse/util/json/JsonPrinterTest.java | 4 +- tests/jenkins/ec2/Jenkinsfile | 4 +- 163 files changed, 4302 insertions(+), 1293 deletions(-) delete mode 100644 doc/release-notes/4259-java-11.md create mode 100644 doc/release-notes/5.4-release-notes.md create mode 100644 doc/release-notes/5.4.1-release-notes.md delete mode 100644 doc/release-notes/7084-crawlable-file-access.md delete mode 100644 doc/release-notes/7188-utf-8-filenames.md delete mode 100644 doc/release-notes/7205-orig-file-size.md delete mode 100644 doc/release-notes/7373-solr-upgrade.md delete mode 100644 doc/release-notes/7374-postgresql-update.md delete mode 100644 doc/release-notes/7398-saved-search-performance.md delete mode 100644 doc/release-notes/7409-remove-worldmap.md delete mode 100644 doc/release-notes/7501-guides-updates.md delete mode 100644 doc/release-notes/7502-more-mime-types.md delete mode 100644 doc/release-notes/7551-expanded-compound-datasetfield-validation.md delete mode 100644 doc/release-notes/7619-restricted-summary-starts.md delete mode 100644 doc/release-notes/820-non-ascii-chars-in-search.md create mode 100644 doc/sphinx-guides/source/developers/s3-direct-upload-api.rst create mode 100644 scripts/api/data/role-test-addRole.json create mode 100644 scripts/issues/7687/file_access_flag_update_bug.txt create mode 100644 src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeactivateUserCommand.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/FileMetadataUtil.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/PauseWithMessageStep.java create mode 100644 src/main/resources/db/migration/V5.3.0.5__7564-workflow.sql create mode 100644 src/main/resources/db/migration/V5.3.0.6__2419-deactivate-users.sql create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java diff --git a/.gitignore b/.gitignore index 4d08cfb2257..7be8263f483 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,10 @@ conf/docker-aio/dv/deps/ conf/docker-aio/dv/install/dvinstall.zip # or copy of test data conf/docker-aio/testdata/ + +# docker-aio creates maven/ which reports 86 new files. ignore this wd. +maven/ + scripts/installer/default.config *.pem diff --git a/conf/docker-aio/run-test-suite.sh b/conf/docker-aio/run-test-suite.sh index 2b24f6c90b2..47a4c3b9576 100755 --- a/conf/docker-aio/run-test-suite.sh +++ b/conf/docker-aio/run-test-suite.sh @@ -8,4 +8,4 @@ fi # Please note the "dataverse.test.baseurl" is set to run for "all-in-one" Docker environment. # TODO: Rather than hard-coding the list of "IT" classes here, add a profile to pom.xml. -source maven/maven.sh && mvn test -Dtest=DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT -Ddataverse.test.baseurl=$dvurl +source maven/maven.sh && mvn test -Dtest=DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT -Ddataverse.test.baseurl=$dvurl diff --git a/doc/release-notes/4259-java-11.md b/doc/release-notes/4259-java-11.md deleted file mode 100644 index 1e5ee19230a..00000000000 --- a/doc/release-notes/4259-java-11.md +++ /dev/null @@ -1,111 +0,0 @@ -## Release Highlights - -### Java 11 - -The Dataverse software now runs and requires Java 11. This will provide performance and security enhancements, allows developers to take advantage of new and updated Java features, and moves the project to a platform with better longer term support. This upgrade requires a few extra steps in the release process, outlined below. - -## Notes to Admins - -### Java 11 Upgrade - -There are some things to note and keep in mind regarding the move to Java 11: - -- You should install the JDK/JRE following your usual methods, depending on your operating system. An example of this on a RHEL/CentOS 7 or RHEL/CentOS 8 system is: - - `$ sudo yum remove java-1.8.0-openjdk java-1.8.0-openjdk-devel java-1.8.0-openjdk-headless` - - `$ sudo yum install java-11-openjdk-devel` - - The `remove` command may provide an error message if -headless isn't installed. - -- We targeted and tested Java 11, but 11+ will likely work. Java 11 was targeted because of its long term support. -- If you're moving from a Dataverse installation that was previously running Glassfish 4.x (typically this would be Dataverse Software 4.x), you will need to adjust some JVM options in domain.xml as part of the upgrade process. We've provided these optional steps below. These steps are not required if your first installed Dataverse version was running Payara 5.x (typically Dataverse Software 5.x). - -### New JVM and DB Options - -#### New JVM Options - -For installations that were previously running on Dataverse Software 4.x, the following JVM Options have been added in support of Java 11: - -- `[9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED` -- `[9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED` -- `[9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED` -- `[9|]--add-opens=java.base/java.lang=ALL-UNNAMED` -- `[9|]--add-opens=java.base/java.net=ALL-UNNAMED` -- `[9|]--add-opens=java.base/java.nio=ALL-UNNAMED` -- `[9|]--add-opens=java.base/java.util=ALL-UNNAMED` -- `[9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED` -- `[9|]--add-opens=java.management/sun.management=ALL-UNNAMED` -- `[9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED` -- `[9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED` -- `[9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED` -- `[9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED` -- `[9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED` - -## Upgrade Instructions - -1\. Undeploy the previous version. - -- `/bin/asadmin list-applications` -- `/bin/asadmin undeploy dataverse<-version>` - -(where `` is where Payara 5 is installed, for example: `/usr/local/payara5`) - -2\. Stop Payara and remove the generated directory - -- `service payara stop` -- remove the generated directory: -`rm -rf /payara/domains/domain1/generated` - -3\. (only required for installations previously running on Dataverse Software 4.x) - -In domain.xml: - -Remove the following JVM options from the domain> element: - -`-Djava.endorsed.dirs=/usr/local/payara5/glassfish/modules/endorsed:/usr/local/payara5/glassfish/lib/endorsed` - -`-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext` - -Add the following JVM options to the domain> element: - -`[9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED` - -`[9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED` - -`[9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED` - -`[9|]--add-opens=java.base/java.lang=ALL-UNNAMED` - -`[9|]--add-opens=java.base/java.net=ALL-UNNAMED` - -`[9|]--add-opens=java.base/java.nio=ALL-UNNAMED` - -`[9|]--add-opens=java.base/java.util=ALL-UNNAMED` - -`[9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED` - -`[9|]--add-opens=java.management/sun.management=ALL-UNNAMED` - -`[9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED` - -`[9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED` - -`[9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED` - -`[9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED` - -`[9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED` - -4\. Start Payara - -- `service payara start` - -5\. Deploy this version. - -- `/bin/asadmin deploy dataverse-5.4.war` - -6\. Restart payara - -- `service payara stop` -- `service payara start` diff --git a/doc/release-notes/5.4-release-notes.md b/doc/release-notes/5.4-release-notes.md new file mode 100644 index 00000000000..0f34db254fa --- /dev/null +++ b/doc/release-notes/5.4-release-notes.md @@ -0,0 +1,337 @@ +# Dataverse Software 5.4 + +This release brings new features, enhancements, and bug fixes to the Dataverse Software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +## Release Highlights + +### Deactivate Users API, Get User Traces API, Revoke Roles API + +A new API has been added to deactivate users to prevent them from logging in, receiving communications, or otherwise being active in the system. Deactivating a user is an alternative to deleting a user, especially when the latter is not possible due to the amount of interaction the user has had with the Dataverse installation. In order to learn more about a user before deleting, deactivating, or merging, a new "get user traces" API is available that will show objects created, roles, group memberships, and more. Finally, the "remove all roles" button available in the superuser dashboard is now also available via API. + +### New File Access API + +A new API offers *crawlable* access view of the folders and files within a dataset: + +`/api/datasets//dirindex/` + +will output a simple html listing, based on the standard Apache directory index, with Access API download links for individual files, and recursive calls to the API above for sub-folders. Please see the [Native API Guide](https://guides.dataverse.org/en/5.4/api/native-api.html) for more information. + +Using this API, ``wget --recursive`` (or similar crawling client) can be used to download all the files in a dataset, preserving the file names and folder structure; without having to use the download-as-zip API. In addition to being faster (zipping is a relatively resource-intensive operation on the server side), this process can be restarted if interrupted (with ``wget --continue`` or equivalent) - unlike zipped multi-file downloads that always have to start from the beginning. + +On a system that uses S3 with download redirects, the individual file downloads will be handled by S3 directly (with the exception of tabular files), without having to be proxied through the Dataverse application. + +### Restricted Files and DDI "dataDscr" Information (Summary Statistics, Variable Names, Variable Labels) + +In previous releases, DDI "dataDscr" information (summary statistics, variable names, and variable labels, sometimes known as "variable metadata") for tabular files that were ingested successfully were available even if files were restricted. This has been changed in the following ways: + +- At the dataset level, DDI exports no longer show "dataDscr" information for restricted files. There is only one version of this export and it is the version that's suitable for public consumption with the "dataDscr" information hidden for restricted files. +- Similarly, at the dataset level, the DDI HTML Codebook no longer shows "dataDscr" information for restricted files. +- At the file level, "dataDscr" information is no longer publicly available for restricted files. In practice, it was only possible to get this publicly via API (the download/access button was hidden). +- At the file level, "dataDscr" (variable metadata) information can still be downloaded for restricted files if you have access to download the file. + +### Search with Accented Characters + +Many languages include characters that have close analogs in ascii, e.g. (á, à, â, ç, é, è, ê, ë, í, ó, ö, ú, ù, û, ü…). This release changes the default Solr configuration to allow search to match words based on these associations, e.g. a search for Mercè would match the word Merce in a Dataset, and vice versa. This should generally be helpful, but can result in false positives, e.g. "canon" will be found searching for "cañon". + +### Java 11, PostgreSQL 13, and Solr 8 Support/Upgrades + +Several of the core components of the Dataverse Software have been upgraded. Specifically: + +- The Dataverse Software now runs on and requires Java 11. This will provide performance and security enhancements, allows developers to take advantage of new and updated Java features, and moves the project to a platform with better longer term support. This upgrade requires a few extra steps in the release process, outlined below. +- The Dataverse Software has now been tested with PostgreSQL versions up to 13. Versions 9.6+ will still work, but this update is necessary to support the software beyond PostgreSQL EOL later in 2021. +- The Dataverse Software now runs on Solr 8.8.1, the latest available stable release in the Solr 8.x series. + +### Saved Search Performance Improvements + +A refactoring has greatly improved Saved Search performance in the application. If your installation has multiple, potentially long-running Saved Searches in place, this greatly improves the probability that those search jobs will complete without timing out. + +### Worldmap/Geoconnect Integration Now Obsolete + +As of this release, the Geoconnect/Worldmap integration is no longer available. The Harvard University Worldmap is going through a migration process, and instead of updating this code to work with the new infrastructure, the decision was made to pursue future Geospatial exploration/analysis through other tools, following the External Tools Framework in the Dataverse Software. + +### Guides Updates + +The Dataverse Software Guides have been updated to follow recent changes to how different terms are used across the Dataverse Project. For more information, see Mercè's note to the community: + + + +### Conditionally Required Metadata Fields + +Prior to this release, when defining metadata for compound fields (via their dataset field types), fields could be either be optional or required, i.e. if required you must always have (at least one) value for that field. For example, Author Name being required means you must have at least one Author with an nonempty Author name. + +In order to support more robust metadata (and specifically to resolve #7551), we need to allow a third case: Conditionally Required, that is, the field is required if and only if any of its "sibling" fields are entered. For example, Producer Name is now conditionally required in the citation metadata block. A user does not have to enter a Producer, but if they do, they have to enter a Producer Name. + +## Major Use Cases + +Newly-supported major use cases in this release include: + +- Dataverse Installation Administrators can now deactivate users using a new API. (Issue #2419, PR #7629) +- Superusers can remove all of a user's assigned roles using a new API. (Issue #2419, PR #7629) +- Superusers can use an API to gather more information about actions a user has taken in the system in order to make an informed decisions about whether or not to deactivate or delete a user. (Issue #2419, PR #7629) +- Superusers will now be able to harvest from installations using ISO-639-3 language codes. (Issue #7638, PR #7690) +- Users interacting with the workflow system will receive status messages (Issue #7564, PR #7635) +- Users interacting with prepublication workflows will see speed improvements (Issue #7681, PR #7682) +- API Users will receive Dataverse collection API responses in a deterministic order. (Issue #7634, PR #7708) +- API Users will be able to access a list of crawlable URLs for file download, allowing for faster and easily resumable transfers. (Issue #7084, PR #7579) +- Users will no longer be able to access summary stats for restricted files. (Issue #7619, PR #7642) +- Users will now see truncated versions of long strings (primarily checksums) throughout the application (Issue #6685, PR #7312) +- Users will now be able to easily copy checksums, API tokens, and private URLs with a single click (Issue #6039, Issue #6685, PR #7539, PR #7312) +- Users uploading data through the Direct Upload API will now be able to use additional checksums (Issue #7600, PR #7602) +- Users searching for content will now be able to search using non-ascii characters. (Issue #820, PR #7378) +- Users can now replace files in draft datasets, a functionality previously only available on published datasets. (Issue #7149, PR #7337) +- Dataverse Installation Administrators can now set subfields of compound fields as **conditionally required**, that is, the field is required if and only if any of its "sibling" fields are entered. For example, Producer Name is now conditionally required in the citation metadata block. A user does not have to enter a Producer, but if they do, they have to enter a Producer Name. (Issue #7606, PR #7608) + +## Notes for Dataverse Installation Administrators + +### Java 11 Upgrade + +There are some things to note and keep in mind regarding the move to Java 11: + +- You should install the JDK/JRE following your usual methods, depending on your operating system. An example of this on a RHEL/CentOS 7 or RHEL/CentOS 8 system is: + + `$ sudo yum remove java-1.8.0-openjdk java-1.8.0-openjdk-devel java-1.8.0-openjdk-headless` + + `$ sudo yum install java-11-openjdk-devel` + + The `remove` command may provide an error message if -headless isn't installed. + +- We targeted and tested Java 11, but 11+ will likely work. Java 11 was targeted because of its long term support. +- If you're moving from a Dataverse installation that was previously running Glassfish 4.x (typically this would be Dataverse Software 4.x), you will need to adjust some JVM options in domain.xml as part of the upgrade process. We've provided these optional steps below. These steps are not required if your first installed Dataverse Software version was running Payara 5.x (typically Dataverse Software 5.x). + +### PostgreSQL Versions Up To 13 Supported + +Up until this release our installation guide "strongly recommended" to install PostgreSQL v. 9.6. While that version is known to be very stable, it is nearing its end-of-life (in Nov. 2021). Dataverse Software has now been tested with versions up to 13. If you decide to upgrade PostgreSQL, the tested and recommended way of doing that is as follows: + +- Export your current database with ``pg_dumpall``; +- Install the new version of PostgreSQL; (make sure it's running on the same port, etc. so that no changes are needed in the Payara configuration) +- Re-import the database with ``psql``, as the postgres user. + +Consult the PostgreSQL upgrade documentation for more information, for example . + +### Solr Upgrade + +With this release we upgrade to the latest available stable release in the Solr 8.x branch. We recommend a fresh installation of Solr 8.8.1 (the index will be empty) followed by an "index all". + +Before you start the "index all", the Dataverse installation will appear to be empty because the search results come from Solr. As indexing progresses, partial results will appear until indexing is complete. + +See for more information. + +### Managing Conditionally Required Metadata Fields + +Prior to this release, when defining metadata for compound fields (via their dataset field types), fields could be either be optional or required, i.e. if required you must always have (at least one) value for that field. For example, Author Name being required means you must have at least one Author with an nonempty Author name. + +In order to support more robust metadata (and specifically to resolve #7551), we need to allow a third case: Conditionally Required, that is, the field is required if and only if any of its "sibling" fields are entered. For example, Producer Name is now conditionally required in the citation metadata block. A user does not have to enter a Producer, but if they do, they have to enter a Producer Name. + +This change required some modifications to how "required" is defined in the metadata .tsv files (for compound fields). + +Prior to this release, the value of required for the parent compound field did not matter and so was set to false. + +Going forward: + +- For optional, the parent compound field would be required = false and all children would be required = false. +- For required, the parent compound field would be required = true and at least one child would be required = true. +- For conditionally required, the parent compound field would be required = false and at least one child would be required = true. + +This release updates the citation .tsv file that is distributed with the software for the required parent compound fields (e.g. author), as well as sets Producer Name to be conditionally required. No other distributed .tsv files were updated, as they did not have any required compound values. + +**If you have created any custom metadata .tsv files**, you will need to make the same (type of) changes there. + +### Citation Metadata Block Update + +Due to the changes for Conditionally Required Metadata Fields, and a minor update in the citation metadata block to support extra ISO-639-3 language codes, a block upgrade is required. Instructions are provided below. + +### Retroactively Store Original File Size + +Beginning in Dataverse Software 4.10, the size of the saved original file (for an ingested tabular datafile) was stored in the database. For files added before this change, we provide an API that retrieves and permanently stores the sizes for any already existing saved originals. See [Datafile Integrity API](https://guides.dataverse.org/en/5.4/api/native-api.html#datafile-integrity) for more information. + +This was documented as a step in previous release notes, but we are noting it in these release notes to give it more visibility. + +### DB Cleanup for Saved Searches + +A previous version of the Dataverse Software changed the indexing logic so that when a user links a Dataverse collection, its children are also indexed as linked. This means that the children do not need to be separately linked, and in this version we removed the logic that creates a saved search to create those links when a Dataverse collection is linked. + +We recommend cleaning up the db to a) remove these saved searches and b) remove the links for the objects. We can do this via a few queries, which are available in the folder here: + + + +There are four sets of queries available, and they should be run in this order: + +- ss_for_deletion.txt to identify the Saved Searches to be deleted +- delete_ss.txt to delete the Saved Searches identified in the previous query +- dld_for_deletion.txt to identify the linked datasets and Dataverse collections to be deleted +- delete_dld.txt to delete the linked datasets and Dataverse collections identified in the previous queries + +Note: removing these saved searches and links should not affect what users will see as linked due to the aforementioned indexing change. Similarly, not removing these saved searches and links should not affect anything, but is a cleanup of unnecessary rows in the database. + +### DB Cleanup for Superusers Releasing without Version Updates + +In datasets where a superuser has run the Curate command and the update included a change to the fileaccessrequest flag, those changes would not be reflected appropriately in the published version. This should be a rare occurrence. + +Instead of an automated solution, we recommend inspecting the affected datasets and correcting the fileaccessrequest flag as appropriate. You can identify the affected datasets this via a query, which is available in the folder here: + + + +## New JVM Options and Database Settings + +For installations that were previously running on Dataverse Software 4.x, a number of new JVM options need to be added as part of the upgrade. The JVM Options are enumerated in the detailed upgrade instructions below. + +Two new Database settings were added: + +- :InstallationName +- :ExportInstallationAsDistributorOnlyWhenNotSet + +For an overview of these new options, please see the +[Installation Guide](https://guides.dataverse.org/en/5.4/installation/config.html#database-settings) + +## Notes for Tool Developers and Integrators + +### UTF-8 Characters and Spaces in File Names + +UTF-8 characters in filenames are now preserved when downloaded. + +Dataverse installations will no longer replace spaces in file names of downloaded files with the + character. If your tool or integration has any special handling around this, you may need to make further adjustments to maintain backwards compatibility while also supporting Dataverse installations on 5.4+. + +Note that this follows a change from 5.1 that only corrected this for installations running with S3 storage. This makes the behavior consistent across installations running all types of file storage. + +## Complete List of Changes + +For the complete list of code changes in this release, see the [5.4 Milestone](https://github.com/IQSS/dataverse/milestone/94?closed=1) in Github. + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. + +## Installation + +If this is a new installation, please see our [Installation Guide](https://guides.dataverse.org/en/5.4/installation/). + +## Upgrade Instructions + +0\. These instructions assume that you've already successfully upgraded from Dataverse Software 4.x to Dataverse Software 5 following the instructions in the [Dataverse Software 5 Release Notes](https://github.com/IQSS/dataverse/releases/tag/v5.0). After upgrading from the 4.x series to 5.0, you should progress through the other 5.x releases before attempting the upgrade to 5.4. + +1\. Upgrade to Java 11. + +2\. Upgrade to Solr 8.8.1. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +In the following commands we assume that Payara 5 is installed in `/usr/local/payara5`. If not, adjust as needed. + +`export PAYARA=/usr/local/payara5` + +(or `setenv PAYARA /usr/local/payara5` if you are using a `csh`-like shell) + +3\. Undeploy the previous version. + +- `$PAYARA/bin/asadmin list-applications` +- `$PAYARA/bin/asadmin undeploy dataverse<-version>` + +4\. Stop Payara and remove the generated directory + +- `service payara stop` +- `rm -rf $PAYARA/glassfish/domains/domain1/generated` + +5\. **(only required for installations previously running Dataverse Software 4.x!)** In other words, if you have a domain.xml that **originated under Glassfish 4**, the below JVM Options need to be added. If your Dataverse installation was first installed on the 5.x series, these JVM options should already be present. + +In domain.xml: + +Remove the following JVM options from the `` section: + +`-Djava.endorsed.dirs=/usr/local/payara5/glassfish/modules/endorsed:/usr/local/payara5/glassfish/lib/endorsed` + +`-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext` + +Add the following JVM options to the `` section: + +`[9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED` + +`[9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED` + +`[9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED` + +`[9|]--add-opens=java.base/java.lang=ALL-UNNAMED` + +`[9|]--add-opens=java.base/java.net=ALL-UNNAMED` + +`[9|]--add-opens=java.base/java.nio=ALL-UNNAMED` + +`[9|]--add-opens=java.base/java.util=ALL-UNNAMED` + +`[9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED` + +`[9|]--add-opens=java.management/sun.management=ALL-UNNAMED` + +`[9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED` + +`[9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED` + +`[9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED` + +`[9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED` + +`[9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED` + +6\. Start Payara + +- `service payara start` + +7\. Deploy this version. + +- `$PAYARA/bin/asadmin deploy dataverse-5.4.war` + +8\. Restart payara + +- `service payara stop` +- `service payara start` + +9\. Reload Citation Metadata Block: + + `wget https://github.com/IQSS/dataverse/releases/download/v5.4/citation.tsv` + `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` + +## Additional Release Steps + +1\. Confirm that the schema.xml was updated with the new v5.4 version when you updated Solr. + +2\. Run the script updateSchemaMDB.sh to generate updated solr schema files and preserve any other custom fields in your Solr configuration. + +For example: (modify the path names as needed) + +cd /usr/local/solr-8.8.1/server/solr/collection1/conf +wget https://github.com/IQSS/dataverse/releases/download/v5.4/updateSchemaMDB.sh +chmod +x updateSchemaMDB.sh +./updateSchemaMDB.sh -t . + +See for more information. + +3\. Do a clean reindex by first clearing then indexing. Re-indexing is required to get full-functionality from this change. Please refer to the guides on how to clear and index if needed. + +4\. Upgrade Postgres. + +- Export your current database with ``pg_dumpall``; +- Install the new version of PostgreSQL; (make sure it's running on the same port, etc. so that no changes are needed in the Payara configuration) +- Re-import the database with ``psql``, as the postgres user. + +Consult the PostgreSQL upgrade documentation for more information, for example . + +5\. Retroactively store original file size + +Use the [Datafile Integrity API](https://guides.dataverse.org/en/5.4/api/native-api.html#datafile-integrity) to ensure that the sizes of all original files are stored in the database. + +6\. DB Cleanup for Superusers Releasing without Version Updates + +In datasets where a superuser has run the Curate command and the update included a change to the fileaccessrequest flag, those changes would not be reflected appropriately in the published version. This should be a rare occurrence. + +Instead of an automated solution, we recommend inspecting the affected datasets and correcting the fileaccessrequest flag as appropriate. You can identify the affected datasets this via a query, which is available in the folder here: + + + +7\. (Optional, but recommended) DB Cleanup for Saved Searches and Linked Objects + +Perform the DB Cleanup for Saved Searches and Linked Objects, summarized in the "Notes for Dataverse Installation Administrators" section above. + +8\. Take a backup of the Worldmap links, if any. + +9\. (Only required if custom metadata blocks are used in your Dataverse installation) Update any custom metadata blocks: + +In the .tsv for any custom metadata blocks, for any subfield that has a required value of TRUE, find the corresponding parent field and change its required value to TRUE. + +Note: As there is an accompanying Flyway script that updates the values directly in the database, you do not need to reload these metadata .tsv files via API, unless you make additional changes, e.g set some compound fields to be conditionally required. diff --git a/doc/release-notes/5.4.1-release-notes.md b/doc/release-notes/5.4.1-release-notes.md new file mode 100644 index 00000000000..13433ac12d2 --- /dev/null +++ b/doc/release-notes/5.4.1-release-notes.md @@ -0,0 +1,46 @@ +# Dataverse Software 5.4.1 + +This release provides a fix for a regression introduced in 5.4 and implements a few other small changes. Please use 5.4.1 for production deployments instead of 5.4. + +## Release Highlights + +### API Backwards Compatibility Maintained + +The syntax in the example in the [Basic File Access](https://guides.dataverse.org/en/5.4.1/api/dataaccess.html#basic-file-access) section of the Dataverse Software Guides will continue to work. + +## Complete List of Changes + +For the complete list of code changes in this release, see the [5.4.1 Milestone](https://github.com/IQSS/dataverse/milestone/95?closed=1) in Github. + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. + +## Installation + +If this is a new installation, please see our [Installation Guide](https://guides.dataverse.org/en/5.4.1/installation/). + +## Upgrade Instructions + +0\. These instructions assume that you've already successfully upgraded from Dataverse Software 4.x to Dataverse Software 5 following the instructions in the [Dataverse Software 5 Release Notes](https://github.com/IQSS/dataverse/releases/tag/v5.0). After upgrading from the 4.x series to 5.0, you should progress through the other 5.x releases before attempting the upgrade to 5.4.1. + +1\. Undeploy the previous version. + +- `$PAYARA/bin/asadmin list-applications` +- `$PAYARA/bin/asadmin undeploy dataverse<-version>` + +2\. Stop Payara and remove the generated directory + +- `service payara stop` +- `rm -rf $PAYARA/glassfish/domains/domain1/generated` + +3\. Start Payara + +- `service payara start` + +4\. Deploy this version. + +- `$PAYARA/bin/asadmin deploy dataverse-5.4.1.war` + +5\. Restart payara + +- `service payara stop` +- `service payara start` diff --git a/doc/release-notes/7084-crawlable-file-access.md b/doc/release-notes/7084-crawlable-file-access.md deleted file mode 100644 index 0e721728e28..00000000000 --- a/doc/release-notes/7084-crawlable-file-access.md +++ /dev/null @@ -1,29 +0,0 @@ - ## Release Highlights - -### A new file access API - -A new api offers *crawlable* access view of the folders and files within a datset: - -``` - /api/datasets//dirindex/ -``` - -will output a simple html listing, based on the standard Apache -directory index, with Access API download links for individual files, -and recursive calls to the API above for sub-folders. (See the -documentation entry in the guides for more information). - -Using this API, ``wget --recursive`` (or similar crawling client) can -be used to download all the files in a dataset, preserving the file -names and folder structure; without having to use the download-as-zip -API. In addition to being faster (zipping is a relatively -resource-intensive operation on the server side), this process can be -restarted if interrupted (with ``wget --continue`` or equivalent) - -unlike zipped multi-file downloads that always have to start from the -beginning. - -On a system that uses S3 with download redirects, the individual file -downloads will be handled by S3 directly, without having to be proxied -through the Dataverse application. - - diff --git a/doc/release-notes/7188-utf-8-filenames.md b/doc/release-notes/7188-utf-8-filenames.md deleted file mode 100644 index 014ea4b4566..00000000000 --- a/doc/release-notes/7188-utf-8-filenames.md +++ /dev/null @@ -1,9 +0,0 @@ -## Notes for Tool Developers and Integrators - -### UTF-8 Characters and Spaces in File Names - -UTF-8 characters in filenames are now preserved when downloaded. - -Dataverse Installations will no longer replace spaces in file names of downloaded files with the + character. If your tool or integration has any special handling around this, you may need to make further adjustments to maintain backwards compatibility while also supporting Dataverse installations on 5.4+. - -Note that this follows a change from 5.1 that only corrected this for installations running with S3 storage. This makes the behavior consistent across installations running all types of file storage. diff --git a/doc/release-notes/7205-orig-file-size.md b/doc/release-notes/7205-orig-file-size.md deleted file mode 100644 index 2f7af23b6f3..00000000000 --- a/doc/release-notes/7205-orig-file-size.md +++ /dev/null @@ -1,11 +0,0 @@ -## Notes to Admins - -Beginning in Dataverse Software 4.10, the size of the saved original file (for an ingested tabular datafile) was stored in the database. For files added before this change, we provide an API that retrieves and permanently stores the sizes for any already existing saved originals. See [Datafile Integrity API](https://guides.dataverse.org/en/5.4/api/native-api.html#datafile-integrity) for more information. - -This was documented as a step in previous release notes, but we are noting it in these release notes to give it more visibility. - -## Upgrade Instructions - -X./ Retroactively store original file size - -Use the [Datafile Integrity API](https://guides.dataverse.org/en/5.4/api/native-api.html#datafile-integrity) to ensure that the sizes of all original files are stored in the database. \ No newline at end of file diff --git a/doc/release-notes/7373-solr-upgrade.md b/doc/release-notes/7373-solr-upgrade.md deleted file mode 100644 index 06e7bc62e5a..00000000000 --- a/doc/release-notes/7373-solr-upgrade.md +++ /dev/null @@ -1,22 +0,0 @@ -### Solr Update - -With this release we upgrade to the latest available stable release in the Solr 8.x branch. We recommend a fresh installation of Solr 8.8.1 (the index will be empty) -followed by an "index all". - -Before you start the "index all", Dataverse will appear to be empty because -the search results come from Solr. As indexing progresses, partial results will -appear until indexing is complete. - - -See http://guides.dataverse.org/installation/prerequisites.html#installing-solr - -[for the additional upgrade steps section] - -Run the script updateSchemaMDB.sh to generate updated solr schema files and preserve any other custom fields in your Solr configuration. -For example: (modify the path names as needed) -cd /usr/local/solr-8.8.1/server/solr/collection1/conf -wget https://github.com/IQSS/dataverse/releases/download/v5.4/updateSchemaMDB.sh -chmod +x updateSchemaMDB.sh -./updateSchemaMDB.sh -t . - -See http://guides.dataverse.org/en/5.4/admin/metadatacustomization.html?highlight=updateschemamdb for more information. diff --git a/doc/release-notes/7374-postgresql-update.md b/doc/release-notes/7374-postgresql-update.md deleted file mode 100644 index 3ac74ad70a1..00000000000 --- a/doc/release-notes/7374-postgresql-update.md +++ /dev/null @@ -1,9 +0,0 @@ -## Notes for Dataverse Administrators - -Up until this release our installation guide "strongly recommended" to install PostgreSQL v. 9.6. While that version is known to be very stable, it is nearing its end-of-life (in Nov. 2021). Dataverse Software has now been tested with versions up to 13. If you decide to upgrade PostgreSQL, the tested and recommended way of doing that is as follows: - -- Export your current database with ``pg_dumpall``; -- Install the new version of PostgreSQL; (make sure it's running on the same port, etc. so that no changes are needed in the Payara configuration) -- Re-import the database with ``psql``, as the postgres user. - -Consult the PostgreSQL upgrade documentation for more information, for example https://www.postgresql.org/docs/13/upgrading.html#UPGRADING-VIA-PGDUMPALL. diff --git a/doc/release-notes/7398-saved-search-performance.md b/doc/release-notes/7398-saved-search-performance.md deleted file mode 100644 index 4986524ed4f..00000000000 --- a/doc/release-notes/7398-saved-search-performance.md +++ /dev/null @@ -1,30 +0,0 @@ -## Release Highlights - -### Saved Search Performance Improvements - -A refactoring has greatly improved Saved Search performance in the application. If your installation has multiple, potentially long-running Saved Searches in place, this greatly improves the probability that those search jobs will complete without timing out. - -## Notes for Dataverse Installation Administrators - -### DB Cleanup for Saved Searches - -A previous version of dataverse changed the indexing logic so that when a user links a dataverse, its children are also indexed as linked. This means that the children do not need to be separately linked, and in this version we removed the logic that creates a saved search to create those links when a dataverse is linked. - -We recommend cleaning up the db to a) remove these saved searches and b) remove the links for the objects. We can do this via a few queries, which are available in the folder here: - -https://github.com/IQSS/dataverse/raw/develop/scripts/issues/7398/ - -There are four sets of queries available, and they should be run in this order: - -- ss_for_deletion.txt to identify the Saved Searches to be deleted -- delete_ss.txt to delete the Saved Searches identified in the previous query -- dld_for_deletion.txt to identify the linked datasets and dataverses to be deleted -- delete_dld.txt to delete the linked datasets and dataverses identified in the previous queries - -Note: removing these saved searches and links should not affect what users will see as linked due to the aforementioned indexing change. Similarly, not removing these saved searches and links should not affect anything, but is a cleanup of unnecessary rows in the database. - -## Additional Upgrade Instructions - -X\. (Optional, but recommended) DB Cleanup - -Perform the DB Cleanup for Saved Searches and Linked Objects, summarized in the "Notes for Dataverse Installation Administrators" section above. diff --git a/doc/release-notes/7409-remove-worldmap.md b/doc/release-notes/7409-remove-worldmap.md deleted file mode 100644 index 3e6ea3ed06e..00000000000 --- a/doc/release-notes/7409-remove-worldmap.md +++ /dev/null @@ -1,9 +0,0 @@ -## Release Highlights - -### Worldmap/Geoconnect Integration Now Obsolete - -As of this release, the Geoconnect/Worldmap integration is no longer available. The Harvard University Worldmap is going through a migration process, and instead of updating this code to work with the new infrastructure, the decision was made to pursue future Geospatial exploration/analysis through other tools, following the External Tools Framework in the Dataverse Software. - -## Notes to Admins - -Take a backup of the Worldmap links, if any. \ No newline at end of file diff --git a/doc/release-notes/7501-guides-updates.md b/doc/release-notes/7501-guides-updates.md deleted file mode 100644 index 29bc7557f05..00000000000 --- a/doc/release-notes/7501-guides-updates.md +++ /dev/null @@ -1,5 +0,0 @@ -## Guides Updates - -The Dataverse Guides have been updated to follow recent changes to how different terms are used across the Dataverse Project. For more information, see Mercè's note to the community: - - diff --git a/doc/release-notes/7502-more-mime-types.md b/doc/release-notes/7502-more-mime-types.md deleted file mode 100644 index 0c57dc3e389..00000000000 --- a/doc/release-notes/7502-more-mime-types.md +++ /dev/null @@ -1,7 +0,0 @@ -## Upgrade Steps - -In addition to mapping friendly names to these file types, the types are further mapped to aggregate file types facets on the homepage. A full reindex is required for the facets to be refreshed. - -Kick off full reindex - -http://guides.dataverse.org/en/4.20/admin/solr-search-index.html diff --git a/doc/release-notes/7551-expanded-compound-datasetfield-validation.md b/doc/release-notes/7551-expanded-compound-datasetfield-validation.md deleted file mode 100644 index 19d6d573699..00000000000 --- a/doc/release-notes/7551-expanded-compound-datasetfield-validation.md +++ /dev/null @@ -1,36 +0,0 @@ -## Notes for Dataverse Administrators - -Prior to this release, when defining metadata for compound fields (via their dataset field types), fields could be either be optional or required, i.e. if required you must always have (at least one) value for that field. For example, Author Name being required means you must have at least one Author with an nonempty Author name. - -In order to support more robust metadata (and specifically to resolve #7551), we need to allow a third case: Conditionally Required, that is, the field is required if and only if any of its "sibling" fields are entered. For example, Producer Name is now conditionally required in the citation metadata block. A user does not have to enter a Producer, but if they do, they have to enter a Producer Name. - -This change required some modifications to how "required" is defined in the metadata .tsv files (for compound fields). - -Prior to this release, the value of required for the parent compound field did not matter and so was set to false. - -Going forward: - -- For optional, the parent compound field would be required = false and all children would be required = false. -- For required, the parent compound field would be required = true and at least one child would be required = true. -- For conditionally required, the parent compound field would be required = false and at least one child would be required = true. - -This release updates the citation .tsv file that is distributed with the software for the required parent compound fields (e.g. author), as well as sets Producer Name to be conditionally required. No other distributed .tsv files were updated, as they did not have any required compound values. - -**If you have created any custom metadata .tsv files**, you will need to make the same (type of) changes there. - -### Additional Upgrade Steps - -1. Reload Citation Metadata Block: - - `wget https://github.com/IQSS/dataverse/releases/download/v5.4/citation.tsv` - `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` - -2. Update any custom metadata blocks (if used): - - For any subfield that has a required value of TRUE, find the corresponding parent field and change its required value to TRUE. - - Note: As there is an accompanying Flyway script that updates the values directly in the database, you do not need to reload these metadata .tsv files via API, unless you make additional changes, e.g set some compound fields to be conditionally required. - -### Use Case - -Metadata designers can now set subfields of compound fields as **conditionally required**, that is, the field is required if and only if any of its "sibling" fields are entered. For example, Producer Name is now conditionally required in the citation metadata block. A user does not have to enter a Producer, but if they do, they have to enter a Producer Name. diff --git a/doc/release-notes/7619-restricted-summary-starts.md b/doc/release-notes/7619-restricted-summary-starts.md deleted file mode 100644 index e1c20f3bde2..00000000000 --- a/doc/release-notes/7619-restricted-summary-starts.md +++ /dev/null @@ -1,14 +0,0 @@ -Restricted Files and DDI "dataDscr" Information (Summary Statistics, Variable Names, Variable Labels) - -In previous releases, DDI "dataDscr" information (summary statistics, variable names, and variable labels, sometimes known as "variable metadata") for tabular files that were ingested successfully were available even if files were restricted. This has been changed in the following ways: - -- At the dataset level, DDI exports no longer show "dataDscr" information for restricted files. There is only one version of this export and it is the version that's suitable for public consumption with the "dataDscr" information hidden for restricted files. -- Similarly, at the dataset level, the DDI HTML Codebook no longer shows "dataDscr" information for restricted files. -- At the file level, "dataDscr" information is no longer publicly available for restricted files. In practice, it was only possible to get this publicly via API (the download/access button was hidden). -- At the file level, "dataDscr" (variable metadata) information can still be downloaded for restricted files if you have access to download the file. - -After upgrading, you should re-export to replace cached DDI exports with restricted summary stats with DDI exports fit for public consumption: - - curl http://localhost:8080/api/admin/metadata/reExportAll - -For details on this operation, see https://guides.dataverse.org/en/5.4/admin/metadataexport.html diff --git a/doc/release-notes/820-non-ascii-chars-in-search.md b/doc/release-notes/820-non-ascii-chars-in-search.md deleted file mode 100644 index 9e21dd83694..00000000000 --- a/doc/release-notes/820-non-ascii-chars-in-search.md +++ /dev/null @@ -1,10 +0,0 @@ -(review these notes if this gets into the same release as #7645 as the steps are included there - we expect to include this in the same release) - -### Search with non-ascii characters - -Many languages include characters that have close analogs in ascii, e.g. (á, à, â, ç, é, è, ê, ë, í, ó, ö, ú, ù, û, ü…). This release changes the default Solr configuration to allow search to match words based on these associations, e.g. a search for Mercè would match the word Merce in a Dataset, and vice versa. This should generally be helpful, but can result in false positives.,e.g. "canon" will be found searching for "cañon". - -## Upgrade Instructions - -1. You will need to replace or modify your `schema.xml` and restart solr. Re-indexing is required to get full-functionality from this change - the standard instructions for an incremental reindex could be added here. - \ No newline at end of file diff --git a/doc/sphinx-guides/source/admin/harvestserver.rst b/doc/sphinx-guides/source/admin/harvestserver.rst index 73b458ee84b..95e68168893 100644 --- a/doc/sphinx-guides/source/admin/harvestserver.rst +++ b/doc/sphinx-guides/source/admin/harvestserver.rst @@ -73,6 +73,14 @@ Some useful examples of search queries to define OAI sets: ``parentId:NNN`` + where NNN is the database id of the Dataverse collection object (consult the Dataverse table of the SQL database used by the application to verify the database id). + + Note that this query does **not** provide datasets that are linked into the specified Dataverse collection. + +- A query to create a set to include the datasets from a specific Dataverse collection including datasets that have been deposited into other Dataverse collections but linked into the specified Dataverse collection: + + ``subtreePaths:"/NNN"`` + where NNN is the database id of the Dataverse collection object (consult the Dataverse table of the SQL database used by the application to verify the database id). - A query to find all the dataset by a certain author: diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst index 7a0840c2216..8d3dbba5127 100644 --- a/doc/sphinx-guides/source/admin/make-data-count.rst +++ b/doc/sphinx-guides/source/admin/make-data-count.rst @@ -129,7 +129,7 @@ Populate Views and Downloads Nightly Running ``main.py`` to create the SUSHI JSON file and the subsequent calling of the Dataverse Software API to process it should be added as a cron job. -The Dataverse Software provides example scripts that run the steps to process new accesses and uploads and update your Dataverse installation's database (`counter_daily.sh`) and to retrieve citations for all Datasets from DataCite (`counter_weekly.sh`). These scripts should be configured for your environment and can be run manually or as cron jobs. +The Dataverse Software provides example scripts that run the steps to process new accesses and uploads and update your Dataverse installation's database :download:`counter_daily.sh <../_static/util/counter_daily.sh>` and to retrieve citations for all Datasets from DataCite :download:`counter_weekly.sh <../_static/util/counter_weekly.sh>`. These scripts should be configured for your environment and can be run manually or as cron jobs. Sending Usage Metrics to the DataCite Hub ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/admin/solr-search-index.rst b/doc/sphinx-guides/source/admin/solr-search-index.rst index adf30b90425..5685672eceb 100644 --- a/doc/sphinx-guides/source/admin/solr-search-index.rst +++ b/doc/sphinx-guides/source/admin/solr-search-index.rst @@ -67,13 +67,13 @@ Reindexing Dataverse Collections Dataverse collections must be referenced by database object ID. If you have direct database access an SQL query such as -``select id from dataverse where alias='datavarsealias';`` +``select id from dataverse where alias='dataversealias';`` should work, or you may click the Dataverse Software's "Edit" menu and look for dataverseId= in the URLs produced by the drop-down. Then, to re-index: ``curl http://localhost:8080/api/admin/index/dataverses/135`` -which should return: _{"status":"OK","data":{"message":"starting reindex of dataverse 135"}}_ +which should return: *{"status":"OK","data":{"message":"starting reindex of dataverse 135"}}* Reindexing Datasets ++++++++++++++++++++ diff --git a/doc/sphinx-guides/source/admin/user-administration.rst b/doc/sphinx-guides/source/admin/user-administration.rst index bc9be64775f..867f06bde8e 100644 --- a/doc/sphinx-guides/source/admin/user-administration.rst +++ b/doc/sphinx-guides/source/admin/user-administration.rst @@ -44,6 +44,16 @@ Change User Identifier See :ref:`change-identifier-label` +Delete a User +------------- + +See :ref:`delete-a-user` + +Deactivate a User +----------------- + +See :ref:`deactivate-a-user` + Confirm Email ------------- diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index 62c8046ad3f..af8b2f19015 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -37,3 +37,10 @@ Java https://github.com/IQSS/dataverse-client-java is the official Java library for Dataverse Software APIs. `Richard Adams `_ from `ResearchSpace `_ created and maintains this library. + +Ruby +---- + +https://github.com/libis/dataverse_api is a Ruby gem for Dataverse Software APIs. It is registered as a library on Rubygems (https://rubygems.org/search?query=dataverse). + +The gem is created and maintained by the LIBIS team (https://www.libis.be) at the University of Leuven (https://www.kuleuven.be). diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 9bafde1d819..5ee086382c0 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -87,7 +87,7 @@ Basic access URI: Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* :: - GET http://$SERVER/api/access/datafile/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB + GET http://$SERVER/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/J8SJZB Parameters: diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 94cb6c720c6..9f422f83c72 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -130,7 +130,7 @@ The fully expanded example above (without environment variables) looks like this Show Contents of a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Lists all the Dataverse collections and datasets directly under a Dataverse collection (direct children only, not recursive) specified by database id or alias. If you pass your API token and have access, unpublished Dataverse collections and datasets will be included in the response. +|CORS| Lists all the Dataverse collections and datasets directly under a Dataverse collection (direct children only, not recursive) specified by database id or alias. If you pass your API token and have access, unpublished Dataverse collections and datasets will be included in the response. The list will be ordered by database id within type of object. That is, all Dataverse collections will be listed first and ordered by database id, then all datasets will be listed ordered by database id. .. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. @@ -246,7 +246,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json Where ``roles.json`` looks like this:: @@ -259,6 +259,8 @@ Where ``roles.json`` looks like this:: ] } +.. note:: Only a Dataverse installation account with superuser permissions is allowed to create roles in a Dataverse Collection. + .. _list-role-assignments-on-a-dataverse-api: List Role Assignments in a Dataverse Collection @@ -1244,6 +1246,8 @@ When adding a file to a dataset, you can optionally specify the following: - The "File Path" of the file, indicating which folder the file should be uploaded to within the dataset. - Whether or not the file is restricted. +Note that when a Dataverse instance is configured to use S3 storage with direct upload enabled, there is API support to send a file directly to S3. This is more complex and is described in the :doc:`/developers/s3-direct-upload-api` guide. + In the curl example below, all of the above are specified but they are optional. .. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. @@ -1520,7 +1524,7 @@ The API will output the list of locks, for example:: If the dataset is not locked (or if there is no lock of the requested type), the API will return an empty list. -The following API end point will lock a Dataset with a lock of specified type: +The following API end point will lock a Dataset with a lock of specified type. Note that this requires “superuser†credentials: .. code-block:: bash @@ -1537,7 +1541,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/24/lock/Ingest -Use the following API to unlock the dataset, by deleting all the locks currently on the dataset: +Use the following API to unlock the dataset, by deleting all the locks currently on the dataset. Note that this requires “superuser†credentials: .. code-block:: bash @@ -1553,7 +1557,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/locks -Or, to delete a lock of the type specified only: +Or, to delete a lock of the type specified only. Note that this requires “superuser†credentials: .. code-block:: bash @@ -1957,6 +1961,8 @@ Replacing Files Replace an existing file where ``ID`` is the database id of the file to replace or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires the ``file`` to be passed as well as a ``jsonString`` expressing the new metadata. Note that metadata such as description, directoryLabel (File Path) and tags are not carried over from the file being replaced. +Note that when a Dataverse instance is configured to use S3 storage with direct upload enabled, there is API support to send a replacement file directly to S3. This is more complex and is described in the :doc:`/developers/s3-direct-upload-api` guide. + A curl example using an ``ID`` .. code-block:: bash @@ -2418,9 +2424,35 @@ Roles Create a New Role in a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Creates a new role in Dataverse collection object whose Id is ``dataverseIdtf`` (that's an id/alias):: +Creates a new role under Dataverse collection ``id``. Needs a json file with the role description: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-type:application/json" $SERVER_URL/api/dataverses/$ID/roles --upload-file roles.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json + +Where ``roles.json`` looks like this:: + + { + "alias": "sys1", + "name": “Restricted System Roleâ€, + "description": “A person who may only add datasets.â€, + "permissions": [ + "AddDataset" + ] + } + +.. note:: Only a Dataverse installation account with superuser permissions is allowed to create roles in a Dataverse Collection. - POST http://$SERVER/api/roles?dvo=$dataverseIdtf&key=$apiKey Show Role ~~~~~~~~~ @@ -2432,9 +2464,38 @@ Shows the role with ``id``:: Delete Role ~~~~~~~~~~~ -Deletes the role with ``id``:: +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/roles/$ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/roles/24 + +A curl example using a Role alias ``ALIAS`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ALIAS=roleAlias + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/roles/:alias?alias=$ALIAS" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/roles/:alias?alias=roleAlias - DELETE http://$SERVER/api/roles/$id Explicit Groups --------------- @@ -2874,6 +2935,41 @@ Create Global Role Creates a global role in the Dataverse installation. The data POSTed are assumed to be a role JSON. :: POST http://$SERVER/api/admin/roles + +Delete Global Role +~~~~~~~~~~~~~~~~~~ + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/admin/roles/$ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/admin/roles/24 + +A curl example using a Role alias ``ALIAS`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ALIAS=roleAlias + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/admin/roles/:alias?alias=$ALIAS" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/admin/roles/:alias?alias=roleAlias List Users ~~~~~~~~~~ @@ -3055,6 +3151,8 @@ Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST http://demo.dataverse.o This action moves account data from jsmith2 into the account jsmith and deletes the account of jsmith2. +Note: User accounts can only be merged if they are either both active or both deactivated. See :ref:`deactivate a user`. + .. _change-identifier-label: Change User Identifier @@ -3074,7 +3172,9 @@ Make User a SuperUser Toggles superuser mode on the ``AuthenticatedUser`` whose ``identifier`` (without the ``@`` sign) is passed. :: POST http://$SERVER/api/admin/superuser/$identifier - + +.. _delete-a-user: + Delete a User ~~~~~~~~~~~~~ @@ -3086,9 +3186,104 @@ Deletes an ``AuthenticatedUser`` whose ``id`` is passed. :: DELETE http://$SERVER/api/admin/authenticatedUsers/id/$id -Note: If the user has performed certain actions such as creating or contributing to a Dataset or downloading a file they cannot be deleted. - - +Note: If the user has performed certain actions such as creating or contributing to a Dataset or downloading a file they cannot be deleted. To see where in the database these actions are stored you can use the :ref:`show-user-traces-api` API. If a user cannot be deleted for this reason, you can choose to :ref:`deactivate a user`. + +.. _deactivate-a-user: + +Deactivate a User +~~~~~~~~~~~~~~~~~ + +Deactivates a user. A superuser API token is not required but the command will operate using the first superuser it finds. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + +.. code-block:: bash + + export SERVER_URL=http://localhost:8080 + export USERNAME=jdoe + + curl -X POST $SERVER_URL/api/admin/authenticatedUsers/$USERNAME/deactivate + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -X POST http://localhost:8080/api/admin/authenticatedUsers/jdoe/deactivate + +The database ID of the user can be passed instead of the username. + +.. code-block:: bash + + export SERVER_URL=http://localhost:8080 + export USERID=42 + + curl -X POST $SERVER_URL/api/admin/authenticatedUsers/id/$USERID/deactivate + +Note: A primary purpose of most Dataverse installations is to serve an archive. In the archival space, there are best practices around the tracking of data access and the tracking of modifications to data and metadata. In support of these key workflows, a simple mechanism to delete users that have performed edit or access actions in the system is not provided. Providing a Deactivate User endpoint for users who have taken certain actions in the system alongside a Delete User endpoint to remove users that haven't taken certain actions in the system is by design. + +This is an irreversible action. There is no option to undeactivate a user. + +Deactivating a user with this endpoint will: + +- Deactivate the user's ability to log in to the Dataverse installation. A message will be shown, stating that the account has been deactivated. The user will not able to create a new account with the same email address, ORCID, Shibboleth, or other login type. +- Deactivate the user's ability to use the API +- Remove the user's access from all Dataverse collections, datasets and files +- Prevent a user from being assigned any roles +- Cancel any pending file access requests generated by the user +- Remove the user from all groups +- No longer have notifications generated or sent by the Dataverse installation +- Prevent the account from being converted into an OAuth or Shibboleth account. +- Prevent the user from becoming a superuser. + +Deactivating a user with this endpoint will keep: + +- The user's contributions to datasets, including dataset creation, file uploads, and publishing. +- The user's access history to datafiles in the Dataverse installation, including guestbook records. +- The user's account information (specifically name, email, affiliation, and position) + +.. _show-user-traces-api: + +Show User Traces +~~~~~~~~~~~~~~~~ + +Show the traces that the user has left in the system, such as datasets created, guestbooks filled out, etc. This can be useful for understanding why a user cannot be deleted. A superuser API token is required. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export USERNAME=jdoe + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET $SERVER_URL/api/users/$USERNAME/traces + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X GET https://demo.dataverse.org/api/users/jdoe/traces + +Remove All Roles from a User +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Removes all roles from the user. This is equivalent of clicking the "Remove All Roles" button in the superuser dashboard. Note that you can preview the roles that will be removed with the :ref:`show-user-traces-api` API. A superuser API token is required. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export USERNAME=jdoe + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/users/$USERNAME/removeRoles + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST http://localhost:8080/api/users/jdoe/removeRoles List Role Assignments of a Role Assignee ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index dfae614bf14..661285dff73 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -65,9 +65,9 @@ # built documents. # # The short X.Y version. -version = '5.3' +version = '5.4.1' # The full version, including alpha/beta/rc tags. -release = '5.3' +release = '5.4.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst index 184d8aff85a..eebfd50ba35 100755 --- a/doc/sphinx-guides/source/developers/index.rst +++ b/doc/sphinx-guides/source/developers/index.rst @@ -34,4 +34,5 @@ Developer Guide selinux big-data-support aux-file-support + s3-direct-upload-api workflows diff --git a/doc/sphinx-guides/source/developers/remote-users.rst b/doc/sphinx-guides/source/developers/remote-users.rst index c85571a55c0..3f8dd836661 100755 --- a/doc/sphinx-guides/source/developers/remote-users.rst +++ b/doc/sphinx-guides/source/developers/remote-users.rst @@ -10,7 +10,7 @@ Shibboleth and OAuth If you are working on anything related to users, please keep in mind that your changes will likely affect Shibboleth and OAuth users. For some background on user accounts in the Dataverse Software, see :ref:`auth-modes` section of Configuration in the Installation Guide. -Rather than setting up Shibboleth on your laptop, developers are advised to simply add a value to their database to enable Shibboleth "dev mode" like this: +Rather than setting up Shibboleth on your laptop, developers are advised to add the Shibboleth auth provider (see "Add the Shibboleth Authentication Provider to Your Dataverse Installation" at :doc:`/installation/shibboleth`) and add a value to their database to enable Shibboleth "dev mode" like this: ``curl http://localhost:8080/api/admin/settings/:DebugShibAccountType -X PUT -d RANDOM`` diff --git a/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst new file mode 100644 index 00000000000..9f2386facb1 --- /dev/null +++ b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst @@ -0,0 +1,146 @@ +Direct DataFile Upload/Replace API +================================== + +The direct Datafile Upload API is used internally to support direct upload of files to S3 storage and by tools such as the DVUploader. + +Direct upload involves a series of three activities, each involving interacting with the server for a Dataverse installation: + +* Requesting initiation of a transfer from the server +* Use of the pre-signed URL(s) returned in that call to perform an upload/multipart-upload of the file to S3 +* A call to the server to register the file as part of the dataset/replace a file in the dataset or to cancel the transfer + +This API is only enabled when a Dataset is configured with a data store supporting direct S3 upload. +Administrators should be aware that partial transfers, where a client starts uploading the file/parts of the file and does not contact the server to complete/cancel the transfer, will result in data stored in S3 that is not referenced in the Dataverse installation (e.g. should be considered temporary and deleted.) + + +Requesting Direct Upload of a DataFile +-------------------------------------- +To initiate a transfer of a file to S3, make a call to the Dataverse installation indicating the size of the file to upload. The response will include a pre-signed URL(s) that allow the client to transfer the file. Pre-signed URLs include a short-lived token authorizing the action represented by the URL. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV + export SIZE=1000000000 + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/uploadurls?persistentId=$PERSISTENT_IDENTIFIER&size=$SIZE" + +The response to this call, assuming direct uploads are enabled, will be one of two forms: + +Single URL: when the file is smaller than the size at which uploads must be broken into multiple parts + +.. code-block:: bash + + { + "status":"OK", + "data":{ + "url":"...", + "partSize":1073741824, + "storageIdentifier":"s3://demo-dataverse-bucket:177883619b8-892ca9f7112e" + } + } + +Multiple URLs: when the file must be uploaded in multiple parts. The part size is set by the Dataverse installation and, for AWS-based storage, range from 5 MB to 5 GB + +.. code-block:: bash + + { + "status":"OK", + "data":{ + "urls":{ + "1":"...", + "2":"...", + "3":"...", + "4":"...", + "5":"..." + } + "abort":"/api/datasets/mpupload?...", + "complete":"/api/datasets/mpupload?..." + "partSize":1073741824, + "storageIdentifier":"s3://demo-dataverse-bucket:177883b000e-49cedef268ac" + } + +In the example responses above, the URLs, which are very long, have been omitted. These URLs reference the S3 server and the specific object identifier that will be used, starting with, for example, https://demo-dataverse-bucket.s3.amazonaws.com/10.5072/FK2FOQPJS/177883b000e-49cedef268ac?... + +The client must then use the URL(s) to PUT the file, or if the file is larger than the specified partSize, parts of the file. + +In the single part case, only one call to the supplied URL is required: + +.. code-block:: bash + + curl -H 'x-amz-tagging:dv-state=temp' -X PUT -T "" + + +In the multipart case, the client must send each part and collect the 'eTag' responses from the server. The calls for this are the same as the one for the single part case except that each call should send a slice of the total file, with the last part containing the remaining bytes. +The responses from the S3 server for these calls will include the 'eTag' for the uploaded part. + +To successfully conclude the multipart upload, the client must call the 'complete' URI, sending a json object including the part eTags: + +.. code-block:: bash + + curl -X PUT "$SERVER_URL/api/datasets/mpload?..." -d '{"1":"","2":"","3":"","4":"","5":""}' + +If the client is unable to complete the multipart upload, it should call the abort URL: + +.. code-block:: bash + + curl -X DELETE "$SERVER_URL/api/datasets/mpload?..." + + +Adding the Uploaded file to the Dataset +--------------------------------------- + +Once the file exists in the s3 bucket, a final API call is needed to add it to the Dataset. This call is the same call used to upload a file to a Dataverse installation but, rather than sending the file bytes, additional metadata is added using the "jsonData" parameter. +jsonData normally includes information such as a file description, tags, provenance, whether the file is restricted, etc. For direct uploads, the jsonData object must also include values for: + +* "storageIdentifier" - String, as specified in prior calls +* "fileName" - String +* "mimeType" - String +* fixity/checksum: either: + + * "md5Hash" - String with MD5 hash value, or + * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings + +The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512 + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV + export JSON_DATA="{'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42', 'fileName':'file1.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123456'}}" + + curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA" + +Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. + +Replacing an existing file in the Dataset +----------------------------------------- + +Once the file exists in the s3 bucket, a final API call is needed to register it as a replacement of an existing file. This call is the same call used to replace a file to a Dataverse installation but, rather than sending the file bytes, additional metadata is added using the "jsonData" parameter. +jsonData normally includes information such as a file description, tags, provenance, whether the file is restricted, whether to allow the mimetype to change (forceReplace=true), etc. For direct uploads, the jsonData object must also include values for: + +* "storageIdentifier" - String, as specified in prior calls +* "fileName" - String +* "mimeType" - String +* fixity/checksum: either: + + * "md5Hash" - String with MD5 hash value, or + * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings + +The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512. +Note that the API call does not validate that the file matches the hash value supplied. If a Dataverse instance is configured to validate file fixity hashes at publication time, a mismatch would be caught at that time and cause publication to fail. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export FILE_IDENTIFIER=5072 + export JSON_DATA="{'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'forceReplace':'true', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42', 'fileName':'file1.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123456'}}" + + curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/files/$FILE_IDENTIFIER/replace" -F "jsonData=$JSON_DATA" + +Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. +With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above. diff --git a/doc/sphinx-guides/source/developers/workflows.rst b/doc/sphinx-guides/source/developers/workflows.rst index 0550aa5653d..c982edc08bb 100644 --- a/doc/sphinx-guides/source/developers/workflows.rst +++ b/doc/sphinx-guides/source/developers/workflows.rst @@ -18,12 +18,14 @@ Steps can be internal (say, writing some data to the log) or external. External The external system reports the step result back to the Dataverse installation, by sending a HTTP ``POST`` command to ``api/workflows/{invocation-id}`` with Content-Type: text/plain. The body of the request is passed to the paused step for further processing. +Steps can define messages to send to the log and to users. If defined, the message to users is sent as a user notification (creating an email and showing in the user notification tab) and will show once for the given user if/when they view the relevant dataset page. The latter provides a means for the asynchronous workflow execution to report success or failure analogous to the way the publication and other processes report on the page. + If a step in a workflow fails, the Dataverse installation makes an effort to roll back all the steps that preceded it. Some actions, such as writing to the log, cannot be rolled back. If such an action has a public external effect (e.g. send an EMail to a mailing list) it is advisable to put it in the post-release workflow. .. tip:: For invoking external systems using a REST api, the Dataverse Software's internal step provider offers two steps for sending and receiving customizable HTTP requests. - *http/sr* and *http/authExt*, detailed below, with the latter able to use the API to make changes to the dataset being processed. Both lock the dataset to prevent other processes from changing the dataset between the time the step is launched to when the external process responds to the Dataverse instance. + *http/sr* and *http/authExt*, detailed below, with the latter able to use the API to make changes to the dataset being processed. (Both lock the dataset to prevent other processes from changing the dataset between the time the step is launched to when the external process responds to the Dataverse instance.) Administration ~~~~~~~~~~~~~~ @@ -70,6 +72,23 @@ The pause step is intended for testing - the invocationId required to end the pa "stepType":"pause" } +pause/message ++++++++++++++ + +A variant of the pause step that pauses the workflow and allows the external process to send a success/failure message. The workflow is paused until a POST request is sent to ``/api/workflows/{invocation-id}``. +The response in the POST body (Content-type:application/json) should be a json object (the same as for the http/extauth step) containing: +- "status" - can be "success" or "failure" +- "reason" - a message that will be logged +- "message" - a message to send to the user that will be sent as a notification and as a banner on the relevant dataset page. +An unparsable reponse will be considered a Failure that will be logged with no user message. (See the http/authext step for an example POST call) + +.. code:: json + + { + "provider":":internal", + "stepType":"pause/message" + } + http/sr +++++++ @@ -113,11 +132,20 @@ The invocationId must be sent as an 'X-Dataverse-invocationId' HTTP Header or as Once this step completes and responds, the invocationId is invalidated and will not allow further access. The url, content type, and message body can use data from the workflow context, using a simple markup language. This step has specific parameters for rollback. -The workflow is restarted when the external system replies with a POST request to ``/api/workflows/{invocation-id}``. +The workflow is restarted when the external system replies with a POST request to ``/api/workflows/{invocation-id}`` (Content-Type: application/json). + The response has is expected to be a json object with three keys: -- "Status" - can be "Success" or "Failure" -- "Reason" - a message that will be logged -- "Message" - a message to send to the user (message sending is not yet implemented). +- "status" - can be "success" or "failure" +- "reason" - a message that will be logged +- "message" - a message to send to the user that will be sent as a notification and as a banner on the relevant dataset page. + +.. code-block:: bash + + export INVOCATION_ID=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export MESSAGE={"status":"success", "reason":"Workflow completed in 10 seconds", "message":"An external workflow to virus check your data was successfully run prior to publication of your data"} + + curl -H 'Content-Type:application/json' -X POST -d $MESSAGE "$SERVER_URL/api/workflows/$INVOCATION_ID" .. code:: json diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index bc038489c42..b8571cd06c5 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -594,7 +594,7 @@ You may also want to look at samples at https://github.com/shlake/LibraDataHomep A simpler option to brand and customize your installation is to utilize the Dataverse collection theme, which each Dataverse collection has, that allows you to change colors, add a logo, tagline or website link to the Dataverse collection header section of the page. Those options are outlined in the :doc:`/user/dataverse-management` section of the User Guide. Custom Homepage -++++++++++++++++ ++++++++++++++++ The Dataverse Software allows you to use a custom homepage or welcome page in place of the default root Dataverse collection page. This allows for complete control over the look and feel of your installation's homepage. @@ -613,7 +613,7 @@ For more background on what this curl command above is doing, see the "Database ``curl -X DELETE http://localhost:8080/api/admin/settings/:HomePageCustomizationFile`` Custom Navbar Logo -+++++++++++++++++++ +++++++++++++++++++ The Dataverse Software allows you to replace the default Dataverse Project icon and name branding in the navbar with your own custom logo. Note that this logo is separate from the *root dataverse theme* logo. @@ -667,7 +667,7 @@ Internationalization The Dataverse Software is being translated into multiple languages by the Dataverse Project Community! Please see below for how to help with this effort! Adding Multiple Languages to the Dropdown in the Header -++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ The presence of the :ref:`:Languages` database setting adds a dropdown in the header for multiple languages. For example to add English and French to the dropdown: @@ -1465,7 +1465,7 @@ Please note that the authority cannot have a slash ("/") in it. .. _:Shoulder: :Shoulder -++++++++++++ ++++++++++ Out of the box, the DOI shoulder is set to "FK2/" but this is for testing only! When you apply for your DOI namespace, you may have requested a shoulder. The following is only an example and a trailing slash is optional. @@ -1509,9 +1509,9 @@ The chart below shows examples from each possible combination of parameters from | | randomString | sequentialNumber | | | | | +=================+===============+==================+ -| **DEPENDENT** | TJCLKP/MLGWJO | 100001/1 | +| **DEPENDENT** | TJCLKP/MLGWJO | 100001/1 | +-----------------+---------------+------------------+ -| **INDEPENDENT** | MLGWJO | 100002 | +| **INDEPENDENT** | MLGWJO | 100002 | +-----------------+---------------+------------------+ As seen above, in cases where ``:IdentifierGenerationStyle`` is set to *sequentialNumber* and ``:DataFilePIDFormat`` is set to *DEPENDENT*, each file within a dataset will be assigned a number *within* that dataset starting with "1". @@ -1536,7 +1536,7 @@ Note: File-level PID registration was added in Dataverse Software 4.9; it could .. _:IndependentHandleService: :IndependentHandleService -+++++++++++++++++++++++++++ ++++++++++++++++++++++++++ Specific for Handle PIDs. Set this setting to true if you want to use a Handle service which is setup to work 'independently' (No communication with the Global Handle Registry). By default this setting is absent and the Dataverse Software assumes it to be false. @@ -2035,7 +2035,7 @@ Set the name of the cloud environment you've integrated with your Dataverse inst .. _:PublicInstall: :PublicInstall -+++++++++++++++++++++ +++++++++++++++ Setting an installation to public will remove the ability to restrict data files or datasets. This functionality of the Dataverse Software will be disabled from your installation. @@ -2224,3 +2224,15 @@ This is the local file system path to be used with the LocalSubmitToArchiveComma +++++++++++++++++++ These are the bucket and project names to be used with the GoogleCloudSubmitToArchiveCommand class. Further information is in the :ref:`Google Cloud Configuration` section above. + +.. _:InstallationName: + +:InstallationName ++++++++++++++++++ + +By default, the name of the root Dataverse collection is used as the 'brandname' of the repository, i.e. in emails and metadata exports. If set, :InstallationName overrides this default, allowing the root collection name and brandname to be set independently. (Note that, since metadata export files are cached, they will have to be reexported (see :doc:`/admin/metadataexport`) before they incorporate a change in this setting.) + +:ExportInstallationAsDistributorOnlyWhenNotSet +++++++++++++++++++++++++++++++++++++++++++++++ + +In the DDI metadata exports, the default behavior is to always add the repository (using its brandname - the root collection name or the value of :ref:`:InstallationName <:InstallationName>`) to the stdyDscr/distStmt/distrbtr element. If this setting is true, this will only be done when a Distributor is not already defined in the Dataset metadata. (Note that, since metadata export files are cached, they will have to be reexported (see :doc:`/admin/metadataexport`) before they incorporate a change in this setting.) \ No newline at end of file diff --git a/doc/sphinx-guides/source/user/account.rst b/doc/sphinx-guides/source/user/account.rst index 18a44bcb85d..4c343ff85d4 100755 --- a/doc/sphinx-guides/source/user/account.rst +++ b/doc/sphinx-guides/source/user/account.rst @@ -99,6 +99,8 @@ If you already have a Dataverse installation account associated with the Usernam #. Enter your current password for your Dataverse installation account and click "Convert Account". #. Now you have finished converting your Dataverse installation account to use your institutional log in. +Note that you cannot go through this conversion process if your Dataverse installation account associated with the Username/Email log in option has been deactivated. + Convert your Dataverse installation account away from your Institutional Log In ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -132,6 +134,8 @@ If you already have a Dataverse installation account associated with the Usernam #. Enter your username and password for your Dataverse installation account and click "Convert Account". #. Now you have finished converting your Dataverse installation account to use ORCID for log in. +Note that you cannot go through this conversion process if your Dataverse installation account associated with the Username/Email log in option has been deactivated. + Convert your Dataverse installation account away from ORCID for log in ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index f64ed23e776..a74192cd02e 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -6,8 +6,10 @@ Dataverse Software Documentation Versions This list provides a way to refer to the documentation for previous versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. -- 5.3 +- 5.4.1 +- `5.4 `__ +- `5.3 `__ - `5.2 `__ - `5.1.1 `__ - `5.1 `__ diff --git a/pom.xml b/pom.xml index ed7e800f797..f33ba575009 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ --> edu.harvard.iq dataverse - 5.3 + 5.4.1 war dataverse diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index d63390e9866..375a8c67cec 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -137,188 +137,188 @@ authorIdentifierScheme ResearcherID 6 authorIdentifierScheme ScopusID 7 language Abkhaz 0 - language Afar 1 - language Afrikaans 2 - language Akan 3 - language Albanian 4 - language Amharic 5 - language Arabic 6 - language Aragonese 7 - language Armenian 8 - language Assamese 9 - language Avaric 10 - language Avestan 11 - language Aymara 12 - language Azerbaijani 13 - language Bambara 14 - language Bashkir 15 - language Basque 16 - language Belarusian 17 + language Afar 1 aar + language Afrikaans 2 afr + language Akan 3 aka + language Albanian 4 sqi + language Amharic 5 amh + language Arabic 6 ara + language Aragonese 7 arg + language Armenian 8 hye + language Assamese 9 asm + language Avaric 10 ava + language Avestan 11 ave + language Aymara 12 aym + language Azerbaijani 13 aze + language Bambara 14 bam + language Bashkir 15 bak + language Basque 16 eus + language Belarusian 17 bel language Bengali, Bangla 18 language Bihari 19 - language Bislama 20 - language Bosnian 21 - language Breton 22 - language Bulgarian 23 - language Burmese 24 + language Bislama 20 bis + language Bosnian 21 bos + language Breton 22 bre + language Bulgarian 23 bul + language Burmese 24 mya language Catalan,Valencian 25 - language Chamorro 26 - language Chechen 27 + language Chamorro 26 cha + language Chechen 27 che language Chichewa, Chewa, Nyanja 28 - language Chinese 29 - language Chuvash 30 - language Cornish 31 - language Corsican 32 - language Cree 33 - language Croatian 34 - language Czech 35 - language Danish 36 + language Chinese 29 zho + language Chuvash 30 chv + language Cornish 31 cor + language Corsican 32 cos + language Cree 33 cre + language Croatian 34 hrv + language Czech 35 ces + language Danish 36 dan language Divehi, Dhivehi, Maldivian 37 - language Dutch 38 - language Dzongkha 39 - language English 40 - language Esperanto 41 - language Estonian 42 - language Ewe 43 - language Faroese 44 - language Fijian 45 - language Finnish 46 - language French 47 + language Dutch 38 nld + language Dzongkha 39 dzo + language English 40 eng + language Esperanto 41 epo + language Estonian 42 est + language Ewe 43 ewe + language Faroese 44 fao + language Fijian 45 fij + language Finnish 46 fin + language French 47 fra language Fula, Fulah, Pulaar, Pular 48 - language Galician 49 - language Georgian 50 - language German 51 + language Galician 49 glg + language Georgian 50 kat + language German 51 deu language Greek (modern) 52 language Guaraní 53 - language Gujarati 54 + language Gujarati 54 guj language Haitian, Haitian Creole 55 - language Hausa 56 + language Hausa 56 hau language Hebrew (modern) 57 - language Herero 58 - language Hindi 59 - language Hiri Motu 60 - language Hungarian 61 + language Herero 58 her + language Hindi 59 hin + language Hiri Motu 60 hmo + language Hungarian 61 hun language Interlingua 62 - language Indonesian 63 - language Interlingue 64 - language Irish 65 - language Igbo 66 - language Inupiaq 67 - language Ido 68 - language Icelandic 69 - language Italian 70 - language Inuktitut 71 - language Japanese 72 - language Javanese 73 + language Indonesian 63 ind + language Interlingue 64 ile + language Irish 65 gle + language Igbo 66 ibo + language Inupiaq 67 ipk + language Ido 68 ido + language Icelandic 69 isl + language Italian 70 ita + language Inuktitut 71 iku + language Japanese 72 jpn + language Javanese 73 jav language Kalaallisut, Greenlandic 74 - language Kannada 75 - language Kanuri 76 - language Kashmiri 77 - language Kazakh 78 - language Khmer 79 + language Kannada 75 kan + language Kanuri 76 kau + language Kashmiri 77 kas + language Kazakh 78 kaz + language Khmer 79 khm language Kikuyu, Gikuyu 80 - language Kinyarwanda 81 + language Kinyarwanda 81 kin language Kyrgyz 82 - language Komi 83 - language Kongo 84 - language Korean 85 - language Kurdish 86 + language Komi 83 kom + language Kongo 84 kon + language Korean 85 kor + language Kurdish 86 kur language Kwanyama, Kuanyama 87 - language Latin 88 + language Latin 88 lat language Luxembourgish, Letzeburgesch 89 - language Ganda 90 + language Ganda 90 lug language Limburgish, Limburgan, Limburger 91 - language Lingala 92 - language Lao 93 - language Lithuanian 94 - language Luba-Katanga 95 - language Latvian 96 - language Manx 97 - language Macedonian 98 - language Malagasy 99 + language Lingala 92 lin + language Lao 93 lao + language Lithuanian 94 lit + language Luba-Katanga 95 lub + language Latvian 96 lav + language Manx 97 glv + language Macedonian 98 mkd + language Malagasy 99 mlg language Malay 100 - language Malayalam 101 - language Maltese 102 + language Malayalam 101 mal + language Maltese 102 mlt language MÄori 103 language Marathi (MarÄá¹­hÄ«) 104 - language Marshallese 105 - language Mixtepec Mixtec 106 - language Mongolian 107 - language Nauru 108 + language Marshallese 105 mah + language Mixtepec Mixtec 106 mix + language Mongolian 107 mon + language Nauru 108 nau language Navajo, Navaho 109 language Northern Ndebele 110 language Nepali 111 - language Ndonga 112 - language Norwegian BokmÃ¥l 113 - language Norwegian Nynorsk 114 - language Norwegian 115 + language Ndonga 112 ndo + language Norwegian BokmÃ¥l 113 nob + language Norwegian Nynorsk 114 nno + language Norwegian 115 nor language Nuosu 116 language Southern Ndebele 117 language Occitan 118 language Ojibwe, Ojibwa 119 language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 - language Oromo 121 + language Oromo 121 orm language Oriya 122 language Ossetian, Ossetic 123 language Panjabi, Punjabi 124 language PÄli 125 language Persian (Farsi) 126 - language Polish 127 + language Polish 127 pol language Pashto, Pushto 128 - language Portuguese 129 - language Quechua 130 - language Romansh 131 + language Portuguese 129 por + language Quechua 130 que + language Romansh 131 roh language Kirundi 132 - language Romanian 133 - language Russian 134 + language Romanian 133 ron + language Russian 134 rus language Sanskrit (Saá¹ská¹›ta) 135 - language Sardinian 136 - language Sindhi 137 - language Northern Sami 138 - language Samoan 139 - language Sango 140 - language Serbian 141 + language Sardinian 136 srd + language Sindhi 137 snd + language Northern Sami 138 sme + language Samoan 139 smo + language Sango 140 sag + language Serbian 141 srp language Scottish Gaelic, Gaelic 142 - language Shona 143 + language Shona 143 sna language Sinhala, Sinhalese 144 - language Slovak 145 + language Slovak 145 slk language Slovene 146 - language Somali 147 - language Southern Sotho 148 + language Somali 147 som + language Southern Sotho 148 sot language Spanish, Castilian 149 - language Sundanese 150 + language Sundanese 150 sun language Swahili 151 - language Swati 152 - language Swedish 153 - language Tamil 154 - language Telugu 155 - language Tajik 156 - language Thai 157 - language Tigrinya 158 + language Swati 152 ssw + language Swedish 153 swe + language Tamil 154 tam + language Telugu 155 tel + language Tajik 156 tgk + language Thai 157 tha + language Tigrinya 158 tir language Tibetan Standard, Tibetan, Central 159 - language Turkmen 160 - language Tagalog 161 - language Tswana 162 - language Tonga (Tonga Islands) 163 - language Turkish 164 - language Tsonga 165 - language Tatar 166 - language Twi 167 - language Tahitian 168 + language Turkmen 160 tuk + language Tagalog 161 tgl + language Tswana 162 tsn + language Tonga (Tonga Islands) 163 ton + language Turkish 164 tur + language Tsonga 165 tso + language Tatar 166 tat + language Twi 167 twi + language Tahitian 168 tah language Uyghur, Uighur 169 - language Ukrainian 170 - language Urdu 171 - language Uzbek 172 - language Venda 173 - language Vietnamese 174 - language Volapük 175 - language Walloon 176 - language Welsh 177 - language Wolof 178 - language Western Frisian 179 - language Xhosa 180 - language Yiddish 181 - language Yoruba 182 + language Ukrainian 170 ukr + language Urdu 171 urd + language Uzbek 172 uzb + language Venda 173 ven + language Vietnamese 174 vie + language Volapük 175 vol + language Walloon 176 wln + language Welsh 177 cym + language Wolof 178 wol + language Western Frisian 179 fry + language Xhosa 180 xho + language Yiddish 181 yid + language Yoruba 182 yor language Zhuang, Chuang 183 - language Zulu 184 + language Zulu 184 zul language Not applicable 185 diff --git a/scripts/api/data/role-test-addRole.json b/scripts/api/data/role-test-addRole.json new file mode 100644 index 00000000000..7923eed916b --- /dev/null +++ b/scripts/api/data/role-test-addRole.json @@ -0,0 +1,10 @@ +{ + "alias":"testRole", + "name":"Test Role", + "description":"Test Role for adding/deleting.", + "permissions":[ + "ViewUnpublishedDataset", + "ViewUnpublishedDataverse", + "DownloadFile" + ] +} diff --git a/scripts/dev/dev-rebuild.sh b/scripts/dev/dev-rebuild.sh index 0a053c1d91d..f98edc8d938 100755 --- a/scripts/dev/dev-rebuild.sh +++ b/scripts/dev/dev-rebuild.sh @@ -52,9 +52,6 @@ cd scripts/api ./setup-all.sh --insecure -p=admin1 | tee /tmp/setup-all.sh.out cd ../.. -echo "Loading SQL reference data..." -psql -U $DB_USER $DB_NAME -f scripts/database/reference_data.sql - echo "Creating SQL sequence..." psql -U $DB_USER $DB_NAME -f doc/sphinx-guides/source/_static/util/createsequence.sql diff --git a/scripts/issues/7687/file_access_flag_update_bug.txt b/scripts/issues/7687/file_access_flag_update_bug.txt new file mode 100644 index 00000000000..a96b9dfab1b --- /dev/null +++ b/scripts/issues/7687/file_access_flag_update_bug.txt @@ -0,0 +1,11 @@ +-- this query will identify datasets where a superuser has run the Curate command and the update included a change to the fileaccessrequest flag, resulting in the file access request updates not being reflected in the published version + +select da.id, dv.id, ta.id, da.fileaccessrequest, ta.fileaccessrequest, dv.releasetime +from datasetversion dv, termsofuseandaccess ta, dataset da +where dv.dataset_id=da.id +and dv.termsofuseandaccess_id=ta.id +and ta.fileaccessrequest != da.fileaccessrequest +and dv.versionstate='RELEASED' +and dv.releasetime in (select max(releasetime) +from datasetversion +where dataset_id=da.id); \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java index 6305b871cc0..f6cbd01ece0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java @@ -62,7 +62,7 @@ protected Map addBasicMetadata(DvObject dvObjectIn, Map met metadataTemplate.setContacts(dataset.getLatestVersion().getDatasetContacts()); metadataTemplate.setProducers(dataset.getLatestVersion().getDatasetProducers()); metadataTemplate.setTitle(dvObject.getCurrentName()); - String producerString = dataverseService.findRootDataverse().getName(); + String producerString = dataverseService.getRootDataverseName(); if (producerString.isEmpty() || producerString.equals(DatasetField.NA_VALUE) ) { producerString = UNAVAILABLE; } diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java index 0b868c73c42..c88fac00c27 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java @@ -100,7 +100,7 @@ public void setDatasetFieldType(DatasetFieldType datasetFieldType) { this.datasetFieldType = datasetFieldType; } - @OneToMany(mappedBy = "controlledVocabularyValue", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) + @OneToMany(mappedBy = "controlledVocabularyValue", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}, orphanRemoval=true) private Collection controlledVocabAlternates = new ArrayList<>(); public Collection getControlledVocabAlternates() { diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java index ba503a18d22..815733f1b7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java @@ -6,6 +6,8 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.AbstractGlobalIdServiceBean.GlobalIdMetadataTemplate; +import edu.harvard.iq.dataverse.branding.BrandingUtil; + import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; @@ -205,7 +207,7 @@ public static String getMetadataFromDvObject(String identifier, Map findByCreatorId(Long creatorId) { + return em.createNamedQuery("DataFile.findByCreatorId").setParameter("creatorId", creatorId).getResultList(); + } + + public List findByReleaseUserId(Long releaseUserId) { + return em.createNamedQuery("DataFile.findByReleaseUserId").setParameter("releaseUserId", releaseUserId).getResultList(); + } + public DataFile findReplacementFile(Long previousFileId){ Query query = em.createQuery("select object(o) from DataFile as o where o.previousDataFileId = :previousFileId"); query.setParameter("previousFileId", previousFileId); @@ -1363,55 +1371,6 @@ public boolean isFileClassPackage (DataFile file) { public void populateFileSearchCard(SolrSearchResult solrSearchResult) { solrSearchResult.setEntity(this.findCheapAndEasy(solrSearchResult.getEntityId())); } - - - /** - * Does this file have a replacement. - * Any file should have AT MOST 1 replacement - * - * @param df - * @return - * @throws java.lang.Exception if a DataFile has more than 1 replacement - * or is unpublished and has a replacement. - */ - public boolean hasReplacement(DataFile df) throws Exception{ - - if (df.getId() == null){ - // An unsaved file cannot have a replacment - return false; - } - - - List dataFiles = em.createQuery("select o from DataFile o" + - " WHERE o.previousDataFileId = :dataFileId", DataFile.class) - .setParameter("dataFileId", df.getId()) - .getResultList(); - - if (dataFiles.isEmpty()){ - return false; - } - - if (!df.isReleased()){ - // An unpublished SHOULD NOT have a replacment - String errMsg = "DataFile with id: [" + df.getId() + "] is UNPUBLISHED with a REPLACEMENT. This should NOT happen."; - logger.severe(errMsg); - - throw new Exception(errMsg); - } - - - - else if (dataFiles.size() == 1){ - return true; - }else{ - - String errMsg = "DataFile with id: [" + df.getId() + "] has more than one replacment!"; - logger.severe(errMsg); - - throw new Exception(errMsg); - } - - } public boolean hasBeenDeleted(DataFile df){ Dataset dataset = df.getOwner(); diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 4cf95dda250..cd40e76a304 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -53,6 +53,10 @@ query = "SELECT o.id FROM Dataset o WHERE o.owner.id=:ownerId"), @NamedQuery(name = "Dataset.findByOwnerId", query = "SELECT o FROM Dataset o WHERE o.owner.id=:ownerId"), + @NamedQuery(name = "Dataset.findByCreatorId", + query = "SELECT o FROM Dataset o WHERE o.creator.id=:creatorId"), + @NamedQuery(name = "Dataset.findByReleaseUserId", + query = "SELECT o FROM Dataset o WHERE o.releaseUser.id=:releaseUserId"), }) /* diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 5f596d1b556..b9cc26ab89b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -54,6 +54,8 @@ import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.workflows.WorkflowComment; + import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -1280,6 +1282,10 @@ public boolean canUpdateDataset() { public boolean canPublishDataverse() { return permissionsWrapper.canIssuePublishDataverseCommand(dataset.getOwner()); } + + public boolean canPublishDataset(){ + return permissionsWrapper.canIssuePublishDatasetCommand(dataset); + } public boolean canViewUnpublishedDataset() { return permissionsWrapper.canViewUnpublishedDataset( dvRequestService.getDataverseRequest(), dataset); @@ -1856,7 +1862,9 @@ private String init(boolean initFull) { MakeDataCountEntry entry = new MakeDataCountEntry(FacesContext.getCurrentInstance(), dvRequestService, workingVersion); mdcLogService.logEntry(entry); } - + displayWorkflowComments(); + + if (initFull) { // init the list of FileMetadatas if (workingVersion.isDraft() && canUpdateDataset()) { @@ -2016,6 +2024,22 @@ private String init(boolean initFull) { return null; } + private void displayWorkflowComments() { + List comments = workingVersion.getWorkflowComments(); + for (WorkflowComment wfc : comments) { + if (wfc.isToBeShown() && wfc.getDatasetVersion().equals(workingVersion) + && wfc.getAuthenticatedUser().equals(session.getUser())) { + if (wfc.getType() == WorkflowComment.Type.WORKFLOW_SUCCESS) { + JsfHelper.addSuccessMessage(wfc.getMessage()); + + } else if (wfc.getType() == WorkflowComment.Type.WORKFLOW_FAILURE) { + JsfHelper.addWarningMessage(wfc.getMessage()); + } + datasetService.markWorkflowCommentAsRead(wfc); + } + } + } + private void displayLockInfo(Dataset dataset) { // Various info messages, when the dataset is locked (for various reasons): if (dataset.isLocked() && canUpdateDataset()) { @@ -2055,9 +2079,8 @@ private void displayLockInfo(Dataset dataset) { BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.contactSupport")); } if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress)) { - String rootDataverseName = dataverseService.findRootDataverse().getName(); JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message"), - BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null, rootDataverseName)))); + BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null)))); } } @@ -2703,6 +2726,8 @@ public String refresh() { } } + displayWorkflowComments(); + return ""; } @@ -3347,6 +3372,8 @@ public String save() { return ""; } + + // Use the Create or Update command to save the dataset: Command cmd; Map deleteStorageLocations = null; @@ -3371,8 +3398,7 @@ public String save() { if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress) || lockTest.isLockedFor(DatasetLock.Reason.EditInProgress)) { logger.log(Level.INFO, "Couldn''t save dataset: {0}", "It is locked." + ""); - String rootDataverseName = dataverseService.findRootDataverse().getName(); - JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message"),BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null, rootDataverseName)))); + JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message"),BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null)))); return returnToDraftVersion(); } } @@ -3450,7 +3476,7 @@ public String save() { } if (addFilesSuccess && dataset.getFiles().size() > 0) { if (nNewFiles == dataset.getFiles().size()) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } else { String partialSuccessMessage = BundleUtil.getStringFromBundle("dataset.message.createSuccess.partialSuccessSavingFiles"); partialSuccessMessage = partialSuccessMessage.replace("{0}", "" + dataset.getFiles().size() + ""); @@ -3460,26 +3486,26 @@ public String save() { } else { JsfHelper.addWarningMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess.failedToSaveFiles")); } - } else { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess")); + } else { + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } } if (editMode.equals(EditMode.METADATA)) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.metadataSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.metadataSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } if (editMode.equals(EditMode.LICENSE)) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.termsSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.termsSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } if (editMode.equals(EditMode.FILE)) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.filesSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.filesSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } } else { // must have been a bulk file update or delete: if (bulkFileDeleteInProgress) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileDeleteSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileDeleteSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } else { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileUpdateSuccess")); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileUpdateSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } } @@ -4200,13 +4226,13 @@ public String getTabularDataFileURL(Long fileid) { public List< String[]> getExporters(){ List retList = new ArrayList<>(); String myHostURL = getDataverseSiteUrl(); - for (String [] provider : ExportService.getInstance(settingsService).getExportersLabels() ){ + for (String [] provider : ExportService.getInstance().getExportersLabels() ){ String formatName = provider[1]; String formatDisplayName = provider[0]; Exporter exporter = null; try { - exporter = ExportService.getInstance(settingsService).getExporter(formatName); + exporter = ExportService.getInstance().getExporter(formatName); } catch (ExportException ex) { exporter = null; } @@ -5150,8 +5176,7 @@ public List getDatasetAuthors() { * @return the publisher of the version */ public String getPublisher() { - assert (null != workingVersion); - return workingVersion.getRootDataverseNameforCitation(); + return dataverseService.getRootDataverseName(); } public void downloadRsyncScript() { @@ -5296,7 +5321,7 @@ public boolean isThisLatestReleasedVersion() { public String getJsonLd() { if (isThisLatestReleasedVersion()) { - ExportService instance = ExportService.getInstance(settingsService); + ExportService instance = ExportService.getInstance(); String jsonLd = instance.getExportAsString(dataset, SchemaDotOrgExporter.NAME); if (jsonLd != null) { logger.fine("Returning cached schema.org JSON-LD."); @@ -5462,4 +5487,13 @@ public boolean isFileAccessRequest() { public void setFileAccessRequest(boolean fileAccessRequest) { this.fileAccessRequest = fileAccessRequest; } + + // wrapper method to see if the file has been deleted (or replaced) in the current version + public boolean isFileDeleted (DataFile dataFile) { + if (dataFile.getDeleted() == null) { + dataFile.setDeleted(datafileService.hasBeenDeleted(dataFile)); + } + + return dataFile.getDeleted(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index c1efe119fd2..224ccfd22f3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -19,6 +19,7 @@ import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.workflows.WorkflowComment; import java.io.File; @@ -150,6 +151,14 @@ private List findIdsByOwnerId(Long ownerId, boolean onlyPublished) { } } + public List findByCreatorId(Long creatorId) { + return em.createNamedQuery("Dataset.findByCreatorId").setParameter("creatorId", creatorId).getResultList(); + } + + public List findByReleaseUserId(Long releaseUserId) { + return em.createNamedQuery("Dataset.findByReleaseUserId").setParameter("releaseUserId", releaseUserId).getResultList(); + } + public List filterByPidQuery(String filterQuery) { // finds only exact matches Dataset ds = findByGlobalId(filterQuery); @@ -719,6 +728,27 @@ public void exportAllDatasets(boolean forceReExport) { } + //get a string to add to save success message + //depends on dataset state and user privleges + public String getReminderString(Dataset dataset, boolean canPublishDataset) { + + if(!dataset.isReleased() ){ + //messages for draft state. + if (canPublishDataset){ + return BundleUtil.getStringFromBundle("dataset.message.publish.remind.draft"); + } else { + return BundleUtil.getStringFromBundle("dataset.message.submit.remind.draft"); + } + } else{ + //messages for new version - post-publish + if (canPublishDataset){ + return BundleUtil.getStringFromBundle("dataset.message.publish.remind.version"); + } else { + return BundleUtil.getStringFromBundle("dataset.message.submit.remind.version"); + } + } + } + public void updateLastExportTimeStamp(Long datasetId) { Date now = new Date(); em.createNativeQuery("UPDATE Dataset SET lastExportTime='"+now.toString()+"' WHERE id="+datasetId).executeUpdate(); @@ -787,6 +817,12 @@ public WorkflowComment addWorkflowComment(WorkflowComment workflowComment) { return workflowComment; } + public void markWorkflowCommentAsRead(WorkflowComment workflowComment) { + workflowComment.setToBeShown(false); + em.merge(workflowComment); + } + + /** * This method used to throw CommandException, which was pretty pointless * seeing how it's called asynchronously. As of v5.0 any CommanExceptiom @@ -963,7 +999,7 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatas // (i.e., the metadata exports): StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - for (String[] exportProvider : ExportService.getInstance(settingsService).getExportersLabels()) { + for (String[] exportProvider : ExportService.getInstance().getExportersLabels()) { String exportLabel = "export_" + exportProvider[1] + ".cached"; try { total += datasetSIO.getAuxObjectSize(exportLabel); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 067cba23d04..d28ce5175d4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -1391,21 +1391,6 @@ public String getDistributorName() { } return null; } - - // TODO: Consider renaming this method since it's also used for getting the "provider" for Schema.org JSON-LD. - public String getRootDataverseNameforCitation(){ - //Get root dataverse name for Citation - Dataverse root = this.getDataset().getOwner(); - while (root.getOwner() != null) { - root = root.getOwner(); - } - String rootDataverseName = root.getName(); - if (!StringUtil.isEmpty(rootDataverseName)) { - return rootDataverseName; - } else { - return ""; - } - } public List getDatasetDistributors() { //todo get distributors from DatasetfieldValues @@ -1888,11 +1873,11 @@ public String getJsonLd() { job.add("includedInDataCatalog", Json.createObjectBuilder() .add("@type", "DataCatalog") - .add("name", this.getRootDataverseNameforCitation()) + .add("name", BrandingUtil.getRootDataverseCollectionName()) .add("url", SystemConfig.getDataverseSiteUrlStatic()) ); - String installationBrandName = BrandingUtil.getInstallationBrandName(getRootDataverseNameforCitation()); + String installationBrandName = BrandingUtil.getInstallationBrandName(); /** * Both "publisher" and "provider" are included but they have the same * values. Some services seem to prefer one over the other. diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 5aab9ef9a9e..b46333a4287 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -50,6 +50,8 @@ @NamedQuery(name = "Dataverse.findRoot", query = "SELECT d FROM Dataverse d where d.owner.id=null"), @NamedQuery(name = "Dataverse.findByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias)=:alias"), @NamedQuery(name = "Dataverse.findByOwnerId", query="select object(o) from Dataverse as o where o.owner.id =:ownerId order by o.name"), + @NamedQuery(name = "Dataverse.findByCreatorId", query="select object(o) from Dataverse as o where o.creator.id =:creatorId order by o.name"), + @NamedQuery(name = "Dataverse.findByReleaseUserId", query="select object(o) from Dataverse as o where o.releaseUser.id =:releaseUserId order by o.name"), @NamedQuery(name = "Dataverse.filterByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias) LIKE :alias order by dv.alias"), @NamedQuery(name = "Dataverse.filterByAliasNameAffiliation", query="SELECT dv FROM Dataverse dv WHERE (LOWER(dv.alias) LIKE :alias) OR (LOWER(dv.name) LIKE :name) OR (LOWER(dv.affiliation) LIKE :affiliation) order by dv.alias"), @NamedQuery(name = "Dataverse.filterByName", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.name) LIKE :name order by dv.alias") diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 96963bb9cb4..520c3ff14df 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -18,6 +18,7 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.search.SolrSearchResult; +import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.File; import java.io.IOException; @@ -172,6 +173,14 @@ public List findDataverseIdsForIndexing(boolean skipIndexed) { } + public List findByCreatorId(Long creatorId) { + return em.createNamedQuery("Dataverse.findByCreatorId").setParameter("creatorId", creatorId).getResultList(); + } + + public List findByReleaseUserId(Long releaseUserId) { + return em.createNamedQuery("Dataverse.findByReleaseUserId").setParameter("releaseUserId", releaseUserId).getResultList(); + } + public List findByOwnerId(Long ownerId) { return em.createNamedQuery("Dataverse.findByOwnerId").setParameter("ownerId", ownerId).getResultList(); } @@ -195,6 +204,15 @@ public Dataverse findRootDataverse() { return em.createNamedQuery("Dataverse.findRoot", Dataverse.class).getSingleResult(); } + + //Similarly - if the above throws that exception, do we need to catch it here? + //ToDo - consider caching? + public String getRootDataverseName() { + Dataverse root = findRootDataverse(); + String rootDataverseName=root.getName(); + return StringUtil.isEmpty(rootDataverseName) ? "" : rootDataverseName; + } + public List findAllPublishedByOwnerId(Long ownerId) { List retVal = new ArrayList<>(); List previousLevel = findPublishedByOwnerId(ownerId); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java index 2a2b02c5b18..c6016939c08 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java @@ -4,9 +4,12 @@ import edu.harvard.iq.dataverse.PermissionServiceBean.StaticPermissionQuery; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; +import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.JsfHelper; import edu.harvard.iq.dataverse.util.SessionUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; @@ -54,7 +57,10 @@ public class DataverseSession implements Serializable{ @EJB BannerMessageServiceBean bannerMessageService; - + + @EJB + AuthenticationServiceBean authenticationService; + private static final Logger logger = Logger.getLogger(DataverseSession.class.getCanonicalName()); private boolean statusDismissed = false; @@ -84,19 +90,57 @@ public void setDismissedMessages(List dismissedMessages) { private Boolean debug; public User getUser() { + return getUser(false); + } + + /** + * For performance reasons, we only lookup the authenticated user again (to + * check if it has been deleted or deactivated, for example) when we have + * to. + * + * @param lookupAuthenticatedUserAgain A boolean to indicate if we should go + * to the database again to lookup the user to get the latest values that + * may have been updated outside the session. + */ + public User getUser(boolean lookupAuthenticatedUserAgain) { if ( user == null ) { user = GuestUser.get(); } - + if (lookupAuthenticatedUserAgain && user instanceof AuthenticatedUser) { + AuthenticatedUser auFromSession = (AuthenticatedUser) user; + AuthenticatedUser auFreshLookup = authenticationService.findByID(auFromSession.getId()); + if (auFreshLookup == null) { + logger.fine("getUser found user no longer exists (was deleted). Returning GuestUser."); + user = GuestUser.get(); + } else { + if (auFreshLookup.isDeactivated()) { + logger.fine("getUser found user is deactivated. Returning GuestUser."); + user = GuestUser.get(); + } + } + } return user; } + /** + * Sets the user and configures the session timeout. + */ public void setUser(User aUser) { - + // We check for deactivated status here in "setUser" to ensure a common user + // experience across Builtin, Shib, OAuth, and OIDC users. + // If we want a different user experience for Builtin users, we can + // modify getUpdateAuthenticatedUser in AuthenticationServiceBean + // (and probably other places). + if (aUser instanceof AuthenticatedUser && aUser.isDeactivated()) { + logger.info("Login attempt by deactivated user " + aUser.getIdentifier() + "."); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("deactivated.error")); + return; + } FacesContext context = FacesContext.getCurrentInstance(); // Log the login/logout and Change the session id if we're using the UI and have // a session, versus an API call with no session - (i.e. /admin/submitToArchive() // which sets the user in the session to pass it through to the underlying command) + // TODO: reformat to remove tabs etc. if(context != null) { logSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.SessionManagement,(aUser==null) ? "logout" : "login") @@ -104,6 +148,12 @@ public void setUser(User aUser) { //#3254 - change session id when user changes SessionUtil.changeSessionId((HttpServletRequest) context.getExternalContext().getRequest()); + HttpSession httpSession = (HttpSession) context.getExternalContext().getSession(false); + if (httpSession != null) { + // Configure session timeout. + logger.fine("jsession: " + httpSession.getId() + " setting the lifespan of the session to " + systemConfig.getLoginSessionTimeout() + " minutes"); + httpSession.setMaxInactiveInterval(systemConfig.getLoginSessionTimeout() * 60); // session timeout, in seconds + } } this.user = aUser; } @@ -208,15 +258,5 @@ public void dismissMessage(BannerMessage message){ } } - - public void configureSessionTimeout() { - HttpSession httpSession = (HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false); - - if (httpSession != null) { - logger.fine("jsession: "+httpSession.getId()+" setting the lifespan of the session to " + systemConfig.getLoginSessionTimeout() + " minutes"); - httpSession.setMaxInactiveInterval(systemConfig.getLoginSessionTimeout() * 60); // session timeout, in seconds - } - - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index f1041303fdd..09a2ef85893 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -33,7 +33,7 @@ @NamedQuery(name = "DvObject.findByProtocolIdentifierAuthority", query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol"), @NamedQuery(name = "DvObject.findByOwnerId", - query = "SELECT o FROM DvObject o WHERE o.owner.id=:ownerId"), + query = "SELECT o FROM DvObject o WHERE o.owner.id=:ownerId order by o.dtype desc, o.id"), @NamedQuery(name = "DvObject.findByAuthenticatedUserId", query = "SELECT o FROM DvObject o WHERE o.creator.id=:ownerId or o.releaseUser.id=:releaseUserId") }) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 89b21d84856..aef3f7d3446 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -36,6 +36,8 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.EjbUtil; +import edu.harvard.iq.dataverse.util.FileMetadataUtil; + import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import java.io.File; import java.io.FileOutputStream; @@ -90,7 +92,11 @@ public class EditDatafilesPage implements java.io.Serializable { public enum FileEditMode { - EDIT, UPLOAD, CREATE, SINGLE, SINGLE_REPLACE + EDIT, UPLOAD, CREATE, REPLACE + }; + + public enum Referrer { + DATASET, FILE }; @EJB @@ -136,6 +142,7 @@ public enum FileEditMode { private String selectedFileIdsString = null; private FileEditMode mode; + private Referrer referrer = Referrer.DATASET; private List selectedFileIdsList = new ArrayList<>(); private List fileMetadatas = new ArrayList<>();; @@ -205,6 +212,16 @@ public FileEditMode getMode() { public void setMode(FileEditMode mode) { this.mode = mode; } + + public Referrer getReferrer() { + return referrer; + } + + public void setReferrer(Referrer referrer) { + this.referrer = referrer; + } + + public List getFileMetadatas() { @@ -501,9 +518,9 @@ public String init() { // ------------------------------------------- // Is this a file replacement operation? // ------------------------------------------- - if (mode == FileEditMode.SINGLE_REPLACE){ + if (mode == FileEditMode.REPLACE){ /* - http://localhost:8080/editdatafiles.xhtml?mode=SINGLE_REPLACE&datasetId=26&fid=726 + http://localhost:8080/editdatafiles.xhtml?mode=REPLACE&datasetId=26&fid=726 */ DataFile fileToReplace = loadFileToReplace(); if (fileToReplace == null){ @@ -525,7 +542,7 @@ public String init() { populateFileMetadatas(); singleFile = getFileToReplace(); - }else if (mode == FileEditMode.EDIT || mode == FileEditMode.SINGLE) { + }else if (mode == FileEditMode.EDIT) { if (selectedFileIdsString != null) { String[] ids = selectedFileIdsString.split(","); @@ -539,7 +556,7 @@ public String init() { test = null; } if (test != null) { - if (mode == FileEditMode.SINGLE) { + if (FileEditMode.EDIT == mode && Referrer.FILE == referrer) { singleFile = datafileService.find(test); } selectedFileIdsList.add(test); @@ -567,7 +584,7 @@ public String init() { return permissionsWrapper.notFound(); } - if (FileEditMode.SINGLE == mode){ + if (FileEditMode.EDIT == mode && Referrer.FILE == referrer){ if (fileMetadatas.get(0).getDatasetVersion().getId() != null){ versionString = "DRAFT"; } @@ -796,8 +813,12 @@ private void deleteFiles(List filesForDelete) { // and let the delete be handled in the command (by adding it to the // filesToBeDeleted list): + // ToDo - FileMetadataUtil.removeFileMetadataFromList should handle these two + // removes so they could be put after this if clause and the else clause could + // be removed. dataset.getEditVersion().getFileMetadatas().remove(markedForDelete); fileMetadatas.remove(markedForDelete); + filesToBeDeleted.add(markedForDelete); } else { logger.fine("this is a brand-new (unsaved) filemetadata"); @@ -810,9 +831,9 @@ private void deleteFiles(List filesForDelete) { // fileMetadatas list. (but doing both just adds a no-op and won't cause an // error) // 1. delete the filemetadata from the local display list: - removeFileMetadataFromList(fileMetadatas, markedForDelete); + FileMetadataUtil.removeFileMetadataFromList(fileMetadatas, markedForDelete); // 2. delete the filemetadata from the version: - removeFileMetadataFromList(dataset.getEditVersion().getFileMetadatas(), markedForDelete); + FileMetadataUtil.removeFileMetadataFromList(dataset.getEditVersion().getFileMetadatas(), markedForDelete); } if (markedForDelete.getDataFile().getId() == null) { @@ -821,8 +842,8 @@ private void deleteFiles(List filesForDelete) { // removing it from the fileMetadatas lists (above), we also remove it from // the newFiles list and the dataset's files, so it never gets saved. - removeDataFileFromList(dataset.getFiles(), markedForDelete.getDataFile()); - removeDataFileFromList(newFiles, markedForDelete.getDataFile()); + FileMetadataUtil.removeDataFileFromList(dataset.getFiles(), markedForDelete.getDataFile()); + FileMetadataUtil.removeDataFileFromList(newFiles, markedForDelete.getDataFile()); FileUtil.deleteTempFile(markedForDelete.getDataFile(), dataset, ingestService); // Also remove checksum from the list of newly uploaded checksums (perhaps odd // to delete and then try uploading the same file again, but it seems like it @@ -851,28 +872,6 @@ private void deleteFiles(List filesForDelete) { } - private void removeFileMetadataFromList(List fmds, FileMetadata fmToDelete) { - Iterator fmit = fmds.iterator(); - while (fmit.hasNext()) { - FileMetadata fmd = fmit.next(); - if (fmToDelete.getDataFile().getStorageIdentifier().equals(fmd.getDataFile().getStorageIdentifier())) { - fmit.remove(); - break; - } - } - } - - private void removeDataFileFromList(List dfs, DataFile dfToDelete) { - Iterator dfit = dfs.iterator(); - while (dfit.hasNext()) { - DataFile df = dfit.next(); - if (dfToDelete.getStorageIdentifier().equals(df.getStorageIdentifier())) { - dfit.remove(); - break; - } - } - } - /** @@ -912,7 +911,11 @@ public String saveReplacementFile() throws FileReplaceException{ if (fileReplacePageHelper.runSaveReplacementFile_Phase2()){ JsfHelper.addSuccessMessage(getBundleString("file.message.replaceSuccess")); // It worked!!! Go to page of new file!! - return returnToFileLandingPageAfterReplace(fileReplacePageHelper.getFirstNewlyAddedFile()); + if (Referrer.FILE == referrer) { + return returnToFileLandingPageAfterReplace(fileReplacePageHelper.getFirstNewlyAddedFile()); + } else { + return returnToDraftVersion(); + } }else{ // Uh oh. String errMsg = fileReplacePageHelper.getErrorMessages(); @@ -941,8 +944,7 @@ public String save() { if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress) || lockTest.isLockedFor(DatasetLock.Reason.EditInProgress)) { logger.log(Level.INFO, "Couldn''t save dataset: {0}", "It is locked." + ""); - String rootDataverseName = dataverseService.findRootDataverse().getName(); - JH.addMessage(FacesMessage.SEVERITY_FATAL, getBundleString("dataset.locked.editInProgress.message"),BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null, rootDataverseName)))); + JH.addMessage(FacesMessage.SEVERITY_FATAL, getBundleString("dataset.locked.editInProgress.message"),BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null)))); return null; } } @@ -1131,13 +1133,13 @@ public String save() { workingVersion = dataset.getEditVersion(); logger.fine("working version id: "+workingVersion.getId()); - if (mode == FileEditMode.SINGLE){ + if (FileEditMode.EDIT == mode && Referrer.FILE == referrer){ JsfHelper.addSuccessMessage(getBundleString("file.message.editSuccess")); } else { int nFilesTotal = workingVersion.getFileMetadatas().size(); if (nNewFiles == 0 || nFilesTotal == nExpectedFilesTotal) { - JsfHelper.addSuccessMessage(getBundleString("dataset.message.filesSuccess")); + JsfHelper.addSuccessMessage(getBundleString("dataset.message.filesSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); } else if (nFilesTotal == nOldFiles) { JsfHelper.addErrorMessage(getBundleString("dataset.message.addFiles.Failure")); } else { @@ -1154,7 +1156,7 @@ public String save() { ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) session.getUser()); } - if (mode == FileEditMode.SINGLE && fileMetadatas.size() > 0) { + if (FileEditMode.EDIT == mode && Referrer.FILE == referrer && fileMetadatas.size() > 0) { // If this was a "single file edit", i.e. an edit request sent from // the individual File Landing page, we want to redirect back to // the landing page. BUT ONLY if the file still exists - i.e., if @@ -1167,6 +1169,10 @@ public String save() { return returnToDraftVersion(); } + public boolean canPublishDataset(){ + return permissionsWrapper.canIssuePublishDatasetCommand(dataset); + } + private void populateDatasetUpdateFailureMessage(){ JH.addMessage(FacesMessage.SEVERITY_FATAL, getBundleString("dataset.message.filesFailure")); @@ -1214,7 +1220,7 @@ public String cancel() { FileUtil.deleteTempFile(newFile, dataset, ingestService); } - if (mode == FileEditMode.SINGLE || mode == FileEditMode.SINGLE_REPLACE ) { + if (Referrer.FILE == referrer) { return returnToFileLandingPage(); } if (workingVersion.getId() != null) { @@ -1271,7 +1277,7 @@ private HttpClient getClient() { * @return */ public boolean isFileReplaceOperation(){ - return (mode == FileEditMode.SINGLE_REPLACE)&&(fileReplacePageHelper!= null); + return (mode == FileEditMode.REPLACE)&&(fileReplacePageHelper!= null); } public boolean allowMultipleFileUpload(){ @@ -1280,7 +1286,7 @@ public boolean allowMultipleFileUpload(){ } public boolean showFileUploadFragment(){ - return mode == FileEditMode.UPLOAD || mode == FileEditMode.CREATE || mode == FileEditMode.SINGLE_REPLACE; + return mode == FileEditMode.UPLOAD || mode == FileEditMode.CREATE || mode == FileEditMode.REPLACE; } diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index da9aba43498..0937f6f6cf7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleServiceBean; import edu.harvard.iq.dataverse.engine.command.Command; @@ -30,8 +31,10 @@ import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; +import java.util.Arrays; import java.util.EnumSet; import java.util.Stack; import java.util.logging.Level; @@ -215,7 +218,21 @@ public R submit(Command aCommand) throws CommandException { } DataverseRequest dvReq = aCommand.getRequest(); - + + AuthenticatedUser authenticatedUser = dvReq.getAuthenticatedUser(); + if (authenticatedUser != null) { + AuthenticatedUser auFreshLookup = authentication.findByID(authenticatedUser.getId()); + if (auFreshLookup == null) { + logger.fine("submit method found user no longer exists (was deleted)."); + throw new CommandException(BundleUtil.getStringFromBundle("command.exception.user.deleted", Arrays.asList(aCommand.getClass().getSimpleName())), aCommand); + } else { + if (auFreshLookup.isDeactivated()) { + logger.fine("submit method found user is deactivated."); + throw new CommandException(BundleUtil.getStringFromBundle("command.exception.user.deactivated", Arrays.asList(aCommand.getClass().getSimpleName())), aCommand); + } + } + } + Map affectedDvObjects = aCommand.getAffectedDvObjects(); logRec.setInfo(aCommand.describe()); for (Map.Entry> pair : requiredMap.entrySet()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index da315424220..d863500d137 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -304,13 +304,13 @@ public void setVersion(String version) { public List< String[]> getExporters(){ List retList = new ArrayList<>(); String myHostURL = systemConfig.getDataverseSiteUrl(); - for (String [] provider : ExportService.getInstance(settingsService).getExportersLabels() ){ + for (String [] provider : ExportService.getInstance().getExportersLabels() ){ String formatName = provider[1]; String formatDisplayName = provider[0]; Exporter exporter = null; try { - exporter = ExportService.getInstance(settingsService).getExporter(formatName); + exporter = ExportService.getInstance().getExporter(formatName); } catch (ExportException ex) { exporter = null; } @@ -786,57 +786,15 @@ private List allRelatedFiles() { return dataFiles; } - public boolean isDraftReplacementFile(){ - /* - This method tests to see if the file has been replaced in a draft version of the dataset - Since it must must work when you are on prior versions of the dataset - it must accrue all replacement files that may have been created - */ - if(null == dataset) { - dataset = fileMetadata.getDataFile().getOwner(); - } - - DataFile dataFileToTest = fileMetadata.getDataFile(); - - DatasetVersion currentVersion = dataset.getLatestVersion(); - - if (!currentVersion.isDraft()){ - return false; - } - - if (dataset.getReleasedVersion() == null){ - return false; - } - - List dataFiles = new ArrayList<>(); - - dataFiles.add(dataFileToTest); - - while (datafileService.findReplacementFile(dataFileToTest.getId()) != null ){ - dataFiles.add(datafileService.findReplacementFile(dataFileToTest.getId())); - dataFileToTest = datafileService.findReplacementFile(dataFileToTest.getId()); - } - - if(dataFiles.size() <2){ - return false; - } - - int numFiles = dataFiles.size(); - - DataFile current = dataFiles.get(numFiles - 1 ); - - DatasetVersion publishedVersion = dataset.getReleasedVersion(); - - if( datafileService.findFileMetadataByDatasetVersionIdAndDataFileId(publishedVersion.getId(), current.getId()) == null){ - return true; + // wrappermethod to see if the file has been deleted (or replaced) in the current version + public boolean isDeletedFile () { + if (file.getDeleted() == null) { + file.setDeleted(datafileService.hasBeenDeleted(file)); } - return false; + return file.getDeleted(); } - - - /** * To help with replace development * @return diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java index 3cc581ee93c..a88c4833f54 100644 --- a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java @@ -25,6 +25,7 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.Collections; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; @@ -915,6 +916,12 @@ public void setOaiSetsSelectItems(List oaiSetsSelectItems) { private void createOaiSetsSelectItems(List setNames) { setOaiSetsSelectItems(new ArrayList<>()); if (setNames != null) { + + // Let's sort the list - otherwise, if the list is long enough, + // using this pulldown menu may be very difficult: + + Collections.sort(setNames, String.CASE_INSENSITIVE_ORDER); + for (String set: setNames) { if (!StringUtils.isEmpty(set)) { getOaiSetsSelectItems().add(new SelectItem(set, set)); diff --git a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java index 38376fa84c0..166c0c081d0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java @@ -171,7 +171,6 @@ public String login() { AuthenticatedUser r = authSvc.getUpdateAuthenticatedUser(credentialsAuthProviderId, authReq); logger.log(Level.FINE, "User authenticated: {0}", r.getEmail()); session.setUser(r); - session.configureSessionTimeout(); if ("dataverse.xhtml".equals(redirectPage)) { redirectPage = redirectToRoot(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index 13a92c9cd27..a0a91e22c32 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -121,12 +121,11 @@ public boolean sendSystemEmail(String to, String subject, String messageText, bo boolean sent = false; - String rootDataverseName = dataverseService.findRootDataverse().getName(); InternetAddress systemAddress = getSystemAddress(); String body = messageText - + (isHtmlContent ? BundleUtil.getStringFromBundle("notification.email.closing.html", Arrays.asList(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress, rootDataverseName))) - : BundleUtil.getStringFromBundle("notification.email.closing", Arrays.asList(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress, rootDataverseName)))); + + (isHtmlContent ? BundleUtil.getStringFromBundle("notification.email.closing.html", Arrays.asList(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress))) + : BundleUtil.getStringFromBundle("notification.email.closing", Arrays.asList(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress)))); logger.fine("Sending email to " + to + ". Subject: <<<" + subject + ">>>. Body: " + body); try { @@ -245,8 +244,7 @@ public Boolean sendNotificationEmail(UserNotification notification, String comme Object objectOfNotification = getObjectOfNotification(notification); if (objectOfNotification != null){ String messageText = getMessageTextBasedOnNotification(notification, objectOfNotification, comment, requestor); - String rootDataverseName = dataverseService.findRootDataverse().getName(); - String subjectText = MailUtil.getSubjectTextBasedOnNotification(notification, rootDataverseName, objectOfNotification); + String subjectText = MailUtil.getSubjectTextBasedOnNotification(notification, objectOfNotification); if (!(messageText.isEmpty() || subjectText.isEmpty())){ retval = sendSystemEmail(emailAddress, subjectText, messageText, isHtmlContent); } else { @@ -496,14 +494,33 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio version.getDataset().getOwner().getDisplayName(), getDataverseLink(version.getDataset().getOwner()), optionalReturnReason}; messageText += MessageFormat.format(pattern, paramArrayReturnedDataset); return messageText; + + case WORKFLOW_SUCCESS: + version = (DatasetVersion) targetObject; + pattern = BundleUtil.getStringFromBundle("notification.email.workflow.success"); + + if (comment == null) { + comment = BundleUtil.getStringFromBundle("notification.email.workflow.nullMessage"); + } + String[] paramArrayWorkflowSuccess = {version.getDataset().getDisplayName(), getDatasetLink(version.getDataset()), comment}; + messageText += MessageFormat.format(pattern, paramArrayWorkflowSuccess); + return messageText; + case WORKFLOW_FAILURE: + version = (DatasetVersion) targetObject; + pattern = BundleUtil.getStringFromBundle("notification.email.workflow.failure"); + if (comment == null) { + comment = BundleUtil.getStringFromBundle("notification.email.workflow.nullMessage"); + } + String[] paramArrayWorkflowFailure = {version.getDataset().getDisplayName(), getDatasetLink(version.getDataset()), comment}; + messageText += MessageFormat.format(pattern, paramArrayWorkflowFailure); + return messageText; case CREATEACC: - String rootDataverseName = dataverseService.findRootDataverse().getName(); InternetAddress systemAddress = getSystemAddress(); String accountCreatedMessage = BundleUtil.getStringFromBundle("notification.email.welcome", Arrays.asList( - BrandingUtil.getInstallationBrandName(rootDataverseName), + BrandingUtil.getInstallationBrandName(), systemConfig.getGuidesBaseUrl(), systemConfig.getGuidesVersion(), - BrandingUtil.getSupportTeamName(systemAddress, rootDataverseName), + BrandingUtil.getSupportTeamName(systemAddress), BrandingUtil.getSupportTeamEmailAddress(systemAddress) )); String optionalConfirmEmailAddon = confirmEmailService.optionalConfirmEmailAddonMsg(userNotification.getUser()); @@ -594,6 +611,8 @@ private Object getObjectOfNotification (UserNotification userNotification){ case PUBLISHEDDS: case PUBLISHFAILED_PIDREG: case RETURNEDDS: + case WORKFLOW_SUCCESS: + case WORKFLOW_FAILURE: return versionService.find(userNotification.getObjectId()); case CREATEACC: return userNotification.getUser(); diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index d73aaeb8dbd..79a3ca800e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -590,7 +590,7 @@ public void updateRole(ActionEvent e) { } catch (PermissionException ex) { JH.addMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("permission.roleNotSaved"), BundleUtil.getStringFromBundle("permission.permissionsMissing", Arrays.asList(ex.getRequiredPermissions().toString()))); } catch (CommandException ex) { - JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("permission.roleNotSaved")); + JH.addMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("permission.roleNotSaved").concat(" " + ex.getMessage()) ); logger.log(Level.SEVERE, "Error saving role: " + ex.getMessage(), ex); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java index b31b55b2e4f..6b207ed0e75 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java @@ -376,7 +376,9 @@ public List filterRoleAssignees(String query, DvObject dvObject, L .getResultList().stream() .filter(ra -> roleAssignSelectedRoleAssignees == null || !roleAssignSelectedRoleAssignees.contains(ra)) .forEach((ra) -> { - roleAssigneeList.add(ra); + if (!ra.isDeactivated()) { + roleAssigneeList.add(ra); + } }); // now we add groups to the list, both global and explicit diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java index 6241f120f80..22d2679efb6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java @@ -42,7 +42,7 @@ @NamedQuery( name = "RoleAssignment.listByDefinitionPointId", query = "SELECT r FROM RoleAssignment r WHERE r.definitionPoint.id=:definitionPointId" ), @NamedQuery( name = "RoleAssignment.listByRoleId", - query = "SELECT r FROM RoleAssignment r WHERE r.role=:roleId" ), + query = "SELECT r FROM RoleAssignment r WHERE r.role.id=:roleId" ), @NamedQuery( name = "RoleAssignment.listByPrivateUrlToken", query = "SELECT r FROM RoleAssignment r WHERE r.privateUrlToken=:privateUrlToken" ), @NamedQuery( name = "RoleAssignment.deleteByAssigneeIdentifier_RoleIdDefinition_PointId", diff --git a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java index dd27842573f..363972b48c3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java +++ b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java @@ -130,7 +130,7 @@ public void setUserSum(Long userSum) { public String getMessageTo() { if (recipient == null) { - return BrandingUtil.getSupportTeamName(systemAddress, dataverseService.findRootDataverse().getName()); + return BrandingUtil.getSupportTeamName(systemAddress); } else if (recipient.isInstanceofDataverse()) { return ((Dataverse) recipient).getDisplayName() + " " + BundleUtil.getStringFromBundle("contact.contact"); } else { @@ -140,7 +140,7 @@ public String getMessageTo() { public String getFormHeader() { if (recipient == null) { - return BrandingUtil.getContactHeader(systemAddress, dataverseService.findRootDataverse().getName()); + return BrandingUtil.getContactHeader(systemAddress); } else if (recipient.isInstanceofDataverse()) { return BundleUtil.getStringFromBundle("contact.dataverse.header"); } else { @@ -198,10 +198,8 @@ public void validateUserEmail(FacesContext context, UIComponent component, Objec } public String sendMessage() { - // FIXME: move dataverseService.findRootDataverse() to init - String rootDataverseName = dataverseService.findRootDataverse().getName(); - String installationBrandName = BrandingUtil.getInstallationBrandName(rootDataverseName); - String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress, rootDataverseName); + String installationBrandName = BrandingUtil.getInstallationBrandName(); + String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress); List feedbacks = FeedbackUtil.gatherFeedback(recipient, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, systemConfig.getDataverseSiteUrl(), installationBrandName, supportTeamName); if (feedbacks.isEmpty()) { logger.warning("No feedback has been sent!"); diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index f27c7c712dc..d7512dd5cf0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -173,13 +173,13 @@ public boolean isDataFilePIDSequentialDependent(){ public String getSupportTeamName() { String systemEmail = getValueForKey(SettingsServiceBean.Key.SystemEmail); InternetAddress systemAddress = MailUtil.parseSystemAddress(systemEmail); - return BrandingUtil.getSupportTeamName(systemAddress, dataverseService.findRootDataverse().getName()); + return BrandingUtil.getSupportTeamName(systemAddress); } public String getSupportTeamEmail() { String systemEmail = getValueForKey(SettingsServiceBean.Key.SystemEmail); InternetAddress systemAddress = MailUtil.parseSystemAddress(systemEmail); - return BrandingUtil.getSupportTeamEmailAddress(systemAddress) != null ? BrandingUtil.getSupportTeamEmailAddress(systemAddress) : BrandingUtil.getSupportTeamName(systemAddress, dataverseService.findRootDataverse().getName()); + return BrandingUtil.getSupportTeamEmailAddress(systemAddress) != null ? BrandingUtil.getSupportTeamEmailAddress(systemAddress) : BrandingUtil.getSupportTeamName(systemAddress); } public Integer getUploadMethodsCount() { diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index 889bdaff03a..4ad50320f23 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -338,6 +338,10 @@ public String confirmAndConvertAccount() { logger.fine("builtin username: " + builtinUsername); AuthenticatedUser builtInUserToConvert = authSvc.canLogInAsBuiltinUser(builtinUsername, builtinPassword); if (builtInUserToConvert != null) { + if (builtInUserToConvert.isDeactivated()) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("shib.convert.fail.deactivated")); + return null; + } // TODO: Switch from authSvc.convertBuiltInToShib to authSvc.convertBuiltInUserToRemoteUser AuthenticatedUser au = authSvc.convertBuiltInToShib(builtInUserToConvert, shibAuthProvider.getId(), userIdentifier); if (au != null) { @@ -358,8 +362,8 @@ public String confirmAndConvertAccount() { private void logInUserAndSetShibAttributes(AuthenticatedUser au) { au.setShibIdentityProvider(shibIdp); + // setUser checks for deactivated users. session.setUser(au); - session.configureSessionTimeout(); logger.fine("Groups for user " + au.getId() + " (" + au.getIdentifier() + "): " + getGroups(au)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java index e44c5f6333e..78f8f38206b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java @@ -30,7 +30,7 @@ public enum Type { ASSIGNROLE, REVOKEROLE, CREATEDV, CREATEDS, CREATEACC, SUBMITTEDDS, RETURNEDDS, PUBLISHEDDS, REQUESTFILEACCESS, GRANTFILEACCESS, REJECTFILEACCESS, FILESYSTEMIMPORT, CHECKSUMIMPORT, CHECKSUMFAIL, CONFIRMEMAIL, APIGENERATED, INGESTCOMPLETED, INGESTCOMPLETEDWITHERRORS, - PUBLISHFAILED_PIDREG + PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE }; private static final long serialVersionUID = 1L; diff --git a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java index e395e9a90ec..5707f477a87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java @@ -140,7 +140,10 @@ private AuthenticatedUser createAuthenticatedUserForView (Object[] dbRowValues, user.setAuthProviderId(UserUtil.getStringOrNull(dbRowValues[11])); user.setAuthProviderFactoryAlias(UserUtil.getStringOrNull(dbRowValues[12])); - + + user.setDeactivated((Boolean)(dbRowValues[13])); + user.setDeactivatedTime(UserUtil.getTimestampOrNull(dbRowValues[14])); + user.setRoles(roles); return user; } @@ -417,7 +420,8 @@ private List getUserListCore(String searchTerm, qstr += " u.affiliation, u.superuser,"; qstr += " u.position,"; qstr += " u.createdtime, u.lastlogintime, u.lastapiusetime, "; - qstr += " prov.id, prov.factoryalias"; + qstr += " prov.id, prov.factoryalias, "; + qstr += " u.deactivated, u.deactivatedtime "; qstr += " FROM authenticateduser u,"; qstr += " authenticateduserlookup prov_lookup,"; qstr += " authenticationproviderrow prov"; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index db0958800f8..6b84a883287 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -26,6 +26,7 @@ import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; +import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -69,6 +70,7 @@ import javax.json.JsonValue; import javax.json.JsonValue.ValueType; import javax.persistence.EntityManager; +import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Context; @@ -86,6 +88,7 @@ public abstract class AbstractApiBean { private static final Logger logger = Logger.getLogger(AbstractApiBean.class.getName()); private static final String DATAVERSE_KEY_HEADER_NAME = "X-Dataverse-key"; private static final String PERSISTENT_ID_KEY=":persistentId"; + private static final String ALIAS_KEY=":alias"; public static final String STATUS_ERROR = "ERROR"; public static final String STATUS_OK = "OK"; public static final String STATUS_WF_IN_PROGRESS = "WORKFLOW_IN_PROGRESS"; @@ -382,6 +385,7 @@ protected AuthenticatedUser findAuthenticatedUserOrDie() throws WrappedResponse private AuthenticatedUser findAuthenticatedUserOrDie( String key, String wfid ) throws WrappedResponse { if (key != null) { + // No check for deactivated user because it's done in authSvc.lookupUser. AuthenticatedUser authUser = authSvc.lookupUser(key); if (authUser != null) { @@ -482,6 +486,37 @@ protected DataFile findDataFileOrDie(String id) throws WrappedResponse { } } } + + protected DataverseRole findRoleOrDie(String id) throws WrappedResponse { + DataverseRole role; + if (id.equals(ALIAS_KEY)) { + String alias = getRequestParameter(ALIAS_KEY.substring(1)); + try { + return em.createNamedQuery("DataverseRole.findDataverseRoleByAlias", DataverseRole.class) + .setParameter("alias", alias) + .getSingleResult(); + + //Should not be a multiple result exception due to table constraint + } catch (NoResultException nre) { + throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataverse.role.error.role.not.found.alias", Collections.singletonList(alias)))); + } + + } else { + + try { + role = rolesSvc.find(Long.parseLong(id)); + if (role == null) { + throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataverse.role.error.role.not.found.id", Collections.singletonList(id)))); + } else { + return role; + } + + } catch (NumberFormatException nfe) { + throw new WrappedResponse( + badRequest(BundleUtil.getStringFromBundle("find.dataverse.role.error.role.not.found.bad.id", Collections.singletonList(id)))); + } + } + } protected DatasetLinkingDataverse findDatasetLinkingDataverseOrDie(String datasetId, String linkingDataverseId) throws WrappedResponse { DatasetLinkingDataverse dsld; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 2cd7e0dd122..6fc2f066b36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -275,13 +275,18 @@ private DataFile findDataFileOrDieWrapper(String fileId){ @Produces({"application/xml"}) public DownloadInstance datafile(@PathParam("fileId") String fileId, @QueryParam("gbrecs") boolean gbrecs, @QueryParam("key") String apiToken, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { + // check first if there's a trailing slash, and chop it: + while (fileId.lastIndexOf('/') == fileId.length() - 1) { + fileId = fileId.substring(0, fileId.length() - 1); + } + if (fileId.indexOf('/') > -1) { // This is for embedding folder names into the Access API URLs; // something like /api/access/datafile/folder/subfolder/1234 // instead of the normal /api/access/datafile/1234 notation. // this is supported only for recreating folders during recursive downloads - // i.e. they are embedded into the URL for the remote client like wget, - // but can be safely ignored here. + // but can be safely ignored here. fileId = fileId.substring(fileId.lastIndexOf('/') + 1); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index fc7cf73d505..52d7f0280dc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.EMailValidator; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.License; import edu.harvard.iq.dataverse.LicenseServiceBean; import edu.harvard.iq.dataverse.UserServiceBean; @@ -74,10 +75,12 @@ import javax.ws.rs.core.Response.Status; import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationProvidersRegistrationServiceBean; +import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; @@ -87,6 +90,10 @@ import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand; +import edu.harvard.iq.dataverse.engine.command.impl.ChangeUserIdentifierCommand; +import edu.harvard.iq.dataverse.engine.command.impl.DeactivateUserCommand; +import edu.harvard.iq.dataverse.engine.command.impl.DeleteRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.RegisterDvObjectCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -101,6 +108,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Date; +import java.util.function.Consumer; import javax.inject.Inject; import javax.json.JsonArray; import javax.persistence.Query; @@ -152,8 +160,8 @@ public class Admin extends AbstractApiBean { @EJB BannerMessageServiceBean bannerMessageService; @EJB - LicenseServiceBean licenseService; - + LicenseServiceBean licenseService; + // Make the session available @Inject @@ -382,9 +390,40 @@ private Response deleteAuthenticatedUser(AuthenticatedUser au) { return ok("AuthenticatedUser " + au.getIdentifier() + " deleted. "); } - - + @POST + @Path("authenticatedUsers/{identifier}/deactivate") + public Response deactivateAuthenticatedUser(@PathParam("identifier") String identifier) { + AuthenticatedUser user = authSvc.getAuthenticatedUser(identifier); + if (user != null) { + return deactivateAuthenticatedUser(user); + } + return error(Response.Status.BAD_REQUEST, "User " + identifier + " not found."); + } + + @POST + @Path("authenticatedUsers/id/{id}/deactivate") + public Response deactivateAuthenticatedUserById(@PathParam("id") Long id) { + AuthenticatedUser user = authSvc.findByID(id); + if (user != null) { + return deactivateAuthenticatedUser(user); + } + return error(Response.Status.BAD_REQUEST, "User " + id + " not found."); + } + + private Response deactivateAuthenticatedUser(AuthenticatedUser userToDisable) { + AuthenticatedUser superuser = authSvc.getAdminUser(); + if (superuser == null) { + return error(Response.Status.INTERNAL_SERVER_ERROR, "Cannot find superuser to execute DeactivateUserCommand."); + } + try { + execCommand(new DeactivateUserCommand(createDataverseRequest(superuser), userToDisable)); + return ok("User " + userToDisable.getIdentifier() + " deactivated."); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + } + @POST @Path("publishDataverseAsCreator/{id}") public Response publishDataverseAsCreator(@PathParam("id") long id) { @@ -658,6 +697,10 @@ public Response builtin2shib(String content) { boolean knowsExistingPassword = false; BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); if (oldBuiltInUser != null) { + if (builtInUserToConvert.isDeactivated()) { + problems.add("builtin account has been deactivated"); + return error(Status.BAD_REQUEST, problems.build().toString()); + } String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); response.add("old username", usernameOfBuiltinAccountToConvert); AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, @@ -892,6 +935,17 @@ public Response listBuiltinRoles() { } } + @DELETE + @Path("roles/{id}") + public Response deleteRole(@PathParam("id") String id) { + + return response(req -> { + DataverseRole doomed = findRoleOrDie(id); + execCommand(new DeleteRoleCommand(req, doomed)); + return ok("role " + doomed.getName() + " deleted."); + }); + } + @Path("superuser/{identifier}") @POST public Response toggleSuperuser(@PathParam("identifier") String identifier) { @@ -899,6 +953,9 @@ public Response toggleSuperuser(@PathParam("identifier") String identifier) { .setInfo(identifier); try { AuthenticatedUser user = authSvc.getAuthenticatedUser(identifier); + if (user.isDeactivated()) { + return error(Status.BAD_REQUEST, "You cannot make a deactivated user a superuser."); + } user.setSuperuser(!user.isSuperuser()); @@ -1687,7 +1744,7 @@ public Response submitDatasetVersionToArchive(@PathParam("id") String dsid, @Pat // DataverseRequest and is sent to the back-end command where it is used to get // the API Token which is then used to retrieve files (e.g. via S3 direct // downloads) to create the Bag - session.setUser(au); + session.setUser(au); // TODO: Stop using session. Use createDataverseRequest instead. Dataset ds = findDatasetOrDie(dsid); DatasetVersion dv = datasetversionService.findByFriendlyVersionNumber(ds.getId(), versionNumber); @@ -2007,5 +2064,5 @@ public Response deleteLicenseByName(@PathParam("name") String name) { } return error(Response.Status.NOT_FOUND, "A license with name " + name + " doesn't exist."); } - + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java index 3cb5f900cd6..646abf51b6c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java @@ -470,15 +470,23 @@ private String parseControlledVocabulary(String[] values) { if (cvv == null) { cvv = new ControlledVocabularyValue(); cvv.setDatasetFieldType(dsv); - //Alt is only for dataload so only add to new - for (int i = 5; i < values.length; i++) { - ControlledVocabAlternate alt = new ControlledVocabAlternate(); - alt.setDatasetFieldType(dsv); - alt.setControlledVocabularyValue(cvv); - alt.setStrValue(values[i]); - cvv.getControlledVocabAlternates().add(alt); - } - } + } + + // Alternate variants for this controlled vocab. value: + + // Note that these are overwritten every time: + cvv.getControlledVocabAlternates().clear(); + // - meaning, if an alternate has been removed from the tsv file, + // it will be removed from the database! -- L.A. 5.4 + + for (int i = 5; i < values.length; i++) { + ControlledVocabAlternate alt = new ControlledVocabAlternate(); + alt.setDatasetFieldType(dsv); + alt.setControlledVocabularyValue(cvv); + alt.setStrValue(values[i]); + cvv.getControlledVocabAlternates().add(alt); + } + cvv.setStrValue(values[2]); cvv.setIdentifier(values[3]); cvv.setDisplayOrder(Integer.parseInt(values[4])); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 69769b8172c..15865766976 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -267,7 +267,7 @@ public Response exportDataset(@QueryParam("persistentId") String persistentId, @ return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found."); } - ExportService instance = ExportService.getInstance(settingsSvc); + ExportService instance = ExportService.getInstance(); InputStream is = instance.getExport(dataset, exporter); @@ -807,11 +807,16 @@ private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){ @PUT @Path("{id}/editMetadata") - public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) throws WrappedResponse{ + public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) { Boolean replaceData = replace != null; - - DataverseRequest req = createDataverseRequest(findUserOrDie()); + DataverseRequest req = null; + try { + req = createDataverseRequest(findUserOrDie()); + } catch (WrappedResponse ex) { + logger.log(Level.SEVERE, "Edit metdata error: " + ex.getMessage(), ex); + return ex.getResponse(); + } return processDatasetUpdate(jsonBody, id, req, replaceData); } @@ -1834,6 +1839,9 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } catch (DataFileTagException ex) { return error( Response.Status.BAD_REQUEST, ex.getMessage()); } + catch (ClassCastException | com.google.gson.JsonParseException ex) { + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing")); + } // ------------------------------------- // (3) Get the file name and content type diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java b/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java index 7c81fe1a892..d9a94ee340b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java @@ -38,9 +38,8 @@ public Response submitFeedback(JsonObject jsonObject) throws AddressException { String userEmail = jsonObject.getString("fromEmail"); String messageSubject = jsonObject.getString("subject"); String baseUrl = systemConfig.getDataverseSiteUrl(); - String rootDataverseName = dataverseSvc.findRootDataverse().getName(); - String installationBrandName = BrandingUtil.getInstallationBrandName(rootDataverseName); - String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress, rootDataverseName); + String installationBrandName = BrandingUtil.getInstallationBrandName(); + String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress); JsonArrayBuilder jab = Json.createArrayBuilder(); List feedbacks = FeedbackUtil.gatherFeedback(recipient, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName); feedbacks.forEach((feedback) -> { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index defc2c4d9ab..818df72c54e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -212,17 +212,33 @@ public Response replaceFileInDataset( } catch (DataFileTagException ex) { return error(Response.Status.BAD_REQUEST, ex.getMessage()); } - } catch (ClassCastException ex) { - logger.info("Exception parsing string '" + jsonData + "': " + ex); + } catch (ClassCastException | com.google.gson.JsonParseException ex) { + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing")); } } // (3) Get the file name and content type - if(null == contentDispositionHeader) { - return error(BAD_REQUEST, "You must upload a file."); + String newFilename = null; + String newFileContentType = null; + String newStorageIdentifier = null; + if (null == contentDispositionHeader) { + if (optionalFileParams.hasStorageIdentifier()) { + newStorageIdentifier = optionalFileParams.getStorageIdentifier(); + // ToDo - check that storageIdentifier is valid + if (optionalFileParams.hasFileName()) { + newFilename = optionalFileParams.getFileName(); + if (optionalFileParams.hasMimetype()) { + newFileContentType = optionalFileParams.getMimeType(); + } + } + } else { + return error(BAD_REQUEST, + "You must upload a file or provide a storageidentifier, filename, and mimetype."); + } + } else { + newFilename = contentDispositionHeader.getFileName(); + newFileContentType = formDataBodyPart.getMediaType().toString(); } - String newFilename = contentDispositionHeader.getFileName(); - String newFileContentType = formDataBodyPart.getMediaType().toString(); // (4) Create the AddReplaceFileHelper object msg("REPLACE!"); @@ -254,14 +270,16 @@ public Response replaceFileInDataset( addFileHelper.runForceReplaceFile(fileToReplaceId, newFilename, newFileContentType, + newStorageIdentifier, testFileInputStream, optionalFileParams); }else{ addFileHelper.runReplaceFile(fileToReplaceId, newFilename, newFileContentType, + newStorageIdentifier, testFileInputStream, - optionalFileParams); + optionalFileParams); } msg("we're back....."); @@ -357,7 +375,7 @@ public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData, return error(Response.Status.BAD_REQUEST, ex.getMessage()); } } catch (ClassCastException | com.google.gson.JsonParseException ex) { - return error(Response.Status.BAD_REQUEST, "Exception parsing provided json"); + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing")); } } @@ -601,7 +619,7 @@ public Response redetectDatafile(@PathParam("id") String id, @QueryParam("dryRun private void exportDatasetMetadata(SettingsServiceBean settingsServiceBean, Dataset theDataset) { try { - ExportService instance = ExportService.getInstance(settingsServiceBean); + ExportService instance = ExportService.getInstance(); instance.exportAllFormats(theDataset); } catch (ExportException ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java index b3f75e00c5a..72add184a24 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java @@ -1,8 +1,10 @@ package edu.harvard.iq.dataverse.api; +import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -11,6 +13,9 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteRoleCommand; +import edu.harvard.iq.dataverse.util.BundleUtil; +import java.util.Arrays; +import java.util.List; import javax.ejb.Stateless; import javax.ws.rs.DELETE; import javax.ws.rs.QueryParam; @@ -26,7 +31,7 @@ public class Roles extends AbstractApiBean { @GET @Path("{id}") - public Response viewRole( @PathParam("id") Long id) { + public Response viewRole( @PathParam("id") String id) { return response( ()-> { final User user = findUserOrDie(); final DataverseRole role = findRoleOrDie(id); @@ -35,14 +40,19 @@ public Response viewRole( @PathParam("id") Long id) { }); } - @DELETE - @Path("{id}") - public Response deleteRole( @PathParam("id") Long id ) { - return response( req -> { - execCommand( new DeleteRoleCommand(req, findRoleOrDie(id)) ); - return ok("role " + id + " deleted."); + @DELETE + @Path("{id}") + public Response deleteRole(@PathParam("id") String id) { + return response(req -> { + DataverseRole role = findRoleOrDie(id); + List args = Arrays.asList(role.getName()); + if (role.getOwner() == null) { + throw new WrappedResponse(forbidden(BundleUtil.getStringFromBundle("find.dataverse.role.error.role.builtin.not.allowed", args))); + } + execCommand(new DeleteRoleCommand(req, role)); + return ok("role " + role.getName() + " deleted."); }); - } + } @POST public Response createNewRole( RoleDTO roleDto, @@ -52,11 +62,4 @@ public Response createNewRole( RoleDTO roleDto, req,findDataverseOrDie(dvoIdtf)))))); } - private DataverseRole findRoleOrDie( long id ) throws WrappedResponse { - DataverseRole role = rolesSvc.find(id); - if ( role != null ) { - return role; - } - throw new WrappedResponse(notFound( "role with id " + id + " not found")); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java index 37eedbe7714..ce226ea14b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java @@ -10,10 +10,13 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.impl.ChangeUserIdentifierCommand; +import edu.harvard.iq.dataverse.engine.command.impl.GetUserTracesCommand; import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand; +import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import java.util.logging.Logger; import javax.ejb.Stateless; +import javax.json.JsonObjectBuilder; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -191,4 +194,31 @@ public Response getAuthenticatedUserByToken() { } + @POST + @Path("{identifier}/removeRoles") + public Response removeUserRoles(@PathParam("identifier") String identifier) { + try { + AuthenticatedUser userToModify = authSvc.getAuthenticatedUser(identifier); + if (userToModify == null) { + return error(Response.Status.BAD_REQUEST, "Cannot find user based on " + identifier + "."); + } + execCommand(new RevokeAllRolesCommand(userToModify, createDataverseRequest(findUserOrDie()))); + return ok("Roles removed for user " + identifier + "."); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @GET + @Path("{identifier}/traces") + public Response getTraces(@PathParam("identifier") String identifier) { + try { + AuthenticatedUser userToQuery = authSvc.getAuthenticatedUser(identifier); + JsonObjectBuilder jsonObj = execCommand(new GetUserTracesCommand(createDataverseRequest(findUserOrDie()), userToQuery)); + return ok(jsonObj); + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java index 7c80fe810d2..049b20f605b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java @@ -10,6 +10,8 @@ import java.util.logging.Logger; import javax.ejb.EJB; import javax.inject.Inject; + +import org.apache.commons.lang3.StringUtils; import org.swordapp.server.AuthCredentials; import org.swordapp.server.ServiceDocument; import org.swordapp.server.ServiceDocumentManager; @@ -43,12 +45,9 @@ public ServiceDocument getServiceDocument(String sdUri, AuthCredentials authCred String warning = urlManager.processUrl(sdUri); ServiceDocument service = new ServiceDocument(); SwordWorkspace swordWorkspace = new SwordWorkspace(); - Dataverse rootDataverse = dataverseService.findRootDataverse(); - if (rootDataverse != null) { - String name = rootDataverse.getName(); - if (name != null) { - swordWorkspace.setTitle(name); - } + String name = dataverseService.getRootDataverseName(); + if (!StringUtils.isEmpty(name)) { + swordWorkspace.setTitle(name); } if (warning != null) { swordWorkspace.getWrappedWorkspace().setAttributeValue("warning", warning); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java index 4a474f62894..13fc37bdc40 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java @@ -36,6 +36,7 @@ public AuthenticatedUser auth(AuthCredentials authCredentials) throws SwordAuthE throw new SwordAuthException(msg); } + // Checking if the user is deactivated is done inside findUserByApiToken. AuthenticatedUser authenticatedUserFromToken = findUserByApiToken(username); if (authenticatedUserFromToken == null) { String msg = "User not found based on API token."; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index 3d0ad99d062..349a86301a6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -30,8 +30,10 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailData; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean; +import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand; import edu.harvard.iq.dataverse.passwordreset.PasswordResetData; import edu.harvard.iq.dataverse.passwordreset.PasswordResetServiceBean; +import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; import edu.harvard.iq.dataverse.workflow.PendingWorkflowInvocation; @@ -121,7 +123,10 @@ public class AuthenticationServiceBean { @EJB ExplicitGroupServiceBean explicitGroupService; - + + @EJB + SavedSearchServiceBean savedSearchService; + @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; @@ -194,10 +199,7 @@ public boolean isOrcidEnabled() { * * Before calling this method, make sure you've deleted all the stuff tied * to the user, including stuff they've created, role assignments, group - * assignments, etc. - * - * Longer term, the intention is to have a "disableAuthenticatedUser" - * method/command. See https://github.com/IQSS/dataverse/issues/2419 + * assignments, etc. See the "removeAuthentictedUserItems" (sic) method. */ public void deleteAuthenticatedUser(Object pk) { AuthenticatedUser user = em.find(AuthenticatedUser.class, pk); @@ -304,7 +306,7 @@ public AuthenticatedUser getUpdateAuthenticatedUser( String authenticationProvid // yay! see if we already have this user. AuthenticatedUser user = lookupUser(authenticationProviderId, resp.getUserId()); - if (user != null){ + if (user != null && !user.isDeactivated()) { user = userService.updateLastLogin(user); } @@ -448,7 +450,13 @@ public AuthenticatedUser lookupUser( String apiToken ) { } } - return tkn.getAuthenticatedUser(); + AuthenticatedUser user = tkn.getAuthenticatedUser(); + if (!user.isDeactivated()) { + return user; + } else { + logger.info("attempted access with token from deactivated user: " + apiToken); + return null; + } } public AuthenticatedUser lookupUserForWorkflowInvocationID(String wfId) { @@ -498,6 +506,10 @@ public String getDeleteUserErrorMessages(AuthenticatedUser au) { if (!datasetVersionService.getDatasetVersionUsersByAuthenticatedUser(au).isEmpty()) { reasons.add(BundleUtil.getStringFromBundle("admin.api.deleteUser.failure.versionUser")); } + + if (!savedSearchService.findByAuthenticatedUser(au).isEmpty()) { + reasons.add(BundleUtil.getStringFromBundle("admin.api.deleteUser.failure.savedSearches")); + } if (!reasons.isEmpty()) { retVal = BundleUtil.getStringFromBundle("admin.api.deleteUser.failure.prefix", Arrays.asList(au.getIdentifier())); @@ -538,7 +550,6 @@ private void deletePendingAccessRequests(AuthenticatedUser au){ } - public AuthenticatedUser save( AuthenticatedUser user ) { em.persist(user); em.flush(); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java index 8ca72e7e9bc..12ddf817221 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java @@ -33,18 +33,20 @@ * @author michael */ @NamedQueries({ - @NamedQuery(name = "DataverseRole.findByOwnerId", - query= "SELECT r FROM DataverseRole r WHERE r.owner.id=:ownerId ORDER BY r.name"), - @NamedQuery(name = "DataverseRole.findBuiltinRoles", - query= "SELECT r FROM DataverseRole r WHERE r.owner is null ORDER BY r.name"), + @NamedQuery(name = "DataverseRole.findByOwnerId", + query = "SELECT r FROM DataverseRole r WHERE r.owner.id=:ownerId ORDER BY r.name"), + @NamedQuery(name = "DataverseRole.findBuiltinRoles", + query = "SELECT r FROM DataverseRole r WHERE r.owner is null ORDER BY r.name"), @NamedQuery(name = "DataverseRole.findBuiltinRoleByAlias", - query= "SELECT r FROM DataverseRole r WHERE r.alias=:alias AND r.owner is null"), - @NamedQuery(name = "DataverseRole.findCustomRoleByAliasAndOwner", - query= "SELECT r FROM DataverseRole r WHERE r.alias=:alias and (r.owner is null or r.owner.id=:ownerId)"), - @NamedQuery(name = "DataverseRole.listAll", - query= "SELECT r FROM DataverseRole r"), - @NamedQuery(name = "DataverseRole.deleteById", - query= "DELETE FROM DataverseRole r WHERE r.id=:id") + query = "SELECT r FROM DataverseRole r WHERE r.alias=:alias AND r.owner is null"), + @NamedQuery(name = "DataverseRole.findDataverseRoleByAlias", + query = "SELECT r FROM DataverseRole r WHERE r.alias=:alias"), + @NamedQuery(name = "DataverseRole.findCustomRoleByAliasAndOwner", + query = "SELECT r FROM DataverseRole r WHERE r.alias=:alias and (r.owner is null or r.owner.id=:ownerId)"), + @NamedQuery(name = "DataverseRole.listAll", + query = "SELECT r FROM DataverseRole r"), + @NamedQuery(name = "DataverseRole.deleteById", + query = "DELETE FROM DataverseRole r WHERE r.id=:id") }) @Entity @Table(indexes = {@Index(columnList="owner_id") diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java index fd7231e827c..9ae4e4b0e87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java @@ -3,8 +3,10 @@ import edu.harvard.iq.dataverse.ValidateEmail; import edu.harvard.iq.dataverse.ValidateUserName; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; +import edu.harvard.iq.dataverse.passwordreset.PasswordResetData; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; import java.io.Serializable; +import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; @@ -13,6 +15,7 @@ import javax.persistence.Index; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; +import javax.persistence.OneToOne; import javax.persistence.Table; import javax.persistence.Transient; import javax.validation.constraints.Size; @@ -47,6 +50,10 @@ public class BuiltinUser implements Serializable { private String userName; private int passwordEncryptionVersion; + + @OneToOne(mappedBy = "builtinUser", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) + private PasswordResetData passwordResetData; + private String encryptedPassword; /** diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java index 204d93b5b8f..673839450d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java @@ -162,7 +162,7 @@ public String init() { } } - if ( session.getUser().isAuthenticated() ) { + if (session.getUser(true).isAuthenticated()) { setCurrentUser((AuthenticatedUser) session.getUser()); userAuthProvider = authenticationService.lookupProvider(currentUser); notificationsList = userNotificationService.findByUser(currentUser.getId()); @@ -284,6 +284,12 @@ public void validateNewPassword(FacesContext context, UIComponent toValidate, Ob public String save() { boolean passwordChanged = false; + + //First reget user to make sure they weren't deactivated or deleted + if (session.getUser().isAuthenticated() && !session.getUser(true).isAuthenticated()) { + return "dataverse.xhtml?alias=" + dataverseService.findRootDataverse().getAlias() + "&faces-redirect=true"; + } + if (editMode == EditMode.CHANGE_PASSWORD) { final AuthenticationProvider prv = getUserAuthProvider(); if (prv.isPasswordUpdateAllowed()) { @@ -327,7 +333,6 @@ public String save() { // Authenticated user registered. Save the new bulitin, and log in. builtinUserService.save(builtinUser); session.setUser(au); - session.configureSessionTimeout(); /** * @todo Move this to * AuthenticationServiceBean.createAuthenticatedUser @@ -485,6 +490,8 @@ public void displayNotification() { case PUBLISHEDDS: case PUBLISHFAILED_PIDREG: case RETURNEDDS: + case WORKFLOW_SUCCESS: + case WORKFLOW_FAILURE: userNotification.setTheObject(datasetVersionService.find(userNotification.getObjectId())); break; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java index a3ce3c5bdf7..44f00f797a0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java @@ -183,7 +183,6 @@ public String createNewAccount() { newUser.getDisplayInfo().getPosition()); final AuthenticatedUser user = authenticationSvc.createAuthenticatedUser(newUser.getUserRecordIdentifier(), getUsername(), newAud, true); session.setUser(user); - session.configureSessionTimeout(); /** * @todo Move this to AuthenticationServiceBean.createAuthenticatedUser */ @@ -209,11 +208,14 @@ public String convertExistingAccount() { auReq.putCredential(creds.get(1).getKey(), getPassword()); try { AuthenticatedUser existingUser = authenticationSvc.getUpdateAuthenticatedUser(BuiltinAuthenticationProvider.PROVIDER_ID, auReq); + if (existingUser.isDeactivated()) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("oauth2.convertAccount.failedDeactivated")); + return null; + } authenticationSvc.updateProvider(existingUser, newUser.getServiceId(), newUser.getIdInService()); builtinUserSvc.removeUser(existingUser.getUserIdentifier()); session.setUser(existingUser); - session.configureSessionTimeout(); AuthenticationProvider newUserAuthProvider = authenticationSvc.getAuthenticationProvider(newUser.getServiceId()); JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("oauth2.convertAccount.success", Arrays.asList(newUserAuthProvider.getInfo().getTitle()))); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java index e42f82d48d8..225352dec43 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java @@ -106,8 +106,8 @@ public void exchangeCodeForToken() throws IOException { } else { // login the user and redirect to HOME of intended page (if any). + // setUser checks for deactivated users. session.setUser(dvUser); - session.configureSessionTimeout(); final OAuth2TokenData tokenData = oauthUser.getTokenData(); if (tokenData != null) { tokenData.setUser(dvUser); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 12161eb1a59..db6164e0ac7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.authorization.AccessRequest; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserLookup; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2TokenData; import edu.harvard.iq.dataverse.userdata.UserUtil; import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.OrcidOAuth2AP; import edu.harvard.iq.dataverse.util.BundleUtil; @@ -114,6 +115,12 @@ public class AuthenticatedUser implements User, Serializable { private boolean superuser; + @Column(nullable=true) + private boolean deactivated; + + @Column(nullable=true) + private Timestamp deactivatedTime; + /** * @todo Consider storing a hash of *all* potentially interesting Shibboleth * attribute key/value pairs, not just the Identity Provider (IdP). @@ -159,7 +166,10 @@ public List getDatasetLocks() { public void setDatasetLocks(List datasetLocks) { this.datasetLocks = datasetLocks; } - + + @OneToMany(mappedBy = "user", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) + private List oAuth2TokenDatas; + @Override public AuthenticatedUserDisplayInfo getDisplayInfo() { return new AuthenticatedUserDisplayInfo(firstName, lastName, email, affiliation, position); @@ -303,6 +313,23 @@ public void setSuperuser(boolean superuser) { this.superuser = superuser; } + @Override + public boolean isDeactivated() { + return deactivated; + } + + public void setDeactivated(boolean deactivated) { + this.deactivated = deactivated; + } + + public Timestamp getDeactivatedTime() { + return deactivatedTime; + } + + public void setDeactivatedTime(Timestamp deactivatedTime) { + this.deactivatedTime = deactivatedTime; + } + @OneToOne(mappedBy = "authenticatedUser") private AuthenticatedUserLookup authenticatedUserLookup; @@ -360,6 +387,9 @@ public JsonObjectBuilder toJson() { authenicatedUserJson.add("lastLoginTime", UserUtil.getTimestampStringOrNull(this.lastLoginTime)); authenicatedUserJson.add("lastApiUseTime", UserUtil.getTimestampStringOrNull(this.lastApiUseTime)); + authenicatedUserJson.add("deactivated", this.deactivated); + authenicatedUserJson.add("deactivatedTime", UserUtil.getTimestampStringOrNull(this.deactivatedTime)); + return authenicatedUserJson; } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/GuestUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/GuestUser.java index f16fa5afe36..16de1b2eaff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/GuestUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/GuestUser.java @@ -32,7 +32,7 @@ public RoleAssigneeDisplayInfo getDisplayInfo() { public boolean isSuperuser() { return false; } - + @Override public boolean equals( Object o ) { return (o instanceof GuestUser); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java index ea35f87d178..4655c9c9f0d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java @@ -14,4 +14,8 @@ public interface User extends RoleAssignee, Serializable { public boolean isSuperuser(); + default boolean isDeactivated() { + return false; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java index ea83fc15ebc..50661ee97fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java @@ -1,22 +1,44 @@ package edu.harvard.iq.dataverse.branding; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.Arrays; +import java.util.logging.Logger; + import javax.mail.internet.InternetAddress; public class BrandingUtil { - public static String getInstallationBrandName(String rootDataverseName) { - return rootDataverseName; + private static final Logger logger = Logger.getLogger(BrandingUtil.class.getCanonicalName()); + + private static DataverseServiceBean dataverseService; + private static SettingsServiceBean settingsService; + + public static String getInstallationBrandName() { + + String brandName = settingsService.getValueForKey(SettingsServiceBean.Key.InstallationName); + //Separate if statement simplifies test setup, otherwise could use the getValueForKey method with a default param + if(brandName==null) { + brandName = dataverseService.getRootDataverseName(); + } + return brandName; + } + + // Convenience to access root name without injecting dataverseService (e.g. in + // DatasetVersion) + public static String getRootDataverseCollectionName() { + return dataverseService.getRootDataverseName(); } - public static String getSupportTeamName(InternetAddress systemAddress, String rootDataverseName) { + public static String getSupportTeamName(InternetAddress systemAddress) { if (systemAddress != null) { String personalName = systemAddress.getPersonal(); if (personalName != null) { return personalName; } } + String rootDataverseName=dataverseService.getRootDataverseName(); if (rootDataverseName != null && !rootDataverseName.isEmpty()) { return rootDataverseName + " " + BundleUtil.getStringFromBundle("contact.support"); } @@ -31,8 +53,12 @@ public static String getSupportTeamEmailAddress(InternetAddress systemAddress) { return systemAddress.getAddress(); } - public static String getContactHeader(InternetAddress systemAddress, String rootDataverseName) { - return BundleUtil.getStringFromBundle("contact.header", Arrays.asList(getSupportTeamName(systemAddress, rootDataverseName))); + public static String getContactHeader(InternetAddress systemAddress) { + return BundleUtil.getStringFromBundle("contact.header", Arrays.asList(getSupportTeamName(systemAddress))); } + public static void injectServices(DataverseServiceBean dataverseSvc, SettingsServiceBean settingsSvc) { + dataverseService = dataverseSvc; + settingsService = settingsSvc; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java new file mode 100644 index 00000000000..274970f8b8e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java @@ -0,0 +1,28 @@ +package edu.harvard.iq.dataverse.branding; + +import javax.annotation.PostConstruct; +import javax.ejb.EJB; +import javax.ejb.Singleton; +import javax.ejb.Startup; + +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + + /** + * This is a small helper bean + * As it is a singleton and built at application start (=deployment), it will inject the (stateless) + * dataverse service into the BrandingUtil once it's ready. + */ + @Singleton + @Startup + public class BrandingUtilHelper { + + @EJB + DataverseServiceBean dataverseSvc; + @EJB SettingsServiceBean settingsSvc; + + @PostConstruct + public void injectService() { + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + } + } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java index 45a04ba4185..823d2c111f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java @@ -55,7 +55,6 @@ public String init() { if (confirmEmailData != null) { user = confirmEmailData.getAuthenticatedUser(); session.setUser(user); - session.configureSessionTimeout(); // TODO: is this needed here? (it can't hurt, but still) JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("confirmEmail.details.success")); return "/dataverse.xhtml?faces-redirect=true"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java index bc3b70326f1..e8748f1e158 100644 --- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java @@ -121,20 +121,14 @@ private void sendLinkOnEmailChange(AuthenticatedUser aUser, String confirmationU try { String toAddress = aUser.getEmail(); - try { - Dataverse rootDataverse = dataverseService.findRootDataverse(); - if (rootDataverse != null) { - String rootDataverseName = rootDataverse.getName(); - // FIXME: consider refactoring this into MailServiceBean.sendNotificationEmail. CONFIRMEMAIL may be the only type where we don't want an in-app notification. - UserNotification userNotification = new UserNotification(); - userNotification.setType(UserNotification.Type.CONFIRMEMAIL); - String subject = MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null); - logger.fine("sending email to " + toAddress + " with this subject: " + subject); - mailService.sendSystemEmail(toAddress, subject, messageBody); - } - } catch (Exception e) { - logger.info("The root dataverse is not present. Don't send a notification to dataverseAdmin."); - } + + // FIXME: consider refactoring this into MailServiceBean.sendNotificationEmail. + // CONFIRMEMAIL may be the only type where we don't want an in-app notification. + UserNotification userNotification = new UserNotification(); + userNotification.setType(UserNotification.Type.CONFIRMEMAIL); + String subject = MailUtil.getSubjectTextBasedOnNotification(userNotification, null); + logger.fine("sending email to " + toAddress + " with this subject: " + subject); + mailService.sendSystemEmail(toAddress, subject, messageBody); } catch (Exception ex) { /** * @todo get more specific about the exception that's thrown when @@ -169,6 +163,10 @@ public ConfirmEmailExecResponse processToken(String tokenQueried) { long nowInMilliseconds = new Date().getTime(); Timestamp emailConfirmed = new Timestamp(nowInMilliseconds); AuthenticatedUser authenticatedUser = confirmEmailData.getAuthenticatedUser(); + if (authenticatedUser.isDeactivated()) { + logger.fine("User is deactivated."); + return null; + } authenticatedUser.setEmailConfirmed(emailConfirmed); em.remove(confirmEmailData); return goodTokenCanProceed; diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index a8c52414e56..e82e6d7ae37 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -21,6 +21,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.AbstractCreateDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; @@ -365,12 +366,14 @@ public boolean runAddFile(Dataset dataset, * @param dataset * @param newFileName * @param newFileContentType + * @param newStorageIdentifier2 * @param newFileInputStream * @return */ public boolean runForceReplaceFile(Long oldFileId, String newFileName, String newFileContentType, + String newStorageIdentifier, InputStream newFileInputStream, OptionalFileParams optionalFileParams){ @@ -392,13 +395,14 @@ public boolean runForceReplaceFile(Long oldFileId, } - return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream, optionalFileParams); + return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newStorageIdentifier, newFileInputStream, optionalFileParams); } public boolean runReplaceFile(Long oldFileId, String newFileName, String newFileContentType, + String newStorageIdentifier, InputStream newFileInputStream, OptionalFileParams optionalFileParams){ @@ -418,7 +422,7 @@ public boolean runReplaceFile(Long oldFileId, if (!this.step_005_loadFileToReplaceById(oldFileId)){ return false; } - return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream, optionalFileParams); + return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newStorageIdentifier, newFileInputStream, optionalFileParams); } @@ -442,10 +446,6 @@ public boolean runReplaceFile(Long oldFileId, * * @return */ - private boolean runAddReplaceFile(Dataset owner, String newFileName, String newFileContentType, - InputStream newFileInputStream, OptionalFileParams optionalFileParams) { - return runAddReplaceFile(owner,newFileName, newFileContentType, null, newFileInputStream, optionalFileParams); - } private boolean runAddReplaceFile(Dataset owner, String newFileName, String newFileContentType, @@ -504,6 +504,18 @@ public boolean runReplaceFromUI_Phase1(Long oldFileId, if (!this.step_005_loadFileToReplaceById(oldFileId)){ return false; } + //Update params to match existing file (except checksum, which should match the new file) + if(fileToReplace != null) { + String checksum = optionalFileParams.getCheckSum(); + ChecksumType checkSumType = optionalFileParams.getCheckSumType(); + try { + optionalFileParams = new OptionalFileParams(fileToReplace); + optionalFileParams.setCheckSum(checksum, checkSumType); + } catch (DataFileTagException e) { + // Shouldn't happen since fileToReplace should have valid tags + e.printStackTrace(); + } + } return this.runAddReplacePhase1(fileToReplace.getOwner(), newFileName, @@ -574,6 +586,26 @@ private boolean runAddReplacePhase1(Dataset owner, return false; } + // if the fileToReplace hasn't been released, + if (fileToReplace != null && !fileToReplace.isReleased()) { + DataFile df = finalFileList.get(0); // step_055 uses a loop and assumes only one file + // set the replacement file's previous and root datafileIds to match (unless + // they are the defaults) + if (fileToReplace.getPreviousDataFileId() != null) { + df.setPreviousDataFileId(fileToReplace.getPreviousDataFileId()); + df.setRootDataFileId(fileToReplace.getRootDataFileId()); + } + // Reuse any file PID during a replace operation (if File PIDs are in use) + if (systemConfig.isFilePIDsEnabled()) { + df.setGlobalId(fileToReplace.getGlobalId()); + df.setGlobalIdCreateTime(fileToReplace.getGlobalIdCreateTime()); + // Should be true or fileToReplace wouldn't have an identifier (since it's not + // yet released in this if statement) + df.setIdentifierRegistered(fileToReplace.isIdentifierRegistered()); + fileToReplace.setGlobalId(null); + } + } + return true; } @@ -1061,16 +1093,6 @@ private boolean step_005_loadFileToReplaceById(Long dataFileId){ if (!step_015_auto_check_permissions(existingFile.getOwner())){ return false; }; - - - - // Is the file published? - // - if (!existingFile.isReleased()){ - addError(getBundleErr("unpublished_file_cannot_be_replaced")); - return false; - } - // Is the file in the latest dataset version? // if (!step_007_auto_isReplacementInLatestVersion(existingFile)){ @@ -1532,7 +1554,22 @@ private boolean step_070_run_update_dataset_command(){ } Command update_cmd; - update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, clone); + String deleteStorageLocation = null; + long deleteFileId=-1; + if(isFileReplaceOperation()) { + List filesToDelete = new ArrayList(); + filesToDelete.add(fileToReplace.getFileMetadata()); + + if(!fileToReplace.isReleased()) { + //If file is only in draft version, also need to delete the physical file + deleteStorageLocation = fileService.getPhysicalFileToDelete(fileToReplace); + deleteFileId=fileToReplace.getId(); + } + //Adding the file to the delete list for the command will delete this filemetadata and, if the file hasn't been released, the datafile itself. + update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, filesToDelete, clone); + } else { + update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, clone); + } ((UpdateDatasetVersionCommand) update_cmd).setValidateLenient(true); try { @@ -1554,89 +1591,23 @@ private boolean step_070_run_update_dataset_command(){ this.addErrorSevere("add.add_file_error (see logs)"); logger.severe(ex.getMessage()); return false; - } - return true; - } - - - /** - * Go through the working DatasetVersion and remove the - * FileMetadata of the file to replace - * - * @return - */ - private boolean step_085_auto_remove_filemetadata_to_replace_from_working_version(){ - - msgt("step_085_auto_remove_filemetadata_to_replace_from_working_version 1"); - - if (!isFileReplaceOperation()){ - // Shouldn't happen! - this.addErrorSevere(getBundleErr("only_replace_operation") + " (step_085_auto_remove_filemetadata_to_replace_from_working_version"); - return false; - } - msg("step_085_auto_remove_filemetadata_to_replace_from_working_version 2"); - - if (this.hasError()){ - return false; } - - - msgt("File to replace getId: " + fileToReplace.getId()); - - Iterator fmIt = workingVersion.getFileMetadatas().iterator(); - msgt("Clear file to replace"); - int cnt = 0; - while (fmIt.hasNext()) { - cnt++; - - FileMetadata fm = fmIt.next(); - msg(cnt + ") next file: " + fm); - msg(" getDataFile().getId(): " + fm.getDataFile().getId()); - if (fm.getDataFile().getId() != null) { - if (Objects.equals(fm.getDataFile().getId(), fileToReplace.getId())) { - msg("Let's remove it!"); - - // If this is a tabular data file with a UNF, we'll need - // to recalculate the version UNF, once the file is removed: - - boolean recalculateUNF = !StringUtils.isEmpty(fm.getDataFile().getUnf()); - - if (workingVersion.getId() != null) { - // If this is an existing draft (i.e., this draft version - // is already saved in the dataset, we'll also need to remove this filemetadata - // explicitly: - msg(" this is an existing draft version..."); - fileService.removeFileMetadata(fm); - - // remove the filemetadata from the list of filemetadatas - // attached to the datafile object as well, for a good - // measure: - fileToReplace.getFileMetadatas().remove(fm); - // (and yes, we can do .remove(fm) safely - if this released - // file is part of an existing draft, we know that the - // filemetadata object also exists in the database, and thus - // has the id, and can be identified unambiguously. - } - - // and remove it from the list of filemetadatas attached - // to the version object, via the iterator: - fmIt.remove(); - - if (recalculateUNF) { - msg("recalculating the UNF"); - ingestService.recalculateDatasetVersionUNF(workingVersion); - msg("UNF recalculated: "+workingVersion.getUNF()); - } - - return true; + //Sanity check + if(isFileReplaceOperation()) { + if (deleteStorageLocation != null) { + // Finalize the delete of the physical file + // (File service will double-check that the datafile no + // longer exists in the database, before proceeding to + // delete the physical file) + try { + fileService.finalizeFileDelete(deleteFileId, deleteStorageLocation); + } catch (IOException ioex) { + logger.warning("Failed to delete the physical file associated with the deleted datafile id=" + + deleteFileId + ", storage location: " + deleteStorageLocation); } } } - - msg("No matches found!"); - addErrorSevere(getBundleErr("failed_to_remove_old_file_from_dataset")); - runMajorCleanup(); - return false; + return true; } @@ -1711,13 +1682,6 @@ private boolean step_080_run_update_dataset_command_for_replace(){ return false; } - // ----------------------------------------------------------- - // Remove the "fileToReplace" from the current working version - // ----------------------------------------------------------- - if (!step_085_auto_remove_filemetadata_to_replace_from_working_version()){ - return false; - } - // ----------------------------------------------------------- // Set the "root file ids" and "previous file ids" // THIS IS A KEY STEP - SPLIT IT OUT @@ -1727,26 +1691,27 @@ private boolean step_080_run_update_dataset_command_for_replace(){ // ----------------------------------------------------------- - /* - Check the root file id on fileToReplace, updating it if necessary - */ - if (fileToReplace.getRootDataFileId().equals(DataFile.ROOT_DATAFILE_ID_DEFAULT)){ + if (fileToReplace.isReleased()) { + /* + * Check the root file id on fileToReplace, updating it if necessary + */ + if (fileToReplace.getRootDataFileId().equals(DataFile.ROOT_DATAFILE_ID_DEFAULT)) { - fileToReplace.setRootDataFileId(fileToReplace.getId()); - fileToReplace = fileService.save(fileToReplace); - } - - /* - Go through the final file list, settting the rootFileId and previousFileId - */ - for (DataFile df : finalFileList){ - df.setPreviousDataFileId(fileToReplace.getId()); - - df.setRootDataFileId(fileToReplace.getRootDataFileId()); - - } + fileToReplace.setRootDataFileId(fileToReplace.getId()); + fileToReplace = fileService.save(fileToReplace); + } + + /* + * Go through the final file list, settting the rootFileId and previousFileId + */ + for (DataFile df : finalFileList) { + df.setPreviousDataFileId(fileToReplace.getId()); + + df.setRootDataFileId(fileToReplace.getRootDataFileId()); - // Call the update dataset command + } + } + // Call the update dataset command which will delete the replaced filemetadata and file in needed (if file is not released) // return step_070_run_update_dataset_command(); diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java index 3832e15b85e..6e0c16a93e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileReplacePageHelper.java @@ -19,7 +19,7 @@ * Phase 1: File successfully uploaded and unpersisted DataFile is in memory * Phase 2: Save the files * - http://localhost:8080/editdatafiles.xhtml?mode=SINGLE_REPLACE&datasetId=26&fid=726 + http://localhost:8080/editdatafiles.xhtml?mode=REPLACE&datasetId=26&fid=726 * This is messy, trying to contain some of it--give me APIs or more time, more time:) * * @author rmp553 @@ -111,13 +111,8 @@ public boolean handleNativeFileUpload(InputStream inputStream, String fullStorag } OptionalFileParams ofp = null; + ofp = new OptionalFileParams(); if(checkSumValue != null) { - try { - ofp = new OptionalFileParams(null); - } catch (DataFileTagException e) { - // Shouldn't happen with null input - e.printStackTrace(); - } ofp.setCheckSum(checkSumValue, checkSumType); } // Run 1st phase of replace diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java index 302bdf915c8..cc75375f979 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java @@ -77,7 +77,9 @@ public class OptionalFileParams { public static final String CHECKSUM_OBJECT_TYPE = "@type"; public static final String CHECKSUM_OBJECT_VALUE = "@value"; - + public OptionalFileParams() { + } + public OptionalFileParams(String jsonData) throws DataFileTagException{ if (jsonData != null){ @@ -108,6 +110,21 @@ public OptionalFileParams(String description, this.restrict = restrict; } + //For use in replace operations - load the file metadata from the file being replaced so it can be applied to the new file + //checksum and mimetype aren't needed + public OptionalFileParams(DataFile df) throws DataFileTagException { + FileMetadata fm = df.getFileMetadata(); + + this.description = fm.getDescription(); + setCategories(fm.getCategoriesByName()); + this.addFileDataTags(df.getTagLabels()); + this.restrict = fm.isRestricted(); + //Explicitly do not replace the file name - replaces with -force may change the mimetype and extension + //this.label = fm.getLabel(); + this.directoryLabel = fm.getDirectoryLabel(); + this.provFreeForm = fm.getProvFreeForm(); + } + /** * Set description @@ -289,12 +306,7 @@ private void loadParamsFromJson(String jsonData) throws DataFileTagException{ // logger.log(Level.SEVERE, "jsonData is null"); } JsonObject jsonObj; - try { - jsonObj = new Gson().fromJson(jsonData, JsonObject.class); - } catch (ClassCastException ex) { - logger.info("Exception parsing string '" + jsonData + "': " + ex); - return; - } + jsonObj = new Gson().fromJson(jsonData, JsonObject.class); // ------------------------------- // get description as string diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java index f6bd1316e44..8ba1d181609 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.groups.GroupException; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -49,6 +50,12 @@ public ExplicitGroup execute(CommandContext ctxt) throws CommandException { if ( ra == null ) { nonexistentRAs.add( rai ); } else { + if (ra instanceof AuthenticatedUser) { + AuthenticatedUser user = (AuthenticatedUser) ra; + if (user.isDeactivated()) { + throw new IllegalCommandException("User " + user.getUserIdentifier() + " is deactivated and cannot be added to a group.", this); + } + } try { explicitGroup.add(ra); } catch (GroupException ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java index 34263599ff0..276f52a5802 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java @@ -10,10 +10,12 @@ import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.RoleAssignee; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import java.util.Collections; import java.util.Map; import java.util.Set; @@ -49,6 +51,12 @@ public AssignRoleCommand(RoleAssignee anAssignee, DataverseRole aRole, DvObject @Override public RoleAssignment execute(CommandContext ctxt) throws CommandException { + if (grantee instanceof AuthenticatedUser) { + AuthenticatedUser user = (AuthenticatedUser) grantee; + if (user.isDeactivated()) { + throw new IllegalCommandException("User " + user.getUserIdentifier() + " is deactivated and cannot be given a role.", this); + } + } // TODO make sure the role is defined on the dataverse. RoleAssignment roleAssignment = new RoleAssignment(role, grantee, defPoint, privateUrlToken); return ctxt.roles().save(roleAssignment); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java index ff28021146d..cb9b0a3c774 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.api.AbstractApiBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -11,6 +12,8 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.util.BundleUtil; +import javax.persistence.NoResultException; /** * Create a new role in a dataverse. @@ -34,9 +37,19 @@ public DataverseRole execute(CommandContext ctxt) throws CommandException { User user = getUser(); //todo: temporary for 4.0 - only superusers can create and edit roles if ((!(user instanceof AuthenticatedUser) || !user.isSuperuser())) { - throw new IllegalCommandException("Roles can only be created or edited by superusers.",this); + throw new IllegalCommandException(BundleUtil.getStringFromBundle("permission.role.must.be.created.by.superuser"),this); + } + //Test to see if the role already exists in DB + try { + DataverseRole testRole = ctxt.em().createNamedQuery("DataverseRole.findDataverseRoleByAlias", DataverseRole.class) + .setParameter("alias", created.getAlias()) + .getSingleResult(); + if (!(testRole == null)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("permission.role.not.created.alias.already.exists"), this); + } + } catch (NoResultException nre) { + // we want no results because that meand we can create a role } - dv.addRole(created); return ctxt.roles().save(created); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index 373454bf694..a86f90d03eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -61,7 +61,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // final DatasetVersion editVersion = getDataset().getEditVersion(); tidyUpFields(updateVersion); - // Merge the new version into out JPA context + // Merge the new version into our JPA context ctxt.em().merge(updateVersion); @@ -71,7 +71,8 @@ public Dataset execute(CommandContext ctxt) throws CommandException { updateVersion.setTermsOfUseAndAccess(newTerms); //Put old terms on version that will be deleted.... getDataset().getEditVersion().setTermsOfUseAndAccess(oldTerms); - + //Also set the fileaccessrequest boolean on the dataset to match the new terms + getDataset().setFileAccessRequest(updateVersion.getTermsOfUseAndAccess().isFileAccessRequest()); List newComments = getDataset().getEditVersion().getWorkflowComments(); if (newComments!=null && newComments.size() >0) { for(WorkflowComment wfc: newComments) { @@ -162,7 +163,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // Update modification time on the published version and the dataset updateVersion.setLastUpdateTime(getTimestamp()); tempDataset.setModificationTime(getTimestamp()); - + ctxt.em().merge(updateVersion); Dataset savedDataset = ctxt.em().merge(tempDataset); // Flush before calling DeleteDatasetVersion which calls @@ -184,7 +185,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { //And the exported metadata files try { - ExportService instance = ExportService.getInstance(ctxt.settings()); + ExportService instance = ExportService.getInstance(); instance.exportAllFormats(getDataset()); } catch (ExportException ex) { // Just like with indexing, a failure to export is not a fatal condition. diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java index 6aa5e0e250a..391e798e285 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java @@ -75,7 +75,7 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { boolean doNormalSolrDocCleanUp = true; - ExportService instance = ExportService.getInstance(ctxt.settings()); + ExportService instance = ExportService.getInstance(); if (managed.getDataset().getReleasedVersion() != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeactivateUserCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeactivateUserCommand.java new file mode 100644 index 00000000000..1dab8120767 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeactivateUserCommand.java @@ -0,0 +1,44 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import java.sql.Timestamp; +import java.util.Date; + +// Superuser-only enforced below. +@RequiredPermissions({}) +public class DeactivateUserCommand extends AbstractCommand { + + private DataverseRequest request; + private AuthenticatedUser userToDeactivate; + + public DeactivateUserCommand(DataverseRequest request, AuthenticatedUser userToDeactivate) { + super(request, (DvObject) null); + this.request = request; + this.userToDeactivate = userToDeactivate; + } + + @Override + public AuthenticatedUser execute(CommandContext ctxt) throws CommandException { + if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) { + throw new PermissionException("Deactivate user command can only be called by superusers.", this, null, null); + } + if (userToDeactivate == null) { + throw new CommandException("Cannot deactivate user. User not found.", this); + } + ctxt.engine().submit(new RevokeAllRolesCommand(userToDeactivate, request)); + ctxt.authentication().removeAuthentictedUserItems(userToDeactivate); + ctxt.notifications().findByUser(userToDeactivate.getId()).forEach(ctxt.notifications()::delete); + userToDeactivate.setDeactivated(true); + userToDeactivate.setDeactivatedTime(new Timestamp(new Date().getTime())); + AuthenticatedUser deactivatedUser = ctxt.authentication().save(userToDeactivate); + return deactivatedUser; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePidCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePidCommand.java index 841c84793ec..274aeb3c3fd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePidCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePidCommand.java @@ -13,12 +13,12 @@ import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; +import org.apache.commons.httpclient.HttpException; + import java.util.Arrays; import java.util.Collections; import java.util.logging.Logger; -import javax.xml.ws.http.HTTPException; - /** * No required permissions because we check for superuser status. */ @@ -51,8 +51,8 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { dataset.setGlobalIdCreateTime(null); dataset.setIdentifierRegistered(false); ctxt.datasets().merge(dataset); - } catch (HTTPException hex) { - String message = BundleUtil.getStringFromBundle("pids.deletePid.failureExpected", Arrays.asList(dataset.getGlobalId().asString(), Integer.toString(hex.getStatusCode()))); + } catch (HttpException hex) { + String message = BundleUtil.getStringFromBundle("pids.deletePid.failureExpected", Arrays.asList(dataset.getGlobalId().asString(), Integer.toString(hex.getReasonCode()))); logger.info(message); throw new IllegalCommandException(message, this); } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index c2f186f1e8c..4fa07dedede 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -186,7 +186,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { //Should this be in onSuccess()? ctxt.workflows().getDefaultWorkflow(TriggerType.PostPublishDataset).ifPresent(wf -> { try { - ctxt.workflows().start(wf, buildContext(ds, TriggerType.PostPublishDataset, datasetExternallyReleased)); + ctxt.workflows().start(wf, buildContext(ds, TriggerType.PostPublishDataset, datasetExternallyReleased), false); } catch (CommandException ex) { ctxt.datasets().removeDatasetLocks(ds, DatasetLock.Reason.Workflow); logger.log(Level.SEVERE, "Error invoking post-publish workflow: " + ex.getMessage(), ex); @@ -246,7 +246,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) { } } - exportMetadata(dataset, ctxt.settings()); + exportMetadata(dataset); ctxt.datasets().updateLastExportTimeStamp(dataset.getId()); @@ -257,10 +257,10 @@ public boolean onSuccess(CommandContext ctxt, Object r) { * Attempting to run metadata export, for all the formats for which we have * metadata Exporters. */ - private void exportMetadata(Dataset dataset, SettingsServiceBean settingsServiceBean) { + private void exportMetadata(Dataset dataset) { try { - ExportService instance = ExportService.getInstance(settingsServiceBean); + ExportService instance = ExportService.getInstance(); instance.exportAllFormats(dataset); } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java new file mode 100644 index 00000000000..41a1708e4c5 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java @@ -0,0 +1,228 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersionUser; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.GuestbookResponse; +import edu.harvard.iq.dataverse.RoleAssignment; +import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import java.math.BigDecimal; +import java.util.List; +import java.util.Set; +import javax.json.Json; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObjectBuilder; + +// Superuser-only enforced below. +@RequiredPermissions({}) +public class GetUserTracesCommand extends AbstractCommand { + + private DataverseRequest request; + private AuthenticatedUser user; + + public GetUserTracesCommand(DataverseRequest request, AuthenticatedUser user) { + super(request, (DvObject) null); + this.request = request; + this.user = user; + } + + @Override + public JsonObjectBuilder execute(CommandContext ctxt) throws CommandException { + if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) { + throw new PermissionException("Get user traces command can only be called by superusers.", this, null, null); + } + if (user == null) { + throw new CommandException("Cannot get traces. User not found.", this); + } + Long userId = user.getId(); + JsonObjectBuilder traces = Json.createObjectBuilder(); +// List roleAssignments = ctxt.permissions().getDvObjectsUserHasRoleOn(user); + List roleAssignments = ctxt.roleAssignees().getAssignmentsFor(user.getIdentifier()); + if (roleAssignments != null && !roleAssignments.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (RoleAssignment roleAssignment : roleAssignments) { + jab.add(NullSafeJsonBuilder.jsonObjectBuilder() + .add("id", roleAssignment.getId()) + .add("definitionPointName", roleAssignment.getDefinitionPoint().getCurrentName()) + .add("definitionPointIdentifier", roleAssignment.getDefinitionPoint().getIdentifier()) + .add("definitionPointId", roleAssignment.getDefinitionPoint().getId()) + .add("roleAlias", roleAssignment.getRole().getAlias()) + .add("roleName", roleAssignment.getRole().getName()) + ); + } + job.add("count", roleAssignments.size()); + job.add("items", jab); + traces.add("roleAssignments", job); + } + List dataversesCreated = ctxt.dataverses().findByCreatorId(userId); + if (dataversesCreated != null && !dataversesCreated.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (Dataverse dataverse : dataversesCreated) { + jab.add(Json.createObjectBuilder() + .add("id", dataverse.getId()) + .add("alias", dataverse.getAlias()) + ); + } + job.add("count", dataversesCreated.size()); + job.add("items", jab); + traces.add("dataverseCreator", job); + } + List dataversesPublished = ctxt.dataverses().findByReleaseUserId(userId); + if (dataversesPublished != null && !dataversesPublished.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (Dataverse dataverse : dataversesPublished) { + jab.add(Json.createObjectBuilder() + .add("id", dataverse.getId()) + .add("alias", dataverse.getAlias()) + ); + } + job.add("count", dataversesPublished.size()); + job.add("items", jab); + traces.add("dataversePublisher", job); + } + List datasetsCreated = ctxt.datasets().findByCreatorId(userId); + if (datasetsCreated != null && !datasetsCreated.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (Dataset dataset : datasetsCreated) { + jab.add(Json.createObjectBuilder() + .add("id", dataset.getId()) + .add("pid", dataset.getGlobalId().asString()) + ); + } + job.add("count", datasetsCreated.size()); + job.add("items", jab); + traces.add("datasetCreator", job); + } + List datasetsPublished = ctxt.datasets().findByReleaseUserId(userId); + if (datasetsPublished != null && !datasetsPublished.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (Dataset dataset : datasetsPublished) { + jab.add(Json.createObjectBuilder() + .add("id", dataset.getId()) + .add("pid", dataset.getGlobalId().asString()) + ); + } + job.add("count", datasetsPublished.size()); + job.add("items", jab); + traces.add("datasetPublisher", job); + } + List dataFilesCreated = ctxt.files().findByCreatorId(userId); + if (dataFilesCreated != null && !dataFilesCreated.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (DataFile dataFile : dataFilesCreated) { + jab.add(Json.createObjectBuilder() + .add("id", dataFile.getId()) + .add("filename", dataFile.getCurrentName()) + .add("datasetPid", dataFile.getOwner().getGlobalId().asString()) + ); + } + job.add("count", dataFilesCreated.size()); + job.add("items", jab); + traces.add("dataFileCreator", job); + } + // TODO: Consider removing this because we don't seem to populate releaseuser_id for files. + List dataFilesPublished = ctxt.files().findByReleaseUserId(userId); + if (dataFilesPublished != null && !dataFilesPublished.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (DataFile dataFile : dataFilesPublished) { + jab.add(Json.createObjectBuilder() + .add("id", dataFile.getId()) + .add("filename", dataFile.getCurrentName()) + .add("datasetPid", dataFile.getOwner().getGlobalId().asString()) + ); + } + job.add("count", dataFilesPublished.size()); + job.add("items", jab); + traces.add("dataFileCreator", job); + } + // These are the users who have published a version (or created a draft). + List datasetVersionUsers = ctxt.datasetVersion().getDatasetVersionUsersByAuthenticatedUser(user); + if (datasetVersionUsers != null && !datasetVersionUsers.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (DatasetVersionUser datasetVersionUser : datasetVersionUsers) { + jab.add(Json.createObjectBuilder() + .add("id", datasetVersionUser.getId()) + .add("dataset", datasetVersionUser.getDatasetVersion().getDataset().getGlobalId().asString()) + .add("version", datasetVersionUser.getDatasetVersion().getSemanticVersion()) + ); + } + job.add("count", datasetVersionUsers.size()); + job.add("items", jab); + traces.add("datasetVersionUsers", job); + } + Set explicitGroups = ctxt.explicitGroups().findDirectlyContainingGroups(user); + if (explicitGroups != null && !explicitGroups.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (ExplicitGroup explicitGroup : explicitGroups) { + jab.add(Json.createObjectBuilder() + .add("id", explicitGroup.getId()) + .add("name", explicitGroup.getDisplayName()) + ); + } + job.add("count", explicitGroups.size()); + job.add("items", jab); + traces.add("explicitGroups", job); + } + List guestbookResponses = ctxt.responses().findByAuthenticatedUserId(user); + if (guestbookResponses != null && !guestbookResponses.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + // The feeling is that this is too much detail for now so we only show a count. +// JsonArrayBuilder jab = Json.createArrayBuilder(); +// for (GuestbookResponse guestbookResponse : guestbookResponses) { +// jab.add(Json.createObjectBuilder() +// .add("id", guestbookResponse.getId()) +// .add("downloadType", guestbookResponse.getDownloadtype()) +// .add("filename", guestbookResponse.getDataFile().getCurrentName()) +// .add("date", guestbookResponse.getResponseDate()) +// .add("guestbookName", guestbookResponse.getGuestbook().getName()) +// .add("dataset", guestbookResponse.getDatasetVersion().getDataset().getGlobalId().asString()) +// .add("version", guestbookResponse.getDatasetVersion().getSemanticVersion()) +// ); +// } + job.add("count", guestbookResponses.size()); +// job.add("items", jab); + traces.add("guestbookEntries", job); + } + List savedSearchs = ctxt.savedSearches().findByAuthenticatedUser(user); + if (savedSearchs != null && !savedSearchs.isEmpty()) { + JsonObjectBuilder job = Json.createObjectBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + for (SavedSearch savedSearch : savedSearchs) { + jab.add(Json.createObjectBuilder() + .add("id", savedSearch.getId()) + ); + } + job.add("count", savedSearchs.size()); + job.add("items", jab); + traces.add("savedSearches", job); + } + JsonObjectBuilder result = Json.createObjectBuilder(); + result.add("user", Json.createObjectBuilder() + .add("identifier", user.getIdentifier()) + .add("name", user.getName()) + ); + result.add("traces", traces); + return result; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GrantSuperuserStatusCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GrantSuperuserStatusCommand.java index 29f1b891c91..42af43b7247 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GrantSuperuserStatusCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GrantSuperuserStatusCommand.java @@ -40,6 +40,10 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { this, null, null); } + if (targetUser.isDeactivated()) { + throw new CommandException("User " + targetUser.getIdentifier() + " has been deactivated and cannot become a superuser.", this); + } + try { targetUser.setSuperuser(true); ctxt.em().merge(targetUser); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java index 28db9b890e9..1ec51764d73 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java @@ -25,6 +25,7 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.passwordreset.PasswordResetData; import edu.harvard.iq.dataverse.search.IndexResponse; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch; import edu.harvard.iq.dataverse.workflows.WorkflowComment; @@ -57,7 +58,15 @@ public MergeInAccountCommand(DataverseRequest createDataverseRequest, Authentica @Override protected void executeImpl(CommandContext ctxt) throws CommandException { - + + if (consumedAU.getId() == ongoingAU.getId()) { + throw new IllegalCommandException("You cannot merge an account into itself.", this); + } + + if (consumedAU.isDeactivated() && !ongoingAU.isDeactivated() || !consumedAU.isDeactivated() && ongoingAU.isDeactivated()) { + throw new IllegalCommandException("User accounts can only be merged if they are either both active or both deactivated.", this); + } + List baseRAList = ctxt.roleAssignees().getAssignmentsFor(ongoingAU.getIdentifier()); List consumedRAList = ctxt.roleAssignees().getAssignmentsFor(consumedAU.getIdentifier()); @@ -185,8 +194,8 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { ctxt.em().remove(consumedAUL); ctxt.em().remove(consumedAU); BuiltinUser consumedBuiltinUser = ctxt.builtinUsers().findByUserName(consumedAU.getUserIdentifier()); - if (consumedBuiltinUser != null){ - ctxt.em().remove(consumedBuiltinUser); + if (consumedBuiltinUser != null) { + ctxt.builtinUsers().removeUser(consumedBuiltinUser.getUserName()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 41645078a25..f1bab1901d0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -96,7 +96,7 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException // We start a workflow theDataset = ctxt.em().merge(theDataset); ctxt.em().flush(); - ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased)); + ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true); return new PublishDatasetResult(theDataset, Status.Workflow); } else{ diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java index ef20ec76e12..8eeca0cb4cd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java @@ -89,7 +89,7 @@ public DataFile execute(CommandContext ctxt) throws CommandException { logger.info("Exception while reindexing files during file type redetection: " + ex.getLocalizedMessage()); } try { - ExportService instance = ExportService.getInstance(ctxt.settings()); + ExportService instance = ExportService.getInstance(); instance.exportAllFormats(dataset); } catch (ExportException ex) { // Just like with indexing, a failure to export is not a fatal condition. diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index fefa8707c8b..4da9e2fef2f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -9,6 +9,8 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.util.FileMetadataUtil; + import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -165,38 +167,77 @@ public Dataset execute(CommandContext ctxt) throws CommandException { } // we have to merge to update the database but not flush because // we don't want to create two draft versions! - // Dataset tempDataset = ctxt.em().merge(theDataset); - //SEK 5/30/2019 - // This interim merge is causing: - // java.lang.IllegalArgumentException: Cannot merge an entity that has been removed: edu.harvard.iq.dvn.core.study.FileMetadata - // at the merge at line 177 - //Is this merge needed to add the lock? - seems to be 'no' so what is it needed for? - - // theDataset = ctxt.em().merge(theDataset); + // Although not completely tested, it looks like this merge handles the + // thumbnail case - if the filemetadata is removed from the context below and + // the dataset still references it, that could cause an issue. Merging here + // avoids any reference from it being the dataset thumbnail + theDataset = ctxt.em().merge(theDataset); + /* + * This code has to handle many cases, and anyone making changes should + * carefully check tests and basic methods that update the dataset version. The + * differences between the cases stem primarily from differences in whether the + * files to add, and their filemetadata, and files to delete, and their + * filemetadata have been persisted at this point, which manifests itself as to + * whether they have id numbers or not, and apparently, whether or not they + * exists in lists, e.g. the getFileMetadatas() list of a datafile. + * + * To handle this, the code is carefully checking to make sure that deletions + * are deleting the right things and not, for example, doing a remove(fmd) when + * the fmd.getId() is null, which just removes the first element found. + */ for (FileMetadata fmd : filesToDelete) { + logger.fine("Deleting fmd: " + fmd.getId() + " for file: " + fmd.getDataFile().getId()); + // if file is draft (ie. new to this version), delete it. Otherwise just remove + // filemetadata object) + // There are a few cases to handle: + // * the fmd has an id (has been persisted) and is the one in the current + // (draft) version + // * the fmd has an id (has been persisted) but it is from a published version + // so we need the corresponding one from the draft version (i.e. created during + // a getEditVersion call) + // * the fmd has no id (hasn't been persisted) so we have to use non-id based + // means to identify it and remove it from lists + + if (fmd.getId() != null) { + // If the datasetversion doesn't match, we have the fmd from a published version + // and we need to remove the one for the newly created draft instead, so we find + // it here + logger.fine("Edit ver: " + theDataset.getEditVersion().getId()); + logger.fine("fmd ver: " + fmd.getDatasetVersion().getId()); + if (!theDataset.getEditVersion().equals(fmd.getDatasetVersion())) { + fmd = FileMetadataUtil.getFmdForFileInEditVersion(fmd, theDataset.getEditVersion()); + } + } + fmd = ctxt.em().merge(fmd); + + // There are two datafile cases as well - the file has been released, so we're + // just removing it from the current draft version or it is only in the draft + // version and we completely remove the file. if (!fmd.getDataFile().isReleased()) { - // if file is draft (ie. new to this version, delete; otherwise just remove - // filemetadata object) + // remove the file ctxt.engine().submit(new DeleteDataFileCommand(fmd.getDataFile(), getRequest())); + // and remove the file from the dataset's list theDataset.getFiles().remove(fmd.getDataFile()); - theDataset.getEditVersion().getFileMetadatas().remove(fmd); - // added this check to handle issue where you could not deleter a file that - // shared a category with a new file - // the relation ship does not seem to cascade, yet somehow it was trying to - // merge the filemetadata - // todo: clean this up some when we clean the create / update dataset methods - for (DataFileCategory cat : theDataset.getCategories()) { - cat.getFileMetadatas().remove(fmd); - } } else { - FileMetadata mergedFmd = ctxt.em().merge(fmd); - ctxt.em().remove(mergedFmd); - fmd.getDataFile().getFileMetadatas().remove(mergedFmd); - theDataset.getEditVersion().getFileMetadatas().remove(mergedFmd); + // if we aren't removing the file, we need to explicitly remove the fmd from the + // context and then remove it from the datafile's list + ctxt.em().remove(fmd); + FileMetadataUtil.removeFileMetadataFromList(fmd.getDataFile().getFileMetadatas(), fmd); + } + // In either case, to fully remove the fmd, we have to remove any other possible + // references + // From the datasetversion + FileMetadataUtil.removeFileMetadataFromList(theDataset.getEditVersion().getFileMetadatas(), fmd); + // and from the list associated with each category + for (DataFileCategory cat : theDataset.getCategories()) { + FileMetadataUtil.removeFileMetadataFromList(cat.getFileMetadatas(), fmd); } } - + for(FileMetadata fmd: theDataset.getEditVersion().getFileMetadatas()) { + logger.fine("FMD: " + fmd.getId() + " for file: " + fmd.getDataFile().getId() + "is in final draft version"); + } + if (recalculateUNF) { ctxt.ingest().recalculateDatasetVersionUNF(theDataset.getEditVersion()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java index 9eaf0dbced0..52de3cf1d78 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java @@ -45,28 +45,12 @@ public class ExportService { private static ExportService service; private ServiceLoader loader; - static SettingsServiceBean settingsService; private ExportService() { loader = ServiceLoader.load(Exporter.class); } - /** - * @deprecated Use `getInstance(SettingsServiceBean settingsService)` - * instead. For privacy reasons, we need to pass in settingsService so that - * we can make a decision whether not not to exclude email addresses. No new - * code should call this method and it would be nice to remove calls from - * existing code. - */ - @Deprecated public static synchronized ExportService getInstance() { - return getInstance(null); - } - - public static synchronized ExportService getInstance(SettingsServiceBean settingsService) { - ExportService.settingsService = settingsService; - // We pass settingsService into the JsonPrinter so it can check the :ExcludeEmailFromExport setting in calls to JsonPrinter.jsonAsDatasetDto(). - JsonPrinter.setSettingsService(settingsService); if (service == null) { service = new ExportService(); } @@ -75,7 +59,7 @@ public static synchronized ExportService getInstance(SettingsServiceBean setting public List< String[]> getExportersLabels() { List retList = new ArrayList<>(); - Iterator exporters = ExportService.getInstance(null).loader.iterator(); + Iterator exporters = ExportService.getInstance().loader.iterator(); while (exporters.hasNext()) { Exporter e = exporters.next(); String[] temp = new String[2]; diff --git a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java index f77821a59e2..367ac4bbc5b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java @@ -35,7 +35,7 @@ public void exportDataset(DatasetVersion version, JsonObject json, OutputStream try { InputStream ddiInputStream; try { - ddiInputStream = ExportService.getInstance(ExportService.settingsService).getExport(version.getDataset(), "ddi"); + ddiInputStream = ExportService.getInstance().getExport(version.getDataset(), "ddi"); } catch(ExportException | IOException e) { throw new ExportException ("Cannot open export_ddi cached file"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java index 6cfcb590681..87adc1b4c5b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java @@ -25,7 +25,7 @@ public class OAI_OREExporter implements Exporter { public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException { try { - new OREMap(version, ExportService.settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false)).writeOREMap(outputStream); + new OREMap(version).writeOREMap(outputStream); } catch (Exception e) { logger.severe(e.getMessage()); e.printStackTrace(); diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 1ce1a2119a7..4fed5913263 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -27,6 +27,8 @@ import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_TAG; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_UNF; import edu.harvard.iq.dataverse.export.DDIExporter; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + import static edu.harvard.iq.dataverse.util.SystemConfig.FQDN; import static edu.harvard.iq.dataverse.util.SystemConfig.SITE_URL; import edu.harvard.iq.dataverse.util.json.JsonUtil; @@ -55,6 +57,7 @@ import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.w3c.dom.Document; +import org.apache.commons.lang3.StringUtils; import org.w3c.dom.DOMException; // For write operation @@ -79,8 +82,8 @@ public class DdiExportUtil { public static final String LEVEL_DV = "dv"; - @EJB - VariableServiceBean variableService; + + static SettingsServiceBean settingsService; public static final String NOTE_TYPE_CONTENTTYPE = "DATAVERSE:CONTENTTYPE"; public static final String NOTE_SUBJECT_CONTENTTYPE = "Content/MIME Type"; @@ -195,7 +198,19 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) writeProducersElement(xmlw, version); xmlw.writeStartElement("distStmt"); - if (datasetDto.getPublisher() != null && !datasetDto.getPublisher().equals("")) { + //The default is to add Dataverse Repository as a distributor. The excludeinstallationifset setting turns that off if there is a distributor defined in the metadata + boolean distributorSet=false; + MetadataBlockDTO citationDTO= version.getMetadataBlocks().get("citation"); + if(citationDTO!=null) { + if(citationDTO.getField(DatasetFieldConstant.distributor)!=null) { + distributorSet=true; + } + } + logger.info("Dsitr set?: " + distributorSet); + logger.info("Pub?: " + datasetDto.getPublisher()); + boolean excludeRepository = settingsService.isTrueForKey(SettingsServiceBean.Key.ExportInstallationAsDistributorOnlyWhenNotSet, false); + logger.info("Exclude: " + excludeRepository); + if (!StringUtils.isEmpty(datasetDto.getPublisher()) && !(excludeRepository && distributorSet)) { xmlw.writeStartElement("distrbtr"); writeAttribute(xmlw, "source", "archive"); xmlw.writeCharacters(datasetDto.getPublisher()); @@ -308,7 +323,8 @@ private static void writeDocDescElement (XMLStreamWriter xmlw, DatasetDTO datase xmlw.writeEndElement(); // IDNo xmlw.writeEndElement(); // titlStmt xmlw.writeStartElement("distStmt"); - if (datasetDto.getPublisher() != null && !datasetDto.getPublisher().equals("")) { + //The doc is always published by the Dataverse Repository + if (!StringUtils.isEmpty(datasetDto.getPublisher())) { xmlw.writeStartElement("distrbtr"); writeAttribute(xmlw, "source", "archive"); xmlw.writeCharacters(datasetDto.getPublisher()); @@ -1795,4 +1811,8 @@ public static void datasetHtmlDDI(InputStream datafile, OutputStream outputStrea } + public static void injectSettingsService(SettingsServiceBean settingsSvc) { + settingsService=settingsSvc; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java new file mode 100644 index 00000000000..149c6791a7e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java @@ -0,0 +1,25 @@ +package edu.harvard.iq.dataverse.export.ddi; + +import javax.annotation.PostConstruct; +import javax.ejb.EJB; +import javax.ejb.Singleton; +import javax.ejb.Startup; + +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + + /** + * This is a small helper bean + * As it is a singleton and built at application start (=deployment), it will inject the (stateless) + * dataverse service into the DdiExportUtil once it's ready. + */ + @Singleton + @Startup + public class DdiExportUtilHelper { + + @EJB SettingsServiceBean settingsSvc; + + @PostConstruct + public void injectService() { + DdiExportUtil.injectSettingsService(settingsSvc); + } + } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java index 973c712b5c8..02e7675a776 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java @@ -229,7 +229,7 @@ public void markOaiRecordsAsRemoved(Collection records, Date updateTi public void exportAllFormats(Dataset dataset) { try { - ExportService exportServiceInstance = ExportService.getInstance(settingsService); + ExportService exportServiceInstance = ExportService.getInstance(); logger.log(Level.FINE, "Attempting to run export on dataset {0}", dataset.getGlobalId()); exportServiceInstance.exportAllFormats(dataset); datasetService.updateLastExportTimeStamp(dataset.getId()); @@ -240,7 +240,7 @@ public void exportAllFormats(Dataset dataset) { @TransactionAttribute(REQUIRES_NEW) public void exportAllFormatsInNewTransaction(Dataset dataset) throws ExportException { try { - ExportService exportServiceInstance = ExportService.getInstance(settingsService); + ExportService exportServiceInstance = ExportService.getInstance(); exportServiceInstance.exportAllFormats(dataset); datasetService.updateLastExportTimeStamp(dataset.getId()); } catch (Exception e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java index c68cd98de4f..5d4c02a87e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java @@ -132,11 +132,11 @@ private Context createContext() { } private void addSupportedMetadataFormats(Context context) { - for (String[] provider : ExportService.getInstance(settingsService).getExportersLabels()) { + for (String[] provider : ExportService.getInstance().getExportersLabels()) { String formatName = provider[1]; Exporter exporter; try { - exporter = ExportService.getInstance(settingsService).getExporter(formatName); + exporter = ExportService.getInstance().getExporter(formatName); } catch (ExportException ex) { exporter = null; } @@ -177,7 +177,7 @@ private RepositoryConfiguration createRepositoryConfiguration() { // some of the settings below - such as the max list numbers - // need to be configurable! - String dataverseName = dataverseService.findRootDataverse().getName(); + String dataverseName = dataverseService.getRootDataverseName(); String repositoryName = StringUtils.isEmpty(dataverseName) || "Root".equals(dataverseName) ? "Test Dataverse OAI Archive" : dataverseName + " Dataverse OAI Archive"; InternetAddress internetAddress = MailUtil.parseSystemAddress(settingsService.getValueForKey(SettingsServiceBean.Key.SystemEmail)); diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java index 78af2a31dc2..a3150161c52 100644 --- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java +++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java @@ -28,7 +28,9 @@ @NamedQuery(name="PasswordResetData.findByUser", query="SELECT prd FROM PasswordResetData prd WHERE prd.builtinUser = :user"), @NamedQuery(name="PasswordResetData.findByToken", - query="SELECT prd FROM PasswordResetData prd WHERE prd.token = :token") + query="SELECT prd FROM PasswordResetData prd WHERE prd.token = :token"), + @NamedQuery(name="PasswordResetData.deleteByUser", + query="DELETE FROM PasswordResetData prd WHERE prd.builtinUser = :user"), }) @Entity public class PasswordResetData implements Serializable { diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java index 532c0216038..aea910c496e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java @@ -121,13 +121,15 @@ public String sendPasswordResetLink() { actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.BuiltinUser, "passwordResetSent") .setInfo("Email Address: " + emailAddress) ); } else { - /** - * @todo remove "single" when it's no longer necessary. See - * https://github.com/IQSS/dataverse/issues/844 and - * https://github.com/IQSS/dataverse/issues/1141 - */ - logger.log(Level.INFO, "Couldn''t find single account using {0}", emailAddress); + logger.log(Level.INFO, "Cannot find account (or it's deactivated) given {0}", emailAddress); } + /** + * We show this "an email will be sent" message no matter what (if + * the account can be found or not, if the account has been + * deactivated or not) to prevent hackers from figuring out if you + * have an account based on your email address. Yes, this is a white + * lie sometimes, in the name of security. + */ FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("passwdVal.passwdReset.resetInitiated"), BundleUtil.getStringFromBundle("passwdReset.successSubmit.tip", Arrays.asList(emailAddress)))); } catch (PasswordResetException ex) { @@ -146,7 +148,6 @@ public String resetPassword() { String builtinAuthProviderId = BuiltinAuthenticationProvider.PROVIDER_ID; AuthenticatedUser au = authSvc.lookupUser(builtinAuthProviderId, user.getUserName()); session.setUser(au); - session.configureSessionTimeout(); return "/dataverse.xhtml?alias=" + dataverseService.findRootDataverse().getAlias() + "faces-redirect=true"; } else { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, response.getMessageSummary(), response.getMessageDetail())); diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java index 507c31f5595..c8db23985d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java @@ -52,13 +52,23 @@ public class PasswordResetServiceBean { * Initiate the password reset process. * * @param emailAddress - * @return {@link PasswordResetInitResponse} + * @return {@link PasswordResetInitResponse} with empty PasswordResetData if + * the reset won't continue (no user, deactivated user). * @throws edu.harvard.iq.dataverse.passwordreset.PasswordResetException */ // inspired by Troy Hunt: Everything you ever wanted to know about building a secure password reset feature - http://www.troyhunt.com/2012/05/everything-you-ever-wanted-to-know.html public PasswordResetInitResponse requestReset(String emailAddress) throws PasswordResetException { deleteAllExpiredTokens(); AuthenticatedUser authUser = authService.getAuthenticatedUserByEmail(emailAddress); + // This null check is for the NPE reported in https://github.com/IQSS/dataverse/issues/5462 + if (authUser == null) { + logger.info("Cannot find a user based on " + emailAddress + ". Cannot reset password."); + return new PasswordResetInitResponse(false); + } + if (authUser.isDeactivated()) { + logger.info("Cannot reset password for " + emailAddress + " because account is deactivated."); + return new PasswordResetInitResponse(false); + } BuiltinUser user = dataverseUserService.findByUserName(authUser.getUserIdentifier()); if (user != null) { @@ -186,6 +196,12 @@ private long deleteAllExpiredTokens() { return numDeleted; } + public void deleteResetDataByDataverseUser(BuiltinUser user) { + TypedQuery typedQuery = em.createNamedQuery("PasswordResetData.deleteByUser", PasswordResetData.class); + typedQuery.setParameter("user", user); + int numRowsAffected = typedQuery.executeUpdate(); + } + public PasswordChangeAttemptResponse attemptPasswordReset(BuiltinUser user, String newPassword, String token) { final String messageSummarySuccess = "Password Reset Successfully"; diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java index e8bc9fc3da7..b0658f10b34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java @@ -35,7 +35,6 @@ public String init() { String draftDatasetPageToBeRedirectedTo = privateUrlRedirectData.getDraftDatasetPageToBeRedirectedTo() + "&faces-redirect=true"; PrivateUrlUser privateUrlUser = privateUrlRedirectData.getPrivateUrlUser(); session.setUser(privateUrlUser); - session.configureSessionTimeout(); logger.info("Redirecting PrivateUrlUser '" + privateUrlUser.getIdentifier() + "' to " + draftDatasetPageToBeRedirectedTo); return draftDatasetPageToBeRedirectedTo; } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 5b2d63c43eb..f61f879eee7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -23,6 +23,7 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccessRequest; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.datavariable.VariableMetadata; import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; import edu.harvard.iq.dataverse.datavariable.VariableServiceBean; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; @@ -45,7 +46,9 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.Future; +import java.util.function.Function; import java.util.logging.Logger; +import java.util.stream.Collectors; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.ejb.AsyncResult; @@ -698,7 +701,7 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset) throws Sol return addOrUpdateDataset(indexableDataset, null); } - private String addOrUpdateDataset(IndexableDataset indexableDataset, Set datafilesInDraftVersion) throws SolrServerException, IOException { + private String addOrUpdateDataset(IndexableDataset indexableDataset, Set datafilesInDraftVersion) throws SolrServerException, IOException { IndexableDataset.DatasetState state = indexableDataset.getDatasetState(); Dataset dataset = indexableDataset.getDatasetVersion().getDataset(); logger.fine("adding or updating Solr document for dataset id " + dataset.getId()); @@ -934,8 +937,7 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d */ if ((fileMetadata.getDataFile().isRestricted() == releasedFileMetadata.getDataFile().isRestricted())) { if (fileMetadata.contentEquals(releasedFileMetadata) - /* SEK 3/12/2020 remove variable metadata indexing*/ - // && variableMetadataUtil.compareVariableMetadata(releasedFileMetadata,fileMetadata) + && variableMetadataUtil.compareVariableMetadata(releasedFileMetadata,fileMetadata) ) { indexThisMetadata = false; logger.fine("This file metadata hasn't changed since the released version; skipping indexing."); @@ -1155,6 +1157,14 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d // names and labels: if (fileMetadata.getDataFile().isTabularData()) { List variables = fileMetadata.getDataFile().getDataTable().getDataVariables(); + + Map variableMap = null; + List variablesByMetadata = variableService.findVarMetByFileMetaId(fileMetadata.getId()); + + variableMap = + variablesByMetadata.stream().collect(Collectors.toMap(VariableMetadata::getId, Function.identity())); + + for (DataVariable var : variables) { // Hard-coded search fields, for now: // TODO: eventually: review, decide how datavariables should @@ -1169,21 +1179,14 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d if (var.getName() != null && !var.getName().equals("")) { datafileSolrInputDocument.addField(SearchFields.VARIABLE_NAME, var.getName()); } - -/* SEK 3/12/2020 remove variable metadata indexing - List vmList = variableService.findByDataVarIdAndFileMetaId(var.getId(), fileMetadata.getId()); - VariableMetadata vm = null; - if (vmList != null && vmList.size() >0) { - vm = vmList.get(0); - } - - if (vmList.size() == 0 ) { + + VariableMetadata vm = variableMap.get(var.getId()); + if (vm == null) { //Variable Label if (var.getLabel() != null && !var.getLabel().equals("")) { datafileSolrInputDocument.addField(SearchFields.VARIABLE_LABEL, var.getLabel()); } - - } else if (vm != null) { + } else { if (vm.getLabel() != null && !vm.getLabel().equals("") ) { datafileSolrInputDocument.addField(SearchFields.VARIABLE_LABEL, vm.getLabel()); } @@ -1204,7 +1207,6 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d } } -*/ } // TABULAR DATA TAGS: diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 1f53281b2fa..02637bfa8df 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -422,7 +422,18 @@ Whether Harvesting (OAI) service is enabled /** * Sort Date Facets Chronologically instead or presenting them in order of # of hits as other facets are. Default is true */ - ChronologicalDateFacets + ChronologicalDateFacets, + + /** + * Used where BrandingUtil.getInstallationBrandName is called, overides the default use of the root Dataverse collection name + */ + InstallationName, + /** + * In metadata exports that set a 'distributor' this flag determines whether the + * Installation Brand Name is always included (default/false) or is not included + * when the Distributor field (citation metadatablock) is set (true) + */ + ExportInstallationAsDistributorOnlyWhenNotSet ; @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSource.java b/src/main/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSource.java index 597fdb314d2..838cd415819 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSource.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSource.java @@ -23,7 +23,7 @@ public class DbSettingConfigSource implements ConfigSource { private static final ConcurrentHashMap properties = new ConcurrentHashMap<>(); private static Instant lastUpdate; private static SettingsServiceBean settingsSvc; - static final String PREFIX = "dataverse.settings.fromdb"; + public static final String PREFIX = "dataverse.settings.fromdb"; /** * Let the SettingsServiceBean be injected by DbSettingConfigHelper with PostConstruct @@ -39,18 +39,19 @@ public static void injectSettingsService(SettingsServiceBean injected) { */ public static void updateProperties() { // skip if the service has not been injected yet - if (settingsSvc == null) + if (settingsSvc == null) { return; - + } + properties.clear(); Set dbSettings = settingsSvc.listAll(); - dbSettings.forEach(s -> properties.put(PREFIX+"."+s.getName()+ (s.getLang() == null ? "" : "."+s.getLang()), s.getContent())); + dbSettings.forEach(s -> properties.put(PREFIX+"."+s.getName().substring(1) + (s.getLang() == null ? "" : "."+s.getLang()), s.getContent())); lastUpdate = Instant.now(); } @Override public Map getProperties() { // if the cache is at least XX number of seconds old, update before serving data. - if (lastUpdate == null || Instant.now().minus(Duration.ofSeconds(60)).isBefore(lastUpdate)) { + if (lastUpdate == null || Instant.now().minus(Duration.ofSeconds(60)).isAfter(lastUpdate)) { updateProperties(); } return properties; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileMetadataUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileMetadataUtil.java new file mode 100644 index 00000000000..cd5db359344 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileMetadataUtil.java @@ -0,0 +1,95 @@ +/* + Copyright (C) 2005-2012, by the President and Fellows of Harvard College. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Dataverse Network - A web application to share, preserve and analyze research data. + Developed at the Institute for Quantitative Social Science, Harvard University. + Version 3.0. +*/ + +package edu.harvard.iq.dataverse.util; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.FileMetadata; + +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.logging.Logger; + +public class FileMetadataUtil implements java.io.Serializable { + private static final Logger logger = Logger.getLogger(FileMetadataUtil.class.getCanonicalName()); + + //Delete the filemetadata from the list if and only if it is in the list + public static void removeFileMetadataFromList(Collection collection, FileMetadata fmToDelete) { + // With an id, the standard remove will work + if (fmToDelete.getId() != null) { + collection.remove(fmToDelete); + } else { + Iterator fmit = collection.iterator(); + while (fmit.hasNext()) { + FileMetadata fmd = fmit.next(); + // If not, we can remove based on a match based on the id of the related + // datafile + if (fmToDelete.getDataFile().getStorageIdentifier().equals(fmd.getDataFile().getStorageIdentifier())) { + // and a match on the datasetversion + if(fmToDelete.getDatasetVersion() == null) { + //not yet associated with a version (i.e. deleting from the upload screen), so the match on datafile is good enough + fmit.remove(); + } else if (fmToDelete.getDatasetVersion().getId() == null) { + // If the fmd to delete is for a datasetversion with no id, we assume match to + // any other fmd with a datasetversion with no id (since there should be only + // one) + // Otherwise, we don't delete anything (this fmd hasn't been persisted and isn't + // in the list. + if (fmd.getDatasetVersion().getId() == null) { + fmit.remove(); + break; + } + } else if (fmToDelete.getDatasetVersion().getId().equals(fmd.getDatasetVersion().getId())) { + fmit.remove(); + break; + } + } + } + } + } + + //Delete datafile from the list even if it's id is null (hasn't yet been persisted) + public static void removeDataFileFromList(List dfList, DataFile dfToDelete) { + // With an id, the standard remove will work + if (dfToDelete.getId() != null) { + dfList.remove(dfToDelete); + } else { + Iterator dfit = dfList.iterator(); + while (dfit.hasNext()) { + DataFile df = dfit.next(); + if (dfToDelete.getStorageIdentifier().equals(df.getStorageIdentifier())) { + dfit.remove(); + break; + } + } + } + } + + public static FileMetadata getFmdForFileInEditVersion(FileMetadata fmd, DatasetVersion editVersion) { + for(FileMetadata editFmd: editVersion.getFileMetadatas()) { + if(editFmd.getDataFile().getId().equals(fmd.getDataFile().getId())) { + return editFmd; + } + } + return null; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index 37667d16b55..7ca702cabbe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -29,8 +29,8 @@ public static InternetAddress parseSystemAddress(String systemEmail) { return null; } - public static String getSubjectTextBasedOnNotification(UserNotification userNotification, String rootDataverseName, Object objectOfNotification) { - List rootDvNameAsList = Arrays.asList(BrandingUtil.getInstallationBrandName(rootDataverseName)); + public static String getSubjectTextBasedOnNotification(UserNotification userNotification, Object objectOfNotification) { + List rootDvNameAsList = Arrays.asList(BrandingUtil.getInstallationBrandName()); switch (userNotification.getType()) { case ASSIGNROLE: return BundleUtil.getStringFromBundle("notification.email.assign.role.subject", rootDvNameAsList); @@ -54,6 +54,10 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti return BundleUtil.getStringFromBundle("notification.email.publishFailure.dataset.subject", rootDvNameAsList); case RETURNEDDS: return BundleUtil.getStringFromBundle("notification.email.returned.dataset.subject", rootDvNameAsList); + case WORKFLOW_SUCCESS: + return BundleUtil.getStringFromBundle("notification.email.workflow.success.subject", rootDvNameAsList); + case WORKFLOW_FAILURE: + return BundleUtil.getStringFromBundle("notification.email.workflow.failure.subject", rootDvNameAsList); case CREATEACC: return BundleUtil.getStringFromBundle("notification.email.create.account.subject", rootDvNameAsList); case CHECKSUMFAIL: diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index a1ed4877f39..f5d129405b7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -650,7 +650,7 @@ public String getDefaultAuthProvider() { } public String getNameOfInstallation() { - return dataverseService.findRootDataverse().getName(); + return dataverseService.getRootDataverseName(); } public AbstractOAuth2AuthenticationProvider.DevOAuthAccountType getDevOAuthAccountType() { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java index 225144a0bd9..7c3db485e47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java @@ -93,7 +93,6 @@ public class BagGenerator { private int timeout = 60; private RequestConfig config = RequestConfig.custom().setConnectTimeout(timeout * 1000) .setConnectionRequestTimeout(timeout * 1000).setSocketTimeout(timeout * 1000).build(); - private static HttpClientContext localContext = HttpClientContext.create(); protected CloseableHttpClient client; private PoolingHttpClientConnectionManager cm = null; @@ -986,7 +985,8 @@ public InputStream get() { HttpGet getMap = createNewGetRequest(new URI(uri), null); logger.finest("Retrieving " + tries + ": " + uri); CloseableHttpResponse response; - response = client.execute(getMap, localContext); + //Note - if we ever need to pass an HttpClientContext, we need a new one per thread. + response = client.execute(getMap); if (response.getStatusLine().getStatusCode() == 200) { logger.finest("Retrieved: " + uri); return response.getEntity().getContent(); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index 5520de3954e..38303eb1f41 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -9,8 +9,9 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; +import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.export.OAI_OREExporter; -import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JsonLDNamespace; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; @@ -20,7 +21,6 @@ import java.time.LocalDate; import java.util.List; import java.util.Map; -import java.util.ResourceBundle; import java.util.TreeMap; import java.util.Map.Entry; @@ -33,14 +33,21 @@ public class OREMap { + static SettingsServiceBean settingsService; + public static final String NAME = "OREMap"; private Map localContext = new TreeMap(); private DatasetVersion version; - private boolean excludeEmail = false; + private Boolean excludeEmail = null; - public OREMap(DatasetVersion version, boolean excludeEmail) { + public OREMap(DatasetVersion version) { this.version = version; - this.excludeEmail = excludeEmail; + } + + //Used when the ExcludeEmailFromExport needs to be overriden, i.e. for archiving + public OREMap(DatasetVersion dv, boolean exclude) { + this.version = dv; + this.excludeEmail = exclude; } public void writeOREMap(OutputStream outputStream) throws Exception { @@ -50,6 +57,11 @@ public void writeOREMap(OutputStream outputStream) throws Exception { public JsonObject getOREMap() throws Exception { + //Set this flag if it wasn't provided + if(excludeEmail==null) { + excludeEmail = settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false); + } + // Add namespaces we'll definitely use to Context // Additional namespaces are added as needed below localContext.putIfAbsent(JsonLDNamespace.ore.getPrefix(), JsonLDNamespace.ore.getUrl()); @@ -166,7 +178,7 @@ public JsonObject getOREMap() throws Exception { } aggBuilder.add(JsonLDTerm.schemaOrg("includedInDataCatalog").getLabel(), - dataset.getDataverseContext().getDisplayName()); + BrandingUtil.getRootDataverseCollectionName()); // The aggregation aggregates aggregatedresources (Datafiles) which each have // their own entry and metadata @@ -247,7 +259,7 @@ public JsonObject getOREMap() throws Exception { JsonObject oremap = Json.createObjectBuilder() .add(JsonLDTerm.dcTerms("modified").getLabel(), LocalDate.now().toString()) .add(JsonLDTerm.dcTerms("creator").getLabel(), - BundleUtil.getStringFromBundle("institution.name")) + BrandingUtil.getInstallationBrandName()) .add("@type", JsonLDTerm.ore("ResourceMap").getLabel()) // Define an id for the map itself (separate from the @id of the dataset being // described @@ -379,4 +391,7 @@ private JsonLDTerm getTermFor(String type, String subType) { return null; } + public static void injectSettingsService(SettingsServiceBean settingsSvc) { + settingsService = settingsSvc; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java new file mode 100644 index 00000000000..bced52a7752 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java @@ -0,0 +1,25 @@ +package edu.harvard.iq.dataverse.util.bagit; + +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + +import javax.annotation.PostConstruct; +import javax.ejb.EJB; +import javax.ejb.Singleton; +import javax.ejb.Startup; + +/** + * This is a small helper bean + * As it is a singleton and built at application start (=deployment), it will inject the (stateless) + * settings service into the OREMap once it's ready. + */ +@Singleton +@Startup +public class OREMapHelper { + @EJB + SettingsServiceBean settingsSvc; + + @PostConstruct + public void injectService() { + OREMap.injectSettingsService(settingsSvc); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 7a5334114e7..7e69d9325c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -36,6 +36,7 @@ import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -58,6 +59,9 @@ import java.util.stream.Collector; import java.util.stream.Collectors; import static java.util.stream.Collectors.toList; + +import javax.ejb.EJB; +import javax.ejb.Singleton; import javax.json.JsonArray; import javax.json.JsonObject; @@ -66,14 +70,15 @@ * * @author michael */ +@Singleton public class JsonPrinter { private static final Logger logger = Logger.getLogger(JsonPrinter.class.getCanonicalName()); - static SettingsServiceBean settingsService = null; + @EJB + static SettingsServiceBean settingsService; - // Passed to DatasetFieldWalker so it can check the :ExcludeEmailFromExport setting - public static void setSettingsService(SettingsServiceBean ssb) { + public static void injectSettingsService(SettingsServiceBean ssb) { settingsService = ssb; } @@ -109,6 +114,8 @@ public static JsonObjectBuilder json(AuthenticatedUser authenticatedUser) { .add("lastName", authenticatedUser.getLastName()) .add("email", authenticatedUser.getEmail()) .add("superuser", authenticatedUser.isSuperuser()) + .add("deactivated", authenticatedUser.isDeactivated()) + .add("deactivatedTime", authenticatedUser.getDeactivatedTime()) .add("affiliation", authenticatedUser.getAffiliation()) .add("position", authenticatedUser.getPosition()) .add("persistentUserId", authenticatedUser.getAuthenticatedUserLookup().getPersistentUserId()) @@ -321,7 +328,7 @@ public static JsonObjectBuilder json(Dataset ds) { .add("persistentUrl", ds.getPersistentURL()) .add("protocol", ds.getProtocol()) .add("authority", ds.getAuthority()) - .add("publisher", getRootDataverseNameforCitation(ds)) + .add("publisher", BrandingUtil.getInstallationBrandName()) .add("publicationDate", ds.getPublicationDateFormattedYYYYMMDD()) .add("storageIdentifier", ds.getStorageIdentifier()); } @@ -391,19 +398,6 @@ public static JsonObjectBuilder jsonDataFileList(List dataFiles){ return bld; } - private static String getRootDataverseNameforCitation(Dataset dataset) { - Dataverse root = dataset.getOwner(); - while (root.getOwner() != null) { - root = root.getOwner(); - } - String rootDataverseName = root.getName(); - if (!StringUtil.isEmpty(rootDataverseName)) { - return rootDataverseName; - } else { - return ""; - } - } - private static String getLicenseInfo(DatasetVersion dsv) { if (dsv.getTermsOfUseAndAccess().getLicense() != null && dsv.getTermsOfUseAndAccess().getLicense().equals(TermsOfUseAndAccess.License.CC0)) { return "CC0 Waiver"; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java new file mode 100644 index 00000000000..62f3569bb8d --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java @@ -0,0 +1,25 @@ +package edu.harvard.iq.dataverse.util.json; + +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + +import javax.annotation.PostConstruct; +import javax.ejb.EJB; +import javax.ejb.Singleton; +import javax.ejb.Startup; + +/** + * This is a small helper bean + * As it is a singleton and built at application start (=deployment), it will inject the (stateless) + * settings service into the OREMap once it's ready. + */ +@Singleton +@Startup +public class JsonPrinterHelper { + @EJB + SettingsServiceBean settingsSvc; + + @PostConstruct + public void injectService() { + JsonPrinter.injectSettingsService(settingsSvc); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java index b6aa35ede7b..11b2ede9d76 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java @@ -5,6 +5,9 @@ import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; +import edu.harvard.iq.dataverse.UserNotification; +import edu.harvard.iq.dataverse.UserNotification.Type; +import edu.harvard.iq.dataverse.UserNotificationServiceBean; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -16,11 +19,14 @@ import edu.harvard.iq.dataverse.workflow.internalspi.InternalWorkflowStepSP; import edu.harvard.iq.dataverse.workflow.step.Failure; import edu.harvard.iq.dataverse.workflow.step.Pending; +import edu.harvard.iq.dataverse.workflow.step.Success; import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; +import edu.harvard.iq.dataverse.workflows.WorkflowComment; -import java.util.Date; +import java.sql.Timestamp; +import java.time.Instant; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -63,6 +69,9 @@ public class WorkflowServiceBean { @EJB SystemConfig systemConfig; + @EJB + UserNotificationServiceBean userNotificationService; + @EJB EjbDataverseEngine engine; @@ -99,18 +108,43 @@ public WorkflowServiceBean() { */ //ToDo - should this be @Async? or just the forward() method? @Asynchronous - public void start(Workflow wf, WorkflowContext ctxt) throws CommandException { - - // Since we are calling this asynchronously anyway - sleep here - // for a few seconds, just in case, to make sure the database update of - // the dataset initiated by the PublishDatasetCommand has finished, - // to avoid any concurrency/optimistic lock issues. - try { - Thread.sleep(1000); - } catch (Exception ex) { - logger.warning("Failed to sleep for a second."); + public void start(Workflow wf, WorkflowContext ctxt, boolean findDataset) throws CommandException { + /* + * Workflows appear to start running prior to the caller's transaction + * completing which can result in exceptions in setting the lock below. To avoid + * this, there are two work-arounds - wait briefly for that transaction to end, + * or refresh the dataset from the db - so the lock is written based on the + * current db state. The latter works for pre-publication workflows (since the + * only changes to the Dataset in the Publish command are edits to the version + * number in the draft version (which aren't valid for the draft anyway)), while + * the former is required for post-publication workflows which may need to see + * the final version number, update times and other changes made in the Finalize + * Publication command. Not waiting saves significant time when many datasets + * are processed, so is prefereable when it makes sense. + * + * This code should be reconsidered if/when the launching of pre/post + * publication workflows is moved to command onSuccess methods (and when + * onSuccess methods are guaranteed to be after the transaction completes (see + * #7568) or other changes are made that can guarantee the dataset in the + * WorkflowContext is up-to-date/usable in further transactions in the workflow. + * (e.g. if this method is not asynchronous) + * + */ + + if (!findDataset) { + /* + * Sleep here briefly to make sure the database update from the callers + * transaction completes which avoids any concurrency/optimistic lock issues. + * Note: 1 second appears long enough, but shorter delays may work + */ + try { + Thread.sleep(1000); + } catch (Exception ex) { + logger.warning("Failed to sleep for a second."); + } } - ctxt = refresh(ctxt, retrieveRequestedSettings( wf.getRequiredSettings()), getCurrentApiToken(ctxt.getRequest().getAuthenticatedUser())); + //Refresh will only em.find the dataset if findDataset is true. (otherwise the dataset is em.merged) + ctxt = refresh(ctxt, retrieveRequestedSettings( wf.getRequiredSettings()), getCurrentApiToken(ctxt.getRequest().getAuthenticatedUser()), findDataset); lockDataset(ctxt, new DatasetLock(DatasetLock.Reason.Workflow, ctxt.getRequest().getAuthenticatedUser())); forward(wf, ctxt); } @@ -184,10 +218,22 @@ private void doResume(PendingWorkflowInvocation pending, String body) { final WorkflowContext ctxt = refresh(newCtxt,retrieveRequestedSettings( wf.getRequiredSettings()), getCurrentApiToken(newCtxt.getRequest().getAuthenticatedUser())); WorkflowStepResult res = pendingStep.resume(ctxt, pending.getLocalData(), body); if (res instanceof Failure) { + logger.warning(((Failure) res).getReason()); + userNotificationService.sendNotification(ctxt.getRequest().getAuthenticatedUser(), Timestamp.from(Instant.now()), UserNotification.Type.WORKFLOW_FAILURE, ctxt.getDataset().getLatestVersion().getId(), ((Failure) res).getMessage()); + //UserNotification isn't meant to be a long-term record and doesn't store the comment, so we'll also keep it as a workflow comment + WorkflowComment wfc = new WorkflowComment(ctxt.getDataset().getLatestVersion(), WorkflowComment.Type.WORKFLOW_FAILURE, ((Failure) res).getMessage(), ctxt.getRequest().getAuthenticatedUser()); + datasets.addWorkflowComment(wfc); rollback(wf, ctxt, (Failure) res, pending.getPendingStepIdx() - 1); } else if (res instanceof Pending) { pauseAndAwait(wf, ctxt, (Pending) res, pending.getPendingStepIdx()); } else { + if (res instanceof Success) { + logger.info(((Success) res).getReason()); + userNotificationService.sendNotification(ctxt.getRequest().getAuthenticatedUser(), Timestamp.from(Instant.now()), UserNotification.Type.WORKFLOW_SUCCESS, ctxt.getDataset().getLatestVersion().getId(), ((Success) res).getMessage()); + //UserNotification isn't meant to be a long-term record and doesn't store the comment, so we'll also keep it as a workflow comment + WorkflowComment wfc = new WorkflowComment(ctxt.getDataset().getLatestVersion(), WorkflowComment.Type.WORKFLOW_SUCCESS, ((Success) res).getMessage(), ctxt.getRequest().getAuthenticatedUser()); + datasets.addWorkflowComment(wfc); + } executeSteps(wf, ctxt, pending.getPendingStepIdx() + 1); } } @@ -466,18 +512,39 @@ private WorkflowStep createStep(WorkflowStepData wsd) { private WorkflowContext refresh( WorkflowContext ctxt ) { return refresh(ctxt, ctxt.getSettings(), ctxt.getApiToken()); } - - private WorkflowContext refresh( WorkflowContext ctxt, Map settings, ApiToken apiToken ) { - /* An earlier version of this class used em.find() to 'refresh' the Dataset in the context. - * For a PostPublication workflow, this had the consequence of hiding/removing changes to the Dataset - * made in the FinalizeDatasetPublicationCommand (i.e. the fact that the draft version is now released and - * has a version number). It is not clear to me if the em.merge below is needed or if it handles the case of - * resumed workflows. (The overall method is needed to allow the context to be updated in the start() method with the - * settings and APItoken retrieved by the WorkflowServiceBean) - JM - 9/18. - */ - WorkflowContext newCtxt =new WorkflowContext( ctxt.getRequest(), - em.merge(ctxt.getDataset()), ctxt.getNextVersionNumber(), - ctxt.getNextMinorVersionNumber(), ctxt.getType(), settings, apiToken, ctxt.getDatasetExternallyReleased(), ctxt.getInvocationId(), ctxt.getLockId()); + + private WorkflowContext refresh(WorkflowContext ctxt, Map settings, ApiToken apiToken) { + return refresh(ctxt, settings, apiToken, false); + } + + private WorkflowContext refresh(WorkflowContext ctxt, Map settings, ApiToken apiToken, + boolean findDataset) { + /* + * An earlier version of this class used em.find() to 'refresh' the Dataset in + * the context. For a PostPublication workflow, this had the consequence of + * hiding/removing changes to the Dataset made in the + * FinalizeDatasetPublicationCommand (i.e. the fact that the draft version is + * now released and has a version number). It is not clear to me if the em.merge + * below is needed or if it handles the case of resumed workflows. (The overall + * method is needed to allow the context to be updated in the start() method + * with the settings and APItoken retrieved by the WorkflowServiceBean) - JM - + * 9/18. + */ + /* + * Introduced the findDataset boolean to optionally revert above change. + * Refreshing the Dataset just before trying to set the workflow lock greatly + * reduces the number of OptimisticLockExceptions. JvM 2/21 + */ + WorkflowContext newCtxt; + if (findDataset) { + newCtxt = new WorkflowContext(ctxt.getRequest(), datasets.find(ctxt.getDataset().getId()), + ctxt.getNextVersionNumber(), ctxt.getNextMinorVersionNumber(), ctxt.getType(), settings, apiToken, + ctxt.getDatasetExternallyReleased(), ctxt.getInvocationId(), ctxt.getLockId()); + } else { + newCtxt = new WorkflowContext(ctxt.getRequest(), em.merge(ctxt.getDataset()), ctxt.getNextVersionNumber(), + ctxt.getNextMinorVersionNumber(), ctxt.getType(), settings, apiToken, + ctxt.getDatasetExternallyReleased(), ctxt.getInvocationId(), ctxt.getLockId()); + } return newCtxt; } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java index 244abdb017c..bbe200aaeb3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java @@ -9,6 +9,8 @@ import edu.harvard.iq.dataverse.workflow.step.Success; import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; +import edu.harvard.iq.dataverse.workflows.WorkflowUtil; + import static edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult.OK; import java.io.StringReader; @@ -77,29 +79,7 @@ public WorkflowStepResult run(WorkflowContext context) { @Override public WorkflowStepResult resume(WorkflowContext context, Map internalData, String externalData) { - try (StringReader reader = new StringReader(externalData)) { - JsonObject response = Json.createReader(reader).readObject(); - String status = response.getString("Status"); - String reason = null; - String message = null; - if (response.containsKey("Reason")) { - reason = response.getString("Reason"); - } - if (response.containsKey("Message")) { - message = response.getString("Message"); - } - switch (status) { - case "Success": - logger.log(Level.FINE, "AuthExt Worfklow Step Succeeded: " + reason); - return new Success(reason, message); - case "Failure": - logger.log(Level.WARNING, "Remote system indicates workflow failed: {0}", reason); - return new Failure(reason, message); - } - } catch (Exception e) { - logger.log(Level.WARNING, "Remote system returned a bad reposonse: {0}", externalData); - } - return new Failure("Workflow failure: Response from remote server could not be parsed:" + externalData, null); + return WorkflowUtil.parseResponse(externalData); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java index df5f2de9058..ef11d306cd3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/InternalWorkflowStepSP.java @@ -17,6 +17,8 @@ public WorkflowStep getStep(String stepType, Map stepParameters) return new LoggingWorkflowStep(stepParameters); case "pause": return new PauseStep(stepParameters); + case "pause/message": + return new PauseWithMessageStep(stepParameters); case "http/sr": return new HttpSendReceiveClientStep(stepParameters); case "http/authExt": diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/PauseWithMessageStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/PauseWithMessageStep.java new file mode 100644 index 00000000000..f7332611697 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/PauseWithMessageStep.java @@ -0,0 +1,48 @@ +package edu.harvard.iq.dataverse.workflow.internalspi; + +import edu.harvard.iq.dataverse.workflow.WorkflowContext; +import edu.harvard.iq.dataverse.workflow.step.Failure; +import edu.harvard.iq.dataverse.workflow.step.Pending; +import edu.harvard.iq.dataverse.workflow.step.WorkflowStep; +import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; +import edu.harvard.iq.dataverse.workflows.WorkflowUtil; + +import java.util.HashMap; +import java.util.Map; +import java.util.logging.Logger; + + +/** + * A sample step that pauses the workflow. + * + * @author michael + */ +public class PauseWithMessageStep implements WorkflowStep { + + /** Constant used by testing to simulate a failed step. */ + public static final String FAILURE_RESPONSE="fail"; + + private final Map params = new HashMap<>(); + + public PauseWithMessageStep( Map paramSet ) { + params.putAll(paramSet); + } + + @Override + public WorkflowStepResult run(WorkflowContext context) { + final Pending result = new Pending(); + result.getData().putAll(params); + return result; + } + + @Override + public WorkflowStepResult resume(WorkflowContext context, Map internalData, String externalData) { + return WorkflowUtil.parseResponse(externalData); + } + + @Override + public void rollback(WorkflowContext context, Failure reason) { + // nothing to roll back + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/step/Failure.java b/src/main/java/edu/harvard/iq/dataverse/workflow/step/Failure.java index 0487313fd0d..950eed52245 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/step/Failure.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/step/Failure.java @@ -11,7 +11,7 @@ public class Failure implements WorkflowStepResult { private final String message; public Failure( String reason ) { - this(reason, reason); + this(reason, null); } /** diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/step/Success.java b/src/main/java/edu/harvard/iq/dataverse/workflow/step/Success.java index 621afdc61f6..0a00c4f279e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/step/Success.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/step/Success.java @@ -4,34 +4,37 @@ * Result returned when step execution succeeds. */ public class Success implements WorkflowStepResult { - + private final String reason; private final String message; - - public Success( String reason ) { + + public Success(String reason) { this(reason, null); } - + /** * Constructs a new success message. - * @param reason Technical reason (for logs etc.). - * @param message Human readable reason. + * + * @param reason Technical comment (for logs etc.). + * @param message Human readable comment. */ public Success(String reason, String message) { this.reason = reason; this.message = message; } - + /** - * Holds the technical reason for the success, useful for debugging the problem. - * @return the technical reason for the problem. + * Holds a technical comment about the success. + * + * @return the technical comment about the processing. */ public String getReason() { return reason; } /** - * Holds the user-friendly message explaining the failure. + * Holds the user-friendly message describing what was successfully done. + * * @return user-friendly message for the success. */ public String getMessage() { @@ -42,6 +45,5 @@ public String getMessage() { public String toString() { return "WorkflowStepResult.Success{" + "reason=" + reason + ", message=" + message + '}'; } - - -} + +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java index 26ddb9b1573..d03afcaa91a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java @@ -22,7 +22,9 @@ public class WorkflowComment implements Serializable { More may be added in future releases, */ public enum Type { - RETURN_TO_AUTHOR //, SUBMIT_FOR_REVIEW not available in this release but may be added in the future + RETURN_TO_AUTHOR, //, SUBMIT_FOR_REVIEW not available in this release but may be added in the future + WORKFLOW_SUCCESS, + WORKFLOW_FAILURE }; @Id @@ -61,20 +63,22 @@ public enum Type { @Column(nullable = false) private Timestamp created; + private boolean toBeShown; + // TODO: Consider support editing in the GUI some day, like GitHub issue comments (show "Edited" in the UI). We won't send a second email, however. You only get one shot to prevent spam. // @Transient // private Timestamp modified; // TODO: How should we best associate these entries to notifications, which can go to multiple authors and curators? + //FWIW: Workflow success/failure messages get shown to the user running the workflow if/when on the relevant dataset version page // @Transient // private List notifications; public WorkflowComment(DatasetVersion version, WorkflowComment.Type type, String message, AuthenticatedUser authenticatedUser) { this.type = type; - if (this.type.equals(WorkflowComment.Type.RETURN_TO_AUTHOR)) { - this.datasetVersion = version; - } + this.datasetVersion = version; this.message = message; this.authenticatedUser = authenticatedUser; this.created = new Timestamp(new Date().getTime()); + this.setToBeShown(true); } /** @@ -123,4 +127,12 @@ public void setDatasetVersion(DatasetVersion dv) { datasetVersion=dv; } + public boolean isToBeShown() { + return toBeShown; + } + + public void setToBeShown(boolean toBeShown) { + this.toBeShown = toBeShown; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java index e28f8525c9a..aeb8bcf6c87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java @@ -2,14 +2,29 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.api.Util; + +import java.io.StringReader; import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + import javax.json.Json; import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; + import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import edu.harvard.iq.dataverse.workflow.internalspi.PauseWithMessageStep; +import edu.harvard.iq.dataverse.workflow.step.Failure; +import edu.harvard.iq.dataverse.workflow.step.Success; +import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult; + import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; +import static org.apache.commons.lang3.StringEscapeUtils.escapeHtml4; public class WorkflowUtil { + private static final Logger logger = Logger.getLogger(WorkflowUtil.class.getName()); + public static JsonArrayBuilder getAllWorkflowComments(DatasetVersion datasetVersion) { JsonArrayBuilder workflowCommentsAsJson = Json.createArrayBuilder(); List workflowComments = datasetVersion.getWorkflowComments(); @@ -26,4 +41,49 @@ public static JsonArrayBuilder getAllWorkflowComments(DatasetVersion datasetVers } return workflowCommentsAsJson; } + + public static WorkflowStepResult parseResponse(String externalData) { + try (StringReader reader = new StringReader(externalData)) { + JsonObject response = Json.createReader(reader).readObject(); + String status = null; + //Lower case is documented, upper case is deprecated + if(response.containsKey("status")) { + status= response.getString("status"); + }else if(response.containsKey("Status")) { + status= response.getString("Status"); + } + String reason = null; + String message = null; + if (response.containsKey("reason")) { + reason = response.getString("reason"); + }else if (response.containsKey("Reason")) { + reason = response.getString("Reason"); + } + if (response.containsKey("message")) { + message = response.getString("message"); + }else if (response.containsKey("Message")) { + message = response.getString("Message"); + } + switch (status) { + case "success": + case "Success": + logger.log(Level.FINE, "AuthExt Worfklow Step Succeeded: " + reason); + return new Success(reason, message); + case "failure": + case "Failure": + logger.log(Level.WARNING, "Remote system indicates workflow failed: {0}", reason); + return new Failure(reason, message); + default: + logger.log(Level.WARNING, "Remote system returned a response with no \"status\" key or bad status value: {0}", escapeHtml4(externalData)); + return new Failure("Workflow failure: Response from remote server doesn't have valid \"status\":" + escapeHtml4(externalData), null); + } + } catch (Exception e) { + logger.log(Level.WARNING, "Remote system returned a bad response: {0}", externalData); + } + //In general, the remote workflow service creating the response is trusted, but, if it's causing an error, escape the result to avoid issues in the UI + return new Failure("Workflow failure: Response from remote server could not be parsed:" + escapeHtml4(externalData), null); + + } + + } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index abcd502d638..ab5352c8efd 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -168,7 +168,6 @@ contact.context.support.intro={0},\n\nThe following message was sent from {1}.\n contact.context.support.ending=\n\n---\n\nMessage sent from Support contact form. # dataverseuser.xhtml -institution.name=A Dataverse Instance account.info=Account Information account.edit=Edit Account account.apiToken=API Token @@ -202,6 +201,9 @@ notification.wasSubmittedForReview={0} was submitted for review to be published notification.wasReturnedByReviewer={0} was returned by the curator of {1}. notification.wasPublished={0} was published in {1}. notification.publishFailedPidReg={0} in {1} could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen. +notification.workflowFailed=An external workflow run on {0} in {1} has failed. Check your email and/or view the Dataset page which may have additional details. Contact support if this continues to happen. +notification.workflowSucceeded=An external workflow run on {0} in {1} has succeeded. Check your email and/or view the Dataset page which may have additional details. + notification.ingestCompleted=Dataset {1} ingest has successfully finished. notification.ingestCompletedWithErrors=Dataset {1} ingest has finished with errors. notification.generic.objectDeleted=The dataverse, dataset, or file for this notification has been deleted. @@ -249,7 +251,7 @@ user.acccountterms.tip=The terms and conditions for using the application and se user.acccountterms.required=Please check the box to indicate your acceptance of the General Terms of Use. user.acccountterms.iagree=I have read and accept the Dataverse General Terms of Use as outlined above. user.createBtn=Create Account -user.updatePassword.welcome=Welcome to Dataverse {0}, {1} +user.updatePassword.welcome=Welcome to Dataverse {0} user.updatePassword.warning=With the release of our new Dataverse 4.0 upgrade, the password requirements and General Terms of Use have updated. As this is the first time you are using Dataverse since the update, you need to create a new password and agree to the new General Terms of Use. user.updatePassword.password={0} user.password=Password @@ -368,7 +370,7 @@ shib.dataverseUsername=Dataverse Username shib.currentDataversePassword=Current Dataverse Password shib.accountInformation=Account Information shib.offerToCreateNewAccount=This information is provided by your institution and will be used to create your Dataverse account. -shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. +shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account. # oauth2/firstLogin.xhtml oauth2.btn.convertAccount=Convert Existing Account @@ -403,14 +405,18 @@ oauth2.newAccount.emailInvalid=Invalid email address. oauth2.convertAccount.explanation=Please enter your {0} account username or email and password to convert your account to the {1} log in option. Learn more about converting your account. oauth2.convertAccount.username=Existing username oauth2.convertAccount.password=Password -oauth2.convertAccount.authenticationFailed=Authentication failed - bad username or password. +oauth2.convertAccount.authenticationFailed=Your account can only be converted if you provide the correct username and password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account. oauth2.convertAccount.buttonTitle=Convert Account oauth2.convertAccount.success=Your Dataverse account is now associated with your {0} account. +oauth2.convertAccount.failedDeactivated=Your existing account cannot be converted because it has been deactivated. # oauth2/callback.xhtml oauth2.callback.page.title=OAuth Callback oauth2.callback.message=Authentication Error - Dataverse could not authenticate your login with the provider that you selected. Please make sure you authorize your account to connect with Dataverse. For more details about the information being requested, see the User Guide. +# deactivated user accounts +deactivated.error=Sorry, your account has been deactivated. + # tab on dataverseuser.xhtml apitoken.title=API Token apitoken.message=Your API Token is valid for a year. Check out our {0}API Guide{1} for more information on using your API Token with the Dataverse APIs. @@ -608,6 +614,7 @@ dashboard.list_users.tbl_header.authProviderFactoryAlias=Authentication dashboard.list_users.tbl_header.createdTime=Created Time dashboard.list_users.tbl_header.lastLoginTime=Last Login Time dashboard.list_users.tbl_header.lastApiUseTime=Last API Use Time +dashboard.list_users.tbl_header.deactivated=deactivated dashboard.list_users.tbl_header.roles.removeAll=Remove All dashboard.list_users.tbl_header.roles.removeAll.header=Remove All Roles dashboard.list_users.tbl_header.roles.removeAll.confirmationText=Are you sure you want to remove all roles for user {0}? @@ -656,6 +663,11 @@ notification.email.submit.dataset.subject={0}: Your dataset has been submitted f notification.email.publish.dataset.subject={0}: Your dataset has been published notification.email.publishFailure.dataset.subject={0}: Failed to publish your dataset notification.email.returned.dataset.subject={0}: Your dataset has been returned +notification.email.workflow.success.subject={0}: Your dataset has been processed +notification.email.workflow.success=A workflow running on {0} (view at {1}) succeeded: {2} +notification.email.workflow.failure.subject={0}: Failed to process your dataset +notification.email.workflow.failure=A workflow running on {0} (view at {1}) failed: {2} +notification.email.workflow.nullMessage=No additional message sent from the workflow. notification.email.create.account.subject={0}: Your account has been created notification.email.assign.role.subject={0}: You have been assigned a role notification.email.revoke.role.subject={0}: Your role has been revoked @@ -967,6 +979,8 @@ dataverse.permissions.roles.add=Add New Role dataverse.permissions.roles.description=All the roles set up in your dataverse, that you can assign to users and groups. dataverse.permissions.roles.edit=Edit Role dataverse.permissions.roles.copy=Copy Role +dataverse.permissions.roles.alias.required=Please enter a unique identifier for this role. +dataverse.permissions.roles.name.required=Please enter a name for this role. # permissions-manage-files.xhtml dataverse.permissionsFiles.title=Restricted File Permissions @@ -1398,15 +1412,19 @@ dataset.message.locked.downloadNotAllowedInReview=Dataset file(s) may not be dow dataset.message.locked.downloadNotAllowed=Dataset file(s) may not be downloaded due to dataset lock. dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lock. dataset.message.locked.publishNotAllowed=Dataset cannot be published due to dataset lock. -dataset.message.createSuccess=This dataset has been created +dataset.message.createSuccess=This dataset has been created. dataset.message.createSuccess.failedToSaveFiles=Partial Success: The dataset has been created. But the file(s) could not be saved. Please try uploading the file(s) again. dataset.message.createSuccess.partialSuccessSavingFiles=Partial Success: The dataset has been created. But only {0} out of {1} files have been saved. Please try uploading the missing file(s) again. dataset.message.linkSuccess= {0} has been successfully linked to {1}. dataset.message.metadataSuccess=The metadata for this dataset has been updated. -dataset.message.termsSuccess=The terms for this dataset has been updated. +dataset.message.termsSuccess=The terms for this dataset have been updated. dataset.message.filesSuccess=The files for this dataset have been updated. dataset.message.addFiles.Failure=Failed to add files to the dataset. Please try uploading the file(s) again. dataset.message.addFiles.partialSuccess=Partial success: only {0} files out of {1} have been saved. Please try uploading the missing file(s) again. +dataset.message.publish.remind.draft=If it's ready for sharing, please publish it. +dataset.message.submit.remind.draft=If it's ready for sharing, please submit it for review. +dataset.message.publish.remind.version=If it's ready for sharing, please publish it so that others can see these changes. +dataset.message.submit.remind.version=If it's ready for sharing, please submit it for review so that others can see these changes. dataset.message.publishSuccess=This dataset has been published. dataset.message.only.authenticatedUsers=Only authenticated users may release Datasets. dataset.message.deleteSuccess=This dataset has been deleted. @@ -1520,10 +1538,8 @@ file.notFound.search=There are no files that match your search. Please change th file.noSelectedFiles.tip=There are no selected files to display. file.noUploadedFiles.tip=Files you upload will appear here. file.replace=Replace -file.replaced.warning.header=Edit File -file.replaced.warning.draft.warningMessage=You can not replace a file that has been replaced in a dataset draft. In order to replace it with a different file you must delete the dataset draft. Note that doing so will discard any other changes within this draft. -file.replaced.warning.previous.warningMessage=You can not edit a file that has been replaced in a previous dataset version. In order to edit it you must go to the most recently published version of the file. -file.alreadyDeleted.previous.warningMessage=This file has already been deleted in current version. It may not be edited. +file.alreadyDeleted.warning.header=Edit File +file.alreadyDeleted.previous.warningMessage=This file has already been deleted (or replaced) in the current version. It may not be edited. file.delete=Delete file.delete.duplicate.multiple=Delete Duplicate Files file.delete.duplicate.single=Delete Duplicate File @@ -1903,9 +1919,9 @@ file.metadata.datafiletag.not_tabular=You cannot add Tabular Data Tags to a non- file.metadata.filedirectory.invalidCharacters=Directory Name cannot contain invalid characters. Valid characters are a-Z, 0-9, '_', '-', '.', '\\', '/' and ' ' (white space). # File Edit Success -file.message.editSuccess=This file has been updated. +file.message.editSuccess=The file has been updated. file.message.deleteSuccess=The file has been deleted. -file.message.replaceSuccess=This file has been replaced. +file.message.replaceSuccess=The file has been replaced. # File Add/Replace operation messages file.addreplace.file_size_ok=File size is in range. @@ -1913,6 +1929,7 @@ file.addreplace.error.byte_abrev=B file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}. file.addreplace.error.dataset_is_null=The dataset cannot be null. file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. +file.addreplace.error.parsing=Error in parsing provided json file.addreplace.warning.unzip.failed=Failed to unzip the file. Saving the file as is. file.addreplace.warning.unzip.failed.size=A file contained in this zip file exceeds the size limit of {0}. This Dataverse installation will save and display the zipped file, rather than unpacking and displaying files. find.dataset.error.dataset_id_is_null=When accessing a dataset based on Persistent ID, a {0} query parameter must be present. @@ -1926,6 +1943,10 @@ find.dataverselinking.error.not.found.bad.ids=Bad dataverse ID number: {0} or da find.datafile.error.datafile.not.found.id=File with ID {0} not found. find.datafile.error.datafile.not.found.bad.id=Bad file ID number: {0}. find.datafile.error.dataset.not.found.persistentId=Datafile with Persistent ID {0} not found. +find.dataverse.role.error.role.not.found.id=Dataverse Role with ID {0} not found. +find.dataverse.role.error.role.not.found.bad.id=Bad Dataverse Role ID number: {0} +find.dataverse.role.error.role.not.found.alias=Dataverse Role with alias {0} not found. +find.dataverse.role.error.role.builtin.not.allowed=May not delete Built In Role {0}. file.addreplace.error.dataset_id_not_found=There was no dataset found for ID: file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset. file.addreplace.error.filename_undetermined=The file name cannot be determined. @@ -2216,6 +2237,7 @@ shib.invalidEmailAddress=The SAML assertion contained an invalid email address: shib.emailAddress.error=A single valid address could not be found. shib.nullerror=The SAML assertion for "{0}" was null. Please contact support. dataverse.shib.success=Your Dataverse account is now associated with your institutional account. +shib.convert.fail.deactivated=Your existing account cannot be converted because it has been deactivated. shib.createUser.fail=Couldn't create user. shib.duplicate.email.error=Cannot login, because the e-mail address associated with it has changed since previous login and is already in use by another account. @@ -2235,6 +2257,8 @@ permission.permissionsMissing=Permissions {0} missing. permission.CannotAssigntDefaultPermissions=Cannot assign default permissions. permission.default.contributor.role.none.decription=A person who has no permissions on a newly created dataset. Not recommended for dataverses with human contributors. permission.default.contributor.role.none.name=None +permission.role.must.be.created.by.superuser=Roles can only be created or edited by superusers. +permission.role.not.created.alias.already.exists=Role with this alias already exists. #ManageFilePermissionsPage.java permission.roleNotAbleToBeRemoved=The role assignment was not able to be removed. @@ -2277,6 +2301,8 @@ pid.allowedCharacters=^[A-Za-z0-9._/:\\-]* #General Command Exception command.exception.only.superusers={1} can only be called by superusers. +command.exception.user.deactivated={0} failed: User account has been deactivated. +command.exception.user.deleted={0} failed: User account has been deleted. #Admin-API admin.api.auth.mustBeSuperUser=Forbidden. You must be a superuser. @@ -2290,8 +2316,7 @@ admin.api.deleteUser.failure.dvobjects= the user has created Dataverse object(s) admin.api.deleteUser.failure.gbResps= the user is associated with file download (Guestbook Response) record(s) admin.api.deleteUser.failure.roleAssignments=the user is associated with role assignment record(s) admin.api.deleteUser.failure.versionUser=the user has contributed to dataset version(s) -admin.api.deleteUser.failure.groupMember=the user is a member of Explicit Group(s) -admin.api.deleteUser.failure.pendingRequests=the user has pending File Access Request(s) +admin.api.deleteUser.failure.savedSearches=the user has created saved searches admin.api.deleteUser.success=Authenticated User {0} deleted. #Files.java diff --git a/src/main/resources/META-INF/microprofile-aliases.properties b/src/main/resources/META-INF/microprofile-aliases.properties index 4efaf02b083..ebde2910d52 100644 --- a/src/main/resources/META-INF/microprofile-aliases.properties +++ b/src/main/resources/META-INF/microprofile-aliases.properties @@ -1,4 +1,5 @@ # NOTE # This file is a placeholder for future aliases of deprecated config settings. # Format: -# dataverse.new.config.option=dataverse.old.deprecated.option \ No newline at end of file +# dataverse.new.config.option=dataverse.old.deprecated.option +dataverse.export.distributor.excludeinstallationifset=dataverse.settings.fromdb.ExportInstallationAsDistributorOnlyWhenNotSet \ No newline at end of file diff --git a/src/main/resources/db/migration/V5.3.0.5__7564-workflow.sql b/src/main/resources/db/migration/V5.3.0.5__7564-workflow.sql new file mode 100644 index 00000000000..d2a8d48726e --- /dev/null +++ b/src/main/resources/db/migration/V5.3.0.5__7564-workflow.sql @@ -0,0 +1,2 @@ +ALTER TABLE workflowcomment +ADD COLUMN IF NOT EXISTS tobeshown boolean; diff --git a/src/main/resources/db/migration/V5.3.0.6__2419-deactivate-users.sql b/src/main/resources/db/migration/V5.3.0.6__2419-deactivate-users.sql new file mode 100644 index 00000000000..a5e4b69e00b --- /dev/null +++ b/src/main/resources/db/migration/V5.3.0.6__2419-deactivate-users.sql @@ -0,0 +1,6 @@ +-- Users can be deactivated. +ALTER TABLE authenticateduser ADD COLUMN IF NOT EXISTS deactivated BOOLEAN; +-- Prevent old users from having null for deactivated. +UPDATE authenticateduser SET deactivated = false WHERE deactivated IS NULL; +-- A timestamp of when the user was deactivated. +ALTER TABLE authenticateduser ADD COLUMN IF NOT EXISTS deactivatedtime timestamp without time zone; diff --git a/src/main/webapp/dashboard-users.xhtml b/src/main/webapp/dashboard-users.xhtml index a9e7461f1fb..3f6087cf01c 100644 --- a/src/main/webapp/dashboard-users.xhtml +++ b/src/main/webapp/dashboard-users.xhtml @@ -65,6 +65,7 @@ + @@ -84,7 +85,8 @@ - + + diff --git a/src/main/webapp/dataverseuser.xhtml b/src/main/webapp/dataverseuser.xhtml index 5de0154f49c..3e48e16404c 100644 --- a/src/main/webapp/dataverseuser.xhtml +++ b/src/main/webapp/dataverseuser.xhtml @@ -312,6 +312,28 @@ + + + + + #{item.theObject.getDataset().getDisplayName()} + + + #{item.theObject.getDataset().getOwner().getDisplayName()} + + + + + + + + #{item.theObject.getDataset().getDisplayName()} + + + #{item.theObject.getDataset().getOwner().getDisplayName()} + + + diff --git a/src/main/webapp/editdatafiles.xhtml b/src/main/webapp/editdatafiles.xhtml index 01c5ccc9123..aa4eb5f228c 100644 --- a/src/main/webapp/editdatafiles.xhtml +++ b/src/main/webapp/editdatafiles.xhtml @@ -15,7 +15,7 @@ - + @@ -25,14 +25,15 @@ + - - + + @@ -53,9 +54,8 @@ - +

diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml index 6b47cac4057..ef5df7acb3d 100644 --- a/src/main/webapp/file-edit-button-fragment.xhtml +++ b/src/main/webapp/file-edit-button-fragment.xhtml @@ -22,7 +22,6 @@ dataserVersion - the datasetversion object to associate with this view fileMetadata - for single file, the fileMetadata object of that file fileMetadataForAction - used by DatasetPage popups to identify single file (also clears for selected file) - isDraftReplacementFile - for single file, if the file is a draft and already replacing a past file configureTools - for single file, list of configureTools for the file bean - the named value of the backing bean for the below method(s), also used by isFilePg param unrestrictFileAction - name of the method on the above bean to call for unrestrict (method must take a boolean) @@ -38,7 +37,7 @@
  • @@ -71,22 +70,13 @@ - - - -
  • - - - -
  • - - -
  • - - - -
  • -
    + + +
  • + + + +
  • diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index 7fe8fb4c083..707a5f26721 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -13,7 +13,17 @@ restrictFileAction - name of the method on the above bean to call for restrict (method must take a boolean) deleteFileAction - name of method on the above bean to delete files --> - + + + +

    #{bundle['file.alreadyDeleted.previous.warningMessage']}

    +
    + +
    +
    @@ -71,8 +81,8 @@ - - + +
    - - + + \ No newline at end of file diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 34717bfae3b..06ff420a9d1 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -210,15 +210,15 @@
    - + #{bundle['file.editBtn']} - +
    + and !FilePage.deletedFile}"> @@ -642,31 +641,7 @@ #{bundle.close}
    - - -

    #{bundle['file.replaced.warning.draft.warningMessage']}

    -
    - -
    -
    - - - -

    #{bundle['file.replaced.warning.previous.warningMessage']}

    -
    - -

    #{bundle['file.alreadyDeleted.previous.warningMessage']}

    -
    -
    - -
    -
    +

    #{bundle['file.compute.fileAccessDenied']}

    diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index f3523b63853..c18dcf0d41a 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -530,7 +530,18 @@
    + and DatasetPage.canUpdateDataset() + and DatasetPage.isFileDeleted(fileMetadata.dataFile)}"> + + #{bundle['file.optionsBtn']} + +
    + diff --git a/src/main/webapp/roles-edit.xhtml b/src/main/webapp/roles-edit.xhtml index a59d53bcdae..e236ef180b3 100644 --- a/src/main/webapp/roles-edit.xhtml +++ b/src/main/webapp/roles-edit.xhtml @@ -8,28 +8,37 @@
    +
    - - + +
    + - - + + +
    +
    @@ -65,9 +74,12 @@
    + oncomplete="if (args && !args.validationFailed) addRoleCommand();" + update="roleInputTextFragment @([id$=Messages])" + > + + + diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index a1bcc0b08fd..c2049705cb1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -358,6 +358,63 @@ public void testConvertShibUserToBuiltin() throws Exception { } + /** + * Here we are asserting that deactivated users cannot be converted into + * shib users. + */ + @Test + public void testConvertDeactivateUserToShib() { + + Response createUserToConvert = UtilIT.createRandomUser(); + createUserToConvert.then().assertThat().statusCode(OK.getStatusCode()); + createUserToConvert.prettyPrint(); + + long idOfUserToConvert = createUserToConvert.body().jsonPath().getLong("data.authenticatedUser.id"); + String emailOfUserToConvert = createUserToConvert.body().jsonPath().getString("data.authenticatedUser.email"); + String usernameOfUserToConvert = UtilIT.getUsernameFromResponse(createUserToConvert); + + Response deactivateUser = UtilIT.deactivateUser(usernameOfUserToConvert); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + + String password = usernameOfUserToConvert; + String newEmailAddressToUse = "builtin2shib." + UUID.randomUUID().toString().substring(0, 8) + "@mailinator.com"; + String data = emailOfUserToConvert + ":" + password + ":" + newEmailAddressToUse; + + Response builtinToShibAnon = UtilIT.migrateBuiltinToShib(data, ""); + builtinToShibAnon.prettyPrint(); + builtinToShibAnon.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response getAuthProviders = UtilIT.getAuthProviders(superuserApiToken); + getAuthProviders.prettyPrint(); + if (!getAuthProviders.body().asString().contains(BuiltinAuthenticationProvider.PROVIDER_ID)) { + System.out.println("Can't proceed with test without builtin provider."); + return; + } + + Response makeShibUser = UtilIT.migrateBuiltinToShib(data, superuserApiToken); + makeShibUser.prettyPrint(); + makeShibUser.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("[\"builtin account has been deactivated\"]")); + + Response userIsStillBuiltin = UtilIT.getAuthenticatedUser(usernameOfUserToConvert, superuserApiToken); + userIsStillBuiltin.prettyPrint(); + userIsStillBuiltin.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.id", equalTo(Long.valueOf(idOfUserToConvert).intValue())) + .body("data.identifier", equalTo("@" + usernameOfUserToConvert)) + .body("data.authenticationProviderId", equalTo("builtin")); + + } + @Test public void testConvertOAuthUserToBuiltin() throws Exception { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java new file mode 100644 index 00000000000..de2a1d422c0 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java @@ -0,0 +1,282 @@ +package edu.harvard.iq.dataverse.api; + +import com.jayway.restassured.RestAssured; +import com.jayway.restassured.path.json.JsonPath; +import com.jayway.restassured.response.Response; +import edu.harvard.iq.dataverse.authorization.DataverseRole; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import static javax.ws.rs.core.Response.Status.CREATED; +import static javax.ws.rs.core.Response.Status.FORBIDDEN; +import static javax.ws.rs.core.Response.Status.OK; +import static javax.ws.rs.core.Response.Status.UNAUTHORIZED; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.startsWith; +import org.junit.BeforeClass; +import org.junit.Test; + +public class DeactivateUsersIT { + + @BeforeClass + public static void setUp() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + @Test + public void testDeactivateUser() { + + Response createSuperuser = UtilIT.createRandomUser(); + createSuperuser.then().assertThat().statusCode(OK.getStatusCode()); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createDataverse = UtilIT.createRandomDataverse(superuserApiToken); + createDataverse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, superuserApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDataset); + + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response grantRoleBeforeDeactivate = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.ADMIN.toString(), "@" + username, superuserApiToken); + grantRoleBeforeDeactivate.prettyPrint(); + grantRoleBeforeDeactivate.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.assignee", equalTo("@" + username)) + .body("data._roleAlias", equalTo("admin")); + + String aliasInOwner = "groupFor" + dataverseAlias; + String displayName = "Group for " + dataverseAlias; + String user2identifier = "@" + username; + Response createGroup = UtilIT.createGroup(dataverseAlias, aliasInOwner, displayName, superuserApiToken); + createGroup.prettyPrint(); + createGroup.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String groupIdentifier = JsonPath.from(createGroup.asString()).getString("data.identifier"); + + List roleAssigneesToAdd = new ArrayList<>(); + roleAssigneesToAdd.add(user2identifier); + Response addToGroup = UtilIT.addToGroup(dataverseAlias, aliasInOwner, roleAssigneesToAdd, superuserApiToken); + addToGroup.prettyPrint(); + addToGroup.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response userTracesBeforeDeactivate = UtilIT.getUserTraces(username, superuserApiToken); + userTracesBeforeDeactivate.prettyPrint(); + userTracesBeforeDeactivate.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.traces.roleAssignments.items[0].definitionPointName", equalTo(dataverseAlias)) + .body("data.traces.roleAssignments.items[0].definitionPointId", equalTo(dataverseId)) + .body("data.traces.explicitGroups.items[0].name", equalTo("Group for " + dataverseAlias)); + + Response deactivateUser = UtilIT.deactivateUser(username); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + + Response getUser = UtilIT.getAuthenticatedUser(username, superuserApiToken); + getUser.prettyPrint(); + getUser.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.deactivated", equalTo(true)); + + Response findUser = UtilIT.filterAuthenticatedUsers(superuserApiToken, username, null, 100, null); + findUser.prettyPrint(); + findUser.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.users[0].userIdentifier", equalTo(username)) + .body("data.users[0].deactivated", equalTo(true)) + .body("data.users[0].deactivatedTime", startsWith("2")); + + Response getUserDeactivated = UtilIT.getAuthenticatedUserByToken(apiToken); + getUserDeactivated.prettyPrint(); + getUserDeactivated.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + Response userTracesAfterDeactivate = UtilIT.getUserTraces(username, superuserApiToken); + userTracesAfterDeactivate.prettyPrint(); + userTracesAfterDeactivate.then().assertThat() + .statusCode(OK.getStatusCode()) + /** + * Here we are showing the the following were deleted: + * + * - role assignments + * + * - membership in explict groups. + */ + .body("data.traces", equalTo(Collections.EMPTY_MAP)); + + Response grantRoleAfterDeactivate = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.ADMIN.toString(), "@" + username, superuserApiToken); + grantRoleAfterDeactivate.prettyPrint(); + grantRoleAfterDeactivate.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo("User " + username + " is deactivated and cannot be given a role.")); + + Response addToGroupAfter = UtilIT.addToGroup(dataverseAlias, aliasInOwner, roleAssigneesToAdd, superuserApiToken); + addToGroupAfter.prettyPrint(); + addToGroupAfter.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo("User " + username + " is deactivated and cannot be added to a group.")); + + Response grantRoleOnDataset = UtilIT.grantRoleOnDataset(datasetPersistentId, DataverseRole.ADMIN.toString(), "@" + username, superuserApiToken); + grantRoleOnDataset.prettyPrint(); + grantRoleOnDataset.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", equalTo("User " + username + " is deactivated and cannot be given a role.")); + + } + + @Test + public void testDeactivateUserById() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + + Long userId = JsonPath.from(createUser.body().asString()).getLong("data.authenticatedUser.id"); + Response deactivateUser = UtilIT.deactivateUser(userId); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + } + + @Test + public void testMergeDeactivatedIntoNonDeactivatedUser() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUserMergeTarget = UtilIT.createRandomUser(); + createUserMergeTarget.prettyPrint(); + String usernameMergeTarget = UtilIT.getUsernameFromResponse(createUserMergeTarget); + + Response createUserToMerge = UtilIT.createRandomUser(); + createUserToMerge.prettyPrint(); + String usernameToMerge = UtilIT.getUsernameFromResponse(createUserToMerge); + + Response deactivateUser = UtilIT.deactivateUser(usernameToMerge); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + + // User accounts can only be merged if they are either both active or both deactivated. + Response mergeAccounts = UtilIT.mergeAccounts(usernameMergeTarget, usernameToMerge, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } + + @Test + public void testMergeNonDeactivatedIntoDeactivatedUser() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUserMergeTarget = UtilIT.createRandomUser(); + createUserMergeTarget.prettyPrint(); + String usernameMergeTarget = UtilIT.getUsernameFromResponse(createUserMergeTarget); + + Response createUserToMerge = UtilIT.createRandomUser(); + createUserToMerge.prettyPrint(); + String usernameToMerge = UtilIT.getUsernameFromResponse(createUserToMerge); + + Response deactivateUser = UtilIT.deactivateUser(usernameMergeTarget); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + + // User accounts can only be merged if they are either both active or both deactivated. + Response mergeAccounts = UtilIT.mergeAccounts(usernameMergeTarget, usernameToMerge, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } + + @Test + public void testMergeDeactivatedIntoDeactivatedUser() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUserMergeTarget = UtilIT.createRandomUser(); + createUserMergeTarget.prettyPrint(); + String usernameMergeTarget = UtilIT.getUsernameFromResponse(createUserMergeTarget); + + Response createUserToMerge = UtilIT.createRandomUser(); + createUserToMerge.prettyPrint(); + String usernameToMerge = UtilIT.getUsernameFromResponse(createUserToMerge); + + Response deactivatedUserMergeTarget = UtilIT.deactivateUser(usernameMergeTarget); + deactivatedUserMergeTarget.prettyPrint(); + deactivatedUserMergeTarget.then().assertThat().statusCode(OK.getStatusCode()); + + Response deactivatedUserToMerge = UtilIT.deactivateUser(usernameToMerge); + deactivatedUserToMerge.prettyPrint(); + deactivatedUserToMerge.then().assertThat().statusCode(OK.getStatusCode()); + + // User accounts can only be merged if they are either both active or both deactivated. + Response mergeAccounts = UtilIT.mergeAccounts(usernameMergeTarget, usernameToMerge, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(OK.getStatusCode()); + } + + @Test + public void testMergeUserIntoSelf() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUserToMerge = UtilIT.createRandomUser(); + createUserToMerge.prettyPrint(); + String usernameToMerge = UtilIT.getUsernameFromResponse(createUserToMerge); + + String usernameMergeTarget = usernameToMerge; + + Response mergeAccounts = UtilIT.mergeAccounts(usernameMergeTarget, usernameToMerge, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } + + @Test + public void testTurnDeactivatedUserIntoSuperuser() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String username = UtilIT.getUsernameFromResponse(createUser); + + Response deactivateUser = UtilIT.deactivateUser(username); + deactivateUser.prettyPrint(); + deactivateUser.then().assertThat().statusCode(OK.getStatusCode()); + + Response toggleSuperuser = UtilIT.makeSuperUser(username); + toggleSuperuser.prettyPrint(); + toggleSuperuser.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); + + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java new file mode 100644 index 00000000000..cae1d0e210a --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java @@ -0,0 +1,701 @@ +package edu.harvard.iq.dataverse.api; + +import com.jayway.restassured.RestAssured; +import com.jayway.restassured.path.json.JsonPath; +import com.jayway.restassured.response.Response; +import edu.harvard.iq.dataverse.authorization.DataverseRole; +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import javax.json.Json; +import javax.json.JsonObjectBuilder; +import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import static javax.ws.rs.core.Response.Status.CREATED; +import static javax.ws.rs.core.Response.Status.OK; +import static javax.ws.rs.core.Response.Status.UNAUTHORIZED; +import static junit.framework.Assert.assertEquals; +import static org.hamcrest.CoreMatchers.equalTo; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * The following query has been helpful in discovering places where user ids + * appear throughout the database. Here's a summary of how user deletion affects + * these tables. + * + * - apitoken: Not a concern. Tokens are deleted. + * + * - authenticateduserlookup: Not a concern. Rows are deleted. + * + * - confirmemaildata: Not a concern. Rows are deleted. + * + * - datasetlock: Not a concern, locks are deleted. + * + * - datasetversionuser: Definitely a concern. This table is what feeds the + * "Contributors" list under the "Version" tab on the dataset page. You can't + * delete the user. You can merge the user but the name under "Contributors" + * will change to the user you merged into. There is talk of implementing the + * concept of disabling users to handle this. + * + * - dvobject (creator_id): Definitely a concern. You can't delete a user. You + * have to merge instead. + * + * - dvobject (releaseuser_id): Definitely a concern. You can't delete a user. + * You have to merge instead. It seems that for files, releaseuser_id is not + * populated. + * + * - explicitgroup: Not a concern. Group membership is deleted. + * + * - fileaccessrequests: Not a concern. File requests are deleted. + * + * - guestbookresponse: Definitely a concern but it's possible to null out the + * user id. You can't delete a user but you can merge instead. There is talk of + * deactivate which would probably null out the id. In all cases the name and + * email address in the rows are left alone. + * + * - oauth2tokendata: Not a concern. Rows are deleted. + * + * - savedsearch: Definitely a concern. You can't delete a user. You have to + * merge. + * + * - userbannermessage: Not a concern. Rows are deleted. + * + * - usernotification (user_id): Not a concern. Deleted by a cascade. + * + * - usernotification (requestor_id): Not a big concern because of other + * constraints. This is only populated by "submit for review" (so that the + * curator has the name and email address of the author). All these + * notifications would be deleted by a cascade but deleting the user itself is + * prevented because the user recorded in the datasetversionuser table. (Both + * "submit for review" and "return to author" add you to that table.) So the + * bottom line is that the user can't be deleted. It has to be merged. + * + * - workflowcomment: Not a big concern because of other constraints. A workflow + * comment is optionally added as part of "return to author" but this also + * creates a row in the datasetversionuser table which means the user can't be + * deleted. It has to be merged instead. + * + * + * The tables that aren't captured above are actionlogrecord and roleassignment + * because the relationship is to the identifier (username) rather than the id. + * So we'll list them separately: + * + * - actionlogrecord: Not a concern. Delete can go through. On merge, they are + * changed from one user identifier to another. + * + * - roleassignment: Not a concern. Delete can go through. On merge, they are + * changed from one user identifier to another. + */ +/* + table_name | constraint_name +---------------------------------+---------------------------------------------------------------- + apitoken | fk_apitoken_authenticateduser_id + authenticateduserlookup | fk_authenticateduserlookup_authenticateduser_id + confirmemaildata | fk_confirmemaildata_authenticateduser_id + datasetlock | fk_datasetlock_user_id + datasetversionuser | fk_datasetversionuser_authenticateduser_id + dvobject | fk_dvobject_creator_id + dvobject | fk_dvobject_releaseuser_id + explicitgroup_authenticateduser | explicitgroup_authenticateduser_containedauthenticatedusers_id + fileaccessrequests | fk_fileaccessrequests_authenticated_user_id + guestbookresponse | fk_guestbookresponse_authenticateduser_id + oauth2tokendata | fk_oauth2tokendata_user_id + savedsearch | fk_savedsearch_creator_id + userbannermessage | fk_userbannermessage_user_id + usernotification | fk_usernotification_user_id + usernotification | fk_usernotification_requestor_id + workflowcomment | fk_workflowcomment_authenticateduser_id +(16 rows) + +-- https://stackoverflow.com/questions/5347050/postgresql-sql-script-to-get-a-list-of-all-tables-that-has-a-particular-column +select R.TABLE_NAME, R.CONSTRAINT_NAME +from INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE u +inner join INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS FK + on U.CONSTRAINT_CATALOG = FK.UNIQUE_CONSTRAINT_CATALOG + and U.CONSTRAINT_SCHEMA = FK.UNIQUE_CONSTRAINT_SCHEMA + and U.CONSTRAINT_NAME = FK.UNIQUE_CONSTRAINT_NAME +inner join INFORMATION_SCHEMA.KEY_COLUMN_USAGE R + ON R.CONSTRAINT_CATALOG = FK.CONSTRAINT_CATALOG + AND R.CONSTRAINT_SCHEMA = FK.CONSTRAINT_SCHEMA + AND R.CONSTRAINT_NAME = FK.CONSTRAINT_NAME +WHERE U.COLUMN_NAME = 'id' +-- AND U.TABLE_CATALOG = 'b' +-- AND U.TABLE_SCHEMA = 'c' + AND U.TABLE_NAME = 'authenticateduser' +ORDER BY R.TABLE_NAME; + */ +public class DeleteUsersIT { + + @BeforeClass + public static void setUp() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + @Test + public void testDeleteRolesAndUnpublishedDataverse() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String usernameForCreateDV = UtilIT.getUsernameFromResponse(createUser); + String normalApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response getTraces1 = UtilIT.getUserTraces(usernameForCreateDV, superuserApiToken); + getTraces1.prettyPrint(); + getTraces1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.user.identifier", equalTo("@" + usernameForCreateDV)) + // traces is {} when user hasn't left a trace + .body("data.traces", equalTo(Collections.emptyMap())); + + Response createDataverse = UtilIT.createRandomDataverse(normalApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response getTraces2 = UtilIT.getUserTraces(usernameForCreateDV, superuserApiToken); + getTraces2.prettyPrint(); + getTraces2.then().assertThat().statusCode(OK.getStatusCode()); + + if (true) { + return; + } + + createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String usernameForAssignedRole = UtilIT.getUsernameFromResponse(createUser); + String roleApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response assignRole = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.EDITOR.toString(), + "@" + usernameForAssignedRole, superuserApiToken); + + // Shouldn't be able to delete user with a role + Response deleteUserRole = UtilIT.deleteUser(usernameForAssignedRole); + + deleteUserRole.prettyPrint(); + deleteUserRole.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Could not delete Authenticated User @" + usernameForAssignedRole + " because the user is associated with role assignment record(s).")); + + // Now remove that role + Response removeRoles1 = UtilIT.deleteUserRoles(usernameForAssignedRole, superuserApiToken); + removeRoles1.prettyPrint(); + removeRoles1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Roles removed for user " + usernameForAssignedRole + ".")); + + // Now the delete should work + Response deleteUserRole2 = UtilIT.deleteUser(usernameForAssignedRole); + deleteUserRole2.prettyPrint(); + deleteUserRole2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("AuthenticatedUser @" + usernameForAssignedRole + " deleted. ")); + + // The owner of the dataverse that was just created is dataverseAdmin because it created the parent dataverse (root). + Response getTraces3 = UtilIT.getUserTraces(usernameForCreateDV, superuserApiToken); + getTraces3.prettyPrint(); + getTraces3.then().assertThat().statusCode(OK.getStatusCode()); + + // Removing roles here but could equally just delete the dataverse. + Response removeRoles2 = UtilIT.deleteUserRoles(usernameForCreateDV, superuserApiToken); + removeRoles2.prettyPrint(); + removeRoles2.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Shouldn't be able to delete a user who has created a DV + Response deleteUserCreateDV = UtilIT.deleteUser(usernameForCreateDV); + deleteUserCreateDV.prettyPrint(); + deleteUserCreateDV.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Could not delete Authenticated User @" + usernameForCreateDV + " because the user has created Dataverse object(s).")); + + Response deleteDataverse = UtilIT.deleteDataverse(dataverseAlias, superuserApiToken); + deleteDataverse.prettyPrint(); + deleteDataverse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Should be able to delete user after dv is deleted + Response deleteUserAfterDeleteDV = UtilIT.deleteUser(usernameForCreateDV); + deleteUserAfterDeleteDV.prettyPrint(); + deleteUserAfterDeleteDV.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response deleteSuperuser = UtilIT.deleteUser(superuserUsername); + deleteSuperuser.prettyPrint(); + assertEquals(200, deleteSuperuser.getStatusCode()); + + } + + @Test + public void testDeleteUserWithUnPublishedDataverse() { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverse = UtilIT.createRandomDataverse(apiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response removeRoles1 = UtilIT.deleteUserRoles(username, superuserApiToken); + removeRoles1.prettyPrint(); + removeRoles1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Roles removed for user " + username + ".")); + + Response deleteUser1 = UtilIT.deleteUser(username); + deleteUser1.prettyPrint(); + deleteUser1.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Could not delete Authenticated User @" + username + " because the user has created Dataverse object(s).")); + + Response traces = UtilIT.getUserTraces(username, superuserApiToken); + traces.prettyPrint(); + traces.then().assertThat().statusCode(OK.getStatusCode()); + + // You can't delete. You have to merge. + Response mergeAccounts = UtilIT.mergeAccounts(superuserUsername, username, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(OK.getStatusCode()); + } + + /** + * You can't delete an account with guestbook entries so you have to merge + * it instead. + */ + @Test + public void testDeleteUserWithGuestbookEntries() throws IOException { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String authorUsername = UtilIT.getUsernameFromResponse(createUser); + String authorApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response downloader = UtilIT.createRandomUser(); + downloader.prettyPrint(); + String downloaderUsername = UtilIT.getUsernameFromResponse(downloader); + String downloaderApiToken = UtilIT.getApiTokenFromResponse(downloader); + + Response createDataverse = UtilIT.createRandomDataverse(authorApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, authorApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + Path pathtoReadme = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "README.md"); + java.nio.file.Files.write(pathtoReadme, "In the beginning...".getBytes()); + + Response uploadReadme = UtilIT.uploadFileViaNative(datasetId.toString(), pathtoReadme.toString(), authorApiToken); + uploadReadme.prettyPrint(); + uploadReadme.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.files[0].label", equalTo("README.md")); + + int fileId = JsonPath.from(uploadReadme.body().asString()).getInt("data.files[0].dataFile.id"); + + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, authorApiToken); + publishDataverse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPid, "major", authorApiToken); + publishDataset.then().assertThat().statusCode(OK.getStatusCode()); + // This download creates a guestbook entry. + Response downloadFile = UtilIT.downloadFile(fileId, downloaderApiToken); + downloadFile.then().assertThat().statusCode(OK.getStatusCode()); + + // We can't delete the downloader because a guestbook record (a download) has been created. + Response deleteDownloaderFail = UtilIT.deleteUser(downloaderUsername); + deleteDownloaderFail.prettyPrint(); + deleteDownloaderFail.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); + + // Let's see why we can't download. + Response getTraces = UtilIT.getUserTraces(downloaderUsername, superuserApiToken); + getTraces.prettyPrint(); + getTraces.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.traces.guestbookEntries.count", equalTo(1)); + + // We can't delete so we do a merge instead. + Response mergeAccounts = UtilIT.mergeAccounts(superuserUsername, downloaderUsername, superuserApiToken); + mergeAccounts.prettyPrint(); + mergeAccounts.then().assertThat().statusCode(OK.getStatusCode()); + + } + + @Test + public void testDatasetLocks() throws IOException { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String authorUsername = UtilIT.getUsernameFromResponse(createUser); + String authorApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response downloader = UtilIT.createRandomUser(); + downloader.prettyPrint(); + String downloaderUsername = UtilIT.getUsernameFromResponse(downloader); + String downloaderApiToken = UtilIT.getApiTokenFromResponse(downloader); + + Response createDataverse = UtilIT.createRandomDataverse(authorApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, authorApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + Response lockDatasetResponse = UtilIT.lockDataset(datasetId.longValue(), "Ingest", superuserApiToken); + lockDatasetResponse.prettyPrint(); + lockDatasetResponse.then().assertThat() + .body("data.message", equalTo("dataset locked with lock type Ingest")) + .statusCode(200); + + Response checkDatasetLocks = UtilIT.checkDatasetLocks(datasetId.longValue(), "Ingest", superuserApiToken); + checkDatasetLocks.prettyPrint(); + checkDatasetLocks.then().assertThat() + .body("data[0].lockType", equalTo("Ingest")) + .statusCode(200); + Response deleteUserWhoCreatedLock = UtilIT.deleteUser(superuserUsername); + deleteUserWhoCreatedLock.prettyPrint(); + deleteUserWhoCreatedLock.then().assertThat() + .statusCode(OK.getStatusCode()); + } + + @Test + public void testDeleteUserWhoIsMemberOfGroup() throws IOException { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String authorUsername = UtilIT.getUsernameFromResponse(createUser); + String authorApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response downloader = UtilIT.createRandomUser(); + downloader.prettyPrint(); + String downloaderUsername = UtilIT.getUsernameFromResponse(downloader); + String downloaderApiToken = UtilIT.getApiTokenFromResponse(downloader); + + Response createDataverse = UtilIT.createRandomDataverse(authorApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response createGroupMember = UtilIT.createRandomUser(); + createGroupMember.prettyPrint(); + String groupMemberUsername = UtilIT.getUsernameFromResponse(createGroupMember); + String groupMemberApiToken = UtilIT.getApiTokenFromResponse(createGroupMember); + + String aliasInOwner = "groupFor" + dataverseAlias; + String displayName = "Group for " + dataverseAlias; + String user2identifier = "@" + groupMemberUsername; + Response createGroup = UtilIT.createGroup(dataverseAlias, aliasInOwner, displayName, superuserApiToken); + createGroup.prettyPrint(); + createGroup.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String groupIdentifier = JsonPath.from(createGroup.asString()).getString("data.identifier"); + + List roleAssigneesToAdd = new ArrayList<>(); + roleAssigneesToAdd.add(user2identifier); + Response addToGroup = UtilIT.addToGroup(dataverseAlias, aliasInOwner, roleAssigneesToAdd, superuserApiToken); + addToGroup.prettyPrint(); + addToGroup.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response getTraces = UtilIT.getUserTraces(groupMemberUsername, superuserApiToken); + getTraces.prettyPrint(); + getTraces.then().assertThat().statusCode(OK.getStatusCode()); + + Response deleteUserInGroup = UtilIT.deleteUser(groupMemberUsername); + deleteUserInGroup.prettyPrint(); + deleteUserInGroup.then().assertThat() + .statusCode(OK.getStatusCode()); + + } + + @Test + public void testDeleteUserWithFileAccessRequests() throws IOException { + + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String authorUsername = UtilIT.getUsernameFromResponse(createUser); + String authorApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response fileRequester = UtilIT.createRandomUser(); + fileRequester.prettyPrint(); + String fileRequesterUsername = UtilIT.getUsernameFromResponse(fileRequester); + String fileRequesterApiToken = UtilIT.getApiTokenFromResponse(fileRequester); + + Response createDataverse = UtilIT.createRandomDataverse(authorApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, authorApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + Path pathtoReadme = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "README.md"); + java.nio.file.Files.write(pathtoReadme, "In the beginning...".getBytes()); + + Response uploadReadme = UtilIT.uploadFileViaNative(datasetId.toString(), pathtoReadme.toString(), authorApiToken); + uploadReadme.prettyPrint(); + uploadReadme.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.files[0].label", equalTo("README.md")); + + Integer fileId = JsonPath.from(uploadReadme.body().asString()).getInt("data.files[0].dataFile.id"); + + Response restrictResponse = UtilIT.restrictFile(fileId.toString(), true, authorApiToken); + restrictResponse.prettyPrint(); + restrictResponse.then().assertThat().statusCode(OK.getStatusCode()); + + //Update Dataset to allow requests + Response allowAccessRequestsResponse = UtilIT.allowAccessRequests(datasetPid, true, authorApiToken); + allowAccessRequestsResponse.prettyPrint(); + allowAccessRequestsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, authorApiToken); + publishDataverse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPid, "major", authorApiToken); + publishDataset.then().assertThat().statusCode(OK.getStatusCode()); + + Response requestFileAccessResponse = UtilIT.requestFileAccess(fileId.toString(), fileRequesterApiToken); + requestFileAccessResponse.prettyPrint(); + requestFileAccessResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Let's see why we can't download. + Response getTraces = UtilIT.getUserTraces(fileRequesterUsername, superuserApiToken); + getTraces.prettyPrint(); + getTraces.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Even if users have outstanding file requests, they can be deleted. + Response deleteDownloaderSuccess = UtilIT.deleteUser(fileRequesterUsername); + deleteDownloaderSuccess.prettyPrint(); + deleteDownloaderSuccess.then().assertThat() + .statusCode(OK.getStatusCode()); + } + + @Test + public void testCuratorSendsCommentsToAuthor() throws InterruptedException { + Response createSuperuser = UtilIT.createRandomUser(); + String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + Response toggleSuperuser = UtilIT.makeSuperUser(superuserUsername); + toggleSuperuser.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response createCurator1 = UtilIT.createRandomUser(); + createCurator1.prettyPrint(); + createCurator1.then().assertThat() + .statusCode(OK.getStatusCode()); + String curator1Username = UtilIT.getUsernameFromResponse(createCurator1); + String curator1ApiToken = UtilIT.getApiTokenFromResponse(createCurator1); + + Response createCurator2 = UtilIT.createRandomUser(); + createCurator2.prettyPrint(); + createCurator2.then().assertThat() + .statusCode(OK.getStatusCode()); + String curator2Username = UtilIT.getUsernameFromResponse(createCurator2); + String curator2ApiToken = UtilIT.getApiTokenFromResponse(createCurator2); + + Response createDataverseResponse = UtilIT.createRandomDataverse(curator1ApiToken); + createDataverseResponse.prettyPrint(); + createDataverseResponse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response makeCurator2Admin = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.ADMIN.toString(), "@" + curator2Username, curator1ApiToken); + makeCurator2Admin.prettyPrint(); + makeCurator2Admin.then().assertThat() + .body("data.assignee", equalTo("@" + curator2Username)) + .body("data._roleAlias", equalTo("admin")) + .statusCode(OK.getStatusCode()); + + Response createAuthor1 = UtilIT.createRandomUser(); + createAuthor1.prettyPrint(); + createAuthor1.then().assertThat() + .statusCode(OK.getStatusCode()); + String author1Username = UtilIT.getUsernameFromResponse(createAuthor1); + String author1ApiToken = UtilIT.getApiTokenFromResponse(createAuthor1); + + Response createAuthor2 = UtilIT.createRandomUser(); + createAuthor2.prettyPrint(); + createAuthor2.then().assertThat() + .statusCode(OK.getStatusCode()); + String author2Username = UtilIT.getUsernameFromResponse(createAuthor2); + String author2ApiToken = UtilIT.getApiTokenFromResponse(createAuthor2); + + Response grantAuthor1AddDataset = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR.toString(), "@" + author1Username, curator1ApiToken); + grantAuthor1AddDataset.prettyPrint(); + grantAuthor1AddDataset.then().assertThat() + .body("data.assignee", equalTo("@" + author1Username)) + .body("data._roleAlias", equalTo("dsContributor")) + .statusCode(OK.getStatusCode()); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, author1ApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + + // FIXME: have the initial create return the DOI or Handle to obviate the need for this call. + Response getDatasetJsonBeforePublishing = UtilIT.nativeGet(datasetId, author1ApiToken); + getDatasetJsonBeforePublishing.prettyPrint(); + String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol"); + String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.authority"); + String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.identifier"); + + String datasetPersistentId = protocol + ":" + authority + "/" + identifier; + System.out.println("datasetPersistentId: " + datasetPersistentId); + +// Response grantAuthor2ContributorOnDataset = UtilIT.grantRoleOnDataset(datasetPersistentId, DataverseRole.DS_CONTRIBUTOR.toString(), "@" + author2Username, curatorApiToken); + // TODO: Tighten this down to something more realistic than ADMIN. + Response grantAuthor2ContributorOnDataset = UtilIT.grantRoleOnDataset(datasetPersistentId, DataverseRole.ADMIN.toString(), "@" + author2Username, curator1ApiToken); + grantAuthor2ContributorOnDataset.prettyPrint(); + grantAuthor2ContributorOnDataset.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.assignee", equalTo("@" + author2Username)) + .body("data._roleAlias", equalTo("admin")); + +// // Whoops, the author tries to publish but isn't allowed. The curator will take a look. +// Response noPermToPublish = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", author1ApiToken); +// noPermToPublish.prettyPrint(); +// noPermToPublish.then().assertThat() +// .body("message", equalTo("User @" + author1Username + " is not permitted to perform requested action.")) +// .statusCode(UNAUTHORIZED.getStatusCode()); + Response submitForReview = UtilIT.submitDatasetForReview(datasetPersistentId, author2ApiToken); + submitForReview.prettyPrint(); + submitForReview.then().assertThat() + .statusCode(OK.getStatusCode()); + + // curator2 returns dataset to author. This makes curator2 a contributor. + String comments = "You forgot to upload any files."; + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("reasonForReturn", comments); + Response returnToAuthor = UtilIT.returnDatasetToAuthor(datasetPersistentId, jsonObjectBuilder.build(), curator2ApiToken); + returnToAuthor.prettyPrint(); + returnToAuthor.then().assertThat() + .body("data.inReview", equalTo(false)) + .statusCode(OK.getStatusCode()); + + Response getTracesForCurator2 = UtilIT.getUserTraces(curator2Username, superuserApiToken); + getTracesForCurator2.prettyPrint(); + getTracesForCurator2.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response removeRolesFromCurator2 = UtilIT.deleteUserRoles(curator2Username, superuserApiToken); + removeRolesFromCurator2.prettyPrint(); + removeRolesFromCurator2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Roles removed for user " + curator2Username + ".")); + + // Because curator2 returned the dataset to the authors, curator2 is now a contributor + // and cannot be deleted. + Response deleteCurator2Fail = UtilIT.deleteUser(curator2Username); + deleteCurator2Fail.prettyPrint(); + deleteCurator2Fail.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Could not delete Authenticated User @" + curator2Username + + " because the user has contributed to dataset version(s).")); + + // What should we do with curator2 instead of deleting? The only option is to merge + // curator2 into some other account. Once implemented, we'll deactivate curator2's account + // so that curator2 continues to be displayed as a contributor. + // + // TODO: deactivate curator2 here + // + // Show the error if you don't have permission. + Response failToRemoveRole = UtilIT.deleteUserRoles(author2Username, curator2ApiToken); + failToRemoveRole.prettyPrint(); + failToRemoveRole.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()) + .body("message", equalTo("User @" + curator2Username + " is not permitted to perform requested action.")); + + Response removeRolesFromAuthor2 = UtilIT.deleteUserRoles(author2Username, superuserApiToken); + removeRolesFromAuthor2.prettyPrint(); + removeRolesFromAuthor2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("Roles removed for user " + author2Username + ".")); + + // Similarly, we can't delete author2 because author2 submitted + // the dataset for review, which makes one a contributor. + Response deleteAuthor2Fail = UtilIT.deleteUser(author2Username); + deleteAuthor2Fail.prettyPrint(); + deleteAuthor2Fail.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Could not delete Authenticated User @" + author2Username + + " because the user has contributed to dataset version(s).")); + + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 8d4369fa85b..9fa06e28a0d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -241,16 +241,12 @@ public void testAddFileBadJson() { Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, junkJson, apiToken); + String parseError = BundleUtil.getStringFromBundle("file.addreplace.error.parsing"); + addResponse.then().assertThat() - .body("status", equalTo(AbstractApiBean.STATUS_OK)) - .body("data.files[0].categories", nullValue()) - .body("data.files[0].dataFile.contentType", equalTo("image/png")) - .body("data.files[0].dataFile.description", equalTo("")) - .body("data.files[0].dataFile.tabularTags", nullValue()) - .body("data.files[0].label", equalTo("dataverseproject.png")) - // not sure why description appears in two places - .body("data.files[0].description", equalTo("")) - .statusCode(OK.getStatusCode()); + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .body("message", equalTo(parseError)); } @Test @@ -372,12 +368,25 @@ public void test_006_ReplaceFileGood() throws InterruptedException { .add("categories", Json.createArrayBuilder() .add("Data") ); + + /* + * ToDo: When the dataset is still locked, the replaceFile call below returns an + * 'OK' status with an empty 'data' array The sleepForLock avoids that so this + * test tests the normal replace functionality directly, but a new test to check + * that, when the dataset is locked, the call fails instead of returning OK + * would be useful (along with making the replace call do that) + */ + /* + * ToDo: make sleep time shorter for this? Add sleepForLock before subsequent + * calls as well? (Or is it only needed here because it is still locked from the + * publish call above?) + */ + + UtilIT.sleepForLock(datasetId, null, apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION); Response replaceResp = UtilIT.replaceFile(origFileId.toString(), pathToFile2, json.build(), apiToken); msgt(replaceResp.prettyPrint()); - String successMsg2 = BundleUtil.getStringFromBundle("file.addreplace.success.replace"); - replaceResp.then().assertThat() /** * @todo We have a need to show human readable success messages @@ -621,6 +630,9 @@ public void testForceReplaceAndUpdate() { .add("categories", Json.createArrayBuilder() .add("Data") ); + + UtilIT.sleepForLock(datasetId, null, apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION); + Response replaceResp = UtilIT.replaceFile(origFileId.toString(), pathToFile2, json.build(), apiToken); replaceResp.prettyPrint(); @@ -733,14 +745,11 @@ public void test_007_ReplaceFileUnpublishedAndBadIds() { String pathToFile2 = "src/main/webapp/resources/images/cc0.png"; Response replaceResp = UtilIT.replaceFile(origFileId.toString(), pathToFile2, apiToken); - String errMsgUnpublished = BundleUtil.getStringFromBundle("file.addreplace.error.unpublished_file_cannot_be_replaced"); - replaceResp.then().assertThat() - .statusCode(BAD_REQUEST.getStatusCode()) - .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) - .body("message", Matchers.startsWith(errMsgUnpublished)) - ; - + .body("data.files[0].dataFile.contentType", equalTo("image/png")) + .body("data.files[0].label", equalTo("cc0.png")) + .statusCode(OK.getStatusCode()); + // ------------------------- // Publish dataset // ------------------------- @@ -903,10 +912,11 @@ public void testReplaceFileBadJson() { Response replaceResp = UtilIT.replaceFile(origFileId.toString(), pathToFile2, jsonAsString, apiToken); msgt("replace resp: " + replaceResp.prettyPrint()); - + String parseError = BundleUtil.getStringFromBundle("file.addreplace.error.parsing"); replaceResp.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("status", equalTo(AbstractApiBean.STATUS_OK)); + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .body("message", equalTo(parseError)); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java new file mode 100644 index 00000000000..d1e2ffb2426 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java @@ -0,0 +1,101 @@ + +package edu.harvard.iq.dataverse.api; + +import com.jayway.restassured.RestAssured; +import com.jayway.restassured.path.json.JsonPath; +import com.jayway.restassured.response.Response; +import java.util.logging.Logger; +import static junit.framework.Assert.assertEquals; +import static org.hamcrest.CoreMatchers.equalTo; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * + * @author skraffmi + */ +public class RolesIT { + + private static final Logger logger = Logger.getLogger(AdminIT.class.getCanonicalName()); + + @BeforeClass + public static void setUp() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + @Test + public void testCreateDeleteRoles() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + UtilIT.makeSuperUser(username); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + String pathToJsonFile = "scripts/api/data/role-test-addRole.json"; + Response addBuiltinRoleResponse = UtilIT.addBuiltInRole(pathToJsonFile); + addBuiltinRoleResponse.prettyPrint(); + String body = addBuiltinRoleResponse.getBody().asString(); + String status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + //Try to delete from non-admin api - should fail. + + Response deleteBuiltinRoleResponseError = UtilIT.deleteDataverseRole("testRole", apiToken); + deleteBuiltinRoleResponseError.prettyPrint(); + body = deleteBuiltinRoleResponseError.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + deleteBuiltinRoleResponseError.then().assertThat().body("message", equalTo("May not delete Built In Role Test Role.")); + + + Response deleteBuiltinRoleResponseSucceed = UtilIT.deleteBuiltInRole("testRole"); + deleteBuiltinRoleResponseSucceed.prettyPrint(); + body = deleteBuiltinRoleResponseSucceed.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + //add as dataverse role + Response addDataverseRoleResponse = UtilIT.addDataverseRole(pathToJsonFile, dataverseAlias, apiToken); + addDataverseRoleResponse.prettyPrint(); + body = addBuiltinRoleResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response viewDataverseRoleResponse = UtilIT.viewDataverseRole("testRole", apiToken); + viewDataverseRoleResponse.prettyPrint(); + body = viewDataverseRoleResponse.getBody().asString(); + String idString = JsonPath.from(body).getString("data.id"); + + System.out.print("idString: " + idString); + + Response deleteDataverseRoleResponseBadAlias = UtilIT.deleteDataverseRole("badAlias", apiToken); + deleteDataverseRoleResponseBadAlias.prettyPrint(); + body = deleteDataverseRoleResponseBadAlias.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + deleteDataverseRoleResponseBadAlias.then().assertThat().body("message", equalTo("Dataverse Role with alias badAlias not found.")); + + Long idBad = Long.parseLong(idString) + 10; + Response deleteDataverseRoleResponseBadId = UtilIT.deleteDataverseRoleById(idBad.toString(), apiToken); + deleteDataverseRoleResponseBadId.prettyPrint(); + body = deleteDataverseRoleResponseBadId.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + deleteDataverseRoleResponseBadId.then().assertThat().body("message", equalTo("Dataverse Role with ID " + idBad.toString() + " not found.")); + + Response deleteDataverseRoleResponseSucceed = UtilIT.deleteDataverseRoleById(idString, apiToken); + deleteDataverseRoleResponseSucceed.prettyPrint(); + body = deleteDataverseRoleResponseSucceed.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index c5f4da033d1..c7f8986f73a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -977,12 +977,38 @@ static public Response grantRoleOnDataverse(String definitionPoint, String role, .post("api/dataverses/" + definitionPoint + "/assignments?key=" + apiToken); } + public static Response deactivateUser(String username) { + Response deactivateUserResponse = given() + .post("/api/admin/authenticatedUsers/" + username + "/deactivate"); + return deactivateUserResponse; + } + + public static Response deactivateUser(Long userId) { + Response deactivateUserResponse = given() + .post("/api/admin/authenticatedUsers/id/" + userId + "/deactivate"); + return deactivateUserResponse; + } + public static Response deleteUser(String username) { Response deleteUserResponse = given() .delete("/api/admin/authenticatedUsers/" + username + "/"); return deleteUserResponse; } + public static Response deleteUserRoles(String username, String apiToken) { + Response deleteUserResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .post("/api/users/" + username + "/removeRoles"); + return deleteUserResponse; + } + + public static Response getUserTraces(String username, String apiToken) { + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/users/" + username + "/traces"); + return response; + } + public static Response reingestFile(Long fileId, String apiToken) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) @@ -1201,6 +1227,7 @@ static Response listAuthenticatedUsers(String apiToken) { return response; } + // TODO: Consider removing apiToken since it isn't used by the API itself. static Response getAuthenticatedUser(String userIdentifier, String apiToken) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) @@ -2232,7 +2259,7 @@ static Boolean sleepForLock(String idOrPersistentId, String lockType, String api } catch (InterruptedException ex) { Logger.getLogger(UtilIT.class.getName()).log(Level.SEVERE, null, ex); } - } while (lockedForIngest.body().prettyPrint().contains(lockType)); + } while (lockedForIngest.body().jsonPath().getList("data").size() >0 && (lockType==null || lockedForIngest.body().prettyPrint().contains(lockType))); return i <= duration; @@ -2495,6 +2522,66 @@ static Response addBannerMessage(String pathToJsonFile) { return addBannerMessageResponse; } + static Response addBuiltInRole(String pathToJsonFile) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response addBannerMessageResponse = given() + .body(jsonIn) + .contentType("application/json") + .post("/api/admin/roles"); + return addBannerMessageResponse; + } + + static Response deleteBuiltInRole(String roleAlias) { + + Response addBannerMessageResponse = given() + .delete("/api/admin/roles/:alias?alias=" +roleAlias); + return addBannerMessageResponse; + } + + static Response addDataverseRole(String pathToJsonFile, String dvAlias, String apiToken) { + String jsonIn = getDatasetJson(pathToJsonFile); + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonIn) + .contentType("application/json") + .post("/api/roles?dvo="+dvAlias); + return addBannerMessageResponse; + } + + static Response deleteDataverseRole( String roleAlias, String apiToken) { + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .delete("/api/roles/:alias?alias="+roleAlias); + return addBannerMessageResponse; + } + + static Response deleteDataverseRoleById( String id, String apiToken) { + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .delete("/api/roles/"+id); + return addBannerMessageResponse; + } + + static Response viewDataverseRole( String roleAlias, String apiToken) { + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/roles/:alias?alias="+roleAlias); + return addBannerMessageResponse; + } + + static Response viewDataverseRoleById( String id, String apiToken) { + + Response addBannerMessageResponse = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/roles/"+id); + return addBannerMessageResponse; + } + static Response getBannerMessages() { Response getBannerMessagesResponse = given() diff --git a/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java index a57f73bd7a7..32f6a487994 100644 --- a/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java @@ -1,32 +1,65 @@ package edu.harvard.iq.dataverse.branding; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.UnsupportedEncodingException; import java.util.Arrays; import javax.mail.internet.AddressException; import javax.mail.internet.InternetAddress; import static org.junit.Assert.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +@TestMethodOrder(OrderAnnotation.class) public class BrandingUtilTest { + @Mock + DataverseServiceBean dataverseSvc; + @Mock + SettingsServiceBean settingsSvc; + @Test + @Order(1) public void testGetInstallationBrandName() { System.out.println("testGetInstallationBrandName"); - assertEquals("LibraScholar", BrandingUtil.getInstallationBrandName("LibraScholar")); - assertEquals(null, BrandingUtil.getInstallationBrandName(null));// misconfiguration to set to null - assertEquals("", BrandingUtil.getInstallationBrandName(""));// misconfiguration to set to empty string + + Mockito.when(settingsSvc.getValueForKey(SettingsServiceBean.Key.InstallationName)).thenReturn(null); + + //And configure the mock DataverseService to pretend the root collection name is as shown + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn("LibraScholar"); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + + assertEquals("LibraScholar", BrandingUtil.getInstallationBrandName()); //Defaults to root collection name + + Mockito.when(settingsSvc.getValueForKey(SettingsServiceBean.Key.InstallationName)).thenReturn("NotLibraScholar"); + + assertEquals("NotLibraScholar", BrandingUtil.getInstallationBrandName()); //uses setting } @Test public void testGetSupportTeamName() throws AddressException, UnsupportedEncodingException { System.out.println("testGetSupportTeamName"); - assertEquals("Support", BrandingUtil.getSupportTeamName(null, null)); - assertEquals("Support", BrandingUtil.getSupportTeamName(null, "")); - assertEquals("LibraScholar Support", BrandingUtil.getSupportTeamName(null, "LibraScholar")); - assertEquals("LibraScholar Support", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu"), "LibraScholar")); - assertEquals("LibraScholar Support Team", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu", "LibraScholar Support Team"), "LibraScholar")); - assertEquals("", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu", ""), "LibraScholar")); // misconfiguration to set to empty string + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn(null); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + assertEquals("Support", BrandingUtil.getSupportTeamName(null)); + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn(""); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + assertEquals("Support", BrandingUtil.getSupportTeamName(null)); + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn("LibraScholar"); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + assertEquals("LibraScholar Support", BrandingUtil.getSupportTeamName(null)); + assertEquals("LibraScholar Support", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu"))); + assertEquals("LibraScholar Support Team", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu", "LibraScholar Support Team"))); + assertEquals("", BrandingUtil.getSupportTeamName(new InternetAddress("support@librascholar.edu", ""))); // misconfiguration to set to empty string } @Test @@ -103,7 +136,9 @@ public void testEmailSubject() { @Test public void testGetContactHeader() { System.out.println("testGetContactHeader"); - assertEquals("Contact Support", BrandingUtil.getContactHeader(null, null)); + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn(null); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + assertEquals("Contact Support", BrandingUtil.getContactHeader(null)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java index ea39bb1bc77..243285e69ab 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java @@ -9,13 +9,19 @@ import edu.harvard.iq.dataverse.authorization.users.GuestUser; import edu.harvard.iq.dataverse.engine.TestCommandContext; import edu.harvard.iq.dataverse.engine.TestDataverseEngine; +import edu.harvard.iq.dataverse.engine.TestEntityManager; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.mocks.MocksFactory; +import javax.persistence.EntityManager; +import javax.persistence.TypedQuery; import static org.junit.Assert.assertTrue; import org.junit.Before; import org.junit.Test; +import org.mockito.Matchers; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; /** * @@ -36,6 +42,12 @@ public DataverseRole save(DataverseRole aRole) { } }; } + + @Override + public EntityManager em() { + return new LocalTestEntityManager(); + + } }); @Before @@ -94,4 +106,33 @@ public void testGuestUsersCantAddRoles() throws CommandException { engine.submit(sut); } + private class LocalTestEntityManager extends TestEntityManager { + + @Override + public T merge(T entity) { + return entity; + } + + @Override + public void persist(Object entity) { + // + } + + @Override + public void flush() { + //nothing to do here + } + + @Override + public TypedQuery createNamedQuery(String name, Class resultClass) { + //Mocking a query to return no results when + //checking for existing role in DB + TypedQuery mockedQuery = mock(TypedQuery.class); + when(mockedQuery.setParameter(Matchers.anyString(), Matchers.anyObject())).thenReturn(mockedQuery); + when(mockedQuery.getSingleResult()).thenReturn(null); + return mockedQuery; + } + + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java index e70e375ace9..3a5ef60a0e4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java @@ -2,7 +2,6 @@ import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.mocks.MockDatasetFieldSvc; @@ -17,9 +16,7 @@ import java.nio.file.Paths; import java.time.Year; import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; -import java.util.Map; import java.util.Set; import javax.json.Json; import javax.json.JsonObject; @@ -145,12 +142,11 @@ public void testCitation() throws Exception { version.setVersionState(DatasetVersion.VersionState.DRAFT); Dataset dataset = new Dataset(); version.setDataset(dataset); - Dataverse dataverse = new Dataverse(); - dataset.setOwner(dataverse); + dataset.setOwner(new Dataverse()); String citation = version.getCitation(); System.out.println("citation: " + citation); int currentYear = Year.now().getValue(); - assertEquals("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", DRAFT VERSION", citation); + assertEquals("Finch, Fiona, " + currentYear + ", \"Darwin's Finches\", LibraScholar, DRAFT VERSION", citation); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java index a76ce8475f2..dce34385274 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java @@ -1,30 +1,42 @@ package edu.harvard.iq.dataverse.export.ddi; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.xml.XmlPrinter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; -import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.logging.Logger; import edu.harvard.iq.dataverse.util.xml.html.HtmlPrinter; -import org.junit.Test; -import javax.json.JsonObject; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; import static org.junit.Assert.*; +@ExtendWith(MockitoExtension.class) +@TestMethodOrder(OrderAnnotation.class) public class DdiExportUtilTest { private static final Logger logger = Logger.getLogger(DdiExportUtilTest.class.getCanonicalName()); + @Mock + SettingsServiceBean settingsSvc; + @Test + @Order(1) public void testJson2DdiNoFiles() throws Exception { + Mockito.when(settingsSvc.isTrueForKey(SettingsServiceBean.Key.ExportInstallationAsDistributorOnlyWhenNotSet, false)).thenReturn(false); + DdiExportUtil.injectSettingsService(settingsSvc); File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json"); String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath()))); File ddiFile = new File("src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml"); diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSourceTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSourceTest.java index c886ef0ba69..9ceca24aadf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSourceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/source/DbSettingConfigSourceTest.java @@ -36,7 +36,7 @@ void testEmptyIfNoSettingsService() { @Test @Order(2) void testDataRetrieval() { - Set settings = new HashSet<>(Arrays.asList(new Setting("FooBar", "hello"), new Setting("FooBarI18N", "de", "hallo"))); + Set settings = new HashSet<>(Arrays.asList(new Setting(":FooBar", "hello"), new Setting(":FooBarI18N", "de", "hallo"))); Mockito.when(settingsSvc.listAll()).thenReturn(settings); DbSettingConfigSource.injectSettingsService(settingsSvc); diff --git a/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java index af0c414d356..4363d1dd3c9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java @@ -1,19 +1,36 @@ package edu.harvard.iq.dataverse.util; +import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.UserNotification; +import edu.harvard.iq.dataverse.branding.BrandingUtil; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + import static org.junit.Assert.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; import org.junit.Before; +@ExtendWith(MockitoExtension.class) +@TestMethodOrder(OrderAnnotation.class) public class MailUtilTest { - private String rootDataverseName; UserNotification userNotification = new UserNotification(); + @Mock + DataverseServiceBean dataverseSvc; + @Mock + SettingsServiceBean settingsSvc; + @Before public void setUp() { - rootDataverseName = "LibraScholar"; userNotification = new UserNotification(); + } @Test @@ -31,93 +48,99 @@ public void testParseSystemAddress() { } @Test + @Order(1) public void testSubjectCreateAccount() { + Mockito.when(settingsSvc.getValueForKey(SettingsServiceBean.Key.InstallationName)).thenReturn(null); + //And configure the mock DataverseService to pretend the root collection name is as shown + Mockito.when(dataverseSvc.getRootDataverseName()).thenReturn("LibraScholar"); + BrandingUtil.injectServices(dataverseSvc, settingsSvc); + userNotification.setType(UserNotification.Type.CREATEACC); - assertEquals("LibraScholar: Your account has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your account has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectAssignRole() { userNotification.setType(UserNotification.Type.ASSIGNROLE); - assertEquals("LibraScholar: You have been assigned a role", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: You have been assigned a role", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectCreateDataverse() { userNotification.setType(UserNotification.Type.CREATEDV); - assertEquals("LibraScholar: Your dataverse has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your dataverse has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectRevokeRole() { userNotification.setType(UserNotification.Type.REVOKEROLE); - assertEquals("LibraScholar: Your role has been revoked", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your role has been revoked", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectRequestFileAccess() { userNotification.setType(UserNotification.Type.REQUESTFILEACCESS); - assertEquals("LibraScholar: Access has been requested for a restricted file", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Access has been requested for a restricted file", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectGrantFileAccess() { userNotification.setType(UserNotification.Type.GRANTFILEACCESS); - assertEquals("LibraScholar: You have been granted access to a restricted file", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: You have been granted access to a restricted file", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectRejectFileAccess() { userNotification.setType(UserNotification.Type.REJECTFILEACCESS); - assertEquals("LibraScholar: Your request for access to a restricted file has been rejected", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your request for access to a restricted file has been rejected", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectCreateDataset() { userNotification.setType(UserNotification.Type.CREATEDS); - assertEquals("LibraScholar: Your dataset has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your dataset has been created", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectSubmittedDS() { userNotification.setType(UserNotification.Type.SUBMITTEDDS); - assertEquals("LibraScholar: Your dataset has been submitted for review", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your dataset has been submitted for review", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectPublishedDS() { userNotification.setType(UserNotification.Type.PUBLISHEDDS); - assertEquals("LibraScholar: Your dataset has been published", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your dataset has been published", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectReturnedDS() { userNotification.setType(UserNotification.Type.RETURNEDDS); - assertEquals("LibraScholar: Your dataset has been returned", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your dataset has been returned", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectChecksumFail() { userNotification.setType(UserNotification.Type.CHECKSUMFAIL); - assertEquals("LibraScholar: Your upload failed checksum validation", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your upload failed checksum validation", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectFileSystemImport() { userNotification.setType(UserNotification.Type.FILESYSTEMIMPORT); //TODO SEK add a dataset version to get the Dataset Title which is actually used in the subject now - assertEquals("Dataset LibraScholar has been successfully uploaded and verified", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName , null)); + assertEquals("Dataset LibraScholar has been successfully uploaded and verified", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectChecksumImport() { userNotification.setType(UserNotification.Type.CHECKSUMIMPORT); - assertEquals("LibraScholar: Your file checksum job has completed", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Your file checksum job has completed", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } @Test public void testSubjectConfirmEmail() { userNotification.setType(UserNotification.Type.CONFIRMEMAIL); - assertEquals("LibraScholar: Verify your email address", MailUtil.getSubjectTextBasedOnNotification(userNotification, rootDataverseName, null)); + assertEquals("LibraScholar: Verify your email address", MailUtil.getSubjectTextBasedOnNotification(userNotification, null)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index f8f0fdc7554..32f7b86a9c9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -201,7 +201,7 @@ public void testDatasetContactOutOfBoxNoPrivacy() { fields.add(datasetContactField); SettingsServiceBean nullServiceBean = null; - JsonPrinter.setSettingsService(nullServiceBean); + JsonPrinter.injectSettingsService(nullServiceBean); JsonObject jsonObject = JsonPrinter.json(block, fields).build(); assertNotNull(jsonObject); @@ -242,7 +242,7 @@ public void testDatasetContactWithPrivacy() { datasetContactField.setDatasetFieldCompoundValues(vals); fields.add(datasetContactField); - JsonPrinter.setSettingsService(new MockSettingsSvc()); + JsonPrinter.injectSettingsService(new MockSettingsSvc()); JsonObject jsonObject = JsonPrinter.json(block, fields).build(); assertNotNull(jsonObject); diff --git a/tests/jenkins/ec2/Jenkinsfile b/tests/jenkins/ec2/Jenkinsfile index 4a16f865886..7c35da867c7 100644 --- a/tests/jenkins/ec2/Jenkinsfile +++ b/tests/jenkins/ec2/Jenkinsfile @@ -24,9 +24,9 @@ pipeline { env.EC2_REPO = env.GIT_URL } } - sh '/usr/bin/curl -O https://raw.githubusercontent.com/GlobalDataverseCommunityConsortium/dataverse-ansible/master/ec2/ec2-create-instance.sh' + sh '/usr/bin/curl -O https://raw.githubusercontent.com/GlobalDataverseCommunityConsortium/dataverse-ansible/develop/ec2/ec2-create-instance.sh' sh '/bin/rm -f groupvars.yml' - sh '/usr/bin/curl -o groupvars.yml https://raw.githubusercontent.com/GlobalDataverseCommunityConsortium/dataverse-ansible/master/tests/group_vars/jenkins.yml' + sh '/usr/bin/curl -o groupvars.yml https://raw.githubusercontent.com/GlobalDataverseCommunityConsortium/dataverse-ansible/develop/tests/group_vars/jenkins.yml' sh '/usr/bin/bash ec2-create-instance.sh -b ${CHANGE_BRANCH} -r ${EC2_REPO} -t jenkins_delete_me -l target -g groupvars.yml -s t3a.large -d' } } From ad48ad711049f99d17b3494fab3d923016bfc799 Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 19 Apr 2021 16:58:39 -0400 Subject: [PATCH 0117/1551] cleanup : removed redundant code from Phase 1 --- .../edu/harvard/iq/dataverse/DatasetPage.java | 4 - .../iq/dataverse/DatasetServiceBean.java | 55 ++- .../harvard/iq/dataverse/api/Datasets.java | 326 +----------- .../harvard/iq/dataverse/api/GlobusApi.java | 464 ------------------ .../datasetutility/AddReplaceFileHelper.java | 18 +- .../harvard/iq/dataverse/globus/FileG.java | 67 --- .../iq/dataverse/globus/FilesList.java | 60 --- .../dataverse/globus/GlobusServiceBean.java | 264 ---------- .../iq/dataverse/globus/Identities.java | 16 - .../harvard/iq/dataverse/globus/Identity.java | 67 --- .../harvard/iq/dataverse/globus/MkDir.java | 22 - .../iq/dataverse/globus/MkDirResponse.java | 50 -- .../dataverse/globus/PermissionsResponse.java | 58 --- .../dataverse/globus/SuccessfulTransfer.java | 35 -- .../edu/harvard/iq/dataverse/globus/Task.java | 69 --- .../harvard/iq/dataverse/globus/Tasklist.java | 17 - .../iq/dataverse/globus/Transferlist.java | 18 - .../harvard/iq/dataverse/globus/UserInfo.java | 68 --- 18 files changed, 46 insertions(+), 1632 deletions(-) delete mode 100644 src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/FileG.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Identities.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Identity.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Task.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 522fe65cea8..5030f4ffeca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.globus.GlobusServiceBean; import edu.harvard.iq.dataverse.provenance.ProvPopupFragmentBean; import edu.harvard.iq.dataverse.api.AbstractApiBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; @@ -234,9 +233,6 @@ public enum DisplayMode { @Inject MakeDataCountLoggingServiceBean mdcLogService; @Inject DataverseHeaderFragment dataverseHeaderFragment; - @Inject - protected GlobusServiceBean globusService; - private Dataset dataset = new Dataset(); private Long id = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 319e6ac1c10..8b715788172 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1067,13 +1067,12 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo @Asynchronous public void globusUpload(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl, User authUser) throws ExecutionException, InterruptedException, MalformedURLException { + Integer countAll = 0; + Integer countSuccess = 0; + Integer countError = 0; String logTimestamp = logFormatter.format(new Date()); Logger globusLogger = Logger.getLogger("edu.harvard.iq.dataverse.upload.client.DatasetServiceBean." + "GlobusUpload" + logTimestamp); - - - //Logger.getLogger(DatasetServiceBean.class.getCanonicalName()); - //Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.DatasetServiceBean." + "ExportAll" + logTimestamp); - String logFileName = "../logs" + File.separator + "globusUpload" + dataset.getId()+"_"+authUser.getIdentifier()+"_"+ logTimestamp + ".log"; + String logFileName = "../logs" + File.separator + "globusUpload_id_" + dataset.getId() + "_" + logTimestamp + ".log"; FileHandler fileHandler; boolean fileHandlerSuceeded; try { @@ -1131,28 +1130,31 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { - // storageIdentifier s3://gcs5-bucket1:1781cfeb8a7-748c270a227c from victoria + // storageIdentifier s3://gcs5-bucket1:1781cfeb8a7-748c270a227c from externalTool String storageIdentifier = fileJsonObject.getString("storageIdentifier"); - String fileName = fileJsonObject.getString("fileName"); String[] bits = storageIdentifier.split(":"); - String fileId = bits[bits.length-1]; String bucketName = bits[1].replace("/", ""); + String fileId = bits[bits.length-1]; // fullpath s3://gcs5-bucket1/10.5072/FK2/3S6G2E/1781cfeb8a7-4ad9418a5873 String fullPath = storageType + bucketName + "/" + datasetIdentifier +"/" +fileId ; + String fileName = fileJsonObject.getString("fileName"); inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); } - // calculate checksum, mimetype + // calculateMissingMetadataFields: checksum, mimetype JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList,globusLogger); JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); - JsonArrayBuilder jsonSecondAPI = Json.createArrayBuilder() ; + JsonArrayBuilder jsonDataSecondAPI = Json.createArrayBuilder() ; for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { + countAll++; String storageIdentifier = fileJsonObject.getString("storageIdentifier"); + String fileName = fileJsonObject.getString("fileName"); + String directoryLabel = fileJsonObject.getString("directoryLabel"); String[] bits = storageIdentifier.split(":"); String fileId = bits[bits.length-1]; @@ -1165,13 +1167,18 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin fileJsonObject = path.apply(fileJsonObject); path = Json.createPatchBuilder().add("/mimeType", newfileJsonObject.get(0).getString("mime")).build(); fileJsonObject = path.apply(fileJsonObject); - jsonSecondAPI.add(stringToJsonObjectBuilder(fileJsonObject.toString())); + jsonDataSecondAPI.add(stringToJsonObjectBuilder(fileJsonObject.toString())); + countSuccess++; + } + else { + globusLogger.info(fileName + " will be skipped from adding to dataset by second API due to missing values "); + countError++; } } - String newjsonData = jsonSecondAPI.build().toString(); + String newjsonData = jsonDataSecondAPI.build().toString(); - globusLogger.info("Generated new JsonData with calculated values"); + globusLogger.info("Successfully generated new JsonData for Second API call"); String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST "+httpRequestUrl+"/api/datasets/:persistentId/addFiles?persistentId=doi:" + datasetIdentifier + " -F jsonData='" + newjsonData + "'"; @@ -1180,7 +1187,8 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin String output = addFilesAsync(command , globusLogger ) ; if(output.equalsIgnoreCase("ok")) { - userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSUPLOADSUCCESS, dataset.getId()); + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSUPLOADSUCCESS, dataset.getId(),""); + globusLogger.info("Successfully completed api/datasets/:persistentId/addFiles call "); } else @@ -1190,6 +1198,11 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin } + globusLogger.info("Files processed: " + countAll.toString()); + globusLogger.info("Files added successfully: " + countSuccess.toString()); + globusLogger.info("Files failures: " + countError.toString()); + globusLogger.info("Finished upload via Globus job."); + if (fileHandlerSuceeded) { fileHandler.close(); } @@ -1310,10 +1323,14 @@ private fileDetailsHolder calculateDetails(String id, Logger globusLogger) throw } while (count < 3); - - String mimeType = calculatemime(fileName); - globusLogger.info(" File Name " + fileName + " File Details " + fileId + " checksum = "+ checksumVal + " mimeType = " + mimeType); - return new fileDetailsHolder(fileId, checksumVal,mimeType); + if(checksumVal.length() > 0 ) { + String mimeType = calculatemime(fileName); + globusLogger.info(" File Name " + fileName + " File Details " + fileId + " checksum = " + checksumVal + " mimeType = " + mimeType); + return new fileDetailsHolder(fileId, checksumVal, mimeType); + } + else { + return null; + } //getBytes(in)+"" ); // calculatemime(fileName)); } @@ -1402,7 +1419,7 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro String logTimestamp = logFormatter.format(new Date()); Logger globusLogger = Logger.getLogger("edu.harvard.iq.dataverse.upload.client.DatasetServiceBean." + "GlobusDownload" + logTimestamp); - String logFileName = "../logs" + File.separator + "globusDownload_" + dataset.getId()+"_"+authUser.getIdentifier()+"_"+logTimestamp + ".log"; + String logFileName = "../logs" + File.separator + "globusDownload_id_" + dataset.getId() + "_" + logTimestamp + ".log"; FileHandler fileHandler; boolean fileHandlerSuceeded; try { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index ca6425fc732..f56674cb351 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2444,315 +2444,6 @@ public Response getTimestamps(@PathParam("identifier") String id) { } - @POST - @Path("{id}/addglobusFilesBkup") - @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response addGlobusFileToDatasetBkup(@PathParam("id") String datasetId, - @FormDataParam("jsonData") String jsonData - ) { - JsonArrayBuilder jarr = Json.createArrayBuilder(); - - if (!systemConfig.isHTTPUpload()) { - return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); - } - - // ------------------------------------- - // (1) Get the user from the API key - // ------------------------------------- - User authUser; - try { - authUser = findUserOrDie(); - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") - ); - } - - // ------------------------------------- - // (2) Get the Dataset Id - // ------------------------------------- - Dataset dataset; - - try { - dataset = findDatasetOrDie(datasetId); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - - //------------------------------------ - // (2a) Add lock to the dataset page - // -------------------------------------- - - String lockInfoMessage = "Globus Upload API is running "; - DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload, - ((AuthenticatedUser) authUser).getId(), lockInfoMessage); - if (lock != null) { - dataset.addLock(lock); - } else { - logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); - } - - //------------------------------------ - // (2b) Make sure dataset does not have package file - // -------------------------------------- - - for (DatasetVersion dv : dataset.getVersions()) { - if (dv.isHasPackageFile()) { - return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") - ); - } - } - - - // ------------------------------------- - // (3) Parse JsonData - // ------------------------------------- - - String taskIdentifier = null; - - msgt("******* (api) jsonData 1: " + jsonData.toString()); - - JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); - } catch (Exception jpe) { - jpe.printStackTrace(); - logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); - } - - // ------------------------------------- - // (4) Get taskIdentifier - // ------------------------------------- - - taskIdentifier = jsonObject.getString("taskIdentifier"); - - // ------------------------------------- - // (5) Wait until task completion - // ------------------------------------- - - boolean success = false; - boolean globustype = true; - - do { - try { - String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); - AccessToken clientTokenUser = globusServiceBean.getClientToken(); - - success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier); - - } catch (Exception ex) { - ex.printStackTrace(); - logger.info(ex.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to get task id"); - } - - } while (!success); - - - try { - StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - - List cachedObjectsTags = datasetSIO.listAuxObjects(); - - DataverseRequest dvRequest = createDataverseRequest(authUser); - AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper( - dvRequest, - ingestService, - datasetService, - fileService, - permissionSvc, - commandEngine, - systemConfig - ); - - // ------------------------------------- - // (6) Parse files information from jsondata - // calculate checksum - // determine mimetype - // ------------------------------------- - - JsonArray filesJson = jsonObject.getJsonArray("files"); - - int totalNumberofFiles = 0; - int successNumberofFiles = 0; - try { - // Start to add the files - if (filesJson != null) { - totalNumberofFiles = filesJson.getValuesAs(JsonObject.class).size(); - for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { - - String storageIdentifier = fileJson.getString("storageIdentifier"); //"s3://176ce6992af-208dea3661bb50" - //String suppliedContentType = fileJson.getString("contentType"); - String fileName = fileJson.getString("fileName"); - - String fullPath = datasetSIO.getStorageLocation() + "/" + storageIdentifier.replace("s3://", ""); - - String bucketName = System.getProperty("dataverse.files." + storageIdentifier.split(":")[0] + ".bucket-name"); - - String dbstorageIdentifier = storageIdentifier.split(":")[0] + "://" + bucketName + ":" + storageIdentifier.replace("s3://", ""); - - // the storageidentifier should be unique - Query query = em.createQuery("select object(o) from DvObject as o where o.storageIdentifier = :storageIdentifier"); - query.setParameter("storageIdentifier", dbstorageIdentifier); - - if (query.getResultList().size() > 0) { - JsonObjectBuilder fileoutput = Json.createObjectBuilder() - .add("storageIdentifier", storageIdentifier) - .add("message", " The datatable is not updated since the Storage Identifier already exists in dvObject. "); - - jarr.add(fileoutput); - } else { - - // calculate mimeType - String finalType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; - - String type = FileUtil.determineFileTypeByExtension(fileName); - - if (!StringUtils.isBlank(type)) { - finalType = type; - } - - JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); - fileJson = path.apply(fileJson); - - int count = 0; - // calculate md5 checksum - do { - try { - - StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); - InputStream in = dataFileStorageIO.getInputStream(); - String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); - - path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); - fileJson = path.apply(fileJson); - count = 3; - } catch (Exception ex) { - count = count + 1; - ex.printStackTrace(); - logger.info(ex.getMessage()); - Thread.sleep(5000); - msgt(" ***** Try to calculate checksum again for " + fileName); - //error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to calculate checksum"); - } - - } while (count < 3); - - //--------------------------------------- - // Load up optional params via JSON - //--------------------------------------- - - OptionalFileParams optionalFileParams = null; - - try { - optionalFileParams = new OptionalFileParams(fileJson.toString()); - } catch (DataFileTagException ex) { - return error(Response.Status.BAD_REQUEST, ex.getMessage()); - } - - msg("ADD!"); - - //------------------- - // Run "runAddFileByDatasetId" - //------------------- - addFileHelper.runAddFileByDataset(dataset, - fileName, - finalType, - storageIdentifier, - null, - optionalFileParams, - true); - - - if (addFileHelper.hasError()) { - - JsonObjectBuilder fileoutput = Json.createObjectBuilder() - .add("storageIdentifier ", storageIdentifier) - .add("error Code: ", addFileHelper.getHttpErrorCode().toString()) - .add("message ", addFileHelper.getErrorMessagesAsString("\n")); - - jarr.add(fileoutput); - - } else { - String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); - - JsonObject successresult = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); - - try { - logger.fine("successMsg: " + successMsg); - String duplicateWarning = addFileHelper.getDuplicateFileWarning(); - if (duplicateWarning != null && !duplicateWarning.isEmpty()) { - // return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); - JsonObjectBuilder fileoutput = Json.createObjectBuilder() - .add("storageIdentifier ", storageIdentifier) - .add("warning message: ", addFileHelper.getDuplicateFileWarning()) - .add("message ", successresult.getJsonArray("files").getJsonObject(0)); - jarr.add(fileoutput); - - } else { - JsonObjectBuilder fileoutput = Json.createObjectBuilder() - .add("storageIdentifier ", storageIdentifier) - .add("message ", successresult.getJsonArray("files").getJsonObject(0)); - jarr.add(fileoutput); - } - - } catch (Exception ex) { - Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); - return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); - } - } - } - successNumberofFiles = successNumberofFiles + 1; - } - }// End of adding files - } catch (Exception e) { - Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, e); - return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); - } - - logger.log(Level.INFO, "Total Number of Files " + totalNumberofFiles); - logger.log(Level.INFO, "Success Number of Files " + successNumberofFiles); - DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.GlobusUpload); - if (dcmLock == null) { - logger.log(Level.WARNING, "Dataset not locked for Globus upload"); - } else { - logger.log(Level.INFO, "Dataset remove locked for Globus upload"); - datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.GlobusUpload); - //dataset.removeLock(dcmLock); - } - - try { - Command cmd; - cmd = new UpdateDatasetVersionCommand(dataset, dvRequest); - ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true); - commandEngine.submit(cmd); - } catch (CommandException ex) { - logger.log(Level.WARNING, "==== datasetId :" + dataset.getId() + "====== UpdateDatasetVersionCommand Exception : " + ex.getMessage()); - } - - dataset = datasetService.find(dataset.getId()); - - List s = dataset.getFiles(); - for (DataFile dataFile : s) { - logger.info(" ******** TEST the datafile id is = " + dataFile.getId() + " = " + dataFile.getDisplayName()); - } - - msg("******* pre ingest start in globus API"); - - ingestService.startIngestJobsForDataset(dataset, (AuthenticatedUser) authUser); - - msg("******* post ingest start in globus API"); - - } catch (Exception e) { - String message = e.getMessage(); - msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); - e.printStackTrace(); - } - - return ok(Json.createObjectBuilder().add("Files", jarr)); - - } - - @POST @Path("{id}/addglobusFiles") @Consumes(MediaType.MULTIPART_FORM_DATA) @@ -2818,21 +2509,6 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId, logger.info(" ==== (api uriInfo.getRequestUri()) jsonData ====== " + uriInfo.getRequestUri().toString()); } - //logger.info(" ==== (api uriInfo.getRequestUri()) jsonData ====== " + headers.getRequestHeaders() - - MultivaluedMap multivaluedMap = headers.getRequestHeaders(); - - Map result = new HashMap<>(); - multivaluedMap.forEach((name, values) -> { - if (!CollectionUtils.isEmpty(values)) { - result.put(name, (values.size() != 1) ? values : values.get(0)); - logger.info(" headers ==== " + name + " ==== "+ values ); - } - }); - - logger.info(" ==== headers.getRequestHeader(origin) ====== " + headers.getRequestHeader("origin") ); - logger.info(" ==== headers.getRequestHeader(referer) ====== " + headers.getRequestHeader("referer") ); - String requestUrl = headers.getRequestHeader("origin").get(0); @@ -3054,7 +2730,7 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, } catch (Exception e) { String message = e.getMessage(); - msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); + msgt("******* datasetId :" + dataset.getId() + " ======= addFilesToDataset CALL Exception ============== " + message); e.printStackTrace(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java b/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java deleted file mode 100644 index 39c1a13842a..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/api/GlobusApi.java +++ /dev/null @@ -1,464 +0,0 @@ -package edu.harvard.iq.dataverse.api; - -import com.amazonaws.services.s3.model.S3ObjectSummary; -import edu.harvard.iq.dataverse.DatasetServiceBean; -import edu.harvard.iq.dataverse.DataverseRequestServiceBean; -import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.PermissionServiceBean; -import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.authorization.users.ApiToken; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.dataaccess.DataAccess; -import edu.harvard.iq.dataverse.*; - -import edu.harvard.iq.dataverse.dataaccess.StorageIO; -import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper; -import edu.harvard.iq.dataverse.datasetutility.DataFileTagException; -import edu.harvard.iq.dataverse.datasetutility.NoFilesException; -import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; -import edu.harvard.iq.dataverse.engine.command.Command; -import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; -import edu.harvard.iq.dataverse.globus.AccessToken; -import edu.harvard.iq.dataverse.globus.GlobusServiceBean; -import edu.harvard.iq.dataverse.ingest.IngestServiceBean; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.util.BundleUtil; -import edu.harvard.iq.dataverse.util.FileUtil; -import edu.harvard.iq.dataverse.util.json.JsonParseException; -import edu.harvard.iq.dataverse.util.json.JsonPrinter; -import org.apache.commons.lang.StringUtils; -import org.apache.http.HttpEntity; -import org.apache.http.HttpResponse; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.entity.StringEntity; -import org.apache.http.entity.mime.MultipartEntityBuilder; -import org.apache.http.entity.mime.content.ContentBody; -import org.apache.http.util.EntityUtils; -import org.glassfish.jersey.media.multipart.FormDataBodyPart; -import org.glassfish.jersey.media.multipart.FormDataContentDisposition; -import org.glassfish.jersey.media.multipart.FormDataParam; -import org.json.JSONObject; - - -import javax.ejb.EJB; -import javax.ejb.EJBException; -import javax.ejb.Stateless; -import javax.inject.Inject; -import javax.json.*; -import javax.json.stream.JsonParsingException; -import javax.persistence.NoResultException; -import javax.persistence.Query; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.*; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.StringReader; -import java.sql.Timestamp; -import java.text.SimpleDateFormat; -import java.util.*; -import java.util.concurrent.TimeUnit; -import java.util.logging.Level; -import java.util.logging.Logger; - - -import edu.harvard.iq.dataverse.api.Datasets; - -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; - -@Stateless -@Path("globus") -public class GlobusApi extends AbstractApiBean { - private static final Logger logger = Logger.getLogger(Access.class.getCanonicalName()); - - @EJB - DatasetServiceBean datasetService; - - @EJB - GlobusServiceBean globusServiceBean; - - @EJB - EjbDataverseEngine commandEngine; - - @EJB - PermissionServiceBean permissionService; - - @EJB - IngestServiceBean ingestService; - - - @Inject - DataverseRequestServiceBean dvRequestService; - - - @POST - @Path("{id}/add") - @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response globus(@PathParam("id") String datasetId, - @FormDataParam("jsonData") String jsonData - ) - { - JsonArrayBuilder jarr = Json.createArrayBuilder(); - - // ------------------------------------- - // (1) Get the user from the API key - // ------------------------------------- - User authUser; - try { - authUser = findUserOrDie(); - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, - BundleUtil.getStringFromBundle("file.addreplace.error.auth") - ); - } - - // ------------------------------------- - // (2) Get the Dataset Id - // ------------------------------------- - Dataset dataset; - - try { - dataset = findDatasetOrDie(datasetId); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - - - // ------------------------------------- - // (3) Parse JsonData - // ------------------------------------- - - String taskIdentifier = null; - - msgt("******* (api) jsonData 1: " + jsonData); - - JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); - } catch (Exception jpe) { - jpe.printStackTrace(); - logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); - } - - // ------------------------------------- - // (4) Get taskIdentifier - // ------------------------------------- - - - taskIdentifier = jsonObject.getString("taskIdentifier"); - msgt("******* (api) newTaskIdentifier: " + taskIdentifier); - - // ------------------------------------- - // (5) Wait until task completion - // ------------------------------------- - - boolean success = false; - - do { - try { - String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); - basicGlobusToken = "ODA0ODBhNzEtODA5ZC00ZTJhLWExNmQtY2JkMzA1NTk0ZDdhOmQvM3NFd1BVUGY0V20ra2hkSkF3NTZMWFJPaFZSTVhnRmR3TU5qM2Q3TjA9"; - msgt("******* (api) basicGlobusToken: " + basicGlobusToken); - AccessToken clientTokenUser = globusServiceBean.getClientToken(); - - success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskIdentifier); - msgt("******* (api) success: " + success); - - } catch (Exception ex) { - ex.printStackTrace(); - logger.info(ex.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Failed to get task id"); - } - - } while (!success); - - - try - { - StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - - DataverseRequest dvRequest2 = createDataverseRequest(authUser); - AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, - ingestService, - datasetService, - fileService, - permissionSvc, - commandEngine, - systemConfig); - - // ------------------------------------- - // (6) Parse files information from jsondata - // calculate checksum - // determine mimetype - // ------------------------------------- - - JsonArray filesJson = jsonObject.getJsonArray("files"); - - if (filesJson != null) { - for (JsonObject fileJson : filesJson.getValuesAs(JsonObject.class)) { -/* - for (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) { - - } - */ - - - String storageIdentifier = fileJson.getString("storageIdentifier"); - String suppliedContentType = fileJson.getString("contentType"); - String fileName = fileJson.getString("fileName"); - - String fullPath = datasetSIO.getStorageLocation() + "/" + storageIdentifier.replace("s3://", ""); - - String bucketName = System.getProperty("dataverse.files." + storageIdentifier.split(":")[0] + ".bucket-name"); - - String dbstorageIdentifier = storageIdentifier.split(":")[0] + "://" + bucketName + ":" + storageIdentifier.replace("s3://", ""); - - Query query = em.createQuery("select object(o) from DvObject as o where o.storageIdentifier = :storageIdentifier"); - query.setParameter("storageIdentifier", dbstorageIdentifier); - - msgt("******* dbstorageIdentifier :" + dbstorageIdentifier + " ======= query.getResultList().size()============== " + query.getResultList().size()); - - - if (query.getResultList().size() > 0) { - - JsonObjectBuilder fileoutput= Json.createObjectBuilder() - .add("storageIdentifier " , storageIdentifier) - .add("message " , " The datatable is not updated since the Storage Identifier already exists in dvObject. "); - - jarr.add(fileoutput); - } else { - - // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied - String finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; - String type = FileUtil.determineFileTypeByExtension(fileName); - if (!StringUtils.isBlank(type)) { - //Use rules for deciding when to trust browser supplied type - //if (FileUtil.useRecognizedType(finalType, type)) - { - finalType = type; - } - logger.info("Supplied type: " + suppliedContentType + ", finalType: " + finalType); - } - - JsonPatch path = Json.createPatchBuilder().add("/mimeType", finalType).build(); - fileJson = path.apply(fileJson); - - StorageIO dataFileStorageIO = DataAccess.getDirectStorageIO(fullPath); - InputStream in = dataFileStorageIO.getInputStream(); - String checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); - - path = Json.createPatchBuilder().add("/md5Hash", checksumVal).build(); - fileJson = path.apply(fileJson); - - //addGlobusFileToDataset(dataset, fileJson.toString(), addFileHelper, fileName, finalType, storageIdentifier); - - - if (!systemConfig.isHTTPUpload()) { - return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); - } - - - //------------------------------------ - // (1) Make sure dataset does not have package file - // -------------------------------------- - - for (DatasetVersion dv : dataset.getVersions()) { - if (dv.isHasPackageFile()) { - return error(Response.Status.FORBIDDEN, - BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") - ); - } - } - - //--------------------------------------- - // (2) Load up optional params via JSON - //--------------------------------------- - - OptionalFileParams optionalFileParams = null; - msgt("(api) jsonData 2: " + fileJson.toString()); - - try { - optionalFileParams = new OptionalFileParams(fileJson.toString()); - } catch (DataFileTagException ex) { - return error( Response.Status.BAD_REQUEST, ex.getMessage()); - } - - - //------------------- - // (3) Create the AddReplaceFileHelper object - //------------------- - msg("ADD!"); - - //------------------- - // (4) Run "runAddFileByDatasetId" - //------------------- - addFileHelper.runAddFileByDataset(dataset, - fileName, - finalType, - storageIdentifier, - null, - optionalFileParams); - - - if (addFileHelper.hasError()){ - - JsonObjectBuilder fileoutput= Json.createObjectBuilder() - .add("storageIdentifier " , storageIdentifier) - .add("error Code: " ,addFileHelper.getHttpErrorCode().toString()) - .add("message " , addFileHelper.getErrorMessagesAsString("\n")); - - jarr.add(fileoutput); - - }else{ - String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); - - JsonObject a1 = addFileHelper.getSuccessResultAsJsonObjectBuilder().build(); - - JsonArray f1 = a1.getJsonArray("files"); - JsonObject file1 = f1.getJsonObject(0); - - try { - //msgt("as String: " + addFileHelper.getSuccessResult()); - - logger.fine("successMsg: " + successMsg); - String duplicateWarning = addFileHelper.getDuplicateFileWarning(); - if (duplicateWarning != null && !duplicateWarning.isEmpty()) { - // return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); - JsonObjectBuilder fileoutput= Json.createObjectBuilder() - .add("storageIdentifier " , storageIdentifier) - .add("warning message: " ,addFileHelper.getDuplicateFileWarning()) - .add("message " , file1); - jarr.add(fileoutput); - - } else { - JsonObjectBuilder fileoutput= Json.createObjectBuilder() - .add("storageIdentifier " , storageIdentifier) - .add("message " , file1); - jarr.add(fileoutput); - } - - //"Look at that! You added a file! (hey hey, it may have worked)"); - } catch (Exception ex) { - Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); - return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); - } - } - } - } - } - } catch (Exception e) { - String message = e.getMessage(); - msgt("******* Exception from globus API call " + message); - msgt("******* datasetId :" + dataset.getId() + " ======= GLOBUS CALL Exception ============== " + message); - e.printStackTrace(); - } - return ok(Json.createObjectBuilder().add("Files", jarr)); - - } - - - - private void msg(String m) { - //System.out.println(m); - logger.info(m); - } - - private void dashes() { - msg("----------------"); - } - - private void msgt(String m) { - //dashes(); - msg(m); - //dashes(); - } - - public Response addGlobusFileToDataset( Dataset dataset, - String jsonData, AddReplaceFileHelper addFileHelper,String fileName, - String finalType, - String storageIdentifier - ){ - - - if (!systemConfig.isHTTPUpload()) { - return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); - } - - - //------------------------------------ - // (1) Make sure dataset does not have package file - // -------------------------------------- - - for (DatasetVersion dv : dataset.getVersions()) { - if (dv.isHasPackageFile()) { - return error(Response.Status.FORBIDDEN, - BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") - ); - } - } - - //--------------------------------------- - // (2) Load up optional params via JSON - //--------------------------------------- - - OptionalFileParams optionalFileParams = null; - msgt("(api) jsonData 2: " + jsonData); - - try { - optionalFileParams = new OptionalFileParams(jsonData); - } catch (DataFileTagException ex) { - return error( Response.Status.BAD_REQUEST, ex.getMessage()); - } - - - //------------------- - // (3) Create the AddReplaceFileHelper object - //------------------- - msg("ADD!"); - - //------------------- - // (4) Run "runAddFileByDatasetId" - //------------------- - addFileHelper.runAddFileByDataset(dataset, - fileName, - finalType, - storageIdentifier, - null, - optionalFileParams); - - - if (addFileHelper.hasError()){ - return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n")); - }else{ - String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); - try { - //msgt("as String: " + addFileHelper.getSuccessResult()); - - logger.fine("successMsg: " + successMsg); - String duplicateWarning = addFileHelper.getDuplicateFileWarning(); - if (duplicateWarning != null && !duplicateWarning.isEmpty()) { - return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); - } else { - return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder()); - } - - //"Look at that! You added a file! (hey hey, it may have worked)"); - } catch (NoFilesException ex) { - Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); - return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); - - } - } - - - - } // end: addFileToDataset - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index f668d8a9a81..6747427d18e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -100,7 +100,7 @@ public class AddReplaceFileHelper{ public static String FILE_ADD_OPERATION = "FILE_ADD_OPERATION"; public static String FILE_REPLACE_OPERATION = "FILE_REPLACE_OPERATION"; public static String FILE_REPLACE_FORCE_OPERATION = "FILE_REPLACE_FORCE_OPERATION"; - public static String GLOBUSFILE_ADD_OPERATION = "GLOBUSFILE_ADD_OPERATION"; + public static String MULTIPLEFILES_ADD_OPERATION = "MULTIPLEFILES_ADD_OPERATION"; private String currentOperation; @@ -326,14 +326,14 @@ public boolean runAddFileByDataset(Dataset chosenDataset, String newStorageIdentifier, InputStream newFileInputStream, OptionalFileParams optionalFileParams, - boolean globustype) { + boolean multipleFiles) { msgt(">> runAddFileByDatasetId"); initErrorHandling(); - if(globustype) { - this.currentOperation = GLOBUSFILE_ADD_OPERATION; + if(multipleFiles) { + this.currentOperation = MULTIPLEFILES_ADD_OPERATION; } else { this.currentOperation = FILE_ADD_OPERATION; @@ -747,7 +747,7 @@ private boolean runAddReplacePhase2(){ }else{ msgt("step_070_run_update_dataset_command"); - if (!this.isGlobusFileAddOperation()) { + if (!this.isMultipleFilesAddOperation()) { if (!this.step_070_run_update_dataset_command()){ return false; } @@ -813,14 +813,14 @@ public boolean isFileAddOperation(){ return this.currentOperation.equals(FILE_ADD_OPERATION); } /** - * Is this a file add operation via Globus? + * Is this a multiple files add operation ? * * @return */ - public boolean isGlobusFileAddOperation(){ + public boolean isMultipleFilesAddOperation(){ - return this.currentOperation.equals(GLOBUSFILE_ADD_OPERATION); + return this.currentOperation.equals(MULTIPLEFILES_ADD_OPERATION); } /** @@ -1902,7 +1902,7 @@ private boolean step_100_startIngestJobs(){ msg("pre ingest start"); // start the ingest! // - if (!this.isGlobusFileAddOperation()) { + if (!this.isMultipleFilesAddOperation()) { ingestService.startIngestJobsForDataset(dataset, dvRequest.getAuthenticatedUser()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/FileG.java b/src/main/java/edu/harvard/iq/dataverse/globus/FileG.java deleted file mode 100644 index bd6a4b3b881..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/FileG.java +++ /dev/null @@ -1,67 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -public class FileG { - private String DATA_TYPE; - private String group; - private String name; - private String permissions; - private String size; - private String type; - private String user; - - public String getDATA_TYPE() { - return DATA_TYPE; - } - - public String getGroup() { - return group; - } - - public String getName() { - return name; - } - - public String getPermissions() { - return permissions; - } - - public String getSize() { - return size; - } - - public String getType() { - return type; - } - - public String getUser() { - return user; - } - - public void setDATA_TYPE(String DATA_TYPE) { - this.DATA_TYPE = DATA_TYPE; - } - - public void setGroup(String group) { - this.group = group; - } - - public void setName(String name) { - this.name = name; - } - - public void setPermissions(String permissions) { - this.permissions = permissions; - } - - public void setSize(String size) { - this.size = size; - } - - public void setType(String type) { - this.type = type; - } - - public void setUser(String user) { - this.user = user; - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java b/src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java deleted file mode 100644 index 777e37f9b80..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/FilesList.java +++ /dev/null @@ -1,60 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -import java.util.ArrayList; - -public class FilesList { - private ArrayList DATA; - private String DATA_TYPE; - private String absolute_path; - private String endpoint; - private String length; - private String path; - - public String getEndpoint() { - return endpoint; - } - - public ArrayList getDATA() { - return DATA; - } - - public String getAbsolute_path() { - return absolute_path; - } - - public String getDATA_TYPE() { - return DATA_TYPE; - } - - public String getLength() { - return length; - } - - public String getPath() { - return path; - } - - public void setLength(String length) { - this.length = length; - } - - public void setEndpoint(String endpoint) { - this.endpoint = endpoint; - } - - public void setDATA(ArrayList DATA) { - this.DATA = DATA; - } - - public void setAbsolute_path(String absolute_path) { - this.absolute_path = absolute_path; - } - - public void setDATA_TYPE(String DATA_TYPE) { - this.DATA_TYPE = DATA_TYPE; - } - - public void setPath(String path) { - this.path = path; - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 2230d5bfcaf..a59a2ca77c1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -101,136 +101,6 @@ public void setUserTransferToken(String userTransferToken) { this.userTransferToken = userTransferToken; } - public void onLoad() { - logger.info("Start Globus " + code); - logger.info("State " + state); - - String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); - String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); - if (globusEndpoint.equals("") || basicGlobusToken.equals("")) { - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - return; - } - String datasetId = state; - logger.info("DatasetId = " + datasetId); - - String directory = getDirectory(datasetId); - if (directory == null) { - logger.severe("Cannot find directory"); - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - return; - } - HttpServletRequest origRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); - - logger.info(origRequest.getScheme()); - logger.info(origRequest.getServerName()); - - if (code != null ) { - - try { - AccessToken accessTokenUser = getAccessToken(origRequest, basicGlobusToken); - if (accessTokenUser == null) { - logger.severe("Cannot get access user token for code " + code); - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - return; - } else { - setUserTransferToken(accessTokenUser.getOtherTokens().get(0).getAccessToken()); - } - - UserInfo usr = getUserInfo(accessTokenUser); - if (usr == null) { - logger.severe("Cannot get user info for " + accessTokenUser.getAccessToken()); - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - return; - } - logger.info(accessTokenUser.getAccessToken()); - logger.info(usr.getEmail()); - //AccessToken clientTokenUser = getClientToken(basicGlobusToken); - AccessToken clientTokenUser = getClientToken(); - if (clientTokenUser == null) { - logger.severe("Cannot get client token "); - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - return; - } - logger.info(clientTokenUser.getAccessToken()); - - int status = createDirectory(clientTokenUser, directory, globusEndpoint); - if (status == 202) { - int perStatus = givePermission("identity", usr.getSub(), "rw", clientTokenUser, directory, globusEndpoint); - if (perStatus != 201 && perStatus != 200) { - logger.severe("Cannot get permissions "); - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - return; - } - } else if (status == 502) { //directory already exists - int perStatus = givePermission("identity", usr.getSub(), "rw", clientTokenUser, directory, globusEndpoint); - if (perStatus == 409) { - logger.info("permissions already exist"); - } else if (perStatus != 201 && perStatus != 200) { - logger.severe("Cannot get permissions "); - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - return; - } - } else { - logger.severe("Cannot create directory, status code " + status); - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - return; - } - // ProcessBuilder processBuilder = new ProcessBuilder(); - // AuthenticatedUser user = (AuthenticatedUser) session.getUser(); - // ApiToken token = authSvc.findApiTokenByUser(user); - // String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST https://" + origRequest.getServerName() + "/api/globus/" + datasetId; - // logger.info("====command ==== " + command); - // processBuilder.command("bash", "-c", command); - // logger.info("=== Start process"); - // Process process = processBuilder.start(); - // logger.info("=== Going globus"); - goGlobusUpload(directory, globusEndpoint); - logger.info("=== Finished globus"); - - - } catch (MalformedURLException ex) { - logger.severe(ex.getMessage()); - logger.severe(ex.getCause().toString()); - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - } catch (UnsupportedEncodingException ex) { - logger.severe(ex.getMessage()); - logger.severe(ex.getCause().toString()); - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - } catch (IOException ex) { - logger.severe(ex.getMessage()); - logger.severe(ex.getCause().toString()); - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.GlobusError")); - } - - } - - } - - private void goGlobusUpload(String directory, String globusEndpoint ) { - - String httpString = "window.location.replace('" + "https://app.globus.org/file-manager?destination_id=" + globusEndpoint + "&destination_path=" + directory + "'" +")"; - PrimeFaces.current().executeScript(httpString); - } - - public void goGlobusDownload(String datasetId) { - - String directory = getDirectory(datasetId); - String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); - String httpString = "window.location.replace('" + "https://app.globus.org/file-manager?origin_id=" + globusEndpoint + "&origin_path=" + directory + "'" +")"; - PrimeFaces.current().executeScript(httpString); - } -/* - public void removeGlobusPermission() throws MalformedURLException { - //taskId and ruleId - String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); - AccessToken clientTokenUser = getClientToken(basicGlobusToken); - String directory = getDirectory( dataset.getId()+"" ); - updatePermision(clientTokenUser, directory, "identity", "r"); - } - - */ - ArrayList checkPermisions( AccessToken clientTokenUser, String directory, String globusEndpoint, String principalType, String principal) throws MalformedURLException { URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access_list"); MakeRequestResponse result = makeRequest(url, "Bearer", @@ -348,125 +218,6 @@ public int givePermission(String principalType, String principal, String perm, A return result.status; } - private int createDirectory(AccessToken clientTokenUser, String directory, String globusEndpoint) throws MalformedURLException { - URL url = new URL("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + globusEndpoint + "/mkdir"); - - MkDir mkDir = new MkDir(); - mkDir.setDataType("mkdir"); - mkDir.setPath(directory); - Gson gson = new GsonBuilder().create(); - - MakeRequestResponse result = makeRequest(url, "Bearer", - clientTokenUser.getOtherTokens().get(0).getAccessToken(),"POST", gson.toJson(mkDir)); - logger.info(result.toString()); - - if (result.status == 502) { - logger.warning("Cannot create directory " + mkDir.getPath() + ", it already exists"); - } else if (result.status == 403) { - logger.severe("Cannot create directory " + mkDir.getPath() + ", permission denied"); - } else if (result.status == 202) { - logger.info("Directory created " + mkDir.getPath()); - } - - return result.status; - - } - - public String getTaskList(String basicGlobusToken, String identifierForFileStorage, String timeWhenAsyncStarted) throws MalformedURLException { - try - { - logger.info("1.getTaskList ====== timeWhenAsyncStarted = " + timeWhenAsyncStarted + " ====== identifierForFileStorage ====== " + identifierForFileStorage); - - String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); - //AccessToken clientTokenUser = getClientToken(basicGlobusToken); - AccessToken clientTokenUser = getClientToken( ); - - URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task_list?filter_endpoint="+globusEndpoint+"&filter_status=SUCCEEDED&filter_completion_time="+timeWhenAsyncStarted); - - //AccessToken accessTokenUser - //accessTokenUser.getOtherTokens().get(0).getAccessToken() - MakeRequestResponse result = makeRequest(url, "Bearer", clientTokenUser.getOtherTokens().get(0).getAccessToken(),"GET", null); - //logger.info("==TEST ==" + result.toString()); - - - - //2019-12-01 18:34:37+00:00 - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - //SimpleDateFormat task_sdf = new SimpleDateFormat("yyyy-MM-ddTHH:mm:ss"); - - Calendar cal1 = Calendar.getInstance(); - cal1.setTime(sdf.parse(timeWhenAsyncStarted)); - - Calendar cal2 = Calendar.getInstance(); - - Tasklist tasklist = null; - //2019-12-01 18:34:37+00:00 - - if (result.status == 200) { - tasklist = parseJson(result.jsonResponse, Tasklist.class, false); - for (int i = 0; i< tasklist.getDATA().size(); i++) { - Task task = tasklist.getDATA().get(i); - Date tastTime = sdf.parse(task.getRequest_time().replace("T" , " ")); - cal2.setTime(tastTime); - - - if ( cal1.before(cal2)) { - - // get /task//successful_transfers - // verify datasetid in "destination_path": "/~/test_godata_copy/file1.txt", - // go to aws and get files and write to database tables - - logger.info("====== timeWhenAsyncStarted = " + timeWhenAsyncStarted + " ====== task.getRequest_time().toString() ====== " + task.getRequest_time()); - - boolean success = getSuccessfulTransfers(clientTokenUser, task.getTask_id() , identifierForFileStorage) ; - - if(success) - { - logger.info("SUCCESS ====== " + timeWhenAsyncStarted + " timeWhenAsyncStarted is before tastTime = TASK time = " + task.getTask_id()); - return task.getTask_id(); - } - } - else - { - //logger.info("====== " + timeWhenAsyncStarted + " timeWhenAsyncStarted is after tastTime = TASK time = " + task.getTask_id()); - //return task.getTask_id(); - } - } - } - } catch (MalformedURLException ex) { - logger.severe(ex.getMessage()); - logger.severe(ex.getCause().toString()); - } catch (Exception e) { - e.printStackTrace(); - } - return null; - } - - public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId, String identifierForFileStorage) throws MalformedURLException { - - URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/"+taskId+"/successful_transfers"); - - MakeRequestResponse result = makeRequest(url, "Bearer",clientTokenUser.getOtherTokens().get(0).getAccessToken(), - "GET", null); - - Transferlist transferlist = null; - - if (result.status == 200) { - transferlist = parseJson(result.jsonResponse, Transferlist.class, false); - for (int i = 0; i < transferlist.getDATA().size(); i++) { - SuccessfulTransfer successfulTransfer = transferlist.getDATA().get(i); - String pathToVerify = successfulTransfer.getDestination_path(); - logger.info("getSuccessfulTransfers : ======pathToVerify === " + pathToVerify + " ====identifierForFileStorage === " + identifierForFileStorage); - if(pathToVerify.contains(identifierForFileStorage)) - { - logger.info(" SUCCESS ====== " + pathToVerify + " ==== " + identifierForFileStorage); - return true; - } - } - } - return false; - } - public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId ) throws MalformedURLException { URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/"+taskId+"/successful_transfers"); @@ -474,8 +225,6 @@ public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId MakeRequestResponse result = makeRequest(url, "Bearer",clientTokenUser.getOtherTokens().get(0).getAccessToken(), "GET", null); - Transferlist transferlist = null; - if (result.status == 200) { logger.info(" SUCCESS ====== " ); return true; @@ -483,8 +232,6 @@ public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId return false; } - - public AccessToken getClientToken() throws MalformedURLException { String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); URL url = new URL("https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); @@ -525,17 +272,6 @@ public AccessToken getAccessToken(HttpServletRequest origRequest, String basicGl } - public UserInfo getUserInfo(AccessToken accessTokenUser) throws MalformedURLException { - - URL url = new URL("https://auth.globus.org/v2/oauth2/userinfo"); - MakeRequestResponse result = makeRequest(url, "Bearer" , accessTokenUser.getAccessToken() , "GET", null); - UserInfo usr = null; - if (result.status == 200) { - usr = parseJson(result.jsonResponse, UserInfo.class, true); - } - - return usr; - } public MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, String jsonString) { String str = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Identities.java b/src/main/java/edu/harvard/iq/dataverse/globus/Identities.java deleted file mode 100644 index 6411262b5c9..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/Identities.java +++ /dev/null @@ -1,16 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -import java.util.ArrayList; - - -public class Identities { - ArrayList identities; - - public void setIdentities(ArrayList identities) { - this.identities = identities; - } - - public ArrayList getIdentities() { - return identities; - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Identity.java b/src/main/java/edu/harvard/iq/dataverse/globus/Identity.java deleted file mode 100644 index 265bd55217a..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/Identity.java +++ /dev/null @@ -1,67 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -public class Identity { - private String id; - private String username; - private String status; - private String name; - private String email; - private String identityProvider; - private String organization; - - public void setOrganization(String organization) { - this.organization = organization; - } - - public void setIdentityProvider(String identityProvider) { - this.identityProvider = identityProvider; - } - - public void setName(String name) { - this.name = name; - } - - public void setEmail(String email) { - this.email = email; - } - - public void setId(String id) { - this.id = id; - } - - public void setStatus(String status) { - this.status = status; - } - - public void setUsername(String username) { - this.username = username; - } - - public String getOrganization() { - return organization; - } - - public String getIdentityProvider() { - return identityProvider; - } - - public String getName() { - return name; - } - - public String getEmail() { - return email; - } - - public String getId() { - return id; - } - - public String getStatus() { - return status; - } - - public String getUsername() { - return username; - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java b/src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java deleted file mode 100644 index 2c906f1f31d..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/MkDir.java +++ /dev/null @@ -1,22 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -public class MkDir { - private String DATA_TYPE; - private String path; - - public void setDataType(String DATA_TYPE) { - this.DATA_TYPE = DATA_TYPE; - } - - public void setPath(String path) { - this.path = path; - } - - public String getDataType() { - return DATA_TYPE; - } - - public String getPath() { - return path; - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java b/src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java deleted file mode 100644 index d31b34b8e70..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/MkDirResponse.java +++ /dev/null @@ -1,50 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -public class MkDirResponse { - private String DATA_TYPE; - private String code; - private String message; - private String request_id; - private String resource; - - public void setCode(String code) { - this.code = code; - } - - public void setDataType(String dataType) { - this.DATA_TYPE = dataType; - } - - public void setMessage(String message) { - this.message = message; - } - - public void setRequestId(String requestId) { - this.request_id = requestId; - } - - public void setResource(String resource) { - this.resource = resource; - } - - public String getCode() { - return code; - } - - public String getDataType() { - return DATA_TYPE; - } - - public String getMessage() { - return message; - } - - public String getRequestId() { - return request_id; - } - - public String getResource() { - return resource; - } - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java b/src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java deleted file mode 100644 index a30b1ecdc04..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/PermissionsResponse.java +++ /dev/null @@ -1,58 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -public class PermissionsResponse { - private String code; - private String resource; - private String DATA_TYPE; - private String request_id; - private String access_id; - private String message; - - public String getDATA_TYPE() { - return DATA_TYPE; - } - - public String getResource() { - return resource; - } - - public String getRequestId() { - return request_id; - } - - public String getMessage() { - return message; - } - - public String getCode() { - return code; - } - - public String getAccessId() { - return access_id; - } - - public void setDATA_TYPE(String DATA_TYPE) { - this.DATA_TYPE = DATA_TYPE; - } - - public void setResource(String resource) { - this.resource = resource; - } - - public void setRequestId(String requestId) { - this.request_id = requestId; - } - - public void setMessage(String message) { - this.message = message; - } - - public void setCode(String code) { - this.code = code; - } - - public void setAccessId(String accessId) { - this.access_id = accessId; - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java b/src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java deleted file mode 100644 index 6e2e5810a0a..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/SuccessfulTransfer.java +++ /dev/null @@ -1,35 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -public class SuccessfulTransfer { - - private String DATA_TYPE; - private String destination_path; - - public String getDATA_TYPE() { - return DATA_TYPE; - } - - public void setDATA_TYPE(String DATA_TYPE) { - this.DATA_TYPE = DATA_TYPE; - } - - public String getDestination_path() { - return destination_path; - } - - public void setDestination_path(String destination_path) { - this.destination_path = destination_path; - } - - public String getSource_path() { - return source_path; - } - - public void setSource_path(String source_path) { - this.source_path = source_path; - } - - private String source_path; - - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Task.java b/src/main/java/edu/harvard/iq/dataverse/globus/Task.java deleted file mode 100644 index 8d9f13f8ddf..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/Task.java +++ /dev/null @@ -1,69 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -public class Task { - - private String DATA_TYPE; - private String type; - private String status; - private String owner_id; - private String request_time; - private String task_id; - private String destination_endpoint_display_name; - - public String getDestination_endpoint_display_name() { - return destination_endpoint_display_name; - } - - public void setDestination_endpoint_display_name(String destination_endpoint_display_name) { - this.destination_endpoint_display_name = destination_endpoint_display_name; - } - - public void setRequest_time(String request_time) { - this.request_time = request_time; - } - - public String getRequest_time() { - return request_time; - } - - public String getTask_id() { - return task_id; - } - - public void setTask_id(String task_id) { - this.task_id = task_id; - } - - public String getDATA_TYPE() { - return DATA_TYPE; - } - - public void setDATA_TYPE(String DATA_TYPE) { - this.DATA_TYPE = DATA_TYPE; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public String getStatus() { - return status; - } - - public void setStatus(String status) { - this.status = status; - } - - public String getOwner_id() { - return owner_id; - } - - public void setOwner_id(String owner_id) { - this.owner_id = owner_id; - } - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java b/src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java deleted file mode 100644 index 34e8c6c528e..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/Tasklist.java +++ /dev/null @@ -1,17 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -import java.util.ArrayList; - -public class Tasklist { - - private ArrayList DATA; - - public void setDATA(ArrayList DATA) { - this.DATA = DATA; - } - - public ArrayList getDATA() { - return DATA; - } - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java b/src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java deleted file mode 100644 index 0a1bd607ee2..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/Transferlist.java +++ /dev/null @@ -1,18 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -import java.util.ArrayList; - -public class Transferlist { - - - private ArrayList DATA; - - public void setDATA(ArrayList DATA) { - this.DATA = DATA; - } - - public ArrayList getDATA() { - return DATA; - } - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java b/src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java deleted file mode 100644 index a195486dd0b..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/globus/UserInfo.java +++ /dev/null @@ -1,68 +0,0 @@ -package edu.harvard.iq.dataverse.globus; - -public class UserInfo implements java.io.Serializable{ - - private String identityProviderDisplayName; - private String identityProvider; - private String organization; - private String sub; - private String preferredUsername; - private String name; - private String email; - - public void setEmail(String email) { - this.email = email; - } - - public void setName(String name) { - this.name = name; - } - - public void setPreferredUsername(String preferredUsername) { - this.preferredUsername = preferredUsername; - } - - public void setSub(String sub) { - this.sub = sub; - } - - public void setIdentityProvider(String identityProvider) { - this.identityProvider = identityProvider; - } - - public void setIdentityProviderDisplayName(String identityProviderDisplayName) { - this.identityProviderDisplayName = identityProviderDisplayName; - } - - public void setOrganization(String organization) { - this.organization = organization; - } - - public String getEmail() { - return email; - } - - public String getPreferredUsername() { - return preferredUsername; - } - - public String getSub() { - return sub; - } - - public String getName() { - return name; - } - - public String getIdentityProvider() { - return identityProvider; - } - - public String getIdentityProviderDisplayName() { - return identityProviderDisplayName; - } - - public String getOrganization() { - return organization; - } -} From a4531f54ab2565c8015493a3bcaa1043bed6137f Mon Sep 17 00:00:00 2001 From: chenganj Date: Tue, 20 Apr 2021 16:55:47 -0400 Subject: [PATCH 0118/1551] update --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index f56674cb351..42f17d53183 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2620,7 +2620,9 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, } catch (DataFileTagException ex) { return error(Response.Status.BAD_REQUEST, ex.getMessage()); } - + catch (ClassCastException | com.google.gson.JsonParseException ex) { + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing")); + } // ------------------------------------- // (3) Get the file name and content type // ------------------------------------- @@ -2704,10 +2706,10 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied, logger.log(Level.INFO, "Success Number of Files " + successNumberofFiles); DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.EditInProgress); if (dcmLock == null) { - logger.log(Level.WARNING, "Dataset not locked for Globus upload"); + logger.log(Level.WARNING, "No lock found for dataset"); } else { - logger.log(Level.INFO, "Dataset remove locked for Globus upload"); datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.EditInProgress); + logger.log(Level.INFO, "Removed EditInProgress lock "); //dataset.removeLock(dcmLock); } From dc9b9711d2883f6ea8308dea54b6e23713479ace Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 26 Apr 2021 12:52:07 -0400 Subject: [PATCH 0119/1551] added errormessages in the email notification after the globus transfer --- .../iq/dataverse/DatasetServiceBean.java | 252 +++++++++++------- .../harvard/iq/dataverse/MailServiceBean.java | 52 +++- .../iq/dataverse/UserNotification.java | 3 +- .../providers/builtin/DataverseUserPage.java | 9 +- .../dataverse/globus/GlobusServiceBean.java | 39 +++ .../edu/harvard/iq/dataverse/globus/Task.java | 92 +++++++ .../harvard/iq/dataverse/util/MailUtil.java | 30 ++- src/main/java/propertyFiles/Bundle.properties | 19 +- src/main/webapp/dataverseuser.xhtml | 22 +- 9 files changed, 396 insertions(+), 122 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/Task.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 8b715788172..823d52814b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -19,6 +19,7 @@ import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.globus.AccessToken; import edu.harvard.iq.dataverse.globus.GlobusServiceBean; +import edu.harvard.iq.dataverse.globus.Task; import edu.harvard.iq.dataverse.globus.fileDetailsHolder; import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; @@ -1094,8 +1095,8 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin String datasetIdentifier = dataset.getStorageIdentifier(); - String storageType = datasetIdentifier.substring(0, datasetIdentifier.indexOf("://") +3); - datasetIdentifier = datasetIdentifier.substring(datasetIdentifier.indexOf("://") +3); + String storageType = datasetIdentifier.substring(0, datasetIdentifier.indexOf("://") + 3); + datasetIdentifier = datasetIdentifier.substring(datasetIdentifier.indexOf("://") + 3); Thread.sleep(5000); @@ -1110,106 +1111,123 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin String taskIdentifier = jsonObject.getString("taskIdentifier"); - String ruleId = "" ; + String ruleId = ""; try { jsonObject.getString("ruleId"); - }catch (NullPointerException npe){ + } catch (NullPointerException npe) { } // globus task status check - globusStatusCheck(taskIdentifier,globusLogger); - - globusServiceBean.deletePermision(ruleId,globusLogger); - - try { - List inputList = new ArrayList(); - JsonArray filesJsonArray = jsonObject.getJsonArray("files"); + String taskStatus = globusStatusCheck(taskIdentifier, globusLogger); + Boolean taskSkippedFiles = taskSkippedFiles(taskIdentifier, globusLogger); - if (filesJsonArray != null) { + if(ruleId.length() > 0) { + globusServiceBean.deletePermision(ruleId, globusLogger); + } - for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { + if (taskStatus.startsWith("FAILED") || taskStatus.startsWith("INACTIVE")) { + String comment = "Reason : " + taskStatus.split("#") [1] + "
    Short Description " + taskStatus.split("#")[2]; + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSUPLOADCOMPLETEDWITHERRORS, dataset.getId(),comment, true); + globusLogger.info("Globus task failed "); + } + else { + try { + List inputList = new ArrayList(); + JsonArray filesJsonArray = jsonObject.getJsonArray("files"); - // storageIdentifier s3://gcs5-bucket1:1781cfeb8a7-748c270a227c from externalTool - String storageIdentifier = fileJsonObject.getString("storageIdentifier"); - String[] bits = storageIdentifier.split(":"); - String bucketName = bits[1].replace("/", ""); - String fileId = bits[bits.length-1]; + if (filesJsonArray != null) { - // fullpath s3://gcs5-bucket1/10.5072/FK2/3S6G2E/1781cfeb8a7-4ad9418a5873 - String fullPath = storageType + bucketName + "/" + datasetIdentifier +"/" +fileId ; - String fileName = fileJsonObject.getString("fileName"); + for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { - inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); - } + // storageIdentifier s3://gcs5-bucket1:1781cfeb8a7-748c270a227c from externalTool + String storageIdentifier = fileJsonObject.getString("storageIdentifier"); + String[] bits = storageIdentifier.split(":"); + String bucketName = bits[1].replace("/", ""); + String fileId = bits[bits.length - 1]; - // calculateMissingMetadataFields: checksum, mimetype - JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList,globusLogger); - JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); + // fullpath s3://gcs5-bucket1/10.5072/FK2/3S6G2E/1781cfeb8a7-4ad9418a5873 + String fullPath = storageType + bucketName + "/" + datasetIdentifier + "/" + fileId; + String fileName = fileJsonObject.getString("fileName"); - JsonArrayBuilder jsonDataSecondAPI = Json.createArrayBuilder() ; + inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); + } - for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { + // calculateMissingMetadataFields: checksum, mimetype + JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, globusLogger); + JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); - countAll++; - String storageIdentifier = fileJsonObject.getString("storageIdentifier"); - String fileName = fileJsonObject.getString("fileName"); - String directoryLabel = fileJsonObject.getString("directoryLabel"); - String[] bits = storageIdentifier.split(":"); - String fileId = bits[bits.length-1]; + JsonArrayBuilder jsonDataSecondAPI = Json.createArrayBuilder(); - List newfileJsonObject = IntStream.range(0, newfilesJsonArray.size() ) - .mapToObj(index -> ((JsonObject)newfilesJsonArray.get(index)).getJsonObject(fileId)) - .filter(Objects::nonNull).collect(Collectors.toList()); + for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) { - if(newfileJsonObject != null) { - JsonPatch path = Json.createPatchBuilder().add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build(); - fileJsonObject = path.apply(fileJsonObject); - path = Json.createPatchBuilder().add("/mimeType", newfileJsonObject.get(0).getString("mime")).build(); - fileJsonObject = path.apply(fileJsonObject); - jsonDataSecondAPI.add(stringToJsonObjectBuilder(fileJsonObject.toString())); - countSuccess++; - } - else { - globusLogger.info(fileName + " will be skipped from adding to dataset by second API due to missing values "); - countError++; + countAll++; + String storageIdentifier = fileJsonObject.getString("storageIdentifier"); + String fileName = fileJsonObject.getString("fileName"); + String directoryLabel = fileJsonObject.getString("directoryLabel"); + String[] bits = storageIdentifier.split(":"); + String fileId = bits[bits.length - 1]; + + List newfileJsonObject = IntStream.range(0, newfilesJsonArray.size()) + .mapToObj(index -> ((JsonObject) newfilesJsonArray.get(index)).getJsonObject(fileId)) + .filter(Objects::nonNull).collect(Collectors.toList()); + + if (newfileJsonObject != null) { + if ( !newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) { + JsonPatch path = Json.createPatchBuilder().add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build(); + fileJsonObject = path.apply(fileJsonObject); + path = Json.createPatchBuilder().add("/mimeType", newfileJsonObject.get(0).getString("mime")).build(); + fileJsonObject = path.apply(fileJsonObject); + jsonDataSecondAPI.add(stringToJsonObjectBuilder(fileJsonObject.toString())); + countSuccess++; + } else { + globusLogger.info(fileName + " will be skipped from adding to dataset by second API due to missing values "); + countError++; + } + } else { + globusLogger.info(fileName + " will be skipped from adding to dataset by second API due to missing values "); + countError++; + } } - } - String newjsonData = jsonDataSecondAPI.build().toString(); + String newjsonData = jsonDataSecondAPI.build().toString(); - globusLogger.info("Successfully generated new JsonData for Second API call"); + globusLogger.info("Successfully generated new JsonData for Second API call"); - String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST "+httpRequestUrl+"/api/datasets/:persistentId/addFiles?persistentId=doi:" + datasetIdentifier + " -F jsonData='" + newjsonData + "'"; - System.out.println("*******====command ==== " + command); + String command = "curl -H \"X-Dataverse-key:" + token.getTokenString() + "\" -X POST " + httpRequestUrl + "/api/datasets/:persistentId/addFiles?persistentId=doi:" + datasetIdentifier + " -F jsonData='" + newjsonData + "'"; + System.out.println("*******====command ==== " + command); - String output = addFilesAsync(command , globusLogger ) ; - if(output.equalsIgnoreCase("ok")) - { - userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSUPLOADSUCCESS, dataset.getId(),""); + String output = addFilesAsync(command, globusLogger); + if (output.equalsIgnoreCase("ok")) { + //if(!taskSkippedFiles) + if (countError == 0 ){ + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSUPLOADCOMPLETED, dataset.getId(), countSuccess + " files added out of "+ countAll , true); + } + else { + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSUPLOADCOMPLETEDWITHERRORS, dataset.getId(), countSuccess + " files added out of "+ countAll , true); + } + globusLogger.info("Successfully completed api/datasets/:persistentId/addFiles call "); + } else { + globusLogger.log(Level.SEVERE, "******* Error while executing api/datasets/:persistentId/add call ", command); + } - globusLogger.info("Successfully completed api/datasets/:persistentId/addFiles call "); - } - else - { - globusLogger.log(Level.SEVERE, "******* Error while executing api/datasets/:persistentId/add call ", command); } - } + globusLogger.info("Files processed: " + countAll.toString()); + globusLogger.info("Files added successfully: " + countSuccess.toString()); + globusLogger.info("Files failures: " + countError.toString()); + globusLogger.info("Finished upload via Globus job."); - globusLogger.info("Files processed: " + countAll.toString()); - globusLogger.info("Files added successfully: " + countSuccess.toString()); - globusLogger.info("Files failures: " + countError.toString()); - globusLogger.info("Finished upload via Globus job."); + if (fileHandlerSuceeded) { + fileHandler.close(); + } - if (fileHandlerSuceeded) { - fileHandler.close(); + } catch (Exception e) { + logger.info("Exception from globusUpload call "); + e.printStackTrace(); + globusLogger.info("Exception from globusUpload call " + e.getMessage()); } - - } catch (Exception e) { - logger.info("Exception "); - e.printStackTrace(); } } @@ -1230,23 +1248,62 @@ public static JsonObjectBuilder stringToJsonObjectBuilder(String str) { Executor executor = Executors.newFixedThreadPool(10); - private Boolean globusStatusCheck(String taskId, Logger globusLogger) throws MalformedURLException { - boolean success = false; + private String globusStatusCheck(String taskId, Logger globusLogger) throws MalformedURLException { + boolean taskCompletion = false; + String status = ""; do { try { globusLogger.info("checking globus transfer task " + taskId); Thread.sleep(50000); AccessToken clientTokenUser = globusServiceBean.getClientToken(); - success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskId); + //success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskId); + Task task = globusServiceBean.getTask(clientTokenUser,taskId, globusLogger); + status = task.getStatus(); + if(status != null) { + //The task is in progress. + if (status.equalsIgnoreCase("ACTIVE")) { + if(task.getNice_status().equalsIgnoreCase("ok") || task.getNice_status().equalsIgnoreCase("queued")) { + taskCompletion = false; + } + else { + taskCompletion = true; + status = "FAILED" + "#" + task.getNice_status() + "#" + task.getNice_status_short_description(); + } + } else { + //The task is either succeeded, failed or inactive. + taskCompletion = true; + status = status + "#" + task.getNice_status() + "#" + task.getNice_status_short_description(); + } + } + else { + status = "FAILED"; + taskCompletion = true; + } } catch (Exception ex) { ex.printStackTrace(); } - } while (!success); + } while (!taskCompletion); globusLogger.info("globus transfer task completed successfully"); - return success; + return status; + } + + private Boolean taskSkippedFiles(String taskId, Logger globusLogger) throws MalformedURLException { + + try { + globusLogger.info("checking globus transfer task " + taskId); + Thread.sleep(50000); + AccessToken clientTokenUser = globusServiceBean.getClientToken(); + return globusServiceBean.getTaskSkippedErrors(clientTokenUser,taskId, globusLogger); + + } catch (Exception ex) { + ex.printStackTrace(); + } + + return false; + } @@ -1314,7 +1371,11 @@ private fileDetailsHolder calculateDetails(String id, Logger globusLogger) throw in = dataFileStorageIO.getInputStream(); checksumVal = FileUtil.calculateChecksum(in, DataFile.ChecksumType.MD5); count = 3; - } catch (Exception ex) { + }catch (IOException ioex) { + count = 3; + logger.info(ioex.getMessage()); + globusLogger.info("S3AccessIO: DataFile (fullPAth " + fullPath + ") does not appear to be an S3 object associated with driver: " ); + }catch (Exception ex) { count = count + 1; ex.printStackTrace(); logger.info(ex.getMessage()); @@ -1323,14 +1384,13 @@ private fileDetailsHolder calculateDetails(String id, Logger globusLogger) throw } while (count < 3); - if(checksumVal.length() > 0 ) { - String mimeType = calculatemime(fileName); - globusLogger.info(" File Name " + fileName + " File Details " + fileId + " checksum = " + checksumVal + " mimeType = " + mimeType); - return new fileDetailsHolder(fileId, checksumVal, mimeType); - } - else { - return null; + if(checksumVal.length() == 0 ) { + checksumVal = "NULL"; } + + String mimeType = calculatemime(fileName); + globusLogger.info(" File Name " + fileName + " File Details " + fileId + " checksum = " + checksumVal + " mimeType = " + mimeType); + return new fileDetailsHolder(fileId, checksumVal, mimeType); //getBytes(in)+"" ); // calculatemime(fileName)); } @@ -1457,15 +1517,27 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro } // globus task status check - globusStatusCheck(taskIdentifier,globusLogger); - - // what if some files failed during download? + String taskStatus = globusStatusCheck(taskIdentifier,globusLogger); + Boolean taskSkippedFiles = taskSkippedFiles(taskIdentifier, globusLogger); if(ruleId.length() > 0) { globusServiceBean.deletePermision(ruleId, globusLogger); } - userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSDOWNLOADSUCCESS, dataset.getId()); + + if (taskStatus.startsWith("FAILED") || taskStatus.startsWith("INACTIVE")) { + String comment = "Reason : " + taskStatus.split("#") [1] + "
    Short Description : " + taskStatus.split("#")[2]; + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSDOWNLOADCOMPLETEDWITHERRORS, dataset.getId(),comment, true); + globusLogger.info("Globus task failed during download process"); + } + else { + if(!taskSkippedFiles) { + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSDOWNLOADCOMPLETED, dataset.getId()); + } + else { + userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSDOWNLOADCOMPLETEDWITHERRORS, dataset.getId(), ""); + } + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index e476a4e55b0..329058aa7a4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -546,23 +546,48 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio logger.fine("fileImportMsg: " + fileImportMsg); return messageText += fileImportMsg; - case GLOBUSUPLOADSUCCESS: + case GLOBUSUPLOADCOMPLETED: dataset = (Dataset) targetObject; - String fileMsg = BundleUtil.getStringFromBundle("notification.mail.import.globus", Arrays.asList( + messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html"); + String uploadCompletedMessage = messageText + BundleUtil.getStringFromBundle("notification.mail.globus.upload.completed", Arrays.asList( systemConfig.getDataverseSiteUrl(), dataset.getGlobalIdString(), - dataset.getDisplayName() - )); - return messageText += fileMsg; + dataset.getDisplayName(), + comment + )) ; + return uploadCompletedMessage; - case GLOBUSDOWNLOADSUCCESS: + case GLOBUSDOWNLOADCOMPLETED: dataset = (Dataset) targetObject; - String fileDownloadMsg = BundleUtil.getStringFromBundle("notification.mail.download.globus", Arrays.asList( + messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html"); + String downloadCompletedMessage = messageText + BundleUtil.getStringFromBundle("notification.mail.globus.download.completed", Arrays.asList( systemConfig.getDataverseSiteUrl(), dataset.getGlobalIdString(), - dataset.getDisplayName() - )); - return messageText += fileDownloadMsg; + dataset.getDisplayName(), + comment + )) ; + return downloadCompletedMessage; + case GLOBUSUPLOADCOMPLETEDWITHERRORS: + dataset = (Dataset) targetObject; + messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html"); + String uploadCompletedWithErrorsMessage = messageText + BundleUtil.getStringFromBundle("notification.mail.globus.upload.completedWithErrors", Arrays.asList( + systemConfig.getDataverseSiteUrl(), + dataset.getGlobalIdString(), + dataset.getDisplayName(), + comment + )) ; + return uploadCompletedWithErrorsMessage; + + case GLOBUSDOWNLOADCOMPLETEDWITHERRORS: + dataset = (Dataset) targetObject; + messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html"); + String downloadCompletedWithErrorsMessage = messageText + BundleUtil.getStringFromBundle("notification.mail.globus.download.completedWithErrors", Arrays.asList( + systemConfig.getDataverseSiteUrl(), + dataset.getGlobalIdString(), + dataset.getDisplayName(), + comment + )) ; + return downloadCompletedWithErrorsMessage; case CHECKSUMIMPORT: version = (DatasetVersion) targetObject; @@ -638,9 +663,10 @@ private Object getObjectOfNotification (UserNotification userNotification){ return datasetService.find(userNotification.getObjectId()); case FILESYSTEMIMPORT: return versionService.find(userNotification.getObjectId()); - case GLOBUSUPLOADSUCCESS: - return datasetService.find(userNotification.getObjectId()); - case GLOBUSDOWNLOADSUCCESS: + case GLOBUSUPLOADCOMPLETED: + case GLOBUSUPLOADCOMPLETEDWITHERRORS: + case GLOBUSDOWNLOADCOMPLETED: + case GLOBUSDOWNLOADCOMPLETEDWITHERRORS: return datasetService.find(userNotification.getObjectId()); case CHECKSUMIMPORT: return versionService.find(userNotification.getObjectId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java index 78ef2bb6783..8a8f3d7d620 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java @@ -30,7 +30,8 @@ public enum Type { ASSIGNROLE, REVOKEROLE, CREATEDV, CREATEDS, CREATEACC, SUBMITTEDDS, RETURNEDDS, PUBLISHEDDS, REQUESTFILEACCESS, GRANTFILEACCESS, REJECTFILEACCESS, FILESYSTEMIMPORT, CHECKSUMIMPORT, CHECKSUMFAIL, CONFIRMEMAIL, APIGENERATED, INGESTCOMPLETED, INGESTCOMPLETEDWITHERRORS, - PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE, GLOBUSUPLOADSUCCESS,GLOBUSDOWNLOADSUCCESS; + PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE, GLOBUSUPLOADCOMPLETED, GLOBUSUPLOADCOMPLETEDWITHERRORS, + GLOBUSDOWNLOADCOMPLETED, GLOBUSDOWNLOADCOMPLETEDWITHERRORS; }; private static final long serialVersionUID = 1L; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java index 4596ac8b3cc..4c7c35bfc73 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java @@ -507,11 +507,10 @@ public void displayNotification() { userNotification.setTheObject(datasetVersionService.find(userNotification.getObjectId())); break; - case GLOBUSUPLOADSUCCESS: - userNotification.setTheObject(datasetService.find(userNotification.getObjectId())); - break; - - case GLOBUSDOWNLOADSUCCESS: + case GLOBUSUPLOADCOMPLETED: + case GLOBUSUPLOADCOMPLETEDWITHERRORS: + case GLOBUSDOWNLOADCOMPLETED: + case GLOBUSDOWNLOADCOMPLETEDWITHERRORS: userNotification.setTheObject(datasetService.find(userNotification.getObjectId())); break; diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index a59a2ca77c1..9cfbf432790 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -232,6 +232,45 @@ public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId return false; } + public Task getTask(AccessToken clientTokenUser, String taskId , Logger globusLogger) throws MalformedURLException { + + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/"+taskId ); + + MakeRequestResponse result = makeRequest(url, "Bearer",clientTokenUser.getOtherTokens().get(0).getAccessToken(), + "GET", null); + + Task task = null; + String status = null; + //2019-12-01 18:34:37+00:00 + + if (result.status == 200) { + task = parseJson(result.jsonResponse, Task.class, false); + status = task.getStatus(); + } + if (result.status != 200) { + globusLogger.warning("Cannot find information for the task " + taskId + " : Reason : " + result.jsonResponse.toString()); + } + + return task; + } + + public Boolean getTaskSkippedErrors(AccessToken clientTokenUser, String taskId , Logger globusLogger) throws MalformedURLException { + + URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/"+taskId ); + + MakeRequestResponse result = makeRequest(url, "Bearer",clientTokenUser.getOtherTokens().get(0).getAccessToken(), + "GET", null); + + Task task = null; + + if (result.status == 200) { + task = parseJson(result.jsonResponse, Task.class, false); + return task.getSkip_source_errors(); + } + + return false; + } + public AccessToken getClientToken() throws MalformedURLException { String basicGlobusToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.BasicGlobusToken, ""); URL url = new URL("https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/Task.java b/src/main/java/edu/harvard/iq/dataverse/globus/Task.java new file mode 100644 index 00000000000..911c84c0d34 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/globus/Task.java @@ -0,0 +1,92 @@ +package edu.harvard.iq.dataverse.globus; + +import org.apache.xpath.operations.Bool; + +public class Task { + + private String DATA_TYPE; + private String type; + private String status; + private String owner_id; + private String request_time; + private String task_id; + private String destination_endpoint_display_name; + private boolean skip_source_errors; + private String nice_status; + private String nice_status_short_description; + + public String getDestination_endpoint_display_name() { + return destination_endpoint_display_name; + } + + public void setDestination_endpoint_display_name(String destination_endpoint_display_name) { + this.destination_endpoint_display_name = destination_endpoint_display_name; + } + + public void setRequest_time(String request_time) { + this.request_time = request_time; + } + + public String getRequest_time() { + return request_time; + } + + public String getTask_id() { + return task_id; + } + + public void setTask_id(String task_id) { + this.task_id = task_id; + } + + public String getDATA_TYPE() { + return DATA_TYPE; + } + + public void setDATA_TYPE(String DATA_TYPE) { + this.DATA_TYPE = DATA_TYPE; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getOwner_id() { + return owner_id; + } + + public void setOwner_id(String owner_id) { + this.owner_id = owner_id; + } + + public Boolean getSkip_source_errors() { + return skip_source_errors; + } + + public void setSkip_source_errors(Boolean skip_source_errors) { + this.skip_source_errors = skip_source_errors; + } + + public String getNice_status() { + return nice_status; + } + + public void setNice_status(String nice_status) { + this.nice_status = nice_status; + } + + public String getNice_status_short_description() { return nice_status_short_description; } + +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index ec665561860..94a2da72b8a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -70,13 +70,37 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti } catch (Exception e) { return BundleUtil.getStringFromBundle("notification.email.import.filesystem.subject", rootDvNameAsList); } - case GLOBUSUPLOADSUCCESS: + case GLOBUSUPLOADCOMPLETED: try { DatasetVersion version = (DatasetVersion)objectOfNotification; List dsNameAsList = Arrays.asList(version.getDataset().getDisplayName()); - return BundleUtil.getStringFromBundle("notification.email.import.globus.subject", dsNameAsList); + return BundleUtil.getStringFromBundle("notification.email.globus.uploadCompleted.subject", dsNameAsList); } catch (Exception e) { - return BundleUtil.getStringFromBundle("notification.email.import.globus.subject", rootDvNameAsList); + return BundleUtil.getStringFromBundle("notification.email.globus.uploadCompleted.subject", rootDvNameAsList); + } + case GLOBUSDOWNLOADCOMPLETED: + try { + DatasetVersion version = (DatasetVersion)objectOfNotification; + List dsNameAsList = Arrays.asList(version.getDataset().getDisplayName()); + return BundleUtil.getStringFromBundle("notification.email.globus.downloadCompleted.subject", dsNameAsList); + } catch (Exception e) { + return BundleUtil.getStringFromBundle("notification.email.globus.downloadCompleted.subject", rootDvNameAsList); + } + case GLOBUSUPLOADCOMPLETEDWITHERRORS: + try { + DatasetVersion version = (DatasetVersion)objectOfNotification; + List dsNameAsList = Arrays.asList(version.getDataset().getDisplayName()); + return BundleUtil.getStringFromBundle("notification.email.globus.uploadCompletedWithErrors.subject", dsNameAsList); + } catch (Exception e) { + return BundleUtil.getStringFromBundle("notification.email.globus.uploadCompletedWithErrors.subject", rootDvNameAsList); + } + case GLOBUSDOWNLOADCOMPLETEDWITHERRORS: + try { + DatasetVersion version = (DatasetVersion)objectOfNotification; + List dsNameAsList = Arrays.asList(version.getDataset().getDisplayName()); + return BundleUtil.getStringFromBundle("notification.email.globus.downloadCompletedWithErrors.subject", dsNameAsList); + } catch (Exception e) { + return BundleUtil.getStringFromBundle("notification.email.globus.downloadCompletedWithErrors.subject", rootDvNameAsList); } case CHECKSUMIMPORT: diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index c4adba1a94e..35487d74cf7 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -219,11 +219,15 @@ notification.checksumfail=One or more files in your upload failed checksum valid notification.ingest.completed=Dataset
    {2} ingest process has successfully finished.

    Ingested files:{3}
    notification.ingest.completedwitherrors=Dataset {2} ingest process has finished with errors.

    Ingested files:{3}
    notification.mail.import.filesystem=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded and verified. -notification.mail.import.globus=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded via Globus and verified. -notification.mail.download.globus=Files from the dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully downloaded via Globus and verified. +notification.mail.globus.upload.completed=Dataset {2} has been successfully uploaded via Globus and verified.

    {3}
    +notification.mail.globus.download.completed=Files from the dataset {2} has been successfully downloaded via Globus.

    {3}
    +notification.mail.globus.upload.completedWithErrors=Dataset {2} : uploading files via Globus has been completed with errors.

    {3}
    +notification.mail.globus.download.completedWithErrors=Files from the dataset {2} : downloading files via Globus has been completed with errors.

    {3}
    notification.import.filesystem=Dataset {1} has been successfully uploaded and verified. -notification.import.globus=Dataset {1} has been successfully uploaded via Globus and verified. -notification.download.globus=Files from the dataset {1} has been successfully downloaded via Globus and verified. +notification.globus.upload.completed=Dataset {1} has been successfully uploaded via Globus and verified. +notification.globus.download.completed=Files from the dataset {1} has been successfully downloaded via Globus. +notification.globus.upload.completedWithErrors=Dataset {1} : uploading files via Globus has been completed with errors. +notification.globus.download.completedWithErrors=Files from the dataset {1} : downloading files via Globus has been completed with errors. notification.import.checksum={1}, dataset had file checksums added via a batch job. removeNotification=Remove Notification groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. @@ -712,8 +716,11 @@ contact.delegation={0} on behalf of {1} notification.email.info.unavailable=Unavailable notification.email.apiTokenGenerated=Hello {0} {1},\n\nAPI Token has been generated. Please keep it secure as you would do with a password. notification.email.apiTokenGenerated.subject=API Token was generated -notification.email.import.globus.subject=Dataset {0} has been successfully uploaded via Globus and verified -notification.email.download.globus.subject=Files from the dataset {0} has been successfully downloaded via Globus and verified +notification.email.globus.uploadCompleted.subject={0}: Files uploaded successfully via Globus and verified +notification.email.globus.downloadCompleted.subject={0}: Files downloaded successfully via Globus +notification.email.globus.uploadCompletedWithErrors.subject={0}: Uploaded files via Globus with errors +notification.email.globus.downloadCompletedWithErrors.subject={0}: Downloaded files via Globus with errors + # dataverse.xhtml dataverse.name=Dataverse Name diff --git a/src/main/webapp/dataverseuser.xhtml b/src/main/webapp/dataverseuser.xhtml index 05ebf5f3b7a..2bb65578517 100644 --- a/src/main/webapp/dataverseuser.xhtml +++ b/src/main/webapp/dataverseuser.xhtml @@ -286,16 +286,30 @@ - + - + - + - + + + + + + + + + + + + + + + From 9dfdb2f2d4e2d0c45f1bf8f56e346847ba0a9f5b Mon Sep 17 00:00:00 2001 From: chenganj Date: Mon, 26 Apr 2021 14:36:42 -0400 Subject: [PATCH 0120/1551] remove lock, if globus transfer failed due to GC not connected --- .../edu/harvard/iq/dataverse/DatasetServiceBean.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 823d52814b1..1ed64ee69cf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1127,9 +1127,18 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin } if (taskStatus.startsWith("FAILED") || taskStatus.startsWith("INACTIVE")) { - String comment = "Reason : " + taskStatus.split("#") [1] + "
    Short Description " + taskStatus.split("#")[2]; + String comment = "Reason : " + taskStatus.split("#") [1] + "
    Short Description : " + taskStatus.split("#")[2]; userNotificationService.sendNotification((AuthenticatedUser) authUser, new Timestamp(new Date().getTime()), UserNotification.Type.GLOBUSUPLOADCOMPLETEDWITHERRORS, dataset.getId(),comment, true); globusLogger.info("Globus task failed "); + + DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.EditInProgress); + if (dcmLock == null) { + logger.log(Level.WARNING, "No lock found for dataset"); + } else { + removeDatasetLocks(dataset, DatasetLock.Reason.EditInProgress); + logger.log(Level.INFO, "Removed EditInProgress lock "); + //dataset.removeLock(dcmLock); + } } else { try { From 0cf73590bd74a3f771e3104fa2d49abab9350bd9 Mon Sep 17 00:00:00 2001 From: mderuijter Date: Wed, 28 Apr 2021 10:01:43 +0200 Subject: [PATCH 0121/1551] added license info to publish dataset popup dialog --- src/main/java/propertyFiles/Bundle.properties | 4 +- src/main/resources/META-INF/persistence.xml | 2 +- src/main/webapp/dataset.xhtml | 3453 ++++++++++------- src/main/webapp/resources/css/structure.css | 1 + 4 files changed, 2001 insertions(+), 1459 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index ab5352c8efd..e7f12971a6a 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1635,7 +1635,7 @@ file.dataFilesTab.metadata.addBtn=Add + Edit Metadata file.dataFilesTab.terms.header=Terms file.dataFilesTab.terms.editTermsBtn=Edit Terms Requirements file.dataFilesTab.terms.list.termsOfUse.header=Terms of Use -file.dataFilesTab.terms.list.termsOfUse.waiver=Waiver +file.dataFilesTab.terms.list.termsOfUse.waiver=License/DUA file.dataFilesTab.terms.list.termsOfUse.waiver.title=The waiver informs data downloaders how they can use this dataset. file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Public Domain Dedication" file.cc0.icon.alttxt=Creative Commons CC0 1.0 Public Domain Dedication icon @@ -1667,7 +1667,7 @@ file.dataFilesTab.terms.list.termsOfAccess.header=Restricted Files + Terms of Ac file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Restricted Files file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=The number of restricted files in this dataset. file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, choice, 0#are|1#is|2#are} {0} restricted {0, choice, 0#files|1#file|2#files} in this dataset. -file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access for Restricted Files file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Information on how and if users can gain access to the restricted files in this dataset. file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Request Access file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=If checked, users can request access to the restricted files in this dataset. diff --git a/src/main/resources/META-INF/persistence.xml b/src/main/resources/META-INF/persistence.xml index 45552f36939..fe80cbfefee 100644 --- a/src/main/resources/META-INF/persistence.xml +++ b/src/main/resources/META-INF/persistence.xml @@ -11,7 +11,7 @@ - + diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 0198e303b06..fdcd08ab24f 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -11,684 +11,868 @@ xmlns:cc="http://java.sun.com/jsf/composite" xmlns:o="http://omnifaces.org/ui" xmlns:of="http://omnifaces.org/functions"> - - - - - - - - - - - + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    -
    -
    - #{DatasetPage.datasetVersionUI.title.value} -
    - - - - - - - - -
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    +
    +
    + #{DatasetPage.datasetVersionUI.title.value} +
    + + + + + + + +
    +
    - -
    -
    - - - - - - - -
    + +
    +
    + + + + + + + +
    -
    - -
    - -
    -
    - - + + + +
    - +
    + - -
    -
    - - - - - - - - - - - - - #{bundle['dataset.publishBtn']} - - - -
    + +
    +
    + + + + + + + + + + + + + #{bundle['dataset.publishBtn']} + + +
    - +
    + - -
    -
    - -
    - +
    + - -
    - - #{bundle['dataset.linkBtn']} - -
    - + +
    + + #{bundle['dataset.linkBtn']} + +
    + - -
    - - - - - - #{bundle['dataset.contactBtn']} - - - #{bundle['dataset.shareBtn']} - -
    - + +
    + + + + + + #{bundle['dataset.contactBtn']} + + + #{bundle['dataset.shareBtn']} +
    - +
    + +
    -
    - -
    -
    - #{bundle['metrics.dataset.title']} - - - - - - -
    -
    - -
    - - - - -
    - -
    - - - +
    + +
    +
    + #{bundle['metrics.dataset.title']} + -
    - -
    - - + data-toggle="tooltip" data-placement="auto top" + data-trigger="hover" + data-original-title="#{bundle['metrics.dataset.tip.default']}"> + + + + +
    +
    + +
    + + + + +
    + +
    + + + + +
    + +
    + + + + +
    + +
    + + + - -
    - -
    - - - - - - -
    + +
    -
    + +
    - -
    - + +
    + - -
    -
    -
    #{bundle['dataset.deaccession.reason']}
    -

    #{DatasetPage.workingVersion.versionNote}

    - -

    #{bundle['dataset.beAccessedAt']} #{DatasetPage.workingVersion.archiveNote}

    -
    -
    + +
    +
    +
    #{bundle['dataset.deaccession.reason']}
    +

    #{DatasetPage.workingVersion.versionNote}

    + +

    #{bundle['dataset.beAccessedAt']} #{DatasetPage.workingVersion.archiveNote} +

    +
    - +
    + - - - +
    +
    - - - +
    + + + - - - -
    - -
    - -

    #{DatasetPage.dataset.owner.name}

    - -
    - -

    - #{bundle['dataset.host.tip']} -

    -
    - - - - - - - - - - - - - - -
    -
    -
    -
    -
    - -
    + + + +
    + +
    + +

    + #{DatasetPage.dataset.owner.name}

    + +
    +

    - #{bundle['dataset.template.tip']} + #{bundle['dataset.host.tip']}

    - - - - - -
    +
    + + + + + + + + + + + + + + +
    +
    -
    - +
    +
    + +
    +

    + #{bundle['dataset.template.tip']} +

    + + + + +
    - - - - - - - - - - - -
    - - - -
    - - - -
    - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + - - - - - + + + + -
    - + - - #{bundle['file.dataFilesTab.metadata.addBtn']} - -
    + + #{bundle['file.dataFilesTab.metadata.addBtn']} + +
    - - -
    + +
    - - - - - - - - - - - - - - - - - - - - - - +
    + + + + + + + + +
    + + + + + + + + + + + + +
    +
    + + +
    + #{bundle['file.metadataTip']} +
    + +
    + + + + + + +
    + + + + + + + + + + + + + +

    #{bundle['dataset.share.datasetShare.tip']}

    +
    +
    +
    - - -
    - #{bundle['file.metadataTip']} + + +

    + + + + +

    +
    +
    + #{bundle['metrics.citations.dialog.empty']} +
    + + +
    - -
    - - - - - - +
    +
    - - - - - - - - - - - + + +

    + #{bundle['dataset.noValidSelectedFilesForDownload']}

    + +

    #{bundle['dataset.requestAccessToRestrictedFiles']}

    - -

    #{bundle['dataset.share.datasetShare.tip']}

    -
    -
    - -
    -
    - +
    + +
    +
    + +

    + #{bundle['dataset.mixedSelectedFilesForDownload']}

    + + + + + + +
    #{resFile.label}
    +
    +

    #{bundle['dataset.downloadUnrestricted']}

    + + + +
    +
    + +

    + #{bundle['dataset.mixedSelectedFilesForDownload']}

    + + + + + + +
    #{resFile.label}
    +
    +

    #{bundle['dataset.downloadUnrestricted']}

    + + + +
    +
    + +

    + #{bundle['file.deleteDialog.tip']}

    +
    + + +
    +
    + +

    + #{bundle['file.deleteDraftDialog.tip']}

    +
    + + +
    +
    + +

    - +

    -
    -
    - #{bundle['metrics.citations.dialog.empty']} +
    +
    +

    #{bundle['dataset.privateurl.absent']}

    - - - -
    -
    - -
    - - -

    #{bundle['dataset.noValidSelectedFilesForDownload']}

    - -

    #{bundle['dataset.requestAccessToRestrictedFiles']}

    -
    -
    - -
    -
    - -

    #{bundle['dataset.mixedSelectedFilesForDownload']}

    - - - - - - -
    #{resFile.label}
    -
    -

    #{bundle['dataset.downloadUnrestricted']}

    - - - -
    -
    - -

    #{bundle['dataset.mixedSelectedFilesForDownload']}

    - - - - - - -
    #{resFile.label}
    -
    -

    #{bundle['dataset.downloadUnrestricted']}

    - - - -
    -
    - -

    #{bundle['file.deleteDialog.tip']}

    -
    - - -
    -
    - -

    #{bundle['file.deleteDraftDialog.tip']}

    -
    - - -
    -
    - - -

    - - - - -

    -
    -
    -

    #{bundle['dataset.privateurl.absent']}

    -
    -
    -

    #{bundle['dataset.privateurl.createdSuccess']}

    - -

    - #{privateUrlLink} -

    -
    -
    -
    - - - - -
    -
    - -

    #{bundle['dataset.privateurl.cannotCreate']}

    -
    - +

    + #{bundle['dataset.privateurl.createdSuccess']}

    + +

    + #{privateUrlLink} +

    -
    -
    - -

    #{bundle['dataset.privateurl.disableConfirmationText']}

    -
    - -
    -
    - -

    #{bundle['file.deleteFileDialog.multiple.immediate']}

    - -

    #{bundle['file.deleteFileDialog.failed.tip']}

    -
    - - -
    -
    - -

    #{bundle['dataset.compute.computeBatchRestricted']}

    -
    -
    -
    - - - - - -

    #{bundle['file.deaccessionDialog.tip']}

    -
    - - - - - -
    -
    - - - - - - - - - - - - -
    -
    - - - - - -
    -
    - - - - -
    -
    - - - - -
    -
    -
    - -

    #{bundle['file.deaccessionDialog.deaccession.tip']}

    + +

    + #{bundle['dataset.privateurl.cannotCreate']}

    - - -
    -
    - -

    #{bundle['file.deaccessionDialog.deaccessionDataset.tip']}

    -
    - - -
    -
    -