From 36098f245264e11516cd31f161b0e3c5f1117cdf Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 1 Jul 2020 13:34:14 -0400 Subject: [PATCH 01/17] fix mimetype of error pages --- src/main/webapp/WEB-INF/web.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml index ce3e7d6e37b..9e007d48a11 100644 --- a/src/main/webapp/WEB-INF/web.xml +++ b/src/main/webapp/WEB-INF/web.xml @@ -185,6 +185,11 @@ webmanifest application/manifest+json + + xhtml + text/html + + From 5e1edafd0e83f1f4ce4ad737116fb56e6a1014eb Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 31 Oct 2023 12:51:16 +0100 Subject: [PATCH 02/17] Use StandardCharsets.UTF_8 And do not try to catch UnsupportedEncodingException --- .../edu/harvard/iq/dataverse/DataCitation.java | 5 +++-- src/main/java/edu/harvard/iq/dataverse/Shib.java | 7 ++++--- .../java/edu/harvard/iq/dataverse/api/Admin.java | 5 +++-- .../providers/builtin/PasswordEncryption.java | 6 +++--- .../dataaccess/TabularSubsetGenerator.java | 1 + .../iq/dataverse/dataset/DatasetUtil.java | 10 ++-------- .../impl/DuraCloudSubmitToArchiveCommand.java | 4 ++-- .../impl/GoogleCloudSubmitToArchiveCommand.java | 4 ++-- .../command/impl/S3SubmitToArchiveCommand.java | 3 ++- .../iq/dataverse/export/JSONExporter.java | 4 ++-- .../iq/dataverse/export/OAI_OREExporter.java | 4 ++-- .../dataverse/export/SchemaDotOrgExporter.java | 3 ++- .../impl/plugins/dta/DTAFileReader.java | 3 ++- .../tabulardata/impl/plugins/dta/DataReader.java | 3 ++- .../impl/plugins/dta/NewDTAFileReader.java | 9 +++++---- .../impl/plugins/por/PORFileReader.java | 6 +++--- .../impl/plugins/rdata/RDATAFileReader.java | 7 +++---- .../impl/plugins/sav/SAVFileReader.java | 7 ++----- .../pidproviders/handle/HandlePidProvider.java | 16 ++++++++-------- .../provenance/ProvPopupFragmentBean.java | 3 ++- .../dataverse/rserve/RemoteDataFrameService.java | 3 ++- .../harvard/iq/dataverse/util/StringUtil.java | 10 +++++----- .../harvard/iq/dataverse/util/UrlSignerUtil.java | 6 +++--- .../iq/dataverse/util/bagit/BagGenerator.java | 9 ++------- .../harvard/iq/dataverse/util/bagit/OREMap.java | 3 ++- 25 files changed, 69 insertions(+), 72 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java index a012175deae..3977023fc4b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java @@ -15,6 +15,7 @@ import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; @@ -253,7 +254,7 @@ public String toBibtexString() { public void writeAsBibtexCitation(OutputStream os) throws IOException { // Use UTF-8 - Writer out = new BufferedWriter(new OutputStreamWriter(os, "utf-8")); + Writer out = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8)); if(getFileTitle() !=null && isDirect()) { out.write("@incollection{"); } else { @@ -317,7 +318,7 @@ public String toRISString() { public void writeAsRISCitation(OutputStream os) throws IOException { // Use UTF-8 - Writer out = new BufferedWriter(new OutputStreamWriter(os, "utf-8")); + Writer out = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8)); out.write("Provider: " + publisher + "\r\n"); out.write("Content: text/plain; charset=\"utf-8\"" + "\r\n"); // Using type "DATA" - see https://github.com/IQSS/dataverse/issues/4816 diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index f9cf061e771..21c39950604 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -19,6 +19,7 @@ import org.apache.commons.lang3.StringUtils; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; @@ -458,9 +459,9 @@ private String getRequiredValueFromAssertion(String key) throws Exception { if (attributeValue.isEmpty()) { throw new Exception(key + " was empty"); } - if(systemConfig.isShibAttributeCharacterSetConversionEnabled()) { - attributeValue= new String( attributeValue.getBytes("ISO-8859-1"), "UTF-8"); - } + if (systemConfig.isShibAttributeCharacterSetConversionEnabled()) { + attributeValue= new String( attributeValue.getBytes("ISO-8859-1"), StandardCharsets.UTF_8); + } String trimmedValue = attributeValue.trim(); logger.fine("The SAML assertion for \"" + key + "\" (required) was \"" + attributeValue + "\" and was trimmed to \"" + trimmedValue + "\"."); return trimmedValue; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 154fa2350bd..a8a8f8455af 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -65,6 +65,7 @@ import java.io.InputStream; import java.io.StringReader; +import java.nio.charset.StandardCharsets; import java.util.Map; import java.util.Map.Entry; import java.util.logging.Level; @@ -1153,7 +1154,7 @@ public void write(OutputStream os) throws IOException, os.write(",\n".getBytes()); } - os.write(output.build().toString().getBytes("UTF8")); + os.write(output.build().toString().getBytes(StandardCharsets.UTF_8)); if (!wroteObject) { wroteObject = true; @@ -1267,7 +1268,7 @@ public void write(OutputStream os) throws IOException, os.write(",\n".getBytes()); } - os.write(output.build().toString().getBytes("UTF8")); + os.write(output.build().toString().getBytes(StandardCharsets.UTF_8)); if (!wroteObject) { wroteObject = true; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java index 4446f68228d..aef8b375b63 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse.authorization.providers.builtin; -import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import org.apache.commons.lang3.RandomStringUtils; @@ -36,13 +36,13 @@ public interface Algorithm { public String encrypt(String plainText) { try { MessageDigest md = MessageDigest.getInstance("SHA"); - md.update(plainText.getBytes("UTF-8")); + md.update(plainText.getBytes(StandardCharsets.UTF_8)); byte[] raw = md.digest(); //String hash = Base64.encodeToString(raw, true); String hash = Base64.getEncoder().encodeToString(raw); return hash; - } catch (NoSuchAlgorithmException | UnsupportedEncodingException e) { + } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java index c369010c8cd..e9c28b9fc7c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java @@ -42,6 +42,7 @@ import java.math.RoundingMode; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; +import java.nio.charset.StandardCharsets; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.logging.Logger; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 98bd26b51d6..c1b95d27938 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -21,8 +21,8 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.UnsupportedEncodingException; import java.nio.channels.FileChannel; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.*; @@ -409,14 +409,8 @@ public static InputStream getThumbnailAsInputStream(Dataset dataset, int size) { String base64Image = datasetThumbnail.getBase64image(); String leadingStringToRemove = FileUtil.DATA_URI_SCHEME; String encodedImg = base64Image.substring(leadingStringToRemove.length()); - byte[] decodedImg = null; - try { - decodedImg = Base64.getDecoder().decode(encodedImg.getBytes("UTF-8")); + byte[] decodedImg = Base64.getDecoder().decode(encodedImg.getBytes(StandardCharsets.UTF_8)); logger.fine("returning this many bytes for " + "dataset id: " + dataset.getId() + ", persistentId: " + dataset.getIdentifier() + " :" + decodedImg.length); - } catch (UnsupportedEncodingException ex) { - logger.info("dataset thumbnail could not be decoded for dataset id " + dataset.getId() + ": " + ex); - return null; - } ByteArrayInputStream nonDefaultDatasetThumbnail = new ByteArrayInputStream(decodedImg); logger.fine("For dataset id " + dataset.getId() + " a thumbnail was found and is being returned."); return nonDefaultDatasetThumbnail; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java index d6d7b49d172..15c469ec769 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java @@ -14,7 +14,7 @@ import java.io.IOException; import java.io.PipedInputStream; import java.io.PipedOutputStream; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -117,7 +117,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t public void run() { try (PipedOutputStream dataciteOut = new PipedOutputStream(dataciteIn)) { - dataciteOut.write(dataciteXml.getBytes(Charset.forName("utf-8"))); + dataciteOut.write(dataciteXml.getBytes(StandardCharsets.UTF_8)); dataciteOut.close(); success=true; } catch (Exception e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java index 512987866d4..506d23e124e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java @@ -26,7 +26,7 @@ import java.io.IOException; import java.io.PipedInputStream; import java.io.PipedOutputStream; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.security.DigestInputStream; import java.security.MessageDigest; import java.util.Map; @@ -82,7 +82,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t public void run() { try (PipedOutputStream dataciteOut = new PipedOutputStream(dataciteIn)) { - dataciteOut.write(dataciteXml.getBytes(Charset.forName("utf-8"))); + dataciteOut.write(dataciteXml.getBytes(StandardCharsets.UTF_8)); dataciteOut.close(); success = true; } catch (Exception e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java index f02edd54b86..41bd2ec71b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java @@ -17,6 +17,7 @@ import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; +import java.nio.charset.StandardCharsets; import java.util.Map; import java.util.logging.Logger; @@ -86,7 +87,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t spaceName = getSpaceName(dataset); String dataciteXml = getDataCiteXml(dv); - try (ByteArrayInputStream dataciteIn = new ByteArrayInputStream(dataciteXml.getBytes("UTF-8"))) { + try (ByteArrayInputStream dataciteIn = new ByteArrayInputStream(dataciteXml.getBytes(StandardCharsets.UTF_8))) { // Add datacite.xml file ObjectMetadata om = new ObjectMetadata(); om.setContentLength(dataciteIn.available()); diff --git a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java index a54e61c7c1e..cf3afd1a39a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java @@ -7,10 +7,10 @@ import io.gdcc.spi.export.Exporter; import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.OutputStream; +import java.nio.charset.StandardCharsets; import java.util.Locale; import java.util.Optional; -import jakarta.json.JsonObject; import jakarta.ws.rs.core.MediaType; @@ -35,7 +35,7 @@ public String getDisplayName(Locale locale) { @Override public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException { try{ - outputStream.write(dataProvider.getDatasetJson().toString().getBytes("UTF8")); + outputStream.write(dataProvider.getDatasetJson().toString().getBytes(StandardCharsets.UTF_8)); outputStream.flush(); } catch (Exception e){ throw new ExportException("Unknown exception caught during JSON export."); diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java index feec4403570..86af45195d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java @@ -7,11 +7,11 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.OutputStream; +import java.nio.charset.StandardCharsets; import java.util.Locale; import java.util.Optional; import java.util.logging.Logger; -import jakarta.json.JsonObject; import jakarta.ws.rs.core.MediaType; @AutoService(Exporter.class) @@ -25,7 +25,7 @@ public class OAI_OREExporter implements Exporter { public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException { try { - outputStream.write(dataProvider.getDatasetORE().toString().getBytes("UTF8")); + outputStream.write(dataProvider.getDatasetORE().toString().getBytes(StandardCharsets.UTF_8)); outputStream.flush(); } catch (Exception e) { logger.severe(e.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java index 5428715b905..0c4b39fd641 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.IOException; import java.io.OutputStream; +import java.nio.charset.StandardCharsets; import java.util.Locale; import java.util.logging.Logger; import jakarta.ws.rs.core.MediaType; @@ -75,7 +76,7 @@ public class SchemaDotOrgExporter implements Exporter { @Override public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException { try { - outputStream.write(dataProvider.getDatasetSchemaDotOrg().toString().getBytes("UTF8")); + outputStream.write(dataProvider.getDatasetSchemaDotOrg().toString().getBytes(StandardCharsets.UTF_8)); } catch (IOException ex) { logger.info("IOException calling outputStream.write: " + ex); } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java index 73818f8fb62..2d2b0c4f6a2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java @@ -29,6 +29,7 @@ import java.io.PrintWriter; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.nio.charset.StandardCharsets; import java.text.DecimalFormat; import java.text.NumberFormat; import java.text.ParseException; @@ -1701,7 +1702,7 @@ private void decodeData(BufferedInputStream stream, boolean saveWithVariableHead ingesteddata.setTabDelimitedFile(tabDelimitedDataFile); fileOutTab = new FileOutputStream(tabDelimitedDataFile); - pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true); + pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true); /* Should we lose this dateFormat thing in 4.0? * the UNF should be calculatable on the app side solely from the data diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java index 0822f6eed72..40b3e5935cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java @@ -4,6 +4,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.logging.Logger; @@ -287,7 +288,7 @@ public String readString(int n) throws IOException { */ public String readUtfString(int n) throws IOException { - String ret = new String(readBytes(n), "UTF8"); + String ret = new String(readBytes(n), StandardCharsets.UTF_8); // Remove the terminating and/or padding zero bytes: if (ret.indexOf(0) > -1) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java index 53607d541de..96a80da11f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java @@ -7,6 +7,7 @@ import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.text.DecimalFormat; import java.text.NumberFormat; import java.text.ParseException; @@ -735,7 +736,7 @@ private void readData(DataReader reader, String variableHeaderLine) throws IOExc ingesteddata.setTabDelimitedFile(tabDelimitedDataFile); FileOutputStream fileOutTab = new FileOutputStream(tabDelimitedDataFile); - PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true); + PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true); // add the variable header here, if needed if (variableHeaderLine != null) { @@ -1001,7 +1002,7 @@ private void readSTRLs(DataReader reader) throws IOException { File finalTabFile = File.createTempFile("finalTabfile.", ".tab"); FileOutputStream fileOutTab = new FileOutputStream(finalTabFile); - PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true); + PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true); logger.fine("Setting the tab-delimited file to " + finalTabFile.getName()); ingesteddata.setTabDelimitedFile(finalTabFile); @@ -1130,7 +1131,7 @@ private String readGSO(DataReader reader, long v, long o) throws IOException { String gsoString; if (binary) { - gsoString = new String(contents, "utf8"); + gsoString = new String(contents, StandardCharsets.UTF_8); } else { gsoString = new String(contents, 0, (int) length - 1, "US-ASCII"); } @@ -1226,7 +1227,7 @@ private void readValueLabels(DataReader reader) throws IOException { } label_length = (int)(label_end - label_offset); - category_value_labels[i] = new String(Arrays.copyOfRange(labelBytes, (int)label_offset, (int)label_end-1), "UTF8"); + category_value_labels[i] = new String(Arrays.copyOfRange(labelBytes, (int)label_offset, (int)label_end-1), StandardCharsets.UTF_8); total_label_bytes += label_length; } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java index 2ee966c3e31..f3be10fb1a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java @@ -31,7 +31,7 @@ import java.io.PrintWriter; import java.io.Writer; import java.nio.ByteBuffer; - +import java.nio.charset.StandardCharsets; import java.text.DecimalFormat; import java.text.NumberFormat; import java.text.SimpleDateFormat; @@ -567,7 +567,7 @@ private File decodeHeader(BufferedInputStream stream) throws IOException { try { tempPORfile = File.createTempFile("tempPORfile.", ".por"); fileOutPOR = new FileOutputStream(tempPORfile); - fileWriter = new BufferedWriter(new OutputStreamWriter(fileOutPOR, "utf8")); + fileWriter = new BufferedWriter(new OutputStreamWriter(fileOutPOR, StandardCharsets.UTF_8)); porScanner = new Scanner(stream); // Because 64-bit and 32-bit machines decode POR's first 40-byte @@ -1115,7 +1115,7 @@ private void decodeData(BufferedReader reader, boolean storeWithVariableHeader) try { fileOutTab = new FileOutputStream(tabDelimitedDataFile); - pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true); + pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true); variableFormatTypeList = new String[varQnty]; for (int i = 0; i < varQnty; i++) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java index 50f2f89e354..bc2c0e5fbc1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java @@ -22,12 +22,11 @@ import java.io.*; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.text.*; import java.util.logging.*; import java.util.*; -import jakarta.inject.Inject; - // Rosuda Wrappers and Methods for R-calls to Rserve import edu.harvard.iq.dataverse.settings.JvmSettings; import org.rosuda.REngine.REXP; @@ -504,7 +503,7 @@ public TabularDataIngest read(BufferedInputStream stream, boolean saveWithVariab // created! // - L.A. RTabFileParser csvFileReader = new RTabFileParser('\t'); - BufferedReader localBufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(localCsvFile), "UTF-8")); + BufferedReader localBufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(localCsvFile), StandardCharsets.UTF_8)); File tabFileDestination = File.createTempFile("data-", ".tab"); PrintWriter tabFileWriter = new PrintWriter(tabFileDestination.getAbsolutePath(), "UTF-8"); @@ -685,7 +684,7 @@ private static String readLocalResource(String path) { // Try opening a buffered reader stream try { - BufferedReader rd = new BufferedReader(new InputStreamReader(resourceStream, "UTF-8")); + BufferedReader rd = new BufferedReader(new InputStreamReader(resourceStream, StandardCharsets.UTF_8)); String line = null; while ((line = rd.readLine()) != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java index 5eecbdfb666..e1628540153 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java @@ -29,7 +29,7 @@ import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.nio.ByteOrder; - +import java.nio.charset.StandardCharsets; import java.text.DecimalFormat; import java.text.NumberFormat; import java.text.SimpleDateFormat; @@ -58,10 +58,7 @@ import edu.harvard.iq.dataverse.DataTable; import edu.harvard.iq.dataverse.datavariable.DataVariable; -import edu.harvard.iq.dataverse.datavariable.SummaryStatistic; import edu.harvard.iq.dataverse.datavariable.VariableCategory; -import edu.harvard.iq.dataverse.datavariable.VariableRange; - import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataFileReader; import edu.harvard.iq.dataverse.ingest.tabulardata.spi.TabularDataFileReaderSpi; import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest; @@ -2347,7 +2344,7 @@ PrintWriter createOutputWriter (BufferedInputStream stream) throws IOException { fileOutTab = new FileOutputStream(tabDelimitedDataFile); - pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true); + pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true); } catch (FileNotFoundException ex) { ex.printStackTrace(); diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java index 2627bc76fd9..9d61663d034 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java @@ -120,21 +120,21 @@ public void reRegisterHandle(DvObject dvObject) { try { - AdminRecord admin = new AdminRecord(authHandle.getBytes("UTF8"), handlenetIndex, + AdminRecord admin = new AdminRecord(authHandle.getBytes(StandardCharsets.UTF_8), handlenetIndex, true, true, true, true, true, true, true, true, true, true, true, true); int timestamp = (int) (System.currentTimeMillis() / 1000); - HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes("UTF8"), + HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes(StandardCharsets.UTF_8), Encoder.encodeAdminRecord(admin), HandleValue.TTL_TYPE_RELATIVE, 86400, - timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes("UTF8"), + timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes(StandardCharsets.UTF_8), datasetUrl.getBytes(), HandleValue.TTL_TYPE_RELATIVE, 86400, timestamp, null, true, true, true, false)}; - ModifyValueRequest req = new ModifyValueRequest(handle.getBytes("UTF8"), val, auth); + ModifyValueRequest req = new ModifyValueRequest(handle.getBytes(StandardCharsets.UTF_8), val, auth); resolver.traceMessages = true; AbstractResponse response = resolver.processRequest(req); @@ -168,22 +168,22 @@ public Throwable registerNewHandle(DvObject dvObject) { try { - AdminRecord admin = new AdminRecord(authHandle.getBytes("UTF8"), handlenetIndex, + AdminRecord admin = new AdminRecord(authHandle.getBytes(StandardCharsets.UTF_8), handlenetIndex, true, true, true, true, true, true, true, true, true, true, true, true); int timestamp = (int) (System.currentTimeMillis() / 1000); - HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes("UTF8"), + HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes(StandardCharsets.UTF_8), Encoder.encodeAdminRecord(admin), HandleValue.TTL_TYPE_RELATIVE, 86400, - timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes("UTF8"), + timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes(StandardCharsets.UTF_8), datasetUrl.getBytes(), HandleValue.TTL_TYPE_RELATIVE, 86400, timestamp, null, true, true, true, false)}; CreateHandleRequest req - = new CreateHandleRequest(handle.getBytes("UTF8"), val, auth); + = new CreateHandleRequest(handle.getBytes(StandardCharsets.UTF_8), val, auth); resolver.traceMessages = true; AbstractResponse response = resolver.processRequest(req); diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java index 6e8a512902a..a8b28d2d79d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java @@ -21,6 +21,7 @@ import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import java.io.OutputStream; import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; @@ -499,7 +500,7 @@ public void showJsonPreviewNewWindow() throws IOException, WrappedResponse { OutputStream output = ec.getResponseOutputStream(); - OutputStreamWriter osw = new OutputStreamWriter(output, "UTF-8"); + OutputStreamWriter osw = new OutputStreamWriter(output, StandardCharsets.UTF_8); osw.write(provJsonState); //the button calling this will only be rendered if provJsonState exists (e.g. a file is uploaded) osw.close(); fc.responseComplete(); diff --git a/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java b/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java index df2e44ecb27..dbcfc039fa1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java +++ b/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -758,7 +759,7 @@ private static String readLocalResource(String path) { // Try opening a buffered reader stream try { - resourceAsString = IOUtils.toString(resourceStream, "UTF-8"); + resourceAsString = IOUtils.toString(resourceStream, StandardCharsets.UTF_8); resourceStream.close(); } catch (IOException ex) { logger.warning(String.format("RDATAFileReader: (readLocalResource) resource stream from path \"%s\" was invalid", path)); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java index 137ae21d793..56f85436773 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java @@ -1,7 +1,7 @@ package edu.harvard.iq.dataverse.util; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2LoginBackingBean; -import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.nio.ByteBuffer; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; @@ -152,7 +152,7 @@ public static String encrypt(String value, String password ) { .replaceAll("/", "_"); } catch ( InvalidKeyException | NoSuchAlgorithmException | BadPaddingException - | IllegalBlockSizeException | NoSuchPaddingException | UnsupportedEncodingException | InvalidAlgorithmParameterException ex) { + | IllegalBlockSizeException | NoSuchPaddingException | InvalidAlgorithmParameterException ex) { Logger.getLogger(OAuth2LoginBackingBean.class.getName()).log(Level.SEVERE, null, ex); throw new RuntimeException(ex); } @@ -173,7 +173,7 @@ public static String decrypt(String value, String password ) { return new String(decrypted); } catch ( InvalidKeyException | NoSuchAlgorithmException | BadPaddingException - | IllegalBlockSizeException | NoSuchPaddingException | UnsupportedEncodingException | InvalidAlgorithmParameterException ex) { + | IllegalBlockSizeException | NoSuchPaddingException | InvalidAlgorithmParameterException ex) { Logger.getLogger(OAuth2LoginBackingBean.class.getName()).log(Level.SEVERE, null, ex); throw new RuntimeException(ex); } @@ -209,8 +209,8 @@ public static String sanitizeFileDirectory(String value, boolean aggressively){ } - private static SecretKeySpec generateKeyFromString(final String secKey) throws UnsupportedEncodingException, NoSuchAlgorithmException { - byte[] key = (secKey).getBytes("UTF-8"); + private static SecretKeySpec generateKeyFromString(final String secKey) throws NoSuchAlgorithmException { + byte[] key = (secKey).getBytes(StandardCharsets.UTF_8); MessageDigest sha = MessageDigest.getInstance("SHA-1"); key = sha.digest(key); key = Arrays.copyOf(key, 16); // use only first 128 bits diff --git a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java index 29c4e8a6fb9..18ea3771301 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java @@ -2,7 +2,7 @@ import java.net.MalformedURLException; import java.net.URL; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; @@ -96,7 +96,7 @@ public static boolean isValidUrl(String signedUrl, String user, String method, S boolean valid = true; try { URL url = new URL(signedUrl); - List params = URLEncodedUtils.parse(url.getQuery(), Charset.forName("UTF-8")); + List params = URLEncodedUtils.parse(url.getQuery(), StandardCharsets.UTF_8); String hash = null; String dateString = null; String allowedMethod = null; @@ -156,7 +156,7 @@ public static boolean isValidUrl(String signedUrl, String user, String method, S public static boolean hasToken(String urlString) { try { URL url = new URL(urlString); - List params = URLEncodedUtils.parse(url.getQuery(), Charset.forName("UTF-8")); + List params = URLEncodedUtils.parse(url.getQuery(), StandardCharsets.UTF_8); for (NameValuePair nvp : params) { if (nvp.getName().equals(SIGNED_URL_TOKEN)) { return true; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java index b7c44014b80..e47426149f9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java @@ -9,10 +9,10 @@ import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintWriter; -import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; import java.nio.file.Paths; import java.security.KeyManagementException; import java.security.KeyStoreException; @@ -686,12 +686,7 @@ private void createFileFromString(final String relPath, final String content) archiveEntry.setMethod(ZipEntry.DEFLATED); InputStreamSupplier supp = new InputStreamSupplier() { public InputStream get() { - try { - return new ByteArrayInputStream(content.getBytes("UTF-8")); - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - return null; + return new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)); } }; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index 84bc7834ab9..60ab9407269 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -11,6 +11,7 @@ import edu.harvard.iq.dataverse.util.json.JsonPrinter; import java.io.OutputStream; +import java.nio.charset.StandardCharsets; import java.time.LocalDate; import java.util.List; import java.util.Map; @@ -68,7 +69,7 @@ public OREMap(DatasetVersion dv, boolean exclude) { } public void writeOREMap(OutputStream outputStream) throws Exception { - outputStream.write(getOREMap().toString().getBytes("UTF8")); + outputStream.write(getOREMap().toString().getBytes(StandardCharsets.UTF_8)); outputStream.flush(); } From 49ad92ce42d8c13c6163099b490be3b10ce42238 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 31 Oct 2023 12:52:49 +0100 Subject: [PATCH 03/17] Remove superfluous implements declaration --- .../engine/command/impl/DuraCloudSubmitToArchiveCommand.java | 3 +-- .../engine/command/impl/GoogleCloudSubmitToArchiveCommand.java | 3 +-- .../engine/command/impl/S3SubmitToArchiveCommand.java | 3 +-- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java index 15c469ec769..94f983f0c13 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java @@ -5,7 +5,6 @@ import edu.harvard.iq.dataverse.DatasetLock.Reason; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.ApiToken; -import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.workflow.step.Failure; @@ -32,7 +31,7 @@ import org.duracloud.error.ContentStoreException; @RequiredPermissions(Permission.PublishDataset) -public class DuraCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command { +public class DuraCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand { private static final Logger logger = Logger.getLogger(DuraCloudSubmitToArchiveCommand.class.getName()); private static final String DEFAULT_PORT = "443"; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java index 506d23e124e..7d749262b87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java @@ -11,7 +11,6 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.ApiToken; -import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.settings.JvmSettings; @@ -33,7 +32,7 @@ import java.util.logging.Logger; @RequiredPermissions(Permission.PublishDataset) -public class GoogleCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command { +public class GoogleCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand { private static final Logger logger = Logger.getLogger(GoogleCloudSubmitToArchiveCommand.class.getName()); private static final String GOOGLECLOUD_BUCKET = ":GoogleCloudBucket"; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java index 41bd2ec71b8..a660b1a4d59 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java @@ -5,7 +5,6 @@ import edu.harvard.iq.dataverse.DatasetLock.Reason; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.ApiToken; -import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.util.bagit.BagGenerator; @@ -42,7 +41,7 @@ import com.amazonaws.services.s3.transfer.TransferManagerBuilder; @RequiredPermissions(Permission.PublishDataset) -public class S3SubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command { +public class S3SubmitToArchiveCommand extends AbstractSubmitToArchiveCommand { private static final Logger logger = Logger.getLogger(S3SubmitToArchiveCommand.class.getName()); private static final String S3_CONFIG = ":S3ArchiverConfig"; From 088b5fcc55f41651b807063f1e544cd5e6f5df07 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 31 Oct 2023 12:56:44 +0100 Subject: [PATCH 04/17] Use try-with-resources in DatasetUtil --- .../harvard/iq/dataverse/dataset/DatasetUtil.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index c1b95d27938..a299915cb77 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -318,17 +318,17 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat int width = fullSizeImage.getWidth(); int height = fullSizeImage.getHeight(); FileChannel src = null; - try { - src = new FileInputStream(tmpFile).getChannel(); - } catch (FileNotFoundException ex) { + try (FileInputStream fis = new FileInputStream(tmpFile)) { + src = fis.getChannel(); + } catch (IOException ex) { IOUtils.closeQuietly(inputStream); logger.severe(ex.getMessage()); return null; } FileChannel dest = null; - try { - dest = new FileOutputStream(tmpFile).getChannel(); - } catch (FileNotFoundException ex) { + try (FileInputStream fis = new FileInputStream(tmpFile)) { + dest = fis.getChannel(); + } catch (IOException ex) { IOUtils.closeQuietly(inputStream); logger.severe(ex.getMessage()); return null; @@ -410,7 +410,7 @@ public static InputStream getThumbnailAsInputStream(Dataset dataset, int size) { String leadingStringToRemove = FileUtil.DATA_URI_SCHEME; String encodedImg = base64Image.substring(leadingStringToRemove.length()); byte[] decodedImg = Base64.getDecoder().decode(encodedImg.getBytes(StandardCharsets.UTF_8)); - logger.fine("returning this many bytes for " + "dataset id: " + dataset.getId() + ", persistentId: " + dataset.getIdentifier() + " :" + decodedImg.length); + logger.fine("returning this many bytes for " + "dataset id: " + dataset.getId() + ", persistentId: " + dataset.getIdentifier() + " :" + decodedImg.length); ByteArrayInputStream nonDefaultDatasetThumbnail = new ByteArrayInputStream(decodedImg); logger.fine("For dataset id " + dataset.getId() + " a thumbnail was found and is being returned."); return nonDefaultDatasetThumbnail; From db5b91f322730f52ac4608c52533331a49772a7a Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 31 Oct 2023 13:05:31 +0100 Subject: [PATCH 05/17] Use StandardCharsets.US_ASCII --- .../dataverse/ingest/IngestableDataChecker.java | 7 ++++--- .../tabulardata/impl/plugins/dta/DataReader.java | 16 ++++++++-------- .../impl/plugins/dta/NewDTAFileReader.java | 2 +- .../impl/plugins/por/PORFileReader.java | 2 +- .../impl/plugins/sav/SAVFileReader.java | 10 +++++----- 5 files changed, 19 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java index 9b62b62fe61..fa83552a9ec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java @@ -24,6 +24,7 @@ import java.io.*; import java.nio.*; import java.nio.channels.*; +import java.nio.charset.StandardCharsets; import java.util.*; import java.lang.reflect.*; import java.util.regex.*; @@ -252,7 +253,7 @@ public String testDTAformat(MappedByteBuffer buff) { try { headerBuffer = new byte[STATA_13_HEADER.length()]; buff.get(headerBuffer, 0, STATA_13_HEADER.length()); - headerString = new String(headerBuffer, "US-ASCII"); + headerString = new String(headerBuffer, StandardCharsets.US_ASCII); } catch (Exception ex) { // probably a buffer underflow exception; // we don't have to do anything... null will @@ -273,7 +274,7 @@ public String testDTAformat(MappedByteBuffer buff) { try { headerBuffer = new byte[STATA_14_HEADER.length()]; buff.get(headerBuffer, 0, STATA_14_HEADER.length()); - headerString = new String(headerBuffer, "US-ASCII"); + headerString = new String(headerBuffer, StandardCharsets.US_ASCII); } catch (Exception ex) { // probably a buffer underflow exception; // we don't have to do anything... null will @@ -292,7 +293,7 @@ public String testDTAformat(MappedByteBuffer buff) { try { headerBuffer = new byte[STATA_15_HEADER.length()]; buff.get(headerBuffer, 0, STATA_15_HEADER.length()); - headerString = new String(headerBuffer, "US-ASCII"); + headerString = new String(headerBuffer, StandardCharsets.US_ASCII); } catch (Exception ex) { // probably a buffer underflow exception; // we don't have to do anything... null will diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java index 40b3e5935cc..913c0ebeab2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java @@ -274,7 +274,7 @@ public float readFloat() throws IOException { */ public String readString(int n) throws IOException { - String ret = new String(readBytes(n), "US-ASCII"); + String ret = new String(readBytes(n), StandardCharsets.US_ASCII); // Remove the terminating and/or padding zero bytes: if (ret != null && ret.indexOf(0) > -1) { @@ -315,11 +315,11 @@ public byte[] readPrimitiveSection(String tag, int length) throws IOException { } public String readPrimitiveStringSection(String tag) throws IOException { - return new String(readPrimitiveSection(tag), "US-ASCII"); + return new String(readPrimitiveSection(tag), StandardCharsets.US_ASCII); } public String readPrimitiveStringSection(String tag, int length) throws IOException { - return new String(readPrimitiveSection(tag, length), "US-ASCII"); + return new String(readPrimitiveSection(tag, length), StandardCharsets.US_ASCII); } public String readLabelSection(String tag, int limit) throws IOException { @@ -333,7 +333,7 @@ public String readLabelSection(String tag, int limit) throws IOException { logger.fine("length of label: " + lengthOfLabel); String label = null; if (lengthOfLabel > 0) { - label = new String(readBytes(lengthOfLabel), "US-ASCII"); + label = new String(readBytes(lengthOfLabel), StandardCharsets.US_ASCII); } logger.fine("ret: " + label); readClosingTag(tag); @@ -359,7 +359,7 @@ public String readDefinedStringSection(String tag, int limit) throws IOException } String ret = null; if (number > 0) { - ret = new String(readBytes(number), "US-ASCII"); + ret = new String(readBytes(number), StandardCharsets.US_ASCII); } logger.fine("ret: " + ret); readClosingTag(tag); @@ -401,7 +401,7 @@ public boolean checkTag(String tag) throws IOException { int n = tag.length(); if ((this.buffer_size - buffer_byte_offset) >= n) { - return (tag).equals(new String(Arrays.copyOfRange(buffer, buffer_byte_offset, buffer_byte_offset+n),"US-ASCII")); + return (tag).equals(new String(Arrays.copyOfRange(buffer, buffer_byte_offset, buffer_byte_offset+n),StandardCharsets.US_ASCII)); } else{ bufferMoreBytes(); @@ -415,7 +415,7 @@ public void readOpeningTag(String tag) throws IOException { throw new IOException("opening tag must be a non-empty string."); } - String openTagString = new String(readBytes(tag.length() + 2), "US-ASCII"); + String openTagString = new String(readBytes(tag.length() + 2), StandardCharsets.US_ASCII); if (openTagString == null || !openTagString.equals("<"+tag+">")) { throw new IOException("Could not read opening tag <"+tag+">"); } @@ -426,7 +426,7 @@ public void readClosingTag(String tag) throws IOException { throw new IOException("closing tag must be a non-empty string."); } - String closeTagString = new String(readBytes(tag.length() + 3), "US-ASCII"); + String closeTagString = new String(readBytes(tag.length() + 3), StandardCharsets.US_ASCII); logger.fine("closeTagString: " + closeTagString); if (closeTagString == null || !closeTagString.equals("")) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java index 96a80da11f1..b0f2c50c997 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java @@ -1133,7 +1133,7 @@ private String readGSO(DataReader reader, long v, long o) throws IOException { if (binary) { gsoString = new String(contents, StandardCharsets.UTF_8); } else { - gsoString = new String(contents, 0, (int) length - 1, "US-ASCII"); + gsoString = new String(contents, 0, (int) length - 1, StandardCharsets.US_ASCII); } logger.fine("GSO " + v + "," + o + ": " + gsoString); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java index f3be10fb1a3..13325ca8f60 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java @@ -195,7 +195,7 @@ public TabularDataIngest read(BufferedInputStream stream, boolean storeWithVaria BufferedReader bfReader = null; try { - bfReader = new BufferedReader(new InputStreamReader(new FileInputStream(tempPORfile.getAbsolutePath()), "US-ASCII")); + bfReader = new BufferedReader(new InputStreamReader(new FileInputStream(tempPORfile.getAbsolutePath()), StandardCharsets.US_ASCII)); if (bfReader == null){ dbgLog.fine("bfReader is null"); throw new IOException("bufferedReader is null"); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java index e1628540153..308ff352b2a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java @@ -630,7 +630,7 @@ void decodeRecordType1(BufferedInputStream stream) throws IOException { int offset_end = LENGTH_SPSS_PRODUCT_INFO; // 60 bytes String productInfo = new String(Arrays.copyOfRange(recordType1, offset_start, - offset_end),"US-ASCII"); + offset_end),StandardCharsets.US_ASCII); dbgLog.fine("productInfo:\n"+productInfo+"\n"); dataTable.setOriginalFormatVersion(productInfo); @@ -869,7 +869,7 @@ void decodeRecordType1(BufferedInputStream stream) throws IOException { offset_end += LENGTH_FILE_CREATION_INFO; // 84 bytes String fileCreationInfo = getNullStrippedString(new String(Arrays.copyOfRange(recordType1, offset_start, - offset_end),"US-ASCII")); + offset_end),StandardCharsets.US_ASCII)); dbgLog.fine("fileCreationInfo:\n"+fileCreationInfo+"\n"); @@ -1217,7 +1217,7 @@ void decodeRecordType2(BufferedInputStream stream) throws IOException { // borders. So we always read the bytes, but only use them for // the real variable entries. /*String variableLabel = new String(Arrays.copyOfRange(variable_label, - 0, rawVariableLabelLength),"US-ASCII");*/ + 0, rawVariableLabelLength),StandardCharsets.US_ASCII);*/ variableLabelMap.put(variableName, variableLabel); } @@ -2072,7 +2072,7 @@ void decodeRecordType7(BufferedInputStream stream) throws IOException { byte[] work = new byte[unitLength*numberOfUnits]; int nbtyes13 = stream.read(work); - String[] variableShortLongNamePairs = new String(work,"US-ASCII").split("\t"); + String[] variableShortLongNamePairs = new String(work,StandardCharsets.US_ASCII).split("\t"); for (int i=0; i Date: Tue, 31 Oct 2023 13:09:30 +0100 Subject: [PATCH 06/17] Use StandardCharsets.ISO_8859_1 --- .../java/edu/harvard/iq/dataverse/Shib.java | 2 +- .../impl/plugins/dta/DTAFileReader.java | 22 +++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index 21c39950604..a3dfbf81512 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -460,7 +460,7 @@ private String getRequiredValueFromAssertion(String key) throws Exception { throw new Exception(key + " was empty"); } if (systemConfig.isShibAttributeCharacterSetConversionEnabled()) { - attributeValue= new String( attributeValue.getBytes("ISO-8859-1"), StandardCharsets.UTF_8); + attributeValue= new String( attributeValue.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8); } String trimmedValue = attributeValue.trim(); logger.fine("The SAML assertion for \"" + key + "\" (required) was \"" + attributeValue + "\" and was trimmed to \"" + trimmedValue + "\"."); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java index 2d2b0c4f6a2..f0262af9e33 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java @@ -686,7 +686,7 @@ private void decodeHeader(BufferedInputStream stream) throws IOException { } String data_label = new String(Arrays.copyOfRange(header, dl_offset, - (dl_offset + dataLabelLength)), "ISO-8859-1"); + (dl_offset + dataLabelLength)), StandardCharsets.ISO_8859_1); if (dbgLog.isLoggable(Level.FINE)) { dbgLog.fine("data_label_length=" + data_label.length()); @@ -711,7 +711,7 @@ private void decodeHeader(BufferedInputStream stream) throws IOException { if (releaseNumber > 104) { int ts_offset = dl_offset + dataLabelLength; String time_stamp = new String(Arrays.copyOfRange(header, ts_offset, - ts_offset + TIME_STAMP_LENGTH), "ISO-8859-1"); + ts_offset + TIME_STAMP_LENGTH), StandardCharsets.ISO_8859_1); if (dbgLog.isLoggable(Level.FINE)) { dbgLog.fine("time_stamp_length=" + time_stamp.length()); } @@ -913,7 +913,7 @@ private void decodeDescriptorVarNameList(BufferedInputStream stream, int nvar) t for (DataVariable dataVariable: dataTable.getDataVariables()) { offset_end += length_var_name; String vari = new String(Arrays.copyOfRange(variableNameBytes, offset_start, - offset_end), "ISO-8859-1"); + offset_end), StandardCharsets.ISO_8859_1); String varName = getNullStrippedString(vari); dataVariable.setName(varName); dbgLog.fine("next name=[" + varName + "]"); @@ -979,7 +979,7 @@ private void decodeDescriptorVariableFormat(BufferedInputStream stream, int nvar for (int i = 0; i < nvar; i++) { offset_end += length_var_format; String vari = new String(Arrays.copyOfRange(variableFormatList, offset_start, - offset_end), "ISO-8859-1"); + offset_end), StandardCharsets.ISO_8859_1); String variableFormat = getNullStrippedString(vari); if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + "-th format=[" + variableFormat + "]"); @@ -1046,7 +1046,7 @@ private void decodeDescriptorValueLabel(BufferedInputStream stream, int nvar) th for (int i = 0; i < nvar; i++) { offset_end += length_label_name; String vari = new String(Arrays.copyOfRange(labelNameList, offset_start, - offset_end), "ISO-8859-1"); + offset_end), StandardCharsets.ISO_8859_1); labelNames[i] = getNullStrippedString(vari); dbgLog.fine(i + "-th label=[" + labelNames[i] + "]"); offset_start = offset_end; @@ -1091,7 +1091,7 @@ private void decodeVariableLabels(BufferedInputStream stream) throws IOException for (int i = 0; i < nvar; i++) { offset_end += length_var_label; String vari = new String(Arrays.copyOfRange(variableLabelBytes, offset_start, - offset_end), "ISO-8859-1"); + offset_end), StandardCharsets.ISO_8859_1); String variableLabelParsed = getNullStrippedString(vari); if (dbgLog.isLoggable(Level.FINE)) { @@ -1273,7 +1273,7 @@ void parseValueLabelsRelease105(BufferedInputStream stream) throws IOException { valueLabelHeader, value_label_table_length, (value_label_table_length + length_label_name)), - "ISO-8859-1"); + StandardCharsets.ISO_8859_1); if (dbgLog.isLoggable(Level.FINE)) { dbgLog.fine("rawLabelName(length)=" + rawLabelName.length()); @@ -1336,7 +1336,7 @@ void parseValueLabelsRelease105(BufferedInputStream stream) throws IOException { for (int l = 0; l < no_value_label_pairs; l++) { String string_l = new String(Arrays.copyOfRange(valueLabelTable_i, offset_start, - offset_end), "ISO-8859-1"); + offset_end), StandardCharsets.ISO_8859_1); int null_position = string_l.indexOf(0); if (null_position != -1) { @@ -1486,7 +1486,7 @@ private void parseValueLabelsReleasel108(BufferedInputStream stream) throws IOEx valueLabelHeader, value_label_table_length, (value_label_table_length + length_label_name)), - "ISO-8859-1"); + StandardCharsets.ISO_8859_1); String labelName = getNullStrippedString(rawLabelName); if (dbgLog.isLoggable(Level.FINE)) { @@ -1582,7 +1582,7 @@ private void parseValueLabelsReleasel108(BufferedInputStream stream) throws IOEx String label_segment = new String( Arrays.copyOfRange(valueLabelTable_i, offset_value, - (length_label_segment + offset_value)), "ISO-8859-1"); + (length_label_segment + offset_value)), StandardCharsets.ISO_8859_1); // L.A. -- 2011.2.25: // This assumes that the labels are already stored in the right @@ -1933,7 +1933,7 @@ private void decodeData(BufferedInputStream stream, boolean saveWithVariableHead // String case int strVarLength = StringLengthTable.get(columnCounter); String raw_datum = new String(Arrays.copyOfRange(dataRowBytes, byte_offset, - (byte_offset + strVarLength)), "ISO-8859-1"); + (byte_offset + strVarLength)), StandardCharsets.ISO_8859_1); // TODO: // is it the right thing to do, to default to "ISO-8859-1"? // (it may be; since there's no mechanism for specifying From 6544a939974534a6b254c78a7880f6bdd6858817 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Tue, 9 Jul 2024 16:11:41 +0200 Subject: [PATCH 07/17] Remove unused imports, replace charset literal Co-authored-by: Steven Winship <39765413+stevenwinship@users.noreply.github.com> --- .../dataaccess/TabularSubsetGenerator.java | 15 --------------- .../impl/plugins/rdata/RDATAFileReader.java | 2 +- 2 files changed, 1 insertion(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java index e9c28b9fc7c..a42bb35615f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java @@ -20,31 +20,16 @@ package edu.harvard.iq.dataverse.dataaccess; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.datavariable.DataVariable; - -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; -import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Scanner; -import java.util.Set; -import java.math.BigDecimal; -import java.math.MathContext; -import java.math.RoundingMode; -import java.nio.ByteBuffer; -import java.nio.channels.FileChannel; -import java.nio.charset.StandardCharsets; -import java.nio.file.Paths; -import java.nio.file.StandardOpenOption; import java.util.logging.Logger; import java.util.regex.Matcher; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java index bc2c0e5fbc1..215c7a5e6d2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java @@ -506,7 +506,7 @@ public TabularDataIngest read(BufferedInputStream stream, boolean saveWithVariab BufferedReader localBufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(localCsvFile), StandardCharsets.UTF_8)); File tabFileDestination = File.createTempFile("data-", ".tab"); - PrintWriter tabFileWriter = new PrintWriter(tabFileDestination.getAbsolutePath(), "UTF-8"); + PrintWriter tabFileWriter = new PrintWriter(tabFileDestination.getAbsolutePath(), StandardCharsets.UTF_8); int lineCount = csvFileReader.read(localBufferedReader, dataTable, saveWithVariableHeader, tabFileWriter); From a82c6f94d9c458008c3ef763191f6bb6008583c8 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Mon, 15 Jul 2024 22:34:45 +0200 Subject: [PATCH 08/17] Create FileOutputStream for copy destination I made a copy-paste error when rewriting code with try-with-resources. Also: - fix duplication in variable assignment in SearchIT - remove unused imports --- .../java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java | 5 ++--- src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java | 3 +-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index a299915cb77..a5f437b5c9f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -17,7 +17,6 @@ import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; @@ -326,8 +325,8 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat return null; } FileChannel dest = null; - try (FileInputStream fis = new FileInputStream(tmpFile)) { - dest = fis.getChannel(); + try (FileOutputStream fos = new FileOutputStream(tmpFile)) { + dest = fos.getChannel(); } catch (IOException ex) { IOUtils.closeQuietly(inputStream); logger.severe(ex.getMessage()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java index 6e4fd5b0bb3..6256a3c3bee 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java @@ -25,7 +25,6 @@ import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import java.awt.image.BufferedImage; import java.io.IOException; -import static java.lang.Thread.sleep; import javax.imageio.ImageIO; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; @@ -599,7 +598,7 @@ public void testDatasetThumbnail() { String datasetLogo = "src/main/webapp/resources/images/cc0.png"; File datasetLogoFile = new File(datasetLogo); - String datasetLogoAsBase64 = datasetLogoAsBase64 = ImageThumbConverter.generateImageThumbnailFromFileAsBase64(datasetLogoFile, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); + String datasetLogoAsBase64 = ImageThumbConverter.generateImageThumbnailFromFileAsBase64(datasetLogoFile, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); if (datasetLogoAsBase64 == null) { Logger.getLogger(SearchIT.class.getName()).log(Level.SEVERE, "Failed to generate a base64 thumbnail from the file dataverseproject.png"); From ab1cda392f1aeae5f2e2e6d81741f514e26cc49c Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Mon, 22 Jul 2024 12:14:13 +0200 Subject: [PATCH 09/17] Add messages to log statements --- .../harvard/iq/dataverse/dataset/DatasetUtil.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index a5f437b5c9f..17f09185103 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -280,7 +280,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat try { tmpFile = FileUtil.inputStreamToFile(inputStream); } catch (IOException ex) { - logger.severe(ex.getMessage()); + logger.severe("FileUtil.inputStreamToFile failed for tmpFile: " + ex.getMessage()); } StorageIO dataAccess = null; @@ -306,7 +306,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat fullSizeImage = ImageIO.read(tmpFile); } catch (IOException ex) { IOUtils.closeQuietly(inputStream); - logger.severe(ex.getMessage()); + logger.severe("ImageIO.read failed for tmpFile: " + ex.getMessage()); return null; } if (fullSizeImage == null) { @@ -321,7 +321,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat src = fis.getChannel(); } catch (IOException ex) { IOUtils.closeQuietly(inputStream); - logger.severe(ex.getMessage()); + logger.severe("fis.getChannel failed: " + ex.getMessage()); return null; } FileChannel dest = null; @@ -329,13 +329,13 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat dest = fos.getChannel(); } catch (IOException ex) { IOUtils.closeQuietly(inputStream); - logger.severe(ex.getMessage()); + logger.severe("fos.getChannel failed: " + ex.getMessage()); return null; } try { dest.transferFrom(src, 0, src.size()); } catch (IOException ex) { - logger.severe(ex.getMessage()); + logger.severe("dest.transferFrom failed: " + ex.getMessage()); return null; } File tmpFileForResize = null; @@ -343,7 +343,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat //The stream was used around line 274 above, so this creates an empty file (OK since all it is used for is getting a path, but not reusing it here would make it easier to close it above.) tmpFileForResize = FileUtil.inputStreamToFile(inputStream); } catch (IOException ex) { - logger.severe(ex.getMessage()); + logger.severe("FileUtil.inputStreamToFile failed for tmpFileForResize: " + ex.getMessage()); return null; } finally { IOUtils.closeQuietly(inputStream); From 2c7f84006627a25b5221aea41d407d7edd488975 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Mon, 22 Jul 2024 12:47:01 +0200 Subject: [PATCH 10/17] Do channel operations in single try-with-res block --- .../iq/dataverse/dataset/DatasetUtil.java | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 17f09185103..8797fc5f2de 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -317,25 +317,14 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat int width = fullSizeImage.getWidth(); int height = fullSizeImage.getHeight(); FileChannel src = null; - try (FileInputStream fis = new FileInputStream(tmpFile)) { - src = fis.getChannel(); - } catch (IOException ex) { - IOUtils.closeQuietly(inputStream); - logger.severe("fis.getChannel failed: " + ex.getMessage()); - return null; - } FileChannel dest = null; - try (FileOutputStream fos = new FileOutputStream(tmpFile)) { + try (FileInputStream fis = new FileInputStream(tmpFile); FileOutputStream fos = new FileOutputStream(tmpFile)) { + src = fis.getChannel(); dest = fos.getChannel(); - } catch (IOException ex) { - IOUtils.closeQuietly(inputStream); - logger.severe("fos.getChannel failed: " + ex.getMessage()); - return null; - } - try { dest.transferFrom(src, 0, src.size()); } catch (IOException ex) { - logger.severe("dest.transferFrom failed: " + ex.getMessage()); + IOUtils.closeQuietly(inputStream); + logger.severe("Error occurred during transfer using FileChannels: " + ex.getMessage()); return null; } File tmpFileForResize = null; From dcca831f78c6555c9a70be38a0f91d88eacfdd08 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Mon, 22 Jul 2024 12:54:43 +0200 Subject: [PATCH 11/17] Use FileUtils.writeStringToFile that is not deprecated The supported version takes a Charset argument --- src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 8797fc5f2de..9eae4b7eb0f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -609,7 +609,7 @@ public static boolean validateDatasetMetadataExternally(Dataset ds, String execu try { File tempFile = File.createTempFile("datasetMetadataCheck", ".tmp"); - FileUtils.writeStringToFile(tempFile, jsonMetadata); + FileUtils.writeStringToFile(tempFile, jsonMetadata, StandardCharsets.UTF_8); // run the external executable: String[] params = { executable, tempFile.getAbsolutePath() }; From f028f0a9a3b22e87b7784f49493bb85e9a2cc560 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Thu, 1 Aug 2024 16:37:22 -0400 Subject: [PATCH 12/17] adding publication statuses to search api response --- ...ublication-status-to-search-api-results.md | 14 +++++++++++ doc/sphinx-guides/source/api/search.rst | 16 +++++++++++++ .../iq/dataverse/search/SolrSearchResult.java | 3 +++ .../harvard/iq/dataverse/api/SearchIT.java | 1 + .../search/SolrSearchResultTest.java | 23 +++++++++++++++++++ 5 files changed, 57 insertions(+) create mode 100644 doc/release-notes/10733-add-publication-status-to-search-api-results.md diff --git a/doc/release-notes/10733-add-publication-status-to-search-api-results.md b/doc/release-notes/10733-add-publication-status-to-search-api-results.md new file mode 100644 index 00000000000..d015a50a00d --- /dev/null +++ b/doc/release-notes/10733-add-publication-status-to-search-api-results.md @@ -0,0 +1,14 @@ +Search API (/api/search) response will now include publicationStatuses in the Json response as long as the list is not empty + +Example: +```javascript +"items": [ + { + "name": "Darwin's Finches", + ... + "publicationStatuses": [ + "Unpublished", + "Draft" + ], +(etc, etc) +``` diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst index e8d0a0b3ea7..297f1283ef7 100755 --- a/doc/sphinx-guides/source/api/search.rst +++ b/doc/sphinx-guides/source/api/search.rst @@ -114,6 +114,9 @@ https://demo.dataverse.org/api/search?q=trees "identifier_of_dataverse":"dvbe69f5e1", "name_of_dataverse":"dvbe69f5e1", "citation":"Finch, Fiona; Spruce, Sabrina; Poe, Edgar Allen; Mulligan, Hercules, 2019, \"Darwin's Finches\", https://doi.org/10.70122/FK2/MB5VGR, Root, V3", + "publicationStatuses": [ + "Published" + ], "storageIdentifier":"file://10.70122/FK2/MB5VGR", "subjects":[ "Astronomy and Astrophysics", @@ -207,6 +210,9 @@ In this example, ``show_relevance=true`` matches per field are shown. Available "published_at":"2016-05-10T12:57:45Z", "citationHtml":"Finch, Fiona, 2016, \"Darwin's Finches\", http://dx.doi.org/10.5072/FK2/G2VPE7, Root Dataverse, V1", "citation":"Finch, Fiona, 2016, \"Darwin's Finches\", http://dx.doi.org/10.5072/FK2/G2VPE7, Root Dataverse, V1", + "publicationStatuses": [ + "Published" + ], "matches":[ { "authorName":{ @@ -297,6 +303,9 @@ The above example ``fq=publicationStatus:Published`` retrieves only "RELEASED" v "identifier_of_dataverse": "rahman", "name_of_dataverse": "mdmizanur rahman Dataverse collection", "citation": "Finch, Fiona, 2019, \"Darwin's Finches\", https://doi.org/10.70122/FK2/GUAS41, Demo Dataverse, V1", + "publicationStatuses": [ + "Published" + ], "storageIdentifier": "file://10.70122/FK2/GUAS41", "subjects": [ "Medicine, Health and Life Sciences" @@ -330,6 +339,9 @@ The above example ``fq=publicationStatus:Published`` retrieves only "RELEASED" v "identifier_of_dataverse": "demo", "name_of_dataverse": "Demo Dataverse", "citation": "Finch, Fiona, 2020, \"Darwin's Finches\", https://doi.org/10.70122/FK2/7ZXYRH, Demo Dataverse, V1", + "publicationStatuses": [ + "Published" + ], "storageIdentifier": "file://10.70122/FK2/7ZXYRH", "subjects": [ "Medicine, Health and Life Sciences" @@ -386,6 +398,10 @@ The above example ``metadata_fields=citation:*`` returns under "metadataBlocks" "identifier_of_dataverse": "Sample_data", "name_of_dataverse": "Sample Data", "citation": "Métropole, 2021, \"JDD avec GeoJson 2021-07-13T10:23:46.409Z\", https://doi.org/10.5072/FK2/GIWCKB, Root, DRAFT VERSION", + "publicationStatuses": [ + "Unpublished", + "Draft" + ], "storageIdentifier": "file://10.5072/FK2/GIWCKB", "subjects": [ "Other" diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java index e84c8f133da..b40dcd69f3b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java @@ -534,6 +534,9 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool nullSafeJsonBuilder.add("entity_id", this.entityId); } } + if (!getPublicationStatuses().isEmpty()) { + nullSafeJsonBuilder.add("publicationStatuses", getPublicationStatusesAsJSON()); + } if (this.entity == null) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java index 6e4fd5b0bb3..96c27b1f6c9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java @@ -113,6 +113,7 @@ public void testSearchPermisions() throws InterruptedException { .body("data.total_count", CoreMatchers.is(1)) .body("data.count_in_response", CoreMatchers.is(1)) .body("data.items[0].name", CoreMatchers.is("Darwin's Finches")) + .body("data.items[0].publicationStatuses", CoreMatchers.hasItems("Unpublished", "Draft")) .statusCode(OK.getStatusCode()); Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken1); diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java index 4fb29869db7..d7deaa2dbc1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java @@ -225,6 +225,29 @@ public void testSetPublicationStatuses14() { assertTrue(this.solrSearchResult.isDeaccessionedState()); } + @Test + public void testSetPublicationStatusesJson() { + + boolean showRelevance = false; + boolean showEntityIds = false; + boolean showApiUrls = false; + + SolrSearchResult result01 = new SolrSearchResult("myQuery", "myName"); + result01.setType(SearchConstants.DATAVERSES); + result01.setPublicationStatuses(List.of("Unpublished", "Draft")); + JsonObjectBuilder actual01 = result01.json(showRelevance, showEntityIds, showApiUrls); + JsonObject actual = actual01.build(); + System.out.println("actual: " + actual); + + JsonObjectBuilder expResult = Json.createObjectBuilder(); + expResult.add("type", SearchConstants.DATAVERSE); + expResult.add("publicationStatuses", Json.createArrayBuilder().add("Unpublished").add("Draft").build()); + JsonObject expected = expResult.build(); + System.out.println("expect: " + expected); + + assertEquals(expected, actual); + } + @Test public void testJson() { From d3fd895010027e6130eb33d30d86e46a60fa7db3 Mon Sep 17 00:00:00 2001 From: Jan Range Date: Thu, 8 Aug 2024 12:40:30 +0200 Subject: [PATCH 13/17] add rust-dataverse to the docs --- doc/sphinx-guides/source/api/client-libraries.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index bd0aa55ba99..99df5b29466 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -78,3 +78,10 @@ Ruby https://github.com/libis/dataverse_api is a Ruby gem for Dataverse APIs. It is registered as a library on Rubygems (https://rubygems.org/search?query=dataverse). The gem is created and maintained by the LIBIS team (https://www.libis.be) at the University of Leuven (https://www.kuleuven.be). + +Rust +---- + +https://github.com/gdcc/rust-dataverse + +The Rust Dataverse client is a comprehensive crate designed for seamless interaction with the Dataverse API. It facilitates essential operations such as collection, dataset, and file management. Additionally, the crate includes a user-friendly Command-line Interface (CLI) that brings the full functionality of the library to the command line. This project is actively maintained by `Jan Range `_. From 483d9a6bc96c59b2a2b106c4584eb3d8e6f3fbc5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 8 Aug 2024 15:11:33 -0400 Subject: [PATCH 14/17] add release note, make small tweaks #10758 --- doc/release-notes/10758-rust-client.md | 3 +++ doc/sphinx-guides/source/api/client-libraries.rst | 2 +- doc/sphinx-guides/source/contributor/index.md | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 doc/release-notes/10758-rust-client.md diff --git a/doc/release-notes/10758-rust-client.md b/doc/release-notes/10758-rust-client.md new file mode 100644 index 00000000000..e206f27ce65 --- /dev/null +++ b/doc/release-notes/10758-rust-client.md @@ -0,0 +1,3 @@ +### Rust API client library + +An API client library for the Rust programming language is now available at https://github.com/gdcc/rust-dataverse and has been added to the [list of client libraries](https://dataverse-guide--10758.org.readthedocs.build/en/10758/api/client-libraries.html) in the API Guide. See also #10758. diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index 99df5b29466..6279ea8329e 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -84,4 +84,4 @@ Rust https://github.com/gdcc/rust-dataverse -The Rust Dataverse client is a comprehensive crate designed for seamless interaction with the Dataverse API. It facilitates essential operations such as collection, dataset, and file management. Additionally, the crate includes a user-friendly Command-line Interface (CLI) that brings the full functionality of the library to the command line. This project is actively maintained by `Jan Range `_. +The Rust Dataverse client is a comprehensive crate designed for seamless interaction with the Dataverse API. It facilitates essential operations such as collection, dataset, and file management. Additionally, the crate includes a user-friendly command-line interface (CLI) that brings the full functionality of the library to the command line. This project is actively maintained by `Jan Range `_. diff --git a/doc/sphinx-guides/source/contributor/index.md b/doc/sphinx-guides/source/contributor/index.md index e75cc58bccd..1017f15f0ed 100644 --- a/doc/sphinx-guides/source/contributor/index.md +++ b/doc/sphinx-guides/source/contributor/index.md @@ -43,7 +43,7 @@ If you speak multiple languages, you are very welcome to help us translate Datav ## Code -Dataverse is open source and we love code contributions. Developers are not limited to the main Dataverse code in this git repo. We have projects in C, C++, Go, Java, Javascript, Julia, PHP, Python, R, Ruby, TypeScript and more. To get started, please see the following pages: +Dataverse is open source and we love code contributions. Developers are not limited to the main Dataverse code in this git repo. We have projects in C, C++, Go, Java, Javascript, Julia, PHP, Python, R, Ruby, Rust, TypeScript and more. To get started, please see the following pages: ```{toctree} :maxdepth: 1 From 93c4b228f28abc2cf012fabdb728f836f0277ec1 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 8 Aug 2024 15:24:24 -0400 Subject: [PATCH 15/17] add rust to short list of codebases to contribute to #10758 --- doc/sphinx-guides/source/contributor/code.md | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/contributor/code.md b/doc/sphinx-guides/source/contributor/code.md index 2a1dec08c05..c7154d14169 100644 --- a/doc/sphinx-guides/source/contributor/code.md +++ b/doc/sphinx-guides/source/contributor/code.md @@ -20,6 +20,7 @@ The primary codebase and issue tracker for Dataverse is (TypeScript) - (Javascript) - (Python) +- (Rust) - (Ansible) - (Javascript) From 7c9b960cc1a102cad7217af99f81c99770cd7ea9 Mon Sep 17 00:00:00 2001 From: Guillermo Portas Date: Mon, 12 Aug 2024 21:57:12 +0200 Subject: [PATCH 16/17] Extends the /dataverses API to get user permissions on a collection (#10751) * Added: new API endpoint getUserPermissionsOnDataverse * Added: release notes for #10749 * Added: docs for #10749 --- ...ataverse-user-permissions-api-extension.md | 1 + doc/sphinx-guides/source/api/native-api.rst | 23 +++++++++ .../harvard/iq/dataverse/api/Dataverses.java | 24 +++++++++ .../iq/dataverse/api/DataversesIT.java | 50 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 6 +++ 5 files changed, 104 insertions(+) create mode 100644 doc/release-notes/10749-dataverse-user-permissions-api-extension.md diff --git a/doc/release-notes/10749-dataverse-user-permissions-api-extension.md b/doc/release-notes/10749-dataverse-user-permissions-api-extension.md new file mode 100644 index 00000000000..706b1f42641 --- /dev/null +++ b/doc/release-notes/10749-dataverse-user-permissions-api-extension.md @@ -0,0 +1 @@ +New API endpoint "dataverses/{identifier}/userPermissions" for obtaining the user permissions on a dataverse. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index a5f7d03899a..f9f346ce7b5 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -678,6 +678,29 @@ The fully expanded example above (without environment variables) looks like this Note: You must have "Edit Dataverse" permission in the given Dataverse to invoke this endpoint. +Get User Permissions on a Dataverse +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API call returns the permissions that the calling user has on a particular dataverse. + +In particular, the user permissions that this API call checks, returned as booleans, are the following: + +* Can add a dataverse +* Can add a dataset +* Can view the unpublished dataverse +* Can edit the dataverse +* Can manage the dataverse permissions +* Can publish the dataverse +* Can delete the dataverse + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/dataverses/$ID/userPermissions" + .. _create-dataset-command: Create a Dataset in a Dataverse Collection diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 81db5f7d782..3ea3c74c4a0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -111,6 +111,9 @@ public class Dataverses extends AbstractApiBean { @EJB SwordServiceBean swordService; + + @EJB + PermissionServiceBean permissionService; @POST @AuthRequired @@ -1647,4 +1650,25 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" } } + @GET + @AuthRequired + @Path("{identifier}/userPermissions") + public Response getUserPermissionsOnDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) { + Dataverse dataverse; + try { + dataverse = findDataverseOrDie(dvIdtf); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + User requestUser = getRequestUser(crc); + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("canAddDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.AddDataverse)); + jsonObjectBuilder.add("canAddDataset", permissionService.userOn(requestUser, dataverse).has(Permission.AddDataset)); + jsonObjectBuilder.add("canViewUnpublishedDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.ViewUnpublishedDataverse)); + jsonObjectBuilder.add("canEditDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.EditDataverse)); + jsonObjectBuilder.add("canManageDataversePermissions", permissionService.userOn(requestUser, dataverse).has(Permission.ManageDataversePermissions)); + jsonObjectBuilder.add("canPublishDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.PublishDataverse)); + jsonObjectBuilder.add("canDeleteDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.DeleteDataverse)); + return ok(jsonObjectBuilder); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 09b60e46e7e..c0b762df2ab 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1045,4 +1045,54 @@ public void testAddDataverse() { .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\"")); } + + @Test + public void testGetUserPermissionsOnDataverse() { + Response createUserResponse = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // Call for dataverse created by the user + Response getUserPermissionsOnDataverseResponse = UtilIT.getUserPermissionsOnDataverse(dataverseAlias, apiToken); + getUserPermissionsOnDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean canAddDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataverse"); + assertTrue(canAddDataverse); + boolean canAddDataset = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataset"); + assertTrue(canAddDataset); + boolean canViewUnpublishedDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canViewUnpublishedDataverse"); + assertTrue(canViewUnpublishedDataverse); + boolean canEditDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canEditDataverse"); + assertTrue(canEditDataverse); + boolean canManageDataversePermissions = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canManageDataversePermissions"); + assertTrue(canManageDataversePermissions); + boolean canPublishDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canPublishDataverse"); + assertTrue(canPublishDataverse); + boolean canDeleteDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canDeleteDataverse"); + assertTrue(canDeleteDataverse); + + // Call for root dataverse + getUserPermissionsOnDataverseResponse = UtilIT.getUserPermissionsOnDataverse("root", apiToken); + getUserPermissionsOnDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + canAddDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataverse"); + assertTrue(canAddDataverse); + canAddDataset = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataset"); + assertTrue(canAddDataset); + canViewUnpublishedDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canViewUnpublishedDataverse"); + assertFalse(canViewUnpublishedDataverse); + canEditDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canEditDataverse"); + assertFalse(canEditDataverse); + canManageDataversePermissions = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canManageDataversePermissions"); + assertFalse(canManageDataversePermissions); + canPublishDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canPublishDataverse"); + assertFalse(canPublishDataverse); + canDeleteDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canDeleteDataverse"); + assertFalse(canDeleteDataverse); + + // Call with invalid dataverse alias + Response getUserPermissionsOnDataverseInvalidIdResponse = UtilIT.getUserPermissionsOnDataverse("testInvalidAlias", apiToken); + getUserPermissionsOnDataverseInvalidIdResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 917154c80cc..5a5d5e1c29b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3776,6 +3776,12 @@ static Response getUserPermissionsOnDataset(String datasetId, String apiToken) { .get("/api/datasets/" + datasetId + "/userPermissions"); } + static Response getUserPermissionsOnDataverse(String dataverseAlias, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/dataverses/" + dataverseAlias + "/userPermissions"); + } + static Response getCanDownloadAtLeastOneFile(String datasetId, String versionId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) From 0966ca9e1c1854410e87e89b15eca82930fc8257 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 12 Aug 2024 17:12:04 -0400 Subject: [PATCH 17/17] Add mimetype for 3D Data readable by the Smithsonian's Voyager app (#10760) * 3D Data readable by the Smithsonian's Voyager app * avoid 406 when client requests specific type --- src/main/java/edu/harvard/iq/dataverse/api/Access.java | 2 +- .../propertyFiles/MimeTypeDetectionByFileExtension.properties | 1 + src/main/java/propertyFiles/MimeTypeDisplay.properties | 2 ++ src/main/java/propertyFiles/MimeTypeFacets.properties | 2 ++ 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 00da4990996..16ac884180b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -286,7 +286,7 @@ private DataFile findDataFileOrDieWrapper(String fileId){ @GET @AuthRequired @Path("datafile/{fileId:.+}") - @Produces({"application/xml"}) + @Produces({"application/xml","*/*"}) public Response datafile(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { // check first if there's a trailing slash, and chop it: diff --git a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties index 97b2eed111c..630539d912e 100644 --- a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties +++ b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties @@ -38,3 +38,4 @@ nf=text/x-nextflow Rmd=text/x-r-notebook rb=text/x-ruby-script dag=text/x-dagman +glb=model/gltf-binary diff --git a/src/main/java/propertyFiles/MimeTypeDisplay.properties b/src/main/java/propertyFiles/MimeTypeDisplay.properties index 8e5a251abbf..549b2b13442 100644 --- a/src/main/java/propertyFiles/MimeTypeDisplay.properties +++ b/src/main/java/propertyFiles/MimeTypeDisplay.properties @@ -219,6 +219,8 @@ video/quicktime=Quicktime Video video/webm=WebM Video # Network Data text/xml-graphml=GraphML Network Data +# 3D Data +model/gltf-binary=3D Model # Other application/octet-stream=Unknown application/x-docker-file=Docker Image File diff --git a/src/main/java/propertyFiles/MimeTypeFacets.properties b/src/main/java/propertyFiles/MimeTypeFacets.properties index 0dad8daff4c..0b0fde89cbd 100644 --- a/src/main/java/propertyFiles/MimeTypeFacets.properties +++ b/src/main/java/propertyFiles/MimeTypeFacets.properties @@ -223,6 +223,8 @@ video/webm=Video # (anything else that looks like image/* will also be indexed as facet type "Video") # Network Data text/xml-graphml=Network Data +# 3D Data +model/gltf-binary=3D Data # Other application/octet-stream=Unknown application/ld+json;\u0020profile\u003d\u0022http\u003a//www.w3.org/ns/json-ld#flattened\u0020http\u003a//www.w3.org/ns/json-ld#compacted\u0020https\u003a//w3id.org/ro/crate\u0022=Metadata