diff --git a/doc/release-notes/10733-add-publication-status-to-search-api-results.md b/doc/release-notes/10733-add-publication-status-to-search-api-results.md
new file mode 100644
index 00000000000..d015a50a00d
--- /dev/null
+++ b/doc/release-notes/10733-add-publication-status-to-search-api-results.md
@@ -0,0 +1,14 @@
+Search API (/api/search) response will now include publicationStatuses in the Json response as long as the list is not empty
+
+Example:
+```javascript
+"items": [
+    {
+        "name": "Darwin's Finches",
+        ...
+        "publicationStatuses": [
+            "Unpublished",
+            "Draft"
+        ],
+(etc, etc)
+```
diff --git a/doc/release-notes/10749-dataverse-user-permissions-api-extension.md b/doc/release-notes/10749-dataverse-user-permissions-api-extension.md
new file mode 100644
index 00000000000..706b1f42641
--- /dev/null
+++ b/doc/release-notes/10749-dataverse-user-permissions-api-extension.md
@@ -0,0 +1 @@
+New API endpoint "dataverses/{identifier}/userPermissions" for obtaining the user permissions on a dataverse.
diff --git a/doc/release-notes/10758-rust-client.md b/doc/release-notes/10758-rust-client.md
new file mode 100644
index 00000000000..e206f27ce65
--- /dev/null
+++ b/doc/release-notes/10758-rust-client.md
@@ -0,0 +1,3 @@
+### Rust API client library
+
+An API client library for the Rust programming language is now available at https://github.com/gdcc/rust-dataverse and has been added to the [list of client libraries](https://dataverse-guide--10758.org.readthedocs.build/en/10758/api/client-libraries.html) in the API Guide. See also #10758.
diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst
index bd0aa55ba99..6279ea8329e 100755
--- a/doc/sphinx-guides/source/api/client-libraries.rst
+++ b/doc/sphinx-guides/source/api/client-libraries.rst
@@ -78,3 +78,10 @@ Ruby
 https://github.com/libis/dataverse_api is a Ruby gem for Dataverse APIs. It is registered as a library on Rubygems (https://rubygems.org/search?query=dataverse).
 
 The gem is created and maintained by the LIBIS team (https://www.libis.be) at the University of Leuven (https://www.kuleuven.be).
+
+Rust
+----
+
+https://github.com/gdcc/rust-dataverse
+
+The Rust Dataverse client is a comprehensive crate designed for seamless interaction with the Dataverse API. It facilitates essential operations such as collection, dataset, and file management. Additionally, the crate includes a user-friendly command-line interface (CLI) that brings the full functionality of the library to the command line. This project is actively maintained by `Jan Range <https://github.com/jr-1991>`_.
diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst
index 429bcaa0022..259bd784f76 100644
--- a/doc/sphinx-guides/source/api/native-api.rst
+++ b/doc/sphinx-guides/source/api/native-api.rst
@@ -678,6 +678,29 @@ The fully expanded example above (without environment variables) looks like this
 
 Note: You must have "Edit Dataverse" permission in the given Dataverse to invoke this endpoint.
 
+Get User Permissions on a Dataverse
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This API call returns the permissions that the calling user has on a particular dataverse.
+
+In particular, the user permissions that this API call checks, returned as booleans, are the following:
+
+* Can add a dataverse
+* Can add a dataset
+* Can view the unpublished dataverse
+* Can edit the dataverse
+* Can manage the dataverse permissions
+* Can publish the dataverse
+* Can delete the dataverse
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=root
+
+  curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/dataverses/$ID/userPermissions"
+
 .. _create-dataset-command: 
 
 Create a Dataset in a Dataverse Collection
diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst
index e8d0a0b3ea7..297f1283ef7 100755
--- a/doc/sphinx-guides/source/api/search.rst
+++ b/doc/sphinx-guides/source/api/search.rst
@@ -114,6 +114,9 @@ https://demo.dataverse.org/api/search?q=trees
                     "identifier_of_dataverse":"dvbe69f5e1",
                     "name_of_dataverse":"dvbe69f5e1",
                     "citation":"Finch, Fiona; Spruce, Sabrina; Poe, Edgar Allen; Mulligan, Hercules, 2019, \"Darwin's Finches\", https://doi.org/10.70122/FK2/MB5VGR, Root, V3",
+                    "publicationStatuses": [
+                        "Published"
+                    ],
                     "storageIdentifier":"file://10.70122/FK2/MB5VGR",
                     "subjects":[  
                        "Astronomy and Astrophysics",
@@ -207,6 +210,9 @@ In this example, ``show_relevance=true`` matches per field are shown. Available
                     "published_at":"2016-05-10T12:57:45Z",
                     "citationHtml":"Finch, Fiona, 2016, \"Darwin's Finches\", <a href=\"http://dx.doi.org/10.5072/FK2/G2VPE7\" target=\"_blank\">http://dx.doi.org/10.5072/FK2/G2VPE7</a>, Root Dataverse, V1",
                     "citation":"Finch, Fiona, 2016, \"Darwin's Finches\", http://dx.doi.org/10.5072/FK2/G2VPE7, Root Dataverse, V1",
+                    "publicationStatuses": [
+                        "Published"
+                    ],
                     "matches":[
                         {
                             "authorName":{
@@ -297,6 +303,9 @@ The above example ``fq=publicationStatus:Published`` retrieves only "RELEASED" v
                     "identifier_of_dataverse": "rahman",
                     "name_of_dataverse": "mdmizanur rahman Dataverse collection",
                     "citation": "Finch, Fiona, 2019, \"Darwin's Finches\", https://doi.org/10.70122/FK2/GUAS41, Demo Dataverse, V1",
+                    "publicationStatuses": [
+                        "Published"
+                    ],
                     "storageIdentifier": "file://10.70122/FK2/GUAS41",
                     "subjects": [
                         "Medicine, Health and Life Sciences"
@@ -330,6 +339,9 @@ The above example ``fq=publicationStatus:Published`` retrieves only "RELEASED" v
                     "identifier_of_dataverse": "demo",
                     "name_of_dataverse": "Demo Dataverse",
                     "citation": "Finch, Fiona, 2020, \"Darwin's Finches\", https://doi.org/10.70122/FK2/7ZXYRH, Demo Dataverse, V1",
+                    "publicationStatuses": [
+                        "Published"
+                    ],
                     "storageIdentifier": "file://10.70122/FK2/7ZXYRH",
                     "subjects": [
                         "Medicine, Health and Life Sciences"
@@ -386,6 +398,10 @@ The above example ``metadata_fields=citation:*`` returns under "metadataBlocks"
                     "identifier_of_dataverse": "Sample_data",
                     "name_of_dataverse": "Sample Data",
                     "citation": "Métropole, 2021, \"JDD avec GeoJson 2021-07-13T10:23:46.409Z\", https://doi.org/10.5072/FK2/GIWCKB, Root, DRAFT VERSION",
+                    "publicationStatuses": [
+                        "Unpublished",
+                        "Draft"
+                    ],
                     "storageIdentifier": "file://10.5072/FK2/GIWCKB",
                     "subjects": [
                         "Other"
diff --git a/doc/sphinx-guides/source/contributor/code.md b/doc/sphinx-guides/source/contributor/code.md
index 2a1dec08c05..c7154d14169 100644
--- a/doc/sphinx-guides/source/contributor/code.md
+++ b/doc/sphinx-guides/source/contributor/code.md
@@ -20,6 +20,7 @@ The primary codebase and issue tracker for Dataverse is <https://github.com/IQSS
 - <https://github.com/IQSS/dataverse-client-javascript> (TypeScript)
 - <https://github.com/gdcc/dataverse-previewers> (Javascript)
 - <https://github.com/gdcc/pyDataverse> (Python)
+- <https://github.com/gdcc/rust-dataverse> (Rust)
 - <https://github.com/gdcc/dataverse-ansible> (Ansible)
 - <https://github.com/gdcc/dv-metrics> (Javascript)
 
diff --git a/doc/sphinx-guides/source/contributor/index.md b/doc/sphinx-guides/source/contributor/index.md
index e75cc58bccd..1017f15f0ed 100644
--- a/doc/sphinx-guides/source/contributor/index.md
+++ b/doc/sphinx-guides/source/contributor/index.md
@@ -43,7 +43,7 @@ If you speak multiple languages, you are very welcome to help us translate Datav
 
 ## Code
 
-Dataverse is open source and we love code contributions. Developers are not limited to the main Dataverse code in this git repo. We have projects in C, C++, Go, Java, Javascript, Julia, PHP, Python, R, Ruby, TypeScript and more. To get started, please see the following pages:
+Dataverse is open source and we love code contributions. Developers are not limited to the main Dataverse code in this git repo. We have projects in C, C++, Go, Java, Javascript, Julia, PHP, Python, R, Ruby, Rust, TypeScript and more. To get started, please see the following pages:
 
 ```{toctree}
 :maxdepth: 1
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
index a012175deae..3977023fc4b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
@@ -15,6 +15,7 @@
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Date;
@@ -253,7 +254,7 @@ public String toBibtexString() {
     
     public void writeAsBibtexCitation(OutputStream os) throws IOException {
         // Use UTF-8
-        Writer out = new BufferedWriter(new OutputStreamWriter(os, "utf-8"));
+        Writer out = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8));
         if(getFileTitle() !=null && isDirect()) {
             out.write("@incollection{");
         } else {
@@ -317,7 +318,7 @@ public String toRISString() {
 
     public void writeAsRISCitation(OutputStream os) throws IOException {
         // Use UTF-8
-        Writer out = new BufferedWriter(new OutputStreamWriter(os, "utf-8"));
+        Writer out = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8));
         out.write("Provider: " + publisher + "\r\n");
         out.write("Content: text/plain; charset=\"utf-8\"" + "\r\n");
         // Using type "DATA" - see https://github.com/IQSS/dataverse/issues/4816
diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java
index f9cf061e771..a3dfbf81512 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Shib.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java
@@ -19,6 +19,7 @@
 import org.apache.commons.lang3.StringUtils;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -458,9 +459,9 @@ private String getRequiredValueFromAssertion(String key) throws Exception {
         if (attributeValue.isEmpty()) {
             throw new Exception(key + " was empty");
         }
-		if(systemConfig.isShibAttributeCharacterSetConversionEnabled()) {
-			attributeValue= new String( attributeValue.getBytes("ISO-8859-1"), "UTF-8");
-		}
+        if (systemConfig.isShibAttributeCharacterSetConversionEnabled()) {
+            attributeValue= new String( attributeValue.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8);
+        }
         String trimmedValue = attributeValue.trim();
         logger.fine("The SAML assertion for \"" + key + "\" (required) was \"" + attributeValue + "\" and was trimmed to \"" + trimmedValue + "\".");
         return trimmedValue;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
index 00da4990996..16ac884180b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
@@ -286,7 +286,7 @@ private DataFile findDataFileOrDieWrapper(String fileId){
     @GET
     @AuthRequired
     @Path("datafile/{fileId:.+}")
-    @Produces({"application/xml"})
+    @Produces({"application/xml","*/*"})
     public Response datafile(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ {
         
         // check first if there's a trailing slash, and chop it: 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index 7593f65e027..54e5eaf7b84 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -65,6 +65,7 @@
 
 import java.io.InputStream;
 import java.io.StringReader;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.logging.Level;
@@ -1155,7 +1156,7 @@ public void write(OutputStream os) throws IOException,
                         os.write(",\n".getBytes());
                     }
 
-                    os.write(output.build().toString().getBytes("UTF8"));
+                    os.write(output.build().toString().getBytes(StandardCharsets.UTF_8));
                     
                     if (!wroteObject) {
                         wroteObject = true;
@@ -1269,7 +1270,7 @@ public void write(OutputStream os) throws IOException,
                         os.write(",\n".getBytes());
                     }
 
-                    os.write(output.build().toString().getBytes("UTF8"));
+                    os.write(output.build().toString().getBytes(StandardCharsets.UTF_8));
                     
                     if (!wroteObject) {
                         wroteObject = true;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
index e6375b2f594..58dd6065528 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
@@ -113,6 +113,9 @@ public class Dataverses extends AbstractApiBean {
 
     @EJB
     SwordServiceBean swordService;
+
+    @EJB
+    PermissionServiceBean permissionService;
     
     @POST
     @AuthRequired
@@ -1653,4 +1656,25 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam("
         }
     }
 
+    @GET
+    @AuthRequired
+    @Path("{identifier}/userPermissions")
+    public Response getUserPermissionsOnDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
+        Dataverse dataverse;
+        try {
+            dataverse = findDataverseOrDie(dvIdtf);
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+        User requestUser = getRequestUser(crc);
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        jsonObjectBuilder.add("canAddDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.AddDataverse));
+        jsonObjectBuilder.add("canAddDataset", permissionService.userOn(requestUser, dataverse).has(Permission.AddDataset));
+        jsonObjectBuilder.add("canViewUnpublishedDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.ViewUnpublishedDataverse));
+        jsonObjectBuilder.add("canEditDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.EditDataverse));
+        jsonObjectBuilder.add("canManageDataversePermissions", permissionService.userOn(requestUser, dataverse).has(Permission.ManageDataversePermissions));
+        jsonObjectBuilder.add("canPublishDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.PublishDataverse));
+        jsonObjectBuilder.add("canDeleteDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.DeleteDataverse));
+        return ok(jsonObjectBuilder);
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java
index 4446f68228d..aef8b375b63 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.authorization.providers.builtin;
 
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import org.apache.commons.lang3.RandomStringUtils;
@@ -36,13 +36,13 @@ public interface Algorithm {
         public String encrypt(String plainText) {
             try {
                 MessageDigest md = MessageDigest.getInstance("SHA");
-                md.update(plainText.getBytes("UTF-8"));
+                md.update(plainText.getBytes(StandardCharsets.UTF_8));
                 byte[] raw = md.digest();
                 //String hash = Base64.encodeToString(raw, true);
                 String hash = Base64.getEncoder().encodeToString(raw);
                 return hash;
                 
-            } catch (NoSuchAlgorithmException | UnsupportedEncodingException e) {
+            } catch (NoSuchAlgorithmException e) {
                 throw new RuntimeException(e);
             }
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java
index c369010c8cd..a42bb35615f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java
@@ -20,30 +20,16 @@
 
 package edu.harvard.iq.dataverse.dataaccess;
 
-import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.datavariable.DataVariable;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Scanner;
-import java.util.Set;
-import java.math.BigDecimal;
-import java.math.MathContext;
-import java.math.RoundingMode;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.file.Paths;
-import java.nio.file.StandardOpenOption;
 import java.util.logging.Logger;
 import java.util.regex.Matcher;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java
index 060b8694e9c..4c84384b271 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java
@@ -17,12 +17,11 @@
 import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.UnsupportedEncodingException;
 import java.nio.channels.FileChannel;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.*;
@@ -281,7 +280,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat
         try {
             tmpFile = FileUtil.inputStreamToFile(inputStream);
         } catch (IOException ex) {
-        	logger.severe(ex.getMessage());
+        	logger.severe("FileUtil.inputStreamToFile failed for tmpFile: " + ex.getMessage());
         }
 
         StorageIO<Dataset> dataAccess = null;
@@ -307,7 +306,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat
             fullSizeImage = ImageIO.read(tmpFile);
         } catch (IOException ex) {
         	IOUtils.closeQuietly(inputStream);
-            logger.severe(ex.getMessage());
+            logger.severe("ImageIO.read failed for tmpFile: " + ex.getMessage());
             return null;
         }
         if (fullSizeImage == null) {
@@ -318,25 +317,14 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat
         int width = fullSizeImage.getWidth();
         int height = fullSizeImage.getHeight();
         FileChannel src = null;
-        try {
-            src = new FileInputStream(tmpFile).getChannel();
-        } catch (FileNotFoundException ex) {
-        	IOUtils.closeQuietly(inputStream);
-            logger.severe(ex.getMessage());
-            return null;
-        }
         FileChannel dest = null;
-        try {
-            dest = new FileOutputStream(tmpFile).getChannel();
-        } catch (FileNotFoundException ex) {
-        	IOUtils.closeQuietly(inputStream);
-            logger.severe(ex.getMessage());
-            return null;
-        }
-        try {
+        try (FileInputStream fis = new FileInputStream(tmpFile); FileOutputStream fos = new FileOutputStream(tmpFile)) {
+            src = fis.getChannel();
+            dest = fos.getChannel();
             dest.transferFrom(src, 0, src.size());
         } catch (IOException ex) {
-            logger.severe(ex.getMessage());
+        	IOUtils.closeQuietly(inputStream);
+            logger.severe("Error occurred during transfer using FileChannels: " + ex.getMessage());
             return null;
         }
         File tmpFileForResize = null;
@@ -344,7 +332,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat
         	//The stream was used around line 274 above, so this creates an empty file (OK since all it is used for is getting a path, but not reusing it here would make it easier to close it above.)
             tmpFileForResize = FileUtil.inputStreamToFile(inputStream);
         } catch (IOException ex) {
-            logger.severe(ex.getMessage());
+            logger.severe("FileUtil.inputStreamToFile failed for tmpFileForResize: " + ex.getMessage());
             return null;
         } finally {
         	IOUtils.closeQuietly(inputStream);
@@ -409,14 +397,8 @@ public static InputStream getThumbnailAsInputStream(Dataset dataset, int size) {
             String base64Image = datasetThumbnail.getBase64image();
             String leadingStringToRemove = FileUtil.DATA_URI_SCHEME;
             String encodedImg = base64Image.substring(leadingStringToRemove.length());
-            byte[] decodedImg = null;
-            try {
-                decodedImg = Base64.getDecoder().decode(encodedImg.getBytes("UTF-8"));
-                logger.fine("returning this many bytes for  " + "dataset id: " + dataset.getId() + ", persistentId: " + dataset.getIdentifier() + " :" + decodedImg.length);
-            } catch (UnsupportedEncodingException ex) {
-                logger.info("dataset thumbnail could not be decoded for dataset id " + dataset.getId() + ": " + ex);
-                return null;
-            }
+            byte[] decodedImg = Base64.getDecoder().decode(encodedImg.getBytes(StandardCharsets.UTF_8));
+            logger.fine("returning this many bytes for  " + "dataset id: " + dataset.getId() + ", persistentId: " + dataset.getIdentifier() + " :" + decodedImg.length);
             ByteArrayInputStream nonDefaultDatasetThumbnail = new ByteArrayInputStream(decodedImg);
             logger.fine("For dataset id " + dataset.getId() + " a thumbnail was found and is being returned.");
             return nonDefaultDatasetThumbnail;
@@ -627,7 +609,7 @@ public static boolean validateDatasetMetadataExternally(Dataset ds, String execu
         
         try {
             File tempFile = File.createTempFile("datasetMetadataCheck", ".tmp");
-            FileUtils.writeStringToFile(tempFile, jsonMetadata);
+            FileUtils.writeStringToFile(tempFile, jsonMetadata, StandardCharsets.UTF_8);
             
             // run the external executable: 
             String[] params = { executable, tempFile.getAbsolutePath() };
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
index d6d7b49d172..94f983f0c13 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
@@ -5,7 +5,6 @@
 import edu.harvard.iq.dataverse.DatasetLock.Reason;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
-import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.workflow.step.Failure;
@@ -14,7 +13,7 @@
 import java.io.IOException;
 import java.io.PipedInputStream;
 import java.io.PipedOutputStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.security.DigestInputStream;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
@@ -32,7 +31,7 @@
 import org.duracloud.error.ContentStoreException;
 
 @RequiredPermissions(Permission.PublishDataset)
-public class DuraCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command<DatasetVersion> {
+public class DuraCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand {
 
     private static final Logger logger = Logger.getLogger(DuraCloudSubmitToArchiveCommand.class.getName());
     private static final String DEFAULT_PORT = "443";
@@ -117,7 +116,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
                             public void run() {
                                 try (PipedOutputStream dataciteOut = new PipedOutputStream(dataciteIn)) {
 
-                                    dataciteOut.write(dataciteXml.getBytes(Charset.forName("utf-8")));
+                                    dataciteOut.write(dataciteXml.getBytes(StandardCharsets.UTF_8));
                                     dataciteOut.close();
                                     success=true;
                                 } catch (Exception e) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
index 512987866d4..7d749262b87 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
@@ -11,7 +11,6 @@
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
-import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
@@ -26,14 +25,14 @@
 import java.io.IOException;
 import java.io.PipedInputStream;
 import java.io.PipedOutputStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.security.DigestInputStream;
 import java.security.MessageDigest;
 import java.util.Map;
 import java.util.logging.Logger;
 
 @RequiredPermissions(Permission.PublishDataset)
-public class GoogleCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command<DatasetVersion> {
+public class GoogleCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand {
 
     private static final Logger logger = Logger.getLogger(GoogleCloudSubmitToArchiveCommand.class.getName());
     private static final String GOOGLECLOUD_BUCKET = ":GoogleCloudBucket";
@@ -82,7 +81,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
                             public void run() {
                                 try (PipedOutputStream dataciteOut = new PipedOutputStream(dataciteIn)) {
 
-                                    dataciteOut.write(dataciteXml.getBytes(Charset.forName("utf-8")));
+                                    dataciteOut.write(dataciteXml.getBytes(StandardCharsets.UTF_8));
                                     dataciteOut.close();
                                     success = true;
                                 } catch (Exception e) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
index f02edd54b86..a660b1a4d59 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
@@ -5,7 +5,6 @@
 import edu.harvard.iq.dataverse.DatasetLock.Reason;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
-import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.util.bagit.BagGenerator;
@@ -17,6 +16,7 @@
 import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.logging.Logger;
 
@@ -41,7 +41,7 @@
 import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
 
 @RequiredPermissions(Permission.PublishDataset)
-public class S3SubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command<DatasetVersion> {
+public class S3SubmitToArchiveCommand extends AbstractSubmitToArchiveCommand {
 
     private static final Logger logger = Logger.getLogger(S3SubmitToArchiveCommand.class.getName());
     private static final String S3_CONFIG = ":S3ArchiverConfig";
@@ -86,7 +86,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
 
                     spaceName = getSpaceName(dataset);
                     String dataciteXml = getDataCiteXml(dv);
-                    try (ByteArrayInputStream dataciteIn = new ByteArrayInputStream(dataciteXml.getBytes("UTF-8"))) {
+                    try (ByteArrayInputStream dataciteIn = new ByteArrayInputStream(dataciteXml.getBytes(StandardCharsets.UTF_8))) {
                         // Add datacite.xml file
                         ObjectMetadata om = new ObjectMetadata();
                         om.setContentLength(dataciteIn.available());
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
index a54e61c7c1e..cf3afd1a39a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
@@ -7,10 +7,10 @@
 import io.gdcc.spi.export.Exporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Locale;
 import java.util.Optional;
 
-import jakarta.json.JsonObject;
 import jakarta.ws.rs.core.MediaType;
 
 
@@ -35,7 +35,7 @@ public String getDisplayName(Locale locale) {
     @Override
     public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try{
-            outputStream.write(dataProvider.getDatasetJson().toString().getBytes("UTF8"));
+            outputStream.write(dataProvider.getDatasetJson().toString().getBytes(StandardCharsets.UTF_8));
             outputStream.flush();
         } catch (Exception e){
             throw new ExportException("Unknown exception caught during JSON export.");
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
index feec4403570..86af45195d7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
@@ -7,11 +7,11 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Locale;
 import java.util.Optional;
 import java.util.logging.Logger;
 
-import jakarta.json.JsonObject;
 import jakarta.ws.rs.core.MediaType;
 
 @AutoService(Exporter.class)
@@ -25,7 +25,7 @@ public class OAI_OREExporter implements Exporter {
     public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream)
             throws ExportException {
         try {
-            outputStream.write(dataProvider.getDatasetORE().toString().getBytes("UTF8"));
+            outputStream.write(dataProvider.getDatasetORE().toString().getBytes(StandardCharsets.UTF_8));
             outputStream.flush();
         } catch (Exception e) {
             logger.severe(e.getMessage());
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
index 5428715b905..0c4b39fd641 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
@@ -7,6 +7,7 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Locale;
 import java.util.logging.Logger;
 import jakarta.ws.rs.core.MediaType;
@@ -75,7 +76,7 @@ public class SchemaDotOrgExporter implements Exporter {
     @Override
     public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try {
-            outputStream.write(dataProvider.getDatasetSchemaDotOrg().toString().getBytes("UTF8"));
+            outputStream.write(dataProvider.getDatasetSchemaDotOrg().toString().getBytes(StandardCharsets.UTF_8));
         } catch (IOException ex) {
             logger.info("IOException calling outputStream.write: " + ex);
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java
index 9b62b62fe61..fa83552a9ec 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java
@@ -24,6 +24,7 @@
 import java.io.*;
 import java.nio.*;
 import java.nio.channels.*;
+import java.nio.charset.StandardCharsets;
 import java.util.*;
 import java.lang.reflect.*;
 import java.util.regex.*;
@@ -252,7 +253,7 @@ public String testDTAformat(MappedByteBuffer buff) {
             try {
                 headerBuffer = new byte[STATA_13_HEADER.length()];
                 buff.get(headerBuffer, 0, STATA_13_HEADER.length());
-                headerString = new String(headerBuffer, "US-ASCII");
+                headerString = new String(headerBuffer, StandardCharsets.US_ASCII);
             } catch (Exception ex) {
                 // probably a buffer underflow exception; 
                 // we don't have to do anything... null will 
@@ -273,7 +274,7 @@ public String testDTAformat(MappedByteBuffer buff) {
             try {
                 headerBuffer = new byte[STATA_14_HEADER.length()];
                 buff.get(headerBuffer, 0, STATA_14_HEADER.length());
-                headerString = new String(headerBuffer, "US-ASCII");
+                headerString = new String(headerBuffer, StandardCharsets.US_ASCII);
             } catch (Exception ex) {
                 // probably a buffer underflow exception;
                 // we don't have to do anything... null will
@@ -292,7 +293,7 @@ public String testDTAformat(MappedByteBuffer buff) {
             try {
                 headerBuffer = new byte[STATA_15_HEADER.length()];
                 buff.get(headerBuffer, 0, STATA_15_HEADER.length());
-                headerString = new String(headerBuffer, "US-ASCII");
+                headerString = new String(headerBuffer, StandardCharsets.US_ASCII);
             } catch (Exception ex) {
                 // probably a buffer underflow exception;
                 // we don't have to do anything... null will
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java
index 73818f8fb62..f0262af9e33 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java
@@ -29,6 +29,7 @@
 import java.io.PrintWriter;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
 import java.text.NumberFormat;
 import java.text.ParseException;
@@ -685,7 +686,7 @@ private void decodeHeader(BufferedInputStream stream) throws IOException {
         }
 
         String data_label = new String(Arrays.copyOfRange(header, dl_offset,
-                (dl_offset + dataLabelLength)), "ISO-8859-1");
+                (dl_offset + dataLabelLength)), StandardCharsets.ISO_8859_1);
 
         if (dbgLog.isLoggable(Level.FINE)) {
             dbgLog.fine("data_label_length=" + data_label.length());
@@ -710,7 +711,7 @@ private void decodeHeader(BufferedInputStream stream) throws IOException {
         if (releaseNumber > 104) {
             int ts_offset = dl_offset + dataLabelLength;
             String time_stamp = new String(Arrays.copyOfRange(header, ts_offset,
-                    ts_offset + TIME_STAMP_LENGTH), "ISO-8859-1");
+                    ts_offset + TIME_STAMP_LENGTH), StandardCharsets.ISO_8859_1);
             if (dbgLog.isLoggable(Level.FINE)) {
                 dbgLog.fine("time_stamp_length=" + time_stamp.length());
             }
@@ -912,7 +913,7 @@ private void decodeDescriptorVarNameList(BufferedInputStream stream, int nvar) t
         for (DataVariable dataVariable: dataTable.getDataVariables()) {
             offset_end += length_var_name;
             String vari = new String(Arrays.copyOfRange(variableNameBytes, offset_start,
-                    offset_end), "ISO-8859-1");
+                    offset_end), StandardCharsets.ISO_8859_1);
             String varName = getNullStrippedString(vari);
             dataVariable.setName(varName);
             dbgLog.fine("next name=[" + varName + "]");
@@ -978,7 +979,7 @@ private void decodeDescriptorVariableFormat(BufferedInputStream stream, int nvar
         for (int i = 0; i < nvar; i++) {
             offset_end += length_var_format;
             String vari = new String(Arrays.copyOfRange(variableFormatList, offset_start,
-                    offset_end), "ISO-8859-1");
+                    offset_end), StandardCharsets.ISO_8859_1);
             String variableFormat = getNullStrippedString(vari);
             if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + "-th format=[" + variableFormat + "]");
                         
@@ -1045,7 +1046,7 @@ private void decodeDescriptorValueLabel(BufferedInputStream stream, int nvar) th
         for (int i = 0; i < nvar; i++) {
             offset_end += length_label_name;
             String vari = new String(Arrays.copyOfRange(labelNameList, offset_start,
-                    offset_end), "ISO-8859-1");
+                    offset_end), StandardCharsets.ISO_8859_1);
             labelNames[i] = getNullStrippedString(vari);
             dbgLog.fine(i + "-th label=[" + labelNames[i] + "]");
             offset_start = offset_end;
@@ -1090,7 +1091,7 @@ private void decodeVariableLabels(BufferedInputStream stream) throws IOException
         for (int i = 0; i < nvar; i++) {
             offset_end += length_var_label;
             String vari = new String(Arrays.copyOfRange(variableLabelBytes, offset_start,
-                    offset_end), "ISO-8859-1");
+                    offset_end), StandardCharsets.ISO_8859_1);
             
             String variableLabelParsed = getNullStrippedString(vari);
             if (dbgLog.isLoggable(Level.FINE)) {
@@ -1272,7 +1273,7 @@ void parseValueLabelsRelease105(BufferedInputStream stream) throws IOException {
                     valueLabelHeader,
                     value_label_table_length,
                     (value_label_table_length + length_label_name)),
-                    "ISO-8859-1");
+                    StandardCharsets.ISO_8859_1);
 
             if (dbgLog.isLoggable(Level.FINE)) {
                 dbgLog.fine("rawLabelName(length)=" + rawLabelName.length());
@@ -1335,7 +1336,7 @@ void parseValueLabelsRelease105(BufferedInputStream stream) throws IOException {
             for (int l = 0; l < no_value_label_pairs; l++) {
 
                 String string_l = new String(Arrays.copyOfRange(valueLabelTable_i, offset_start,
-                        offset_end), "ISO-8859-1");
+                        offset_end), StandardCharsets.ISO_8859_1);
 
                 int null_position = string_l.indexOf(0);
                 if (null_position != -1) {
@@ -1485,7 +1486,7 @@ private void parseValueLabelsReleasel108(BufferedInputStream stream) throws IOEx
                     valueLabelHeader,
                     value_label_table_length,
                     (value_label_table_length + length_label_name)),
-                    "ISO-8859-1");
+                    StandardCharsets.ISO_8859_1);
             String labelName = getNullStrippedString(rawLabelName);
 
             if (dbgLog.isLoggable(Level.FINE)) {
@@ -1581,7 +1582,7 @@ private void parseValueLabelsReleasel108(BufferedInputStream stream) throws IOEx
             String label_segment = new String(
                     Arrays.copyOfRange(valueLabelTable_i,
                             offset_value,
-                            (length_label_segment + offset_value)), "ISO-8859-1");
+                            (length_label_segment + offset_value)), StandardCharsets.ISO_8859_1);
 
             // L.A. -- 2011.2.25:
             // This assumes that the labels are already stored in the right
@@ -1701,7 +1702,7 @@ private void decodeData(BufferedInputStream stream, boolean saveWithVariableHead
         ingesteddata.setTabDelimitedFile(tabDelimitedDataFile);
 
         fileOutTab = new FileOutputStream(tabDelimitedDataFile);
-        pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
+        pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true);
 
         /* Should we lose this dateFormat thing in 4.0? 
          * the UNF should be calculatable on the app side solely from the data
@@ -1932,7 +1933,7 @@ private void decodeData(BufferedInputStream stream, boolean saveWithVariableHead
                         // String case
                         int strVarLength = StringLengthTable.get(columnCounter);
                         String raw_datum = new String(Arrays.copyOfRange(dataRowBytes, byte_offset,
-                                (byte_offset + strVarLength)), "ISO-8859-1");
+                                (byte_offset + strVarLength)), StandardCharsets.ISO_8859_1);
                         // TODO: 
                         // is it the right thing to do, to default to "ISO-8859-1"?
                         // (it may be; since there's no mechanism for specifying
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java
index 0822f6eed72..913c0ebeab2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java
@@ -4,6 +4,7 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.logging.Logger;
 
@@ -273,7 +274,7 @@ public float readFloat() throws IOException {
      */
     public String readString(int n) throws IOException {
 
-        String ret = new String(readBytes(n), "US-ASCII");
+        String ret = new String(readBytes(n), StandardCharsets.US_ASCII);
 
         // Remove the terminating and/or padding zero bytes:
         if (ret != null && ret.indexOf(0) > -1) {
@@ -287,7 +288,7 @@ public String readString(int n) throws IOException {
      */
     public String readUtfString(int n) throws IOException {
 
-        String ret = new String(readBytes(n), "UTF8");
+        String ret = new String(readBytes(n), StandardCharsets.UTF_8);
 
         // Remove the terminating and/or padding zero bytes:
         if (ret.indexOf(0) > -1) {
@@ -314,11 +315,11 @@ public byte[] readPrimitiveSection(String tag, int length) throws IOException {
     }
 
     public String readPrimitiveStringSection(String tag) throws IOException {
-        return new String(readPrimitiveSection(tag), "US-ASCII");
+        return new String(readPrimitiveSection(tag), StandardCharsets.US_ASCII);
     }
 
     public String readPrimitiveStringSection(String tag, int length) throws IOException {
-        return new String(readPrimitiveSection(tag, length), "US-ASCII");
+        return new String(readPrimitiveSection(tag, length), StandardCharsets.US_ASCII);
     }
 
     public String readLabelSection(String tag, int limit) throws IOException {
@@ -332,7 +333,7 @@ public String readLabelSection(String tag, int limit) throws IOException {
         logger.fine("length of label: " + lengthOfLabel);
         String label = null;
         if (lengthOfLabel > 0) {
-            label = new String(readBytes(lengthOfLabel), "US-ASCII");
+            label = new String(readBytes(lengthOfLabel), StandardCharsets.US_ASCII);
         }
         logger.fine("ret: " + label);
         readClosingTag(tag);
@@ -358,7 +359,7 @@ public String readDefinedStringSection(String tag, int limit) throws IOException
         }
         String ret = null;
         if (number > 0) {
-            ret = new String(readBytes(number), "US-ASCII");
+            ret = new String(readBytes(number), StandardCharsets.US_ASCII);
         }
         logger.fine("ret: " + ret);
         readClosingTag(tag);
@@ -400,7 +401,7 @@ public boolean checkTag(String tag) throws IOException {
 
         int n = tag.length();
         if ((this.buffer_size - buffer_byte_offset) >= n) {
-            return (tag).equals(new String(Arrays.copyOfRange(buffer, buffer_byte_offset, buffer_byte_offset+n),"US-ASCII"));
+            return (tag).equals(new String(Arrays.copyOfRange(buffer, buffer_byte_offset, buffer_byte_offset+n),StandardCharsets.US_ASCII));
         }
         else{
             bufferMoreBytes();
@@ -414,7 +415,7 @@ public void readOpeningTag(String tag) throws IOException {
             throw new IOException("opening tag must be a non-empty string.");
         }
 
-        String openTagString = new String(readBytes(tag.length() + 2), "US-ASCII");
+        String openTagString = new String(readBytes(tag.length() + 2), StandardCharsets.US_ASCII);
         if (openTagString == null || !openTagString.equals("<"+tag+">")) {
             throw new IOException("Could not read opening tag <"+tag+">");
         }
@@ -425,7 +426,7 @@ public void readClosingTag(String tag) throws IOException {
             throw new IOException("closing tag must be a non-empty string.");
         }
 
-        String closeTagString = new String(readBytes(tag.length() + 3), "US-ASCII");
+        String closeTagString = new String(readBytes(tag.length() + 3), StandardCharsets.US_ASCII);
         logger.fine("closeTagString: " + closeTagString);
 
         if (closeTagString == null || !closeTagString.equals("</" + tag + ">")) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java
index 53607d541de..b0f2c50c997 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java
@@ -7,6 +7,7 @@
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
 import java.text.NumberFormat;
 import java.text.ParseException;
@@ -735,7 +736,7 @@ private void readData(DataReader reader, String variableHeaderLine) throws IOExc
         ingesteddata.setTabDelimitedFile(tabDelimitedDataFile);
 
         FileOutputStream fileOutTab = new FileOutputStream(tabDelimitedDataFile);
-        PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
+        PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true);
 
         // add the variable header here, if needed
         if (variableHeaderLine != null) {
@@ -1001,7 +1002,7 @@ private void readSTRLs(DataReader reader) throws IOException {
 
             File finalTabFile = File.createTempFile("finalTabfile.", ".tab");
             FileOutputStream fileOutTab = new FileOutputStream(finalTabFile);
-            PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
+            PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true);
 
             logger.fine("Setting the tab-delimited file to " + finalTabFile.getName());
             ingesteddata.setTabDelimitedFile(finalTabFile);
@@ -1130,9 +1131,9 @@ private String readGSO(DataReader reader, long v, long o) throws IOException {
 
         String gsoString;
         if (binary) {
-            gsoString = new String(contents, "utf8"); 
+            gsoString = new String(contents, StandardCharsets.UTF_8); 
         } else {
-            gsoString = new String(contents, 0, (int) length - 1, "US-ASCII");
+            gsoString = new String(contents, 0, (int) length - 1, StandardCharsets.US_ASCII);
         }
 
         logger.fine("GSO " + v + "," + o + ": " + gsoString);
@@ -1226,7 +1227,7 @@ private void readValueLabels(DataReader reader) throws IOException {
                 }
                 label_length = (int)(label_end - label_offset);
 
-                category_value_labels[i] = new String(Arrays.copyOfRange(labelBytes, (int)label_offset, (int)label_end-1), "UTF8");
+                category_value_labels[i] = new String(Arrays.copyOfRange(labelBytes, (int)label_offset, (int)label_end-1), StandardCharsets.UTF_8);
                 total_label_bytes += label_length;
             }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java
index 2ee966c3e31..13325ca8f60 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java
@@ -31,7 +31,7 @@
 import java.io.PrintWriter;
 import java.io.Writer;
 import java.nio.ByteBuffer;
-
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
 import java.text.NumberFormat;
 import java.text.SimpleDateFormat;
@@ -195,7 +195,7 @@ public TabularDataIngest read(BufferedInputStream stream, boolean storeWithVaria
         BufferedReader bfReader = null;
         
         try {            
-            bfReader = new BufferedReader(new InputStreamReader(new FileInputStream(tempPORfile.getAbsolutePath()), "US-ASCII"));
+            bfReader = new BufferedReader(new InputStreamReader(new FileInputStream(tempPORfile.getAbsolutePath()), StandardCharsets.US_ASCII));
             if (bfReader == null){
                 dbgLog.fine("bfReader is null");
                 throw new IOException("bufferedReader is null");
@@ -567,7 +567,7 @@ private File decodeHeader(BufferedInputStream stream) throws IOException {
         try {
             tempPORfile = File.createTempFile("tempPORfile.", ".por");
             fileOutPOR = new FileOutputStream(tempPORfile);
-            fileWriter = new BufferedWriter(new OutputStreamWriter(fileOutPOR, "utf8"));
+            fileWriter = new BufferedWriter(new OutputStreamWriter(fileOutPOR, StandardCharsets.UTF_8));
             porScanner = new Scanner(stream);
 
             // Because 64-bit and 32-bit machines decode POR's first 40-byte
@@ -1115,7 +1115,7 @@ private void decodeData(BufferedReader reader, boolean storeWithVariableHeader)
 
         try {
             fileOutTab = new FileOutputStream(tabDelimitedDataFile);
-            pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
+            pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true);
 
             variableFormatTypeList = new String[varQnty];
             for (int i = 0; i < varQnty; i++) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
index 50f2f89e354..215c7a5e6d2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
@@ -22,12 +22,11 @@
 
 import java.io.*;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.text.*;
 import java.util.logging.*;
 import java.util.*;
 
-import jakarta.inject.Inject;
-
 // Rosuda Wrappers and Methods for R-calls to Rserve
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import org.rosuda.REngine.REXP;
@@ -504,10 +503,10 @@ public TabularDataIngest read(BufferedInputStream stream, boolean saveWithVariab
             // created!
             // - L.A. 
             RTabFileParser csvFileReader = new RTabFileParser('\t');
-            BufferedReader localBufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(localCsvFile), "UTF-8"));
+            BufferedReader localBufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(localCsvFile), StandardCharsets.UTF_8));
 
             File tabFileDestination = File.createTempFile("data-", ".tab");
-            PrintWriter tabFileWriter = new PrintWriter(tabFileDestination.getAbsolutePath(), "UTF-8");
+            PrintWriter tabFileWriter = new PrintWriter(tabFileDestination.getAbsolutePath(), StandardCharsets.UTF_8);
         
             int lineCount = csvFileReader.read(localBufferedReader, dataTable, saveWithVariableHeader, tabFileWriter);
 
@@ -685,7 +684,7 @@ private static String readLocalResource(String path) {
 
         // Try opening a buffered reader stream
         try {
-            BufferedReader rd = new BufferedReader(new InputStreamReader(resourceStream, "UTF-8"));
+            BufferedReader rd = new BufferedReader(new InputStreamReader(resourceStream, StandardCharsets.UTF_8));
 
             String line = null;
             while ((line = rd.readLine()) != null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java
index 5eecbdfb666..308ff352b2a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java
@@ -29,7 +29,7 @@
 import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
-
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
 import java.text.NumberFormat;
 import java.text.SimpleDateFormat;
@@ -58,10 +58,7 @@
 
 import edu.harvard.iq.dataverse.DataTable;
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
-import edu.harvard.iq.dataverse.datavariable.SummaryStatistic;
 import edu.harvard.iq.dataverse.datavariable.VariableCategory;
-import edu.harvard.iq.dataverse.datavariable.VariableRange;
-
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataFileReader;
 import edu.harvard.iq.dataverse.ingest.tabulardata.spi.TabularDataFileReaderSpi;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest;
@@ -633,7 +630,7 @@ void decodeRecordType1(BufferedInputStream stream) throws IOException {
             int offset_end = LENGTH_SPSS_PRODUCT_INFO; // 60 bytes
             
             String productInfo = new String(Arrays.copyOfRange(recordType1, offset_start,
-                offset_end),"US-ASCII");
+                offset_end),StandardCharsets.US_ASCII);
                 
             dbgLog.fine("productInfo:\n"+productInfo+"\n");
             dataTable.setOriginalFormatVersion(productInfo);
@@ -872,7 +869,7 @@ void decodeRecordType1(BufferedInputStream stream) throws IOException {
             offset_end += LENGTH_FILE_CREATION_INFO; // 84 bytes
             
             String fileCreationInfo = getNullStrippedString(new String(Arrays.copyOfRange(recordType1, offset_start,
-                offset_end),"US-ASCII"));
+                offset_end),StandardCharsets.US_ASCII));
                 
             dbgLog.fine("fileCreationInfo:\n"+fileCreationInfo+"\n");
             
@@ -1220,7 +1217,7 @@ void decodeRecordType2(BufferedInputStream stream) throws IOException {
                     // borders. So we always read the bytes, but only use them for
                     // the real variable entries.
                         /*String variableLabel = new String(Arrays.copyOfRange(variable_label,
-                                0, rawVariableLabelLength),"US-ASCII");*/
+                                0, rawVariableLabelLength),StandardCharsets.US_ASCII);*/
 
                         variableLabelMap.put(variableName, variableLabel);
                     }
@@ -2075,7 +2072,7 @@ void decodeRecordType7(BufferedInputStream stream) throws IOException {
                         byte[] work = new byte[unitLength*numberOfUnits];
                         int nbtyes13 = stream.read(work);
 
-                        String[] variableShortLongNamePairs = new String(work,"US-ASCII").split("\t");
+                        String[] variableShortLongNamePairs = new String(work,StandardCharsets.US_ASCII).split("\t");
 
                         for (int i=0; i<variableShortLongNamePairs.length; i++){
                             dbgLog.fine("RT7: "+i+"-th pair"+variableShortLongNamePairs[i]);
@@ -2166,7 +2163,7 @@ void decodeRecordType7(BufferedInputStream stream) throws IOException {
                         byte[] rt7st20bytes = new byte[unitLength*numberOfUnits];
                         int nbytes20 = stream.read(rt7st20bytes);
 
-                        String dataCharSet = new String(rt7st20bytes,"US-ASCII");
+                        String dataCharSet = new String(rt7st20bytes,StandardCharsets.US_ASCII);
 
                         if (dataCharSet != null && !(dataCharSet.equals(""))) {
                             dbgLog.fine("RT7-20: data charset: "+ dataCharSet);
@@ -2347,7 +2344,7 @@ PrintWriter createOutputWriter (BufferedInputStream stream) throws IOException {
 
             fileOutTab = new FileOutputStream(tabDelimitedDataFile);
             
-            pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
+            pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true);
 
         } catch (FileNotFoundException ex) {
             ex.printStackTrace();
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java
index 2627bc76fd9..9d61663d034 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java
@@ -120,21 +120,21 @@ public void reRegisterHandle(DvObject dvObject) {
             
             try {
 
-                AdminRecord admin = new AdminRecord(authHandle.getBytes("UTF8"), handlenetIndex,
+                AdminRecord admin = new AdminRecord(authHandle.getBytes(StandardCharsets.UTF_8), handlenetIndex,
                         true, true, true, true, true, true,
                         true, true, true, true, true, true);
 
                 int timestamp = (int) (System.currentTimeMillis() / 1000);
 
-                HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes("UTF8"),
+                HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes(StandardCharsets.UTF_8),
                     Encoder.encodeAdminRecord(admin),
                     HandleValue.TTL_TYPE_RELATIVE, 86400,
-                    timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes("UTF8"),
+                    timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes(StandardCharsets.UTF_8),
                     datasetUrl.getBytes(),
                     HandleValue.TTL_TYPE_RELATIVE, 86400,
                     timestamp, null, true, true, true, false)};
 
-                ModifyValueRequest req = new ModifyValueRequest(handle.getBytes("UTF8"), val, auth);
+                ModifyValueRequest req = new ModifyValueRequest(handle.getBytes(StandardCharsets.UTF_8), val, auth);
 
                 resolver.traceMessages = true;
                 AbstractResponse response = resolver.processRequest(req);
@@ -168,22 +168,22 @@ public Throwable registerNewHandle(DvObject dvObject) {
 
         try {
 
-            AdminRecord admin = new AdminRecord(authHandle.getBytes("UTF8"), handlenetIndex,
+            AdminRecord admin = new AdminRecord(authHandle.getBytes(StandardCharsets.UTF_8), handlenetIndex,
                     true, true, true, true, true, true,
                     true, true, true, true, true, true);
 
             int timestamp = (int) (System.currentTimeMillis() / 1000);
 
-            HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes("UTF8"),
+            HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes(StandardCharsets.UTF_8),
                 Encoder.encodeAdminRecord(admin),
                 HandleValue.TTL_TYPE_RELATIVE, 86400,
-                timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes("UTF8"),
+                timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes(StandardCharsets.UTF_8),
                 datasetUrl.getBytes(),
                 HandleValue.TTL_TYPE_RELATIVE, 86400,
                 timestamp, null, true, true, true, false)};
 
             CreateHandleRequest req
-                    = new CreateHandleRequest(handle.getBytes("UTF8"), val, auth);
+                    = new CreateHandleRequest(handle.getBytes(StandardCharsets.UTF_8), val, auth);
 
             resolver.traceMessages = true;
             AbstractResponse response = resolver.processRequest(req);
diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
index 6e8a512902a..a8b28d2d79d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
@@ -21,6 +21,7 @@
 import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
@@ -499,7 +500,7 @@ public void showJsonPreviewNewWindow() throws IOException, WrappedResponse {
 
         OutputStream output = ec.getResponseOutputStream();
         
-        OutputStreamWriter osw = new OutputStreamWriter(output, "UTF-8");
+        OutputStreamWriter osw = new OutputStreamWriter(output, StandardCharsets.UTF_8);
         osw.write(provJsonState); //the button calling this will only be rendered if provJsonState exists (e.g. a file is uploaded)
         osw.close();
         fc.responseComplete();
diff --git a/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java b/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java
index df2e44ecb27..dbcfc039fa1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java
+++ b/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java
@@ -33,6 +33,7 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -758,7 +759,7 @@ private static String readLocalResource(String path) {
 
         // Try opening a buffered reader stream
         try {
-            resourceAsString = IOUtils.toString(resourceStream, "UTF-8");
+            resourceAsString = IOUtils.toString(resourceStream, StandardCharsets.UTF_8);
             resourceStream.close();
         } catch (IOException ex) {
             logger.warning(String.format("RDATAFileReader: (readLocalResource) resource stream from path \"%s\" was invalid", path));
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
index 244eafc899d..07b44cef3bf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
@@ -536,6 +536,9 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool
 				nullSafeJsonBuilder.add("entity_id", this.entityId);
 			}
 		}
+		if (!getPublicationStatuses().isEmpty()) {
+			nullSafeJsonBuilder.add("publicationStatuses", getPublicationStatusesAsJSON());
+		}
 
 		if (this.entity == null) {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java
index 137ae21d793..56f85436773 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.util;
 
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2LoginBackingBean;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.nio.ByteBuffer;
 import java.security.InvalidAlgorithmParameterException;
 import java.security.InvalidKeyException;
@@ -152,7 +152,7 @@ public static String encrypt(String value, String password ) {
                     .replaceAll("/", "_");
             
         } catch (  InvalidKeyException | NoSuchAlgorithmException | BadPaddingException
-                  | IllegalBlockSizeException | NoSuchPaddingException | UnsupportedEncodingException | InvalidAlgorithmParameterException ex) {
+                  | IllegalBlockSizeException | NoSuchPaddingException | InvalidAlgorithmParameterException ex) {
             Logger.getLogger(OAuth2LoginBackingBean.class.getName()).log(Level.SEVERE, null, ex);
             throw new RuntimeException(ex);
         }
@@ -173,7 +173,7 @@ public static String decrypt(String value, String password ) {
             return new String(decrypted);
             
         } catch ( InvalidKeyException | NoSuchAlgorithmException | BadPaddingException
-                  | IllegalBlockSizeException | NoSuchPaddingException | UnsupportedEncodingException | InvalidAlgorithmParameterException ex) {
+                  | IllegalBlockSizeException | NoSuchPaddingException | InvalidAlgorithmParameterException ex) {
             Logger.getLogger(OAuth2LoginBackingBean.class.getName()).log(Level.SEVERE, null, ex);
             throw new RuntimeException(ex);
         }
@@ -209,8 +209,8 @@ public static String sanitizeFileDirectory(String value, boolean aggressively){
     }
     
     
-    private static SecretKeySpec generateKeyFromString(final String secKey) throws UnsupportedEncodingException, NoSuchAlgorithmException {
-        byte[] key = (secKey).getBytes("UTF-8");
+    private static SecretKeySpec generateKeyFromString(final String secKey) throws NoSuchAlgorithmException {
+        byte[] key = (secKey).getBytes(StandardCharsets.UTF_8);
         MessageDigest sha = MessageDigest.getInstance("SHA-1");
         key = sha.digest(key);
         key = Arrays.copyOf(key, 16); // use only first 128 bits
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java
index 29c4e8a6fb9..18ea3771301 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java
@@ -2,7 +2,7 @@
 
 import java.net.MalformedURLException;
 import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -96,7 +96,7 @@ public static boolean isValidUrl(String signedUrl, String user, String method, S
         boolean valid = true;
         try {
             URL url = new URL(signedUrl);
-            List<NameValuePair> params = URLEncodedUtils.parse(url.getQuery(), Charset.forName("UTF-8"));
+            List<NameValuePair> params = URLEncodedUtils.parse(url.getQuery(), StandardCharsets.UTF_8);
             String hash = null;
             String dateString = null;
             String allowedMethod = null;
@@ -156,7 +156,7 @@ public static boolean isValidUrl(String signedUrl, String user, String method, S
     public static boolean hasToken(String urlString) {
         try {
             URL url = new URL(urlString);
-            List<NameValuePair> params = URLEncodedUtils.parse(url.getQuery(), Charset.forName("UTF-8"));
+            List<NameValuePair> params = URLEncodedUtils.parse(url.getQuery(), StandardCharsets.UTF_8);
             for (NameValuePair nvp : params) {
                 if (nvp.getName().equals(SIGNED_URL_TOKEN)) {
                     return true;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
index b7c44014b80..e47426149f9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
@@ -9,10 +9,10 @@
 import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.PrintWriter;
-import java.io.UnsupportedEncodingException;
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Paths;
 import java.security.KeyManagementException;
 import java.security.KeyStoreException;
@@ -686,12 +686,7 @@ private void createFileFromString(final String relPath, final String content)
         archiveEntry.setMethod(ZipEntry.DEFLATED);
         InputStreamSupplier supp = new InputStreamSupplier() {
             public InputStream get() {
-                try {
-                    return new ByteArrayInputStream(content.getBytes("UTF-8"));
-                } catch (UnsupportedEncodingException e) {
-                    e.printStackTrace();
-                }
-                return null;
+                return new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8));
             }
         };
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
index 84bc7834ab9..60ab9407269 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
@@ -11,6 +11,7 @@
 import edu.harvard.iq.dataverse.util.json.JsonPrinter;
 
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.time.LocalDate;
 import java.util.List;
 import java.util.Map;
@@ -68,7 +69,7 @@ public OREMap(DatasetVersion dv, boolean exclude) {
     }
 
     public void writeOREMap(OutputStream outputStream) throws Exception {
-        outputStream.write(getOREMap().toString().getBytes("UTF8"));
+        outputStream.write(getOREMap().toString().getBytes(StandardCharsets.UTF_8));
         outputStream.flush();
     }
 
diff --git a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties
index 97b2eed111c..630539d912e 100644
--- a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties
+++ b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties
@@ -38,3 +38,4 @@ nf=text/x-nextflow
 Rmd=text/x-r-notebook
 rb=text/x-ruby-script
 dag=text/x-dagman
+glb=model/gltf-binary
diff --git a/src/main/java/propertyFiles/MimeTypeDisplay.properties b/src/main/java/propertyFiles/MimeTypeDisplay.properties
index 8e5a251abbf..549b2b13442 100644
--- a/src/main/java/propertyFiles/MimeTypeDisplay.properties
+++ b/src/main/java/propertyFiles/MimeTypeDisplay.properties
@@ -219,6 +219,8 @@ video/quicktime=Quicktime Video
 video/webm=WebM Video
 # Network Data
 text/xml-graphml=GraphML Network Data
+# 3D Data
+model/gltf-binary=3D Model
 # Other
 application/octet-stream=Unknown
 application/x-docker-file=Docker Image File
diff --git a/src/main/java/propertyFiles/MimeTypeFacets.properties b/src/main/java/propertyFiles/MimeTypeFacets.properties
index 0dad8daff4c..0b0fde89cbd 100644
--- a/src/main/java/propertyFiles/MimeTypeFacets.properties
+++ b/src/main/java/propertyFiles/MimeTypeFacets.properties
@@ -223,6 +223,8 @@ video/webm=Video
 # (anything else that looks like image/* will also be indexed as facet type "Video")
 # Network Data
 text/xml-graphml=Network Data
+# 3D Data
+model/gltf-binary=3D Data
 # Other
 application/octet-stream=Unknown
 application/ld+json;\u0020profile\u003d\u0022http\u003a//www.w3.org/ns/json-ld#flattened\u0020http\u003a//www.w3.org/ns/json-ld#compacted\u0020https\u003a//w3id.org/ro/crate\u0022=Metadata
diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml
index 427615f2f0b..732c634205f 100644
--- a/src/main/webapp/WEB-INF/web.xml
+++ b/src/main/webapp/WEB-INF/web.xml
@@ -182,6 +182,11 @@
         <extension>webmanifest</extension>
         <mime-type>application/manifest+json</mime-type>
     </mime-mapping>
+    <mime-mapping>
+        <extension>xhtml</extension>
+        <mime-type>text/html</mime-type>
+    </mime-mapping>
+    
     <!-- BEGIN Data Deposit API (SWORD v2) -->
     <!-- See also SwordConfigurationImpl for how deprecation warnings are configured -->
     <context-param>
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
index 09b60e46e7e..c0b762df2ab 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
@@ -1045,4 +1045,54 @@ public void testAddDataverse() {
                 .statusCode(BAD_REQUEST.getStatusCode())
                 .body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\""));
     }
+
+    @Test
+    public void testGetUserPermissionsOnDataverse() {
+        Response createUserResponse = UtilIT.createRandomUser();
+        String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        // Call for dataverse created by the user
+        Response getUserPermissionsOnDataverseResponse = UtilIT.getUserPermissionsOnDataverse(dataverseAlias, apiToken);
+        getUserPermissionsOnDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        boolean canAddDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataverse");
+        assertTrue(canAddDataverse);
+        boolean canAddDataset = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataset");
+        assertTrue(canAddDataset);
+        boolean canViewUnpublishedDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canViewUnpublishedDataverse");
+        assertTrue(canViewUnpublishedDataverse);
+        boolean canEditDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canEditDataverse");
+        assertTrue(canEditDataverse);
+        boolean canManageDataversePermissions = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canManageDataversePermissions");
+        assertTrue(canManageDataversePermissions);
+        boolean canPublishDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canPublishDataverse");
+        assertTrue(canPublishDataverse);
+        boolean canDeleteDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canDeleteDataverse");
+        assertTrue(canDeleteDataverse);
+
+        // Call for root dataverse
+        getUserPermissionsOnDataverseResponse = UtilIT.getUserPermissionsOnDataverse("root", apiToken);
+        getUserPermissionsOnDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        canAddDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataverse");
+        assertTrue(canAddDataverse);
+        canAddDataset = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataset");
+        assertTrue(canAddDataset);
+        canViewUnpublishedDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canViewUnpublishedDataverse");
+        assertFalse(canViewUnpublishedDataverse);
+        canEditDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canEditDataverse");
+        assertFalse(canEditDataverse);
+        canManageDataversePermissions = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canManageDataversePermissions");
+        assertFalse(canManageDataversePermissions);
+        canPublishDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canPublishDataverse");
+        assertFalse(canPublishDataverse);
+        canDeleteDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canDeleteDataverse");
+        assertFalse(canDeleteDataverse);
+
+        // Call with invalid dataverse alias
+        Response getUserPermissionsOnDataverseInvalidIdResponse = UtilIT.getUserPermissionsOnDataverse("testInvalidAlias", apiToken);
+        getUserPermissionsOnDataverseInvalidIdResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
index 6e4fd5b0bb3..9fa13bb2939 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
@@ -25,7 +25,6 @@
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import java.awt.image.BufferedImage;
 import java.io.IOException;
-import static java.lang.Thread.sleep;
 import javax.imageio.ImageIO;
 import static jakarta.ws.rs.core.Response.Status.CREATED;
 import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
@@ -113,6 +112,7 @@ public void testSearchPermisions() throws InterruptedException {
                 .body("data.total_count", CoreMatchers.is(1))
                 .body("data.count_in_response", CoreMatchers.is(1))
                 .body("data.items[0].name", CoreMatchers.is("Darwin's Finches"))
+                .body("data.items[0].publicationStatuses", CoreMatchers.hasItems("Unpublished", "Draft"))
                 .statusCode(OK.getStatusCode());
 
         Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken1);
@@ -599,7 +599,7 @@ public void testDatasetThumbnail() {
 
         String datasetLogo = "src/main/webapp/resources/images/cc0.png";
         File datasetLogoFile = new File(datasetLogo);
-        String datasetLogoAsBase64 = datasetLogoAsBase64 = ImageThumbConverter.generateImageThumbnailFromFileAsBase64(datasetLogoFile, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
+        String datasetLogoAsBase64 = ImageThumbConverter.generateImageThumbnailFromFileAsBase64(datasetLogoFile, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
 
         if (datasetLogoAsBase64 == null) {
             Logger.getLogger(SearchIT.class.getName()).log(Level.SEVERE, "Failed to generate a base64 thumbnail from the file dataverseproject.png");
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index c24ff599d9c..42f590a8adb 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -3821,6 +3821,12 @@ static Response getUserPermissionsOnDataset(String datasetId, String apiToken) {
                 .get("/api/datasets/" + datasetId + "/userPermissions");
     }
 
+    static Response getUserPermissionsOnDataverse(String dataverseAlias, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/dataverses/" + dataverseAlias + "/userPermissions");
+    }
+
     static Response getCanDownloadAtLeastOneFile(String datasetId, String versionId, String apiToken) {
         return given()
                 .header(API_TOKEN_HTTP_HEADER, apiToken)
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
index 4fb29869db7..d7deaa2dbc1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
@@ -225,6 +225,29 @@ public void testSetPublicationStatuses14() {
         assertTrue(this.solrSearchResult.isDeaccessionedState());
     }
 
+    @Test
+    public void testSetPublicationStatusesJson() {
+
+        boolean showRelevance = false;
+        boolean showEntityIds = false;
+        boolean showApiUrls = false;
+
+        SolrSearchResult result01 = new SolrSearchResult("myQuery", "myName");
+        result01.setType(SearchConstants.DATAVERSES);
+        result01.setPublicationStatuses(List.of("Unpublished", "Draft"));
+        JsonObjectBuilder actual01 = result01.json(showRelevance, showEntityIds, showApiUrls);
+        JsonObject actual = actual01.build();
+        System.out.println("actual: " + actual);
+
+        JsonObjectBuilder expResult = Json.createObjectBuilder();
+        expResult.add("type", SearchConstants.DATAVERSE);
+        expResult.add("publicationStatuses", Json.createArrayBuilder().add("Unpublished").add("Draft").build());
+        JsonObject expected = expResult.build();
+        System.out.println("expect: " + expected);
+
+        assertEquals(expected, actual);
+    }
+
     @Test
     public void testJson() {