diff --git a/.env b/.env
index d5cffcec0aa..9d604630073 100644
--- a/.env
+++ b/.env
@@ -1,5 +1,5 @@
APP_IMAGE=gdcc/dataverse:unstable
-POSTGRES_VERSION=16
+POSTGRES_VERSION=17
DATAVERSE_DB_USER=dataverse
SOLR_VERSION=9.3.0
-SKIP_DEPLOY=0
\ No newline at end of file
+SKIP_DEPLOY=0
diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml
index 028f0140cc9..eca8416732a 100644
--- a/.github/workflows/deploy_beta_testing.yml
+++ b/.github/workflows/deploy_beta_testing.yml
@@ -5,14 +5,18 @@ on:
branches:
- develop
+concurrency:
+ group: deploy-beta-testing
+ cancel-in-progress: false
+
jobs:
build:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- - uses: actions/setup-java@v3
+ - uses: actions/setup-java@v4
with:
distribution: 'zulu'
java-version: '17'
@@ -32,7 +36,7 @@ jobs:
run: echo "war_file=$(ls *.war | head -1)">> $GITHUB_ENV
- name: Upload war artifact
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: built-app
path: ./target/${{ env.war_file }}
@@ -42,10 +46,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Download war artifact
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: built-app
path: ./
diff --git a/.github/workflows/guides_build_sphinx.yml b/.github/workflows/guides_build_sphinx.yml
index 992f30f2872..86b59b11d35 100644
--- a/.github/workflows/guides_build_sphinx.yml
+++ b/.github/workflows/guides_build_sphinx.yml
@@ -11,6 +11,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- - uses: OdumInstitute/sphinx-action@master
+ - uses: uncch-rdmc/sphinx-action@master
with:
docs-folder: "doc/sphinx-guides/"
diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml
index a94b17a67ba..45180ea7aec 100644
--- a/.github/workflows/maven_unit_test.yml
+++ b/.github/workflows/maven_unit_test.yml
@@ -32,9 +32,9 @@ jobs:
steps:
# TODO: As part of #10618 change to setup-maven custom action
# Basic setup chores
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up JDK ${{ matrix.jdk }}
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v4
with:
java-version: ${{ matrix.jdk }}
distribution: temurin
@@ -57,7 +57,7 @@ jobs:
# Upload the built war file. For download, it will be wrapped in a ZIP by GitHub.
# See also https://github.com/actions/upload-artifact#zipped-artifact-downloads
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: dataverse-java${{ matrix.jdk }}.war
path: target/dataverse*.war
@@ -67,7 +67,7 @@ jobs:
- run: |
tar -cvf java-builddir.tar target
tar -cvf java-m2-selection.tar ~/.m2/repository/io/gdcc/dataverse-*
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: java-artifacts
path: |
@@ -98,16 +98,16 @@ jobs:
steps:
# TODO: As part of #10618 change to setup-maven custom action
# Basic setup chores
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up JDK ${{ matrix.jdk }}
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v4
with:
java-version: ${{ matrix.jdk }}
distribution: temurin
cache: maven
# Get the build output from the unit test job
- - uses: actions/download-artifact@v3
+ - uses: actions/download-artifact@v4
with:
name: java-artifacts
- run: |
@@ -119,7 +119,7 @@ jobs:
# Wrap up and send to coverage job
- run: tar -cvf java-reportdir.tar target/site
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: java-reportdir
path: java-reportdir.tar
@@ -132,15 +132,15 @@ jobs:
steps:
# TODO: As part of #10618 change to setup-maven custom action
# Basic setup chores
- - uses: actions/checkout@v3
- - uses: actions/setup-java@v3
+ - uses: actions/checkout@v4
+ - uses: actions/setup-java@v4
with:
java-version: '17'
distribution: temurin
cache: maven
# Get the build output from the integration test job
- - uses: actions/download-artifact@v3
+ - uses: actions/download-artifact@v4
with:
name: java-reportdir
- run: tar -xvf java-reportdir.tar
diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml
index 2aed50e9998..d5c789c7189 100644
--- a/conf/solr/schema.xml
+++ b/conf/solr/schema.xml
@@ -234,6 +234,7 @@
+
- 6.4
+ 6.5
17
UTF-8
@@ -149,7 +149,7 @@
6.2024.6
- 42.7.2
+ 42.7.4
9.4.1
1.12.748
26.30.0
diff --git a/pom.xml b/pom.xml
index b2344989569..5ecbd7059c1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -27,7 +27,7 @@
war
1.2.18.4
- 9.22.1
+ 10.19.0
1.20.1
5.2.1
2.9.1
@@ -188,6 +188,11 @@
flyway-core
${flyway.version}
+
+ org.flywaydb
+ flyway-database-postgresql
+ ${flyway.version}
+
org.eclipse.persistence
@@ -1004,7 +1009,7 @@
true
docker-build
- 16
+ 17
gdcc/dataverse:${app.image.tag}
unstable
diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java
index 2cb6f27c3e4..d880da5b4a8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java
@@ -2,7 +2,7 @@
import java.io.Serializable;
import java.util.List;
import jakarta.persistence.*;
-import org.hibernate.validator.constraints.NotBlank;
+import jakarta.validation.constraints.NotBlank;
/**
*
@@ -41,7 +41,7 @@ public void setId(Long id) {
private String questionType;
@NotBlank(message = "{custom.questiontext}")
- @Column( nullable = false )
+ @Column( nullable = false, columnDefinition = "TEXT")
private String questionString;
private boolean required;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
index 3977023fc4b..02fb59751fb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
@@ -792,6 +792,7 @@ private GlobalId getPIDFrom(DatasetVersion dsv, DvObject dv) {
if (!dsv.getDataset().isHarvested()
|| HarvestingClient.HARVEST_STYLE_VDC.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())
|| HarvestingClient.HARVEST_STYLE_ICPSR.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())
+ || HarvestingClient.HARVEST_STYLE_DEFAULT.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())
|| HarvestingClient.HARVEST_STYLE_DATAVERSE
.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())) {
if(!isDirect()) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index 98ac8ff387f..937f5693511 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -1407,8 +1407,7 @@ public UploadSessionQuotaLimit getUploadSessionQuotaLimit(DvObjectContainer pare
}
public boolean isInReleasedVersion(Long id) {
- Query query = em.createQuery("SELECT fm.id FROM FileMetadata fm, DvObject dvo WHERE fm.datasetVersion.id=(SELECT dv.id FROM DatasetVersion dv WHERE dv.dataset.id=dvo.owner.id and dv.versionState=edu.harvard.iq.dataverse.DatasetVersion.VersionState.RELEASED ORDER BY dv.versionNumber DESC, dv.minorVersionNumber DESC LIMIT 1) AND dvo.id=fm.dataFile.id AND fm.dataFile.id=:fid");
- query.setParameter("fid", id);
+ Query query = em.createNativeQuery("SELECT fm.id FROM filemetadata fm WHERE fm.datasetversion_id=(SELECT dv.id FROM datasetversion dv, dvobject dvo WHERE dv.dataset_id=dvo.owner_id AND dv.versionState='RELEASED' and dvo.id=" + id + " ORDER BY dv.versionNumber DESC, dv.minorVersionNumber DESC LIMIT 1) AND fm.datafile_id=" + id);
try {
query.getSingleResult();
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
index 40ed491a302..78579b1de21 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
@@ -483,8 +483,17 @@ public Date getMostRecentMajorVersionReleaseDate() {
if (this.isHarvested()) {
return getVersions().get(0).getReleaseTime();
} else {
+ Long majorVersion = null;
for (DatasetVersion version : this.getVersions()) {
- if (version.isReleased() && version.getMinorVersionNumber().equals((long) 0)) {
+ if (version.isReleased()) {
+ if (version.getMinorVersionNumber().equals((long) 0)) {
+ return version.getReleaseTime();
+ } else if (majorVersion == null) {
+ majorVersion = version.getVersionNumber();
+ }
+ } else if (version.isDeaccessioned() && majorVersion != null
+ && majorVersion.longValue() == version.getVersionNumber().longValue()
+ && version.getMinorVersionNumber().equals((long) 0)) {
return version.getReleaseTime();
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
index 91150b79505..ded7c83de62 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
@@ -891,6 +891,10 @@ public List findAllDisplayedOnCreateInMetadataBlock(MetadataBl
}
public List findAllInMetadataBlockAndDataverse(MetadataBlock metadataBlock, Dataverse dataverse, boolean onlyDisplayedOnCreate) {
+ if (!dataverse.isMetadataBlockRoot() && dataverse.getOwner() != null) {
+ return findAllInMetadataBlockAndDataverse(metadataBlock, dataverse.getOwner(), onlyDisplayedOnCreate);
+ }
+
CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(DatasetFieldType.class);
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 8522f2733c7..33a093c8044 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -101,6 +101,7 @@
import jakarta.faces.view.ViewScoped;
import jakarta.inject.Inject;
import jakarta.inject.Named;
+import jakarta.persistence.OptimisticLockException;
import org.apache.commons.lang3.StringUtils;
import org.primefaces.event.FileUploadEvent;
@@ -1935,13 +1936,13 @@ public void updateOwnerDataverse() {
if (selectedHostDataverse != null && selectedHostDataverse.getId() != null) {
ownerId = selectedHostDataverse.getId();
dataset.setOwner(selectedHostDataverse);
- logger.info("New host dataverse id: "+ownerId);
+ logger.info("New host dataverse id: " + ownerId);
// discard the dataset already created
//If a global ID was already assigned, as is true for direct upload, keep it (if files were already uploaded, they are at the path corresponding to the existing global id)
GlobalId gid = dataset.getGlobalId();
dataset = new Dataset();
- if(gid!=null) {
- dataset.setGlobalId(gid);
+ if (gid != null) {
+ dataset.setGlobalId(gid);
}
// initiate from scratch: (isolate the creation of a new dataset in its own method?)
@@ -2287,8 +2288,17 @@ private String init(boolean initFull) {
JsfHelper.addWarningMessage(message);
}
}
+ if(isAnonymizedAccess()){
+ dataverseHeaderFragment.setBreadcrumbs(new ArrayList<>());
+ }
return null;
}
+
+ public void viewActionInitBreadcrumbs(){
+ if(!isAnonymizedAccess()){
+ dataverseHeaderFragment.initBreadcrumbs(dataset);
+ }
+ }
private void displayWorkflowComments() {
List comments = workingVersion.getWorkflowComments();
@@ -2888,6 +2898,9 @@ private String releaseDataset(boolean minor) {
// the lock info system.
JsfHelper.addErrorMessage(ex.getLocalizedMessage());
}
+ if(ex.getCause()!=null && ex.getCause() instanceof OptimisticLockException) {
+ JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelPublishError"));
+ }
logger.severe(ex.getMessage());
}
@@ -4002,6 +4015,10 @@ public String save() {
Throwable cause = ex;
while (cause.getCause()!= null) {
cause = cause.getCause();
+ if (cause != null && cause instanceof OptimisticLockException) {
+ JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelUpdateError"));
+ return null;
+ }
error.append(cause).append(" ");
error.append(cause.getMessage()).append(" ");
}
@@ -4011,6 +4028,15 @@ public String save() {
} catch (CommandException ex) {
//FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString()));
logger.log(Level.SEVERE, "CommandException, when attempting to update the dataset: " + ex.getMessage(), ex);
+ Throwable cause = ex;
+ while (cause.getCause()!= null) {
+ cause = cause.getCause();
+ logger.info("Cause is: " + cause.getClass().getName() + ", Message: " + cause.getMessage());
+ if (cause != null && cause instanceof OptimisticLockException) {
+ JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelUpdateError"));
+ return null;
+ }
+ }
populateDatasetUpdateFailureMessage();
return returnToDraftVersion();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
index 0433c425fd2..a7bbc7c3ad4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
@@ -102,6 +102,10 @@ public int compare(DatasetVersion o1, DatasetVersion o2) {
}
}
};
+ public static final JsonObjectBuilder compareVersions(DatasetVersion originalVersion, DatasetVersion newVersion) {
+ DatasetVersionDifference diff = new DatasetVersionDifference(newVersion, originalVersion);
+ return diff.compareVersionsAsJson();
+ }
// TODO: Determine the UI implications of various version states
//IMPORTANT: If you add a new value to this enum, you will also have to modify the
@@ -1390,7 +1394,10 @@ public List getRelatedPublications() {
relatedPublication.setIdNumber(subField.getDisplayValue());
break;
case DatasetFieldConstant.publicationRelationType:
- relatedPublication.setRelationType(subField.getDisplayValue());
+ List values = subField.getValues_nondisplay();
+ if (!values.isEmpty()) {
+ relatedPublication.setRelationType(values.get(0)); //only one value allowed
+ }
break;
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java
index eca0c84ae84..c5d6c31386c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java
@@ -5,16 +5,24 @@
import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil;
import edu.harvard.iq.dataverse.util.StringUtil;
+import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Logger;
+import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
import org.apache.commons.lang3.StringUtils;
import edu.harvard.iq.dataverse.util.BundleUtil;
import java.util.Arrays;
import java.util.Date;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
@@ -34,14 +42,13 @@ public final class DatasetVersionDifference {
private List addedFiles = new ArrayList<>();
private List removedFiles = new ArrayList<>();
private List changedFileMetadata = new ArrayList<>();
+ private Map>> changedFileMetadataDiff = new HashMap<>();
private List changedVariableMetadata = new ArrayList<>();
private List replacedFiles = new ArrayList<>();
private List changedTermsAccess = new ArrayList<>();
private List
+
+
+
+
+
+ #{privateUrlLink}
+
+
+
+
+
+
#{bundle['dataset.privateurl.absent']}
@@ -1200,17 +1273,11 @@
-
+
#{bundle['dataset.privateurl.cannotCreate']}
@@ -1224,7 +1291,10 @@
#{bundle['dataset.privateurl.disableConfirmationText']}
-
-
\ No newline at end of file
+
diff --git a/src/main/webapp/previewurl.xhtml b/src/main/webapp/previewurl.xhtml
new file mode 100644
index 00000000000..980d775506b
--- /dev/null
+++ b/src/main/webapp/previewurl.xhtml
@@ -0,0 +1,24 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/main/webapp/resources/iqbs/messages.xhtml b/src/main/webapp/resources/iqbs/messages.xhtml
index bd17cf34d21..f8e1f5e8e9d 100644
--- a/src/main/webapp/resources/iqbs/messages.xhtml
+++ b/src/main/webapp/resources/iqbs/messages.xhtml
@@ -63,7 +63,7 @@
Server:
- #{systemConfig.dataverseServer}
+ #{systemConfig.dataverseSiteUrl}
#{msg.rendered()}
diff --git a/src/main/webapp/resources/js/mydata.js b/src/main/webapp/resources/js/mydata.js
index 899ba6637e2..c731d6772ac 100644
--- a/src/main/webapp/resources/js/mydata.js
+++ b/src/main/webapp/resources/js/mydata.js
@@ -391,7 +391,7 @@ function submit_my_data_search(){
// --------------------------------
// ah, but with the horribly coded xhtml page, we can't use form tags...
//var formData = $('#mydata_filter_form').serialize();
- var formData = $("#my_data_filter_column :input").serialize() + '&' + $("#my_data_filter_column2 :input").serialize() ;
+ var formData = $("#my_data_filter_column :input").serialize() + '&' + $("#my_data_filter_column3 :input").serialize()+ '&' + $("#my_data_filter_column2 :input").serialize() ;
// For debugging, show the search params
if (MYDATA_DEBUG_ON){
diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml
index 505fe681363..fc224443a8e 100644
--- a/src/main/webapp/search-include-fragment.xhtml
+++ b/src/main/webapp/search-include-fragment.xhtml
@@ -582,7 +582,7 @@
-
+
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java
index a235c9b0061..588bf5294e5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java
@@ -63,17 +63,17 @@ public void testIsSanitizeHtml() {
//if textbox then sanitize - allow tags
instance.setFieldType(DatasetFieldType.FieldType.TEXTBOX);
result = instance.isSanitizeHtml();
- assertEquals(true, result);
+ assertTrue(result);
//if textbox then don't sanitize - allow tags
instance.setFieldType(DatasetFieldType.FieldType.EMAIL);
result = instance.isSanitizeHtml();
- assertEquals(false, result);
+ assertFalse(result);
//URL, too
instance.setFieldType(DatasetFieldType.FieldType.URL);
result = instance.isSanitizeHtml();
- assertEquals(true, result);
+ assertTrue(result);
}
@Test
@@ -102,7 +102,7 @@ public void testIsEscapeOutputText(){
//URL, too
instance.setFieldType(DatasetFieldType.FieldType.URL);
result = instance.isEscapeOutputText();
- assertEquals(false, result);
+ assertFalse(result);
}
@@ -121,7 +121,7 @@ public void testGetSolrField(){
parent.setAllowMultiples(true);
instance.setParentDatasetFieldType(parent);
solrField = instance.getSolrField();
- assertEquals(true, solrField.isAllowedToBeMultivalued());
+ assertTrue(solrField.isAllowedToBeMultivalued());
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java
index 3f85acc1f87..b753f534c6b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java
@@ -52,63 +52,63 @@ public void testIsValid() {
//Make string too long - should fail.
value.setValue("asdfgX");
result = instance.isValid(value, ctx);
- assertEquals(false, result);
+ assertFalse(result);
//Make string too long - should fail.
value.setValue("asdf");
result = instance.isValid(value, ctx);
- assertEquals(false, result);
+ assertFalse(result);
//Now lets try Dates
dft.setFieldType(DatasetFieldType.FieldType.DATE);
dft.setValidationFormat(null);
value.setValue("1999AD");
result = instance.isValid(value, ctx);
- assertEquals(true, result);
+ assertTrue(result);
value.setValue("44BCE");
result = instance.isValid(value, ctx);
- assertEquals(true, result);
+ assertTrue(result);
value.setValue("2004-10-27");
result = instance.isValid(value, ctx);
- assertEquals(true, result);
+ assertTrue(result);
value.setValue("2002-08");
result = instance.isValid(value, ctx);
- assertEquals(true, result);
+ assertTrue(result);
value.setValue("[1999?]");
result = instance.isValid(value, ctx);
- assertEquals(true, result);
+ assertTrue(result);
value.setValue("Blergh");
result = instance.isValid(value, ctx);
- assertEquals(false, result);
+ assertFalse(result);
//Float
dft.setFieldType(DatasetFieldType.FieldType.FLOAT);
value.setValue("44");
result = instance.isValid(value, ctx);
- assertEquals(true, result);
+ assertTrue(result);
value.setValue("44 1/2");
result = instance.isValid(value, ctx);
- assertEquals(false, result);
+ assertFalse(result);
//Integer
dft.setFieldType(DatasetFieldType.FieldType.INT);
value.setValue("44");
result = instance.isValid(value, ctx);
- assertEquals(true, result);
+ assertTrue(result);
value.setValue("-44");
result = instance.isValid(value, ctx);
- assertEquals(true, result);
+ assertTrue(result);
value.setValue("12.14");
result = instance.isValid(value, ctx);
- assertEquals(false, result);
+ assertFalse(result);
}
@Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java
index 2153a336303..687e0af5b81 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java
@@ -10,6 +10,7 @@
import static org.junit.jupiter.api.Assertions.*;
import java.util.ArrayList;
+import java.util.Date;
import java.util.List;
/**
@@ -240,5 +241,41 @@ public void datasetShouldBeDeaccessionedWithDeaccessionedAndDeaccessionedVersion
assertTrue(dataset.isDeaccessioned());
}
-
+
+ @Test
+ public void testGetMostRecentMajorVersionReleaseDateWithDeaccessionedVersions() {
+ List versionList = new ArrayList();
+
+ long ver = 5;
+ // 5.2
+ DatasetVersion relVersion = new DatasetVersion();
+ relVersion.setVersionState(VersionState.RELEASED);
+ relVersion.setMinorVersionNumber(2L);
+ relVersion.setVersionNumber(ver);
+ versionList.add(relVersion);
+
+ // 5.1
+ relVersion = new DatasetVersion();
+ relVersion.setVersionState(VersionState.DEACCESSIONED);
+ relVersion.setMinorVersionNumber(1L);
+ relVersion.setVersionNumber(ver);
+ versionList.add(relVersion);
+
+ // 5.0, 4.0, 3.0, 2.0, 1.0
+ while (ver > 0) {
+ DatasetVersion deaccessionedVersion = new DatasetVersion();
+ deaccessionedVersion.setVersionState(VersionState.DEACCESSIONED);
+ // only add an actual date to v5.0 so the assertNotNull will only pass if this version's date is returned
+ deaccessionedVersion.setReleaseTime((ver == 5) ? new Date() : null);
+ deaccessionedVersion.setMinorVersionNumber(0L);
+ deaccessionedVersion.setVersionNumber(ver--);
+ versionList.add(deaccessionedVersion);
+ }
+
+ Dataset dataset = new Dataset();
+ dataset.setVersions(versionList);
+
+ Date releaseDate = dataset.getMostRecentMajorVersionReleaseDate();
+ assertNotNull(releaseDate);
+ }
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java
new file mode 100644
index 00000000000..0ba8dde8aa0
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java
@@ -0,0 +1,460 @@
+package edu.harvard.iq.dataverse;
+
+import edu.harvard.iq.dataverse.branding.BrandingUtilTest;
+import edu.harvard.iq.dataverse.datavariable.VariableMetadata;
+import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil;
+import edu.harvard.iq.dataverse.license.License;
+import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+
+import java.net.URI;
+import java.sql.Timestamp;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.List;
+import java.util.logging.Logger;
+
+import static org.assertj.core.util.DateUtil.now;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import io.restassured.path.json.JsonPath;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Test;
+
+public class DatasetVersionDifferenceTest {
+
+ private static final Logger logger = Logger.getLogger(DatasetVersion.class.getCanonicalName());
+
+ private static List addedFiles;
+ private static List removedFiles;
+ private static List changedFileMetadata;
+ private static List changedVariableMetadata;
+ private static List replacedFiles;
+ private static Long fileId = Long.valueOf(0);
+
+ @BeforeAll
+ public static void setUp() {
+ BrandingUtilTest.setupMocks();
+ }
+
+ @AfterAll
+ public static void tearDown() {
+ BrandingUtilTest.setupMocks();
+ }
+
+ @Test
+ public void testDifferencing() {
+ Dataset dataset = new Dataset();
+ License license = new License("CC0 1.0",
+ "You can copy, modify, distribute and perform the work, even for commercial purposes, all without asking permission.",
+ URI.create("http://creativecommons.org/publicdomain/zero/1.0"), URI.create("/resources/images/cc0.png"),
+ true, 1l);
+ license.setDefault(true);
+ dataset.setProtocol("doi");
+ dataset.setAuthority("10.5072/FK2");
+ dataset.setIdentifier("LK0D1H");
+ DatasetVersion datasetVersion = new DatasetVersion();
+ datasetVersion.setDataset(dataset);
+ datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED);
+ datasetVersion.setVersionNumber(1L);
+ datasetVersion.setTermsOfUseAndAccess(new TermsOfUseAndAccess());
+ DatasetVersion datasetVersion2 = new DatasetVersion();
+ datasetVersion2.setDataset(dataset);
+ datasetVersion2.setVersionState(DatasetVersion.VersionState.DRAFT);
+
+ // Published version's two files
+ DataFile dataFile = new DataFile();
+ dataFile.setId(1L);
+ DataFile dataFile2 = new DataFile();
+ dataFile2.setId(2L);
+
+ FileMetadata fileMetadata1 = createFileMetadata(10L, datasetVersion, dataFile, "file1.txt");
+ fileMetadata1.setLabel("file1.txt");
+
+ FileMetadata fileMetadata2 = createFileMetadata(20L, datasetVersion, dataFile2, "file2.txt");
+
+ // Draft version - same two files with one label change
+ FileMetadata fileMetadata3 = fileMetadata1.createCopy();
+ fileMetadata3.setId(30L);
+
+ FileMetadata fileMetadata4 = fileMetadata2.createCopy();
+ fileMetadata4.setLabel("file3.txt");
+ fileMetadata4.setId(40L);
+
+ List fileMetadatas = new ArrayList<>(Arrays.asList(fileMetadata1, fileMetadata2));
+ datasetVersion.setFileMetadatas(fileMetadatas);
+ List fileMetadatas2 = new ArrayList<>(Arrays.asList(fileMetadata3, fileMetadata4));
+ datasetVersion2.setFileMetadatas(fileMetadatas2);
+
+ SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd");
+ Date publicationDate;
+ try {
+ publicationDate = dateFmt.parse("19551105");
+ datasetVersion.setReleaseTime(publicationDate);
+ dataset.setPublicationDate(new Timestamp(publicationDate.getTime()));
+ } catch (ParseException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ List versionList = new ArrayList<>(Arrays.asList(datasetVersion, datasetVersion2));
+ dataset.setVersions(versionList);
+
+ // One file has a changed label
+ List expectedAddedFiles = new ArrayList<>();
+ List expectedRemovedFiles = new ArrayList<>();
+ ;
+ List expectedChangedFileMetadata = Arrays.asList(fileMetadata2, fileMetadata4);
+ List expectedChangedVariableMetadata = new ArrayList<>();
+ List expectedReplacedFiles = new ArrayList<>();
+ List changedTerms = new ArrayList<>();
+
+ compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles,
+ expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms);
+ // change label for first file as well
+ fileMetadata3.setLabel("file1_updated.txt");
+ expectedChangedFileMetadata = Arrays.asList(fileMetadata1, fileMetadata3, fileMetadata2, fileMetadata4);
+ compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles,
+ expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms);
+ // Add one change to variable metadata
+ fileMetadata3.setVariableMetadatas(Arrays.asList(new VariableMetadata()));
+ expectedChangedVariableMetadata = Arrays.asList(fileMetadata1, fileMetadata3);
+ compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles,
+ expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms);
+ // Replaced File
+ DataFile replacingFile = new DataFile();
+ replacingFile.setId(3L);
+ replacingFile.setPreviousDataFileId(1L);
+ fileMetadata3.setDataFile(replacingFile);
+ expectedChangedFileMetadata = Arrays.asList(fileMetadata2, fileMetadata4);
+ expectedChangedVariableMetadata = new ArrayList<>();
+
+ FileMetadata[] filePair = new FileMetadata[2];
+ filePair[0] = fileMetadata1;
+ filePair[1] = fileMetadata3;
+ expectedReplacedFiles = new ArrayList<>();
+ expectedReplacedFiles.add(filePair);
+ compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles,
+ expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms);
+
+ // Add a new file
+ DataFile newFile = new DataFile();
+ newFile.setId(3L);
+ FileMetadata fileMetadata5 = createFileMetadata(50L, datasetVersion2, newFile, "newFile.txt");
+ datasetVersion2.getFileMetadatas().add(fileMetadata5);
+ expectedAddedFiles = Arrays.asList(fileMetadata5);
+ compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles,
+ expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms);
+
+ // Remove a file
+ datasetVersion2.getFileMetadatas().remove(fileMetadata4);
+ expectedRemovedFiles = Arrays.asList(fileMetadata2);
+ expectedChangedFileMetadata = new ArrayList<>();
+ compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles,
+ expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms);
+
+ // Set the published version's TermsOfUseAndAccess to a non-null value
+ TermsOfUseAndAccess termsOfUseAndAccess = new TermsOfUseAndAccess();
+ datasetVersion.setTermsOfUseAndAccess(termsOfUseAndAccess);
+
+ compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles,
+ expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms);
+
+ // Set the draft version's TermsOfUseAndAccess to a non-null value
+
+ datasetVersion2.setTermsOfUseAndAccess(new TermsOfUseAndAccess());
+
+ compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles,
+ expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms);
+
+ // Set a term field
+
+ datasetVersion2.getTermsOfUseAndAccess().setTermsOfUse("Terms o' Use");
+ String[] termField = new String[] {
+ BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"), "", "Terms o' Use" };
+ changedTerms.add(termField);
+
+ compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles,
+ expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms);
+
+ // Set a term field in the original version
+
+ datasetVersion.getTermsOfUseAndAccess().setDisclaimer("Not our fault");
+ String[] termField2 = new String[] {
+ BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"),
+ "Not our fault", "" };
+ changedTerms.add(termField2);
+
+ compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles,
+ expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms);
+
+ }
+
+ private FileMetadata createFileMetadata(long id, DatasetVersion datasetVersion, DataFile dataFile, String label) {
+ FileMetadata fileMetadata = new FileMetadata();
+ fileMetadata.setId(id);
+ fileMetadata.setDatasetVersion(datasetVersion);
+ fileMetadata.setDataFile(dataFile);
+ fileMetadata.setLabel(label);
+ fileMetadata.setCategories(new ArrayList<>());
+ return fileMetadata;
+ }
+
+ /**
+ * CompareResults is currently testing the output of the
+ * DatasetVersionDifference class with the manually created expected results
+ * included as parameters and with the results of the less efficient algorithm
+ * it is replacing. Once we're collectively convinced that the tests here are
+ * correct (i.e. the manually created expected* parameters are set correctly for
+ * each use case), we could drop running the originalCalculateDifference method
+ * and just compare with the expected* results.
+ *
+ * @param changedTerms
+ */
+ private void compareResults(DatasetVersion datasetVersion, DatasetVersion datasetVersion2,
+ List expectedAddedFiles, List expectedRemovedFiles,
+ List expectedChangedFileMetadata, List expectedChangedVariableMetadata,
+ List expectedReplacedFiles, List changedTerms) {
+ DatasetVersionDifference diff = new DatasetVersionDifference(datasetVersion2, datasetVersion);
+ // Run the original algorithm
+ originalCalculateDifference(datasetVersion2, datasetVersion);
+ // Compare the old and new algorithms
+ assertEquals(addedFiles, diff.getAddedFiles());
+ assertEquals(removedFiles, diff.getRemovedFiles());
+ assertEquals(changedFileMetadata, diff.getChangedFileMetadata());
+ assertEquals(changedVariableMetadata, diff.getChangedVariableMetadata());
+ assertEquals(replacedFiles.size(), diff.getReplacedFiles().size());
+ for (int i = 0; i < replacedFiles.size(); i++) {
+ assertEquals(replacedFiles.get(i)[0], diff.getReplacedFiles().get(i)[0]);
+ assertEquals(replacedFiles.get(i)[1], diff.getReplacedFiles().get(i)[1]);
+ }
+
+ // Also compare the new algorithm with the manually created expected* values for
+ // the test cases
+ assertEquals(expectedAddedFiles, diff.getAddedFiles());
+ assertEquals(expectedRemovedFiles, diff.getRemovedFiles());
+ assertEquals(expectedChangedFileMetadata, diff.getChangedFileMetadata());
+ assertEquals(expectedChangedVariableMetadata, diff.getChangedVariableMetadata());
+ assertEquals(expectedReplacedFiles.size(), diff.getReplacedFiles().size());
+ for (int i = 0; i < expectedReplacedFiles.size(); i++) {
+ assertEquals(expectedReplacedFiles.get(i)[0], diff.getReplacedFiles().get(i)[0]);
+ assertEquals(expectedReplacedFiles.get(i)[1], diff.getReplacedFiles().get(i)[1]);
+ }
+
+ assertEquals(changedTerms.size(), diff.getChangedTermsAccess().size());
+ for (int i = 0; i < changedTerms.size(); i++) {
+ String[] diffArray = diff.getChangedTermsAccess().get(i);
+ assertEquals(changedTerms.get(i)[0], diffArray[0]);
+ assertEquals(changedTerms.get(i)[1], diffArray[1]);
+ assertEquals(changedTerms.get(i)[2], diffArray[2]);
+ }
+ }
+
+ @Deprecated
+ // This is the "Original" difference calculation from DatasetVersionDifference
+ // It is included here to help verify that the new implementation is the same as
+ // the original
+ private static void originalCalculateDifference(DatasetVersion newVersion, DatasetVersion originalVersion) {
+
+ addedFiles = new ArrayList<>();
+ removedFiles = new ArrayList<>();
+ changedFileMetadata = new ArrayList<>();
+ changedVariableMetadata = new ArrayList<>();
+ replacedFiles = new ArrayList<>();
+ long startTime = System.currentTimeMillis();
+ // TODO: ?
+ // It looks like we are going through the filemetadatas in both versions,
+ // *sequentially* (i.e. at the cost of O(N*M)), to select the lists of
+ // changed, deleted and added files between the 2 versions... But why
+ // are we doing it, if we are doing virtually the same thing inside
+ // the initDatasetFilesDifferenceList(), below - but in a more efficient
+ // way (sorting both lists, then goint through them in parallel, at the
+ // cost of (N+M) max.?
+ // -- 4.6 Nov. 2016
+
+ for (FileMetadata fmdo : originalVersion.getFileMetadatas()) {
+ boolean deleted = true;
+ for (FileMetadata fmdn : newVersion.getFileMetadatas()) {
+ if (fmdo.getDataFile().equals(fmdn.getDataFile())) {
+ deleted = false;
+ if (!DatasetVersionDifference.compareFileMetadatas(fmdo, fmdn).isEmpty()) {
+ changedFileMetadata.add(fmdo);
+ changedFileMetadata.add(fmdn);
+ }
+ if (!VariableMetadataUtil.compareVariableMetadata(fmdo, fmdn)
+ || !DatasetVersionDifference.compareVarGroup(fmdo, fmdn)) {
+ changedVariableMetadata.add(fmdo);
+ changedVariableMetadata.add(fmdn);
+ }
+ break;
+ }
+ }
+ if (deleted) {
+ removedFiles.add(fmdo);
+ }
+ }
+ for (FileMetadata fmdn : newVersion.getFileMetadatas()) {
+ boolean added = true;
+ for (FileMetadata fmdo : originalVersion.getFileMetadatas()) {
+ if (fmdo.getDataFile().equals(fmdn.getDataFile())) {
+ added = false;
+ break;
+ }
+ }
+ if (added) {
+ addedFiles.add(fmdn);
+ }
+ }
+
+ getReplacedFiles();
+ logger.info("Difference Loop Execution time: " + (System.currentTimeMillis() - startTime) + " ms");
+
+ }
+
+ @Deprecated
+ // This is used only in the original algorithm and was removed from
+ // DatasetVersionDifference
+ private static void getReplacedFiles() {
+ if (addedFiles.isEmpty() || removedFiles.isEmpty()) {
+ return;
+ }
+ List addedToReplaced = new ArrayList<>();
+ List removedToReplaced = new ArrayList<>();
+ for (FileMetadata added : addedFiles) {
+ DataFile addedDF = added.getDataFile();
+ Long replacedId = addedDF.getPreviousDataFileId();
+ if (added.getDataFile().getPreviousDataFileId() != null) {
+ }
+ for (FileMetadata removed : removedFiles) {
+ DataFile test = removed.getDataFile();
+ if (test.getId().equals(replacedId)) {
+ addedToReplaced.add(added);
+ removedToReplaced.add(removed);
+ FileMetadata[] replacedArray = new FileMetadata[2];
+ replacedArray[0] = removed;
+ replacedArray[1] = added;
+ replacedFiles.add(replacedArray);
+ }
+ }
+ }
+ if (addedToReplaced.isEmpty()) {
+ } else {
+ addedToReplaced.stream().forEach((delete) -> {
+ addedFiles.remove(delete);
+ });
+ removedToReplaced.stream().forEach((delete) -> {
+ removedFiles.remove(delete);
+ });
+ }
+ }
+
+ @Test
+ public void testCompareVersionsAsJson() {
+
+ Dataverse dv = new Dataverse();
+ Dataset ds = new Dataset();
+ ds.setOwner(dv);
+ ds.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL,"10.5072","FK2/BYM3IW", "/", AbstractDOIProvider.DOI_RESOLVER_URL, null));
+
+ DatasetVersion dv1 = initDatasetVersion(0L, ds, DatasetVersion.VersionState.RELEASED);
+ DatasetVersion dv2 = initDatasetVersion(1L, ds, DatasetVersion.VersionState.DRAFT);
+ ds.setVersions(List.of(dv1, dv2));
+
+ TermsOfUseAndAccess toa = new TermsOfUseAndAccess();
+ toa.setDisclaimer("disclaimer");
+ dv2.setTermsOfUseAndAccess(toa);
+ DatasetField dsf = new DatasetField();
+ dsf.setDatasetFieldType(new DatasetFieldType("Author", DatasetFieldType.FieldType.TEXT, true));
+ MetadataBlock mb = new MetadataBlock();
+ mb.setDisplayName("testMetadataBlock");
+ dsf.getDatasetFieldType().setMetadataBlock(mb);
+ dsf.setSingleValue("TEST");
+ dv2.getDatasetFields().add(dsf);
+ // modify file at index 0
+ dv2.getFileMetadatas().get(0).setRestricted(!dv2.getFileMetadatas().get(2).isRestricted());
+
+ FileMetadata addedFile = initFile(dv2); // add a new file
+ FileMetadata removedFile = dv2.getFileMetadatas().get(1); // remove the second file
+ dv2.getFileMetadatas().remove(1);
+ FileMetadata replacedFile = dv2.getFileMetadatas().get(1); // the third file is now at index 1 since the second file was removed
+ FileMetadata replacementFile = initFile(dv2, replacedFile.getDataFile().getId()); // replace the third file with a new file
+ dv2.getFileMetadatas().remove(1);
+
+ DatasetVersionDifference dvd = new DatasetVersionDifference(dv2, dv1);
+
+ JsonObjectBuilder json = dvd.compareVersionsAsJson();
+ JsonObject obj = json.build();
+ System.out.println(JsonUtil.prettyPrint(obj));
+
+ JsonPath dataFile = JsonPath.from(JsonUtil.prettyPrint(obj));
+ assertTrue("TEST".equalsIgnoreCase(dataFile.getString("metadataChanges[0].changed[0].newValue")));
+ assertTrue(addedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesAdded[0].fileName")));
+ assertTrue(removedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesRemoved[0].fileName")));
+ assertTrue(replacedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesReplaced[0].oldFile.fileName")));
+ assertTrue(replacementFile.getLabel().equalsIgnoreCase(dataFile.getString("filesReplaced[0].newFile.fileName")));
+ assertTrue("true".equalsIgnoreCase(dataFile.getString("fileChanges[0].changed[0].newValue")));
+ assertTrue("disclaimer".equalsIgnoreCase(dataFile.getString("TermsOfAccess.changed[0].newValue")));
+ }
+ private DatasetVersion initDatasetVersion(Long id, Dataset ds, DatasetVersion.VersionState vs) {
+ DatasetVersion dv = new DatasetVersion();
+ dv.setDataset(ds);
+ dv.setVersion(1L);
+ dv.setVersionState(vs);
+ dv.setMinorVersionNumber(0L);
+ if (vs == DatasetVersion.VersionState.RELEASED) {
+ dv.setVersionNumber(1L);
+ dv.setVersion(1L);
+ dv.setReleaseTime(now());
+ }
+ dv.setId(id);
+ dv.setCreateTime(now());
+ dv.setLastUpdateTime(now());
+ dv.setTermsOfUseAndAccess(new TermsOfUseAndAccess());
+ dv.setFileMetadatas(initFiles(dv));
+ return dv;
+ }
+ private List initFiles(DatasetVersion dsv) {
+ List fileMetadatas = new ArrayList<>();
+ fileId = 0L;
+ for (int i=0; i < 10; i++) {
+ FileMetadata fm = initFile(dsv);
+ fileMetadatas.add(fm);
+ }
+ return fileMetadatas;
+ }
+ private FileMetadata initFile(DatasetVersion dsv) {
+ return initFile(dsv, null);
+ }
+ private FileMetadata initFile(DatasetVersion dsv, Long prevId) {
+ Long id = fileId++;
+ FileMetadata fm = new FileMetadata();
+ DataFile df = new DataFile();
+ fm.setDatasetVersion(dsv);
+ DataTable dt = new DataTable();
+ dt.setOriginalFileName("filename"+id+".txt");
+ df.setId(id);
+ df.setDescription("Desc"+id);
+ df.setRestricted(false);
+ df.setFilesize(100 + id);
+ df.setChecksumType(DataFile.ChecksumType.MD5);
+ df.setChecksumValue("value"+id);
+ df.setDataTable(dt);
+ df.setOwner(dsv.getDataset());
+ df.getFileMetadatas().add(fm);
+ df.setPreviousDataFileId(prevId);
+ fm.setId(id);
+ fm.setDataFile(df);
+ fm.setLabel("Label"+id);
+ fm.setDirectoryLabel("/myFilePath/");
+ fm.setDescription("Desc"+id);
+ dsv.getFileMetadatas().add(fm);
+ return fm;
+ }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java
index 4cd6c4dfaa7..b36d8907472 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java
@@ -80,7 +80,7 @@ public void testIsInReview() {
DatasetVersion nonDraft = new DatasetVersion();
nonDraft.setVersionState(DatasetVersion.VersionState.RELEASED);
- assertEquals(false, nonDraft.isInReview());
+ assertFalse(nonDraft.isInReview());
ds.addLock(null);
assertFalse(nonDraft.isInReview());
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java
index 6d7dd2eae29..94aece95861 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java
@@ -16,6 +16,8 @@
import java.util.HashMap;
import java.util.List;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.BeforeAll;
@@ -26,13 +28,11 @@
import java.util.Map;
import java.util.UUID;
-import java.util.logging.Level;
import java.util.logging.Logger;
import static jakarta.ws.rs.core.Response.Status.*;
+import static org.hamcrest.CoreMatchers.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class AdminIT {
@@ -901,6 +901,50 @@ public void testDownloadTmpFile() throws IOException {
.body("message", equalTo("Path must begin with '/tmp' but after normalization was '/etc/passwd'."));
}
+ @Test
+ public void testFindMissingFiles() {
+ Response createUserResponse = UtilIT.createRandomUser();
+ createUserResponse.then().assertThat().statusCode(OK.getStatusCode());
+ String username = UtilIT.getUsernameFromResponse(createUserResponse);
+ String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse);
+ UtilIT.setSuperuserStatus(username, true);
+
+ String dataverseAlias = ":root";
+ Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+ createDatasetResponse.prettyPrint();
+ createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+ int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+ String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId");
+
+ // Upload file
+ Response uploadResponse = UtilIT.uploadRandomFile(datasetPersistentId, apiToken);
+ uploadResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+
+ // Audit files
+ Response resp = UtilIT.auditFiles(apiToken, null, 100L, null);
+ resp.prettyPrint();
+ JsonArray emptyArray = Json.createArrayBuilder().build();
+ resp.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("data.lastId", equalTo(100));
+
+ // Audit files with invalid parameters
+ resp = UtilIT.auditFiles(apiToken, 100L, 0L, null);
+ resp.prettyPrint();
+ resp.then().assertThat()
+ .statusCode(BAD_REQUEST.getStatusCode())
+ .body("status", equalTo("ERROR"))
+ .body("message", equalTo("Invalid Parameters: lastId must be equal to or greater than firstId"));
+
+ // Audit files with list of dataset identifiers parameter
+ resp = UtilIT.auditFiles(apiToken, 1L, null, "bad/id, " + datasetPersistentId);
+ resp.prettyPrint();
+ resp.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("data.failures[0].datasetIdentifier", equalTo("bad/id"))
+ .body("data.failures[0].reason", equalTo("Not Found"));
+ }
+
private String createTestNonSuperuserApiToken() {
Response createUserResponse = UtilIT.createRandomUser();
createUserResponse.then().assertThat().statusCode(OK.getStatusCode());
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java
index ca99960f240..5f00d34b276 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java
@@ -1,15 +1,61 @@
package edu.harvard.iq.dataverse.api;
+import edu.harvard.iq.dataverse.ControlledVocabularyValueServiceBean;
+import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
+import edu.harvard.iq.dataverse.DataverseServiceBean;
+import edu.harvard.iq.dataverse.MetadataBlockServiceBean;
+import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.ws.rs.core.Response;
+import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import java.io.File;
+import java.io.StringReader;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
+@ExtendWith(MockitoExtension.class)
public class DatasetFieldServiceApiTest {
+ @Mock
+ private ActionLogServiceBean actionLogSvc;
+
+ @Mock
+ private MetadataBlockServiceBean metadataBlockService;
+
+ @Mock
+ private DataverseServiceBean dataverseService;
+
+ @Mock
+ private DatasetFieldServiceBean datasetFieldService;
+
+ @Mock
+ private ControlledVocabularyValueServiceBean controlledVocabularyValueService;
+
+ private DatasetFieldServiceApi api;
+
+ @BeforeEach
+ public void setup(){
+ api = new DatasetFieldServiceApi();
+ api.actionLogSvc = actionLogSvc;
+ api.metadataBlockService = metadataBlockService;
+ api.dataverseService = dataverseService;
+ api.datasetFieldService = datasetFieldService;
+ api.controlledVocabularyValueService = controlledVocabularyValueService;
+ }
+
@Test
public void testArrayIndexOutOfBoundMessageBundle() {
List arguments = new ArrayList<>();
@@ -59,4 +105,41 @@ public void testGetGeneralErrorMessage() {
message
);
}
+
+ @Test
+ public void testGetGeneralErrorMessageEmptyHeader() {
+ DatasetFieldServiceApi api = new DatasetFieldServiceApi();
+ String message = api.getGeneralErrorMessage(null, 5, "some error");
+ assertEquals(
+ "Error parsing metadata block in unknown part, line #5: some error",
+ message
+ );
+ }
+
+ @Test
+ public void testLoadDatasetFieldsWhitespaceTrimming() {
+
+ Path resourceDirectory = Paths.get("src/test/resources/tsv/whitespace-test.tsv");
+ File testfile = new File(resourceDirectory.toFile().getAbsolutePath());
+ JsonReader jsonReader;
+ try (Response response = api.loadDatasetFields(testfile)) {
+ assertEquals(200, response.getStatus());
+ jsonReader = Json.createReader(new StringReader(response.getEntity().toString()));
+ }
+ JsonObject jsonObject = jsonReader.readObject();
+
+ final List metadataNames = jsonObject.getJsonObject("data").getJsonArray("added")
+ .getValuesAs(e -> e.asJsonObject().getString("name"));
+ assertThat(metadataNames).contains("whitespaceDemo")
+ .contains("whitespaceDemoOne")
+ .contains("whitespaceDemoTwo")
+ .contains("whitespaceDemoThree")
+ .contains("CV1")
+ .contains("CV2")
+ .contains("CV3");
+ assertThat(metadataNames).doesNotContain(" whitespaceDemo")
+ .doesNotContain("whitespaceDemoOne ")
+ .doesNotContain("CV1 ")
+ .doesNotContain(" CV2");
+ }
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index 93f1024ae7a..34afbb404f0 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -1676,7 +1676,7 @@ public void testPrivateUrl() {
List assignments = with(roleAssignments.body().asString()).param("member", "member").getJsonObject("data.findAll { data -> data._roleAlias == member }");
assertEquals(1, assignments.size());
PrivateUrlUser privateUrlUser = new PrivateUrlUser(datasetId);
- assertEquals("Private URL Enabled", privateUrlUser.getDisplayInfo().getTitle());
+ assertEquals("Preview URL Enabled", privateUrlUser.getDisplayInfo().getTitle());
List assigneeShouldExistForPrivateUrlUser = with(roleAssignments.body().asString()).param("assigneeString", privateUrlUser.getIdentifier()).getJsonObject("data.findAll { data -> data.assignee == assigneeString }");
logger.info(assigneeShouldExistForPrivateUrlUser + " found for " + privateUrlUser.getIdentifier());
assertEquals(1, assigneeShouldExistForPrivateUrlUser.size());
@@ -1757,7 +1757,7 @@ public void testPrivateUrl() {
Response privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, apiToken);
privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.prettyPrint();
- assertEquals(false, privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.body().asString().contains(privateUrlUser.getIdentifier()));
+ assertFalse(privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.body().asString().contains(privateUrlUser.getIdentifier()));
String newTitleAgain = "I am changing the title again";
Response draftCreatedAgainPostPub = UtilIT.updateDatasetTitleViaSword(dataset1PersistentId, newTitleAgain, apiToken);
@@ -4242,7 +4242,7 @@ public void testCitationDate() throws IOException {
.statusCode(OK.getStatusCode())
.body("data.message", is(expectedCitation));
- Response exportDatasetAsDublinCore = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken);
+ Response exportDatasetAsDublinCore = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken, true);
exportDatasetAsDublinCore.prettyPrint();
exportDatasetAsDublinCore.then().assertThat()
.body("oai_dc.type", equalTo("Dataset"))
@@ -4259,7 +4259,7 @@ public void testCitationDate() throws IOException {
rexport.then().assertThat().statusCode(OK.getStatusCode());
String todayDate = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
- Response exportPostClear = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken);
+ Response exportPostClear = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken, true);
exportPostClear.prettyPrint();
exportPostClear.then().assertThat()
.body("oai_dc.type", equalTo("Dataset"))
@@ -5168,4 +5168,134 @@ public void testGetCanDownloadAtLeastOneFile() {
Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getCanDownloadAtLeastOneFile("testInvalidId", DS_VERSION_LATEST, secondUserApiToken);
getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
}
+
+ @Test
+ public void testCompareDatasetVersionsAPI() throws InterruptedException {
+
+ Response createUser = UtilIT.createRandomUser();
+ assertEquals(200, createUser.getStatusCode());
+ String username = UtilIT.getUsernameFromResponse(createUser);
+ String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+ Response makeSuperUser = UtilIT.makeSuperUser(username);
+ assertEquals(200, makeSuperUser.getStatusCode());
+
+ Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+ String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+ Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+ Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+ Response getDatasetJsonBeforePublishing = UtilIT.nativeGet(datasetId, apiToken);
+ String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol");
+ String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.authority");
+ String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.identifier");
+ String datasetPersistentId = protocol + ":" + authority + "/" + identifier;
+ // used for all added files
+ JsonObjectBuilder json = Json.createObjectBuilder()
+ .add("description", "my description")
+ .add("directoryLabel", "/data/subdir1/")
+ .add("categories", Json.createArrayBuilder()
+ .add("Data")
+ );
+ JsonObject jsonObj = json.build();
+ String pathToFile = "src/main/webapp/resources/images/dataverse-icon-1200.png";
+ Response uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken);
+ uploadResponse.prettyPrint();
+ uploadResponse.then().assertThat()
+ .statusCode(OK.getStatusCode());
+ Integer modifyFileId = UtilIT.getDataFileIdFromResponse(uploadResponse);
+ pathToFile = "src/main/webapp/resources/images/dataverseproject_logo.jpg";
+ uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken);
+ uploadResponse.then().assertThat()
+ .statusCode(OK.getStatusCode());
+ Integer deleteFileId = UtilIT.getDataFileIdFromResponse(uploadResponse);
+
+ pathToFile = "src/main/webapp/resources/images/fav/favicon-16x16.png";
+ uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken);
+ uploadResponse.then().assertThat()
+ .statusCode(OK.getStatusCode());
+ Integer replaceFileId = UtilIT.getDataFileIdFromResponse(uploadResponse);
+
+ Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
+ assertEquals(200, publishDataverse.getStatusCode());
+
+ Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken);
+ assertEquals(200, publishDataset.getStatusCode());
+
+ // post publish update to create DRAFT version
+ String pathToJsonFilePostPub = "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json";
+ Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetPersistentId, pathToJsonFilePostPub, apiToken);
+ addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode());
+
+ // Test adding a file
+ pathToFile = "src/test/resources/tab/test.tab";
+ Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToFile, jsonObj, apiToken);
+ uploadTabularFileResponse.prettyPrint();
+ uploadTabularFileResponse.then().assertThat()
+ .statusCode(OK.getStatusCode());
+ Integer addedFileId = UtilIT.getDataFileIdFromResponse(uploadTabularFileResponse);
+
+ // Ensure tabular file is ingested
+ sleep(2000);
+
+ String tabularTagName = "Survey";
+ Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(String.valueOf(addedFileId), apiToken, List.of(tabularTagName));
+ setFileTabularTagsResponse.prettyPrint();
+ setFileTabularTagsResponse.then().assertThat()
+ .statusCode(OK.getStatusCode());
+
+ // Test removing a file
+ uploadResponse = UtilIT.deleteFile(deleteFileId, apiToken);
+ uploadResponse.prettyPrint();
+ uploadResponse.then().assertThat()
+ .statusCode(NO_CONTENT.getStatusCode());
+
+ // Test Replacing a file
+ Response replaceResponse = UtilIT.replaceFile(String.valueOf(replaceFileId), "src/main/webapp/resources/images/fav/favicon-32x32.png", jsonObj, apiToken);
+ replaceResponse.prettyPrint();
+ replaceResponse.then().assertThat()
+ .statusCode(OK.getStatusCode());
+
+ // Test modify by restricting the file
+ Response restrictResponse = UtilIT.restrictFile(modifyFileId.toString(), true, apiToken);
+ restrictResponse.prettyPrint();
+ restrictResponse.then().assertThat()
+ .statusCode(OK.getStatusCode());
+
+ // Also test a terms of access change
+ String jsonLDTerms = "{\"https://dataverse.org/schema/core#fileTermsOfAccess\":{\"https://dataverse.org/schema/core#dataAccessPlace\":\"Somewhere\"}}";
+ Response updateTerms = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, jsonLDTerms, true);
+ updateTerms.then().assertThat()
+ .statusCode(OK.getStatusCode());
+
+ Response compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":latest-published", ":draft", apiToken);
+ compareResponse.prettyPrint();
+ compareResponse.then().assertThat()
+ .body("data.oldVersion.versionNumber", CoreMatchers.equalTo("1.0"))
+ .body("data.newVersion.versionNumber", CoreMatchers.equalTo("DRAFT"))
+ .body("data.metadataChanges[0].blockName", CoreMatchers.equalTo("Citation Metadata"))
+ .body("data.metadataChanges[0].changed[0].fieldName", CoreMatchers.equalTo("Author"))
+ .body("data.metadataChanges[0].changed[0].oldValue", CoreMatchers.containsString("Finch, Fiona; (Birds Inc.)"))
+ .body("data.metadataChanges[1].blockName", CoreMatchers.equalTo("Life Sciences Metadata"))
+ .body("data.metadataChanges[1].changed[0].fieldName", CoreMatchers.equalTo("Design Type"))
+ .body("data.metadataChanges[1].changed[0].oldValue", CoreMatchers.containsString(""))
+ .body("data.metadataChanges[1].changed[0].newValue", CoreMatchers.containsString("Parallel Group Design; Nested Case Control Design"))
+ .body("data.filesAdded[0].fileName", CoreMatchers.equalTo("test.tab"))
+ .body("data.filesAdded[0].filePath", CoreMatchers.equalTo("data/subdir1"))
+ .body("data.filesAdded[0].description", CoreMatchers.equalTo("my description"))
+ .body("data.filesAdded[0].tags[0]", CoreMatchers.equalTo("Survey"))
+ .body("data.filesRemoved[0].fileName", CoreMatchers.equalTo("dataverseproject_logo.jpg"))
+ .body("data.fileChanges[0].fileName", CoreMatchers.equalTo("dataverse-icon-1200.png"))
+ .body("data.fileChanges[0].changed[0].newValue", CoreMatchers.equalTo("true"))
+ .body("data.filesReplaced[0].oldFile.fileName", CoreMatchers.equalTo("favicon-16x16.png"))
+ .body("data.filesReplaced[0].newFile.fileName", CoreMatchers.equalTo("favicon-32x32.png"))
+ .body("data.TermsOfAccess", CoreMatchers.notNullValue())
+ .statusCode(OK.getStatusCode());
+
+ compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":draft", ":latest-published", apiToken);
+ compareResponse.prettyPrint();
+ compareResponse.then().assertThat()
+ .body("message", CoreMatchers.equalTo(BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order")))
+ .statusCode(BAD_REQUEST.getStatusCode());
+ }
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
index 2983b7b59d1..0c5ac8f4260 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
@@ -1,12 +1,15 @@
package edu.harvard.iq.dataverse.api;
import io.restassured.RestAssured;
+
import static io.restassured.RestAssured.given;
import static io.restassured.path.json.JsonPath.with;
+
import io.restassured.response.Response;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
+
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
@@ -14,6 +17,7 @@
import java.util.Arrays;
import java.util.List;
import java.util.logging.Logger;
+
import jakarta.json.Json;
import jakarta.json.JsonObject;
import jakarta.json.JsonObjectBuilder;
@@ -31,6 +35,7 @@
import static org.junit.jupiter.api.Assertions.*;
import java.nio.file.Files;
+
import io.restassured.path.json.JsonPath;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Matchers;
@@ -43,7 +48,7 @@ public class DataversesIT {
public static void setUpClass() {
RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
}
-
+
@AfterAll
public static void afterClass() {
Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport);
@@ -130,14 +135,16 @@ public void testDataverseCategory() {
public void testMinimalDataverse() throws FileNotFoundException {
Response createUser = UtilIT.createRandomUser();
createUser.prettyPrint();
- String username = UtilIT.getUsernameFromResponse(createUser);
String apiToken = UtilIT.getApiTokenFromResponse(createUser);
JsonObject dvJson;
FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-minimal.json");
dvJson = Json.createReader(reader).readObject();
Response create = UtilIT.createDataverse(dvJson, apiToken);
create.prettyPrint();
- create.then().assertThat().statusCode(CREATED.getStatusCode());
+ create.then().assertThat()
+ .body("data.isMetadataBlockRoot", equalTo(false))
+ .body("data.isFacetRoot", equalTo(false))
+ .statusCode(CREATED.getStatusCode());
Response deleteDataverse = UtilIT.deleteDataverse("science", apiToken);
deleteDataverse.prettyPrint();
deleteDataverse.then().assertThat().statusCode(OK.getStatusCode());
@@ -819,10 +826,9 @@ public void testImport() throws IOException, InterruptedException {
Response deleteUserResponse = UtilIT.deleteUser(username);
assertEquals(200, deleteUserResponse.getStatusCode());
}
-
- @Test
- public void testAttributesApi() throws Exception {
+ @Test
+ public void testAttributesApi() {
Response createUser = UtilIT.createRandomUser();
String apiToken = UtilIT.getApiTokenFromResponse(createUser);
@@ -837,30 +843,70 @@ public void testAttributesApi() throws Exception {
String collectionAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
String newCollectionAlias = collectionAlias + "RENAMED";
-
- // Change the alias of the collection:
-
- Response changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "alias", newCollectionAlias, apiToken);
- changeAttributeResp.prettyPrint();
-
+
+ // Change the name of the collection:
+
+ String newCollectionName = "Renamed Name";
+ Response changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "name", newCollectionName, apiToken);
changeAttributeResp.then().assertThat()
.statusCode(OK.getStatusCode())
.body("message.message", equalTo("Update successful"));
-
- // Check on the collection, under the new alias:
-
+
+ // Change the description of the collection:
+
+ String newDescription = "Renamed Description";
+ changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "description", newDescription, apiToken);
+ changeAttributeResp.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("message.message", equalTo("Update successful"));
+
+ // Change the affiliation of the collection:
+
+ String newAffiliation = "Renamed Affiliation";
+ changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "affiliation", newAffiliation, apiToken);
+ changeAttributeResp.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("message.message", equalTo("Update successful"));
+
+ // Cannot update filePIDsEnabled from a regular user:
+
+ changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "filePIDsEnabled", "true", apiToken);
+ changeAttributeResp.then().assertThat()
+ .statusCode(UNAUTHORIZED.getStatusCode());
+
+ // Change the alias of the collection:
+
+ changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "alias", newCollectionAlias, apiToken);
+ changeAttributeResp.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("message.message", equalTo("Update successful"));
+
+ // Check on the collection, under the new alias:
+
Response collectionInfoResponse = UtilIT.exportDataverse(newCollectionAlias, apiToken);
- collectionInfoResponse.prettyPrint();
-
collectionInfoResponse.then().assertThat()
.statusCode(OK.getStatusCode())
- .body("data.alias", equalTo(newCollectionAlias));
-
+ .body("data.alias", equalTo(newCollectionAlias))
+ .body("data.name", equalTo(newCollectionName))
+ .body("data.description", equalTo(newDescription))
+ .body("data.affiliation", equalTo(newAffiliation));
+
// Delete the collection (again, using its new alias):
-
+
Response deleteCollectionResponse = UtilIT.deleteDataverse(newCollectionAlias, apiToken);
- deleteCollectionResponse.prettyPrint();
assertEquals(OK.getStatusCode(), deleteCollectionResponse.getStatusCode());
+
+ // Cannot update root collection from a regular user:
+
+ changeAttributeResp = UtilIT.setCollectionAttribute("root", "name", newCollectionName, apiToken);
+ changeAttributeResp.then().assertThat()
+ .statusCode(UNAUTHORIZED.getStatusCode());
+
+ collectionInfoResponse = UtilIT.exportDataverse("root", apiToken);
+
+ collectionInfoResponse.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("data.name", equalTo("Root"));
}
@Test
@@ -872,6 +918,17 @@ public void testListMetadataBlocks() {
createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+ // New Dataverse should return just the citation block and its displayOnCreate fields when onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true
+ Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken);
+ listMetadataBlocks.prettyPrint();
+ listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode());
+ listMetadataBlocks.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("data.size()", equalTo(1))
+ .body("data[0].name", is("citation"))
+ .body("data[0].fields.title.displayOnCreate", equalTo(true))
+ .body("data[0].fields.size()", is(28));
+
Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken);
setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode());
@@ -1269,6 +1326,153 @@ public void testAddDataverse() {
.body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\""));
}
+ @Test
+ public void testUpdateDataverse() {
+ Response createUser = UtilIT.createRandomUser();
+ String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+ String testAliasSuffix = "-update-dataverse";
+
+ String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix;
+ Response createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root");
+ createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+
+ String newAlias = UtilIT.getRandomDvAlias() + testAliasSuffix;
+ String newName = "New Test Dataverse Name";
+ String newAffiliation = "New Test Dataverse Affiliation";
+ String newDataverseType = Dataverse.DataverseType.TEACHING_COURSES.toString();
+ String[] newContactEmails = new String[]{"new_email@dataverse.com"};
+ String[] newInputLevelNames = new String[]{"geographicCoverage"};
+ String[] newFacetIds = new String[]{"contributorName"};
+ String[] newMetadataBlockNames = new String[]{"citation", "geospatial", "biomedical"};
+
+ Response updateDataverseResponse = UtilIT.updateDataverse(
+ testDataverseAlias,
+ newAlias,
+ newName,
+ newAffiliation,
+ newDataverseType,
+ newContactEmails,
+ newInputLevelNames,
+ newFacetIds,
+ newMetadataBlockNames,
+ apiToken
+ );
+
+ // Assert dataverse properties are updated
+ updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+ String actualDataverseAlias = updateDataverseResponse.then().extract().path("data.alias");
+ assertEquals(newAlias, actualDataverseAlias);
+ String actualDataverseName = updateDataverseResponse.then().extract().path("data.name");
+ assertEquals(newName, actualDataverseName);
+ String actualDataverseAffiliation = updateDataverseResponse.then().extract().path("data.affiliation");
+ assertEquals(newAffiliation, actualDataverseAffiliation);
+ String actualDataverseType = updateDataverseResponse.then().extract().path("data.dataverseType");
+ assertEquals(newDataverseType, actualDataverseType);
+ String actualContactEmail = updateDataverseResponse.then().extract().path("data.dataverseContacts[0].contactEmail");
+ assertEquals("new_email@dataverse.com", actualContactEmail);
+
+ // Assert metadata blocks are updated
+ Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken);
+ String actualDataverseMetadataBlock1 = listMetadataBlocksResponse.then().extract().path("data[0].name");
+ String actualDataverseMetadataBlock2 = listMetadataBlocksResponse.then().extract().path("data[1].name");
+ String actualDataverseMetadataBlock3 = listMetadataBlocksResponse.then().extract().path("data[2].name");
+ assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock1));
+ assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock2));
+ assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock3));
+
+ // Assert custom facets are updated
+ Response listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken);
+ String actualFacetName = listDataverseFacetsResponse.then().extract().path("data[0]");
+ assertThat(newFacetIds, hasItemInArray(actualFacetName));
+
+ // Assert input levels are updated
+ Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken);
+ String actualInputLevelName = listDataverseInputLevelsResponse.then().extract().path("data[0].datasetFieldTypeName");
+ assertThat(newInputLevelNames, hasItemInArray(actualInputLevelName));
+
+ // The alias has been changed, so we should not be able to do any operation using the old one
+ String oldDataverseAlias = testDataverseAlias;
+ Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken);
+ getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+ // Update the dataverse without setting metadata blocks, facets, or input levels
+ updateDataverseResponse = UtilIT.updateDataverse(
+ newAlias,
+ newAlias,
+ newName,
+ newAffiliation,
+ newDataverseType,
+ newContactEmails,
+ null,
+ null,
+ null,
+ apiToken
+ );
+ updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+ // Assert that the metadata blocks are inherited from the parent
+ listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken);
+ listMetadataBlocksResponse
+ .then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("data.size()", equalTo(1))
+ .body("data[0].name", equalTo("citation"));
+
+ // Assert that the facets are inherited from the parent
+ String[] rootFacetIds = new String[]{"authorName", "subject", "keywordValue", "dateOfDeposit"};
+ listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken);
+ String actualFacetName1 = listDataverseFacetsResponse.then().extract().path("data[0]");
+ String actualFacetName2 = listDataverseFacetsResponse.then().extract().path("data[1]");
+ String actualFacetName3 = listDataverseFacetsResponse.then().extract().path("data[2]");
+ String actualFacetName4 = listDataverseFacetsResponse.then().extract().path("data[3]");
+ assertThat(rootFacetIds, hasItemInArray(actualFacetName1));
+ assertThat(rootFacetIds, hasItemInArray(actualFacetName2));
+ assertThat(rootFacetIds, hasItemInArray(actualFacetName3));
+ assertThat(rootFacetIds, hasItemInArray(actualFacetName4));
+
+ // Assert that the dataverse should not have any input level
+ listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken);
+ listDataverseInputLevelsResponse
+ .then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("data.size()", equalTo(0));
+
+ // Should return error when the dataverse to edit does not exist
+ updateDataverseResponse = UtilIT.updateDataverse(
+ "unexistingDataverseAlias",
+ newAlias,
+ newName,
+ newAffiliation,
+ newDataverseType,
+ newContactEmails,
+ newInputLevelNames,
+ newFacetIds,
+ newMetadataBlockNames,
+ apiToken
+ );
+ updateDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+ // User with unprivileged API token cannot update Root dataverse
+ updateDataverseResponse = UtilIT.updateDataverse(
+ "root",
+ newAlias,
+ newName,
+ newAffiliation,
+ newDataverseType,
+ newContactEmails,
+ newInputLevelNames,
+ newFacetIds,
+ newMetadataBlockNames,
+ apiToken
+ );
+ updateDataverseResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode());
+
+ Response rootCollectionInfoResponse = UtilIT.exportDataverse("root", apiToken);
+ rootCollectionInfoResponse.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("data.name", equalTo("Root"));
+ }
+
@Test
public void testListFacets() {
Response createUserResponse = UtilIT.createRandomUser();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index e3c26284d55..98107eca33a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -2275,7 +2275,6 @@ public void testDeleteFile() {
Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", apiToken);
v1.prettyPrint();
v1.then().assertThat()
- .body("data.files[0].dataFile.filename", equalTo("cc0.png"))
.statusCode(OK.getStatusCode());
Map v1files1 = with(v1.body().asString()).param("fileToFind", "cc0.png")
@@ -2290,7 +2289,6 @@ public void testDeleteFile() {
Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken);
postv1draft2.prettyPrint();
postv1draft2.then().assertThat()
- .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png"))
.statusCode(OK.getStatusCode());
Map v1files2 = with(postv1draft2.body().asString()).param("fileToFind", "orcid_16x16.png")
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
index 5e436dd0e98..b198d2769a0 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
@@ -6,11 +6,14 @@
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
-import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
import static jakarta.ws.rs.core.Response.Status.OK;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Paths;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
+import org.skyscreamer.jsonassert.JSONAssert;
public class InfoIT {
@@ -81,6 +84,22 @@ public void testGetZipDownloadLimit() {
.body("data", notNullValue());
}
+ @Test
+ public void testGetExportFormats() throws IOException {
+ Response response = given().urlEncodingEnabled(false)
+ .get("/api/info/exportFormats");
+ response.prettyPrint();
+ response.then().assertThat().statusCode(OK.getStatusCode());
+
+ String actual = response.getBody().asString();
+ String expected =
+ java.nio.file.Files.readString(
+ Paths.get("src/test/resources/json/export-formats.json"),
+ StandardCharsets.UTF_8);
+ JSONAssert.assertEquals(expected, actual, true);
+
+ }
+
private void testSettingEndpoint(SettingsServiceBean.Key settingKey, String testSettingValue) {
String endpoint = "/api/info/settings/" + settingKey;
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java
index 90357596c25..08ebec31cd6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java
@@ -22,7 +22,7 @@ public class SavedSearchIT {
@BeforeAll
public static void setUpClass() {
-
+ RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
}
@AfterAll
@@ -53,81 +53,55 @@ public void testSavedSearches() {
Integer datasetId2 = UtilIT.getDatasetIdFromResponse(createDatasetResponse2);
// missing body
- Response resp = RestAssured.given()
- .contentType("application/json")
- .post("/api/admin/savedsearches");
+ Response resp = UtilIT.setSavedSearch();
resp.prettyPrint();
resp.then().assertThat()
.statusCode(INTERNAL_SERVER_ERROR.getStatusCode());
// creatorId null
- resp = RestAssured.given()
- .body(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences"))
- .contentType("application/json")
- .post("/api/admin/savedsearches");
+ resp = UtilIT.setSavedSearch(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences"));
resp.prettyPrint();
resp.then().assertThat()
.statusCode(BAD_REQUEST.getStatusCode());
// creatorId string
- resp = RestAssured.given()
- .body(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences"))
- .contentType("application/json")
- .post("/api/admin/savedsearches");
+ resp = UtilIT.setSavedSearch(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences"));
resp.prettyPrint();
resp.then().assertThat()
.statusCode(BAD_REQUEST.getStatusCode());
// creatorId not found
- resp = RestAssured.given()
- .body(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences"))
- .contentType("application/json")
- .post("/api/admin/savedsearches");
+ resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences"));
resp.prettyPrint();
resp.then().assertThat()
.statusCode(NOT_FOUND.getStatusCode());
// definitionPointId null
- resp = RestAssured.given()
- .body(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences"))
- .contentType("application/json")
- .post("/api/admin/savedsearches");
+ resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences"));
resp.prettyPrint();
resp.then().assertThat()
.statusCode(BAD_REQUEST.getStatusCode());
// definitionPointId string
- resp = RestAssured.given()
- .body(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences"))
- .contentType("application/json")
- .post("/api/admin/savedsearches");
+ resp = UtilIT.setSavedSearch(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences"));
resp.prettyPrint();
resp.then().assertThat()
.statusCode(BAD_REQUEST.getStatusCode());
// definitionPointId not found
- resp = RestAssured.given()
- .body(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences"))
- .contentType("application/json")
- .post("/api/admin/savedsearches");
+ resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences"));
resp.prettyPrint();
resp.then().assertThat()
.statusCode(NOT_FOUND.getStatusCode());
// missing filter
- resp = RestAssured.given()
- .body(createSavedSearchJson("*", 1, dataverseId))
- .contentType("application/json")
- .post("/api/admin/savedsearches");
+ resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, dataverseId));
resp.prettyPrint();
resp.then().assertThat()
.statusCode(OK.getStatusCode());
// create a saved search as superuser : OK
- resp = RestAssured.given()
- .body(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences"))
- .contentType("application/json")
- .post("/api/admin/savedsearches");
+ resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences"));
resp.prettyPrint();
resp.then().assertThat()
.statusCode(OK.getStatusCode());
@@ -136,8 +110,7 @@ public void testSavedSearches() {
Integer createdSavedSearchId = path.getInt("data.id");
// get list as non superuser : OK
- Response getListReponse = RestAssured.given()
- .get("/api/admin/savedsearches/list");
+ Response getListReponse = UtilIT.getSavedSearchList();
getListReponse.prettyPrint();
getListReponse.then().assertThat()
.statusCode(OK.getStatusCode());
@@ -146,22 +119,19 @@ public void testSavedSearches() {
List listBeforeDelete = path2.getList("data.savedSearches");
// makelinks/all as non superuser : OK
- Response makelinksAll = RestAssured.given()
- .put("/api/admin/savedsearches/makelinks/all");
+ Response makelinksAll = UtilIT.setSavedSearchMakelinksAll();
makelinksAll.prettyPrint();
makelinksAll.then().assertThat()
.statusCode(OK.getStatusCode());
//delete a saved search as non superuser : OK
- Response deleteReponse = RestAssured.given()
- .delete("/api/admin/savedsearches/" + createdSavedSearchId);
+ Response deleteReponse = UtilIT.deleteSavedSearchById(createdSavedSearchId);
deleteReponse.prettyPrint();
deleteReponse.then().assertThat()
.statusCode(OK.getStatusCode());
// check list count minus 1
- getListReponse = RestAssured.given()
- .get("/api/admin/savedsearches/list");
+ getListReponse = UtilIT.getSavedSearchList();
getListReponse.prettyPrint();
JsonPath path3 = JsonPath.from(getListReponse.body().asString());
List listAfterDelete = path3.getList("data.savedSearches");
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
index 3a2b684c421..b03c23cd1e2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
@@ -1308,8 +1308,8 @@ public void testSearchFilesAndUrlImages() {
.statusCode(200);
pathToFile = "src/main/webapp/resources/js/mydata.js";
Response uploadFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
- uploadImage.prettyPrint();
- uploadImage.then().assertThat()
+ uploadFile.prettyPrint();
+ uploadFile.then().assertThat()
.statusCode(200);
Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
@@ -1337,7 +1337,7 @@ public void testSearchFilesAndUrlImages() {
.statusCode(OK.getStatusCode())
.body("data.items[0].type", CoreMatchers.is("dataverse"))
.body("data.items[0].url", CoreMatchers.containsString("/dataverse/"))
- .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("url_image")));
+ .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("image_url")));
searchResp = UtilIT.search("mydata", apiToken);
searchResp.prettyPrint();
@@ -1345,6 +1345,6 @@ public void testSearchFilesAndUrlImages() {
.statusCode(OK.getStatusCode())
.body("data.items[0].type", CoreMatchers.is("file"))
.body("data.items[0].url", CoreMatchers.containsString("/datafile/"))
- .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("url_image")));
+ .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("image_url")));
}
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
index 518431bfa2d..709908ac6eb 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
@@ -850,7 +850,7 @@ public void testDeleteFiles() {
String citation = atomEntryDraftV2.body().xmlPath().getString("entry.bibliographicCitation");
logger.info("citation (should contain 'DRAFT'): " + citation);
boolean draftStringFoundInCitation = citation.matches(".*DRAFT.*");
- assertEquals(true, draftStringFoundInCitation);
+ assertTrue(draftStringFoundInCitation);
List oneFileLeftInV2Draft = statement3.getBody().xmlPath().getList("feed.entry.id");
logger.info("Number of files remaining in this post version 1 draft:" + oneFileLeftInV2Draft.size());
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
index 1003c1a990c..ce3b8bf75ff 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
@@ -405,7 +405,6 @@ public void testAPITokenEndpoints() {
*/
createUser = UtilIT.createRandomUser();
- String username = UtilIT.getUsernameFromResponse(createUser);
String apiToken = UtilIT.getApiTokenFromResponse(createUser);
Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
createDataverseResponse.prettyPrint();
@@ -428,7 +427,7 @@ public void testAPITokenEndpoints() {
getExpiration = UtilIT.getTokenExpiration(tokenForPrivateUrlUser);
getExpiration.prettyPrint();
getExpiration.then().assertThat()
- .statusCode(NOT_FOUND.getStatusCode());
+ .statusCode(UNAUTHORIZED.getStatusCode());
createUser = UtilIT.createRandomUser();
assertEquals(OK.getStatusCode(), createUser.getStatusCode());
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index 70f49d81b35..1930610532a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -1,5 +1,6 @@
package edu.harvard.iq.dataverse.api;
+import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
import io.restassured.http.ContentType;
import io.restassured.path.json.JsonPath;
import io.restassured.response.Response;
@@ -12,6 +13,7 @@
import jakarta.json.JsonArrayBuilder;
import jakarta.json.JsonObject;
+import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
import static jakarta.ws.rs.core.Response.Status.CREATED;
import java.nio.charset.StandardCharsets;
@@ -239,6 +241,22 @@ public static Response clearThumbnailFailureFlag(long fileId) {
return response;
}
+ public static Response auditFiles(String apiToken, Long firstId, Long lastId, String csvList) {
+ String params = "";
+ if (firstId != null) {
+ params = "?firstId="+ firstId;
+ }
+ if (lastId != null) {
+ params = params + (params.isEmpty() ? "?" : "&") + "lastId="+ lastId;
+ }
+ if (csvList != null) {
+ params = params + (params.isEmpty() ? "?" : "&") + "datasetIdentifierList="+ csvList;
+ }
+ return given()
+ .header(API_TOKEN_HTTP_HEADER, apiToken)
+ .get("/api/admin/datafiles/auditFiles" + params);
+ }
+
private static String getAuthenticatedUserAsJsonString(String persistentUserId, String firstName, String lastName, String authenticationProviderId, String identifier) {
JsonObjectBuilder builder = Json.createObjectBuilder();
builder.add("authenticationProviderId", authenticationProviderId);
@@ -323,7 +341,14 @@ static Integer getDatasetIdFromResponse(Response createDatasetResponse) {
logger.info("Id found in create dataset response: " + datasetId);
return datasetId;
}
-
+
+ static Integer getDataFileIdFromResponse(Response uploadDataFileResponse) {
+ JsonPath dataFile = JsonPath.from(uploadDataFileResponse.body().asString());
+ int dataFileId = dataFile.getInt("data.files[0].dataFile.id");
+ logger.info("Id found in upload DataFile response: " + dataFileId);
+ return dataFileId;
+ }
+
static Integer getSearchCountFromResponse(Response searchResponse) {
JsonPath createdDataset = JsonPath.from(searchResponse.body().asString());
int searchCount = createdDataset.getInt("data.total_count");
@@ -389,6 +414,48 @@ static Response createSubDataverse(String alias, String category, String apiToke
objectBuilder.add("affiliation", affiliation);
}
+ updateDataverseRequestJsonWithMetadataBlocksConfiguration(inputLevelNames, facetIds, metadataBlockNames, objectBuilder);
+
+ JsonObject dvData = objectBuilder.build();
+ return given()
+ .body(dvData.toString()).contentType(ContentType.JSON)
+ .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken);
+ }
+
+ static Response updateDataverse(String alias,
+ String newAlias,
+ String newName,
+ String newAffiliation,
+ String newDataverseType,
+ String[] newContactEmails,
+ String[] newInputLevelNames,
+ String[] newFacetIds,
+ String[] newMetadataBlockNames,
+ String apiToken) {
+ JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder();
+ for(String contactEmail : newContactEmails) {
+ contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", contactEmail));
+ }
+ NullSafeJsonBuilder jsonBuilder = jsonObjectBuilder()
+ .add("alias", newAlias)
+ .add("name", newName)
+ .add("affiliation", newAffiliation)
+ .add("dataverseContacts", contactArrayBuilder)
+ .add("dataverseType", newDataverseType)
+ .add("affiliation", newAffiliation);
+
+ updateDataverseRequestJsonWithMetadataBlocksConfiguration(newInputLevelNames, newFacetIds, newMetadataBlockNames, jsonBuilder);
+
+ JsonObject dvData = jsonBuilder.build();
+ return given()
+ .body(dvData.toString()).contentType(ContentType.JSON)
+ .when().put("/api/dataverses/" + alias + "?key=" + apiToken);
+ }
+
+ private static void updateDataverseRequestJsonWithMetadataBlocksConfiguration(String[] inputLevelNames,
+ String[] facetIds,
+ String[] metadataBlockNames,
+ JsonObjectBuilder objectBuilder) {
JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder();
if (inputLevelNames != null) {
@@ -420,12 +487,6 @@ static Response createSubDataverse(String alias, String category, String apiToke
}
objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder);
-
- JsonObject dvData = objectBuilder.build();
- Response createDataverseResponse = given()
- .body(dvData.toString()).contentType(ContentType.JSON)
- .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken);
- return createDataverseResponse;
}
static Response createDataverse(JsonObject dvData, String apiToken) {
@@ -1570,7 +1631,16 @@ static Response getDatasetVersion(String persistentId, String versionNumber, Str
+ persistentId
+ (excludeFiles ? "&excludeFiles=true" : ""));
}
-
+ static Response compareDatasetVersions(String persistentId, String versionNumber1, String versionNumber2, String apiToken) {
+ return given()
+ .header(API_TOKEN_HTTP_HEADER, apiToken)
+ .get("/api/datasets/:persistentId/versions/"
+ + versionNumber1
+ + "/compare/"
+ + versionNumber2
+ + "?persistentId="
+ + persistentId);
+ }
static Response getDatasetWithOwners(String persistentId, String apiToken, boolean returnOwners) {
return given()
.header(API_TOKEN_HTTP_HEADER, apiToken)
@@ -2123,19 +2193,22 @@ static Response uploadProvFreeForm(String idOrPersistentId, JsonObject jsonObjec
// return requestSpecification.delete("/api/files/" + idInPath + "/prov-freeform" + optionalQueryParam);
// }
static Response exportDataset(String datasetPersistentId, String exporter) {
- return exportDataset(datasetPersistentId, exporter, null);
+ return exportDataset(datasetPersistentId, exporter, null, false);
}
-
static Response exportDataset(String datasetPersistentId, String exporter, String apiToken) {
-// http://localhost:8080/api/datasets/export?exporter=dataverse_json&persistentId=doi%3A10.5072/FK2/W6WIMQ
+ return exportDataset(datasetPersistentId, exporter, apiToken, false);
+ }
+ static Response exportDataset(String datasetPersistentId, String exporter, String apiToken, boolean wait) {
+ // Wait for the Async call to finish to get the updated data
+ if (wait) {
+ sleepForReexport(datasetPersistentId, apiToken, 10);
+ }
RequestSpecification requestSpecification = given();
if (apiToken != null) {
requestSpecification = given()
.header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken);
}
return requestSpecification
- // .header(API_TOKEN_HTTP_HEADER, apiToken)
- // .get("/api/datasets/:persistentId/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter);
.get("/api/datasets/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter);
}
@@ -4093,8 +4166,37 @@ static Response setDatasetStorageDriver(Integer datasetId, String driverLabel, S
.body(driverLabel)
.put("/api/datasets/" + datasetId + "/storageDriver");
}
-
-
+
+ /** GET on /api/admin/savedsearches/list */
+ static Response getSavedSearchList() {
+ return given().get("/api/admin/savedsearches/list");
+ }
+
+ /** POST on /api/admin/savedsearches without body */
+ static Response setSavedSearch() {
+ return given()
+ .contentType("application/json")
+ .post("/api/admin/savedsearches");
+ }
+
+ /** POST on /api/admin/savedsearches with body */
+ static Response setSavedSearch(String body) {
+ return given()
+ .body(body)
+ .contentType("application/json")
+ .post("/api/admin/savedsearches");
+ }
+
+ /** PUT on /api/admin/savedsearches/makelinks/all */
+ static Response setSavedSearchMakelinksAll() {
+ return given().put("/api/admin/savedsearches/makelinks/all");
+ }
+
+ /** DELETE on /api/admin/savedsearches/{id} with identifier */
+ static Response deleteSavedSearchById(Integer id) {
+ return given().delete("/api/admin/savedsearches/" + id);
+ }
+
//Globus Store related - not currently used
static Response getDatasetGlobusUploadParameters(Integer datasetId, String locale, String apiToken) {
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java
index 44739f3f62a..acf5d970358 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java
@@ -1,6 +1,13 @@
package edu.harvard.iq.dataverse.api.imports;
import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
+import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
+
+import org.apache.commons.io.FileUtils;
+import com.google.gson.Gson;
+import java.io.File;
+import java.io.IOException;
+
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
@@ -8,6 +15,8 @@
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
+import java.nio.charset.StandardCharsets;
+
@ExtendWith(MockitoExtension.class)
public class ImportGenericServiceBeanTest {
@@ -15,7 +24,47 @@ public class ImportGenericServiceBeanTest {
private ImportGenericServiceBean importGenericService;
@Test
- public void testReassignIdentifierAsGlobalId() {
+ void testIdentifierHarvestableWithOtherID() throws IOException {
+ // "otherIdValue" containing the value : doi:10.7910/DVN/TJCLKP
+ File file = new File("src/test/resources/json/importGenericWithOtherId.json");
+ String text = FileUtils.readFileToString(file, StandardCharsets.UTF_8);
+ DatasetVersionDTO dto = new Gson().fromJson(text, DatasetVersionDTO.class);
+
+ assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://doi.org/10.7910/DVN/TJCLKP"));
+ // junk or null
+ assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "junk"));
+ assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, null));
+ assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://www.example.com"));
+ assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://dataverse.org"));
+ }
+
+ @Test
+ void testIdentifierHarvestableWithoutOtherID() throws IOException {
+ // Does not contain data of type "otherIdValue"
+ File file = new File("src/test/resources/json/importGenericWithoutOtherId.json");
+ String text = FileUtils.readFileToString(file, StandardCharsets.UTF_8);
+ DatasetVersionDTO dto = new Gson().fromJson(text, DatasetVersionDTO.class);
+
+ // non-URL
+ assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "doi:10.7910/DVN/TJCLKP"));
+ assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "hdl:10.7910/DVN/TJCLKP"));
+ // HTTPS
+ assertEquals("https://doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://doi.org/10.7910/DVN/TJCLKP"));
+ assertEquals("https://dx.doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://dx.doi.org/10.7910/DVN/TJCLKP"));
+ assertEquals("https://hdl.handle.net/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://hdl.handle.net/10.7910/DVN/TJCLKP"));
+ // HTTP (no S)
+ assertEquals("http://doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://doi.org/10.7910/DVN/TJCLKP"));
+ assertEquals("http://dx.doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://dx.doi.org/10.7910/DVN/TJCLKP"));
+ assertEquals("http://hdl.handle.net/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://hdl.handle.net/10.7910/DVN/TJCLKP"));
+ // junk or null
+ assertNull(importGenericService.selectIdentifier(dto, "junk"));
+ assertNull(importGenericService.selectIdentifier(dto, null));
+ assertNull(importGenericService.selectIdentifier(dto, "http://www.example.com"));
+ assertNull(importGenericService.selectIdentifier(dto, "https://dataverse.org"));
+ }
+
+ @Test
+ void testReassignIdentifierAsGlobalId() {
// non-URL
assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("doi:10.7910/DVN/TJCLKP", new DatasetDTO()));
assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("hdl:10.7910/DVN/TJCLKP", new DatasetDTO()));
@@ -29,6 +78,8 @@ public void testReassignIdentifierAsGlobalId() {
assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO()));
// junk
assertNull(importGenericService.reassignIdentifierAsGlobalId("junk", new DatasetDTO()));
+ assertNull(importGenericService.reassignIdentifierAsGlobalId("http://www.example.com", new DatasetDTO()));
+ assertNull(importGenericService.reassignIdentifierAsGlobalId("https://dataverse.org", new DatasetDTO()));
}
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java
index 7bd802b3b02..bd3bfcc1a60 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java
@@ -378,20 +378,20 @@ public void testMutingInJson() {
public void testHasEmailMuted() {
testUser.setMutedEmails(mutedTypes);
System.out.println("hasEmailMuted");
- assertEquals(true, testUser.hasEmailMuted(Type.ASSIGNROLE));
- assertEquals(true, testUser.hasEmailMuted(Type.REVOKEROLE));
- assertEquals(false, testUser.hasEmailMuted(Type.CREATEDV));
- assertEquals(false, testUser.hasEmailMuted(null));
+ assertTrue(testUser.hasEmailMuted(Type.ASSIGNROLE));
+ assertTrue(testUser.hasEmailMuted(Type.REVOKEROLE));
+ assertFalse(testUser.hasEmailMuted(Type.CREATEDV));
+ assertFalse(testUser.hasEmailMuted(null));
}
@Test
public void testHasNotificationsMutedMuted() {
testUser.setMutedNotifications(mutedTypes);
System.out.println("hasNotificationMuted");
- assertEquals(true, testUser.hasNotificationMuted(Type.ASSIGNROLE));
- assertEquals(true, testUser.hasNotificationMuted(Type.REVOKEROLE));
- assertEquals(false, testUser.hasNotificationMuted(Type.CREATEDV));
- assertEquals(false, testUser.hasNotificationMuted(null));
+ assertTrue(testUser.hasNotificationMuted(Type.ASSIGNROLE));
+ assertTrue(testUser.hasNotificationMuted(Type.REVOKEROLE));
+ assertFalse(testUser.hasNotificationMuted(Type.CREATEDV));
+ assertFalse(testUser.hasNotificationMuted(null));
}
@Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java
index a8dda2f6a7e..d3c5cdca470 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java
@@ -38,7 +38,7 @@ void getIdentifier() {
@Test
void testGetDisplayInfo() {
RoleAssigneeDisplayInfo displayInfo = privateUrlUser.getDisplayInfo();
- assertEquals("Private URL Enabled", displayInfo.getTitle());
+ assertEquals("Preview URL Enabled", displayInfo.getTitle());
assertNull(displayInfo.getEmailAddress());
}
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java
index 552d76b74e8..ea5cc4b66a8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java
@@ -89,20 +89,20 @@ public void tearDownClass() throws IOException {
*/
@Test
public void testOpen() throws IOException {
- assertEquals(false, datasetAccess.canRead());
- assertEquals(false, datasetAccess.canWrite());
+ assertFalse(datasetAccess.canRead());
+ assertFalse(datasetAccess.canWrite());
datasetAccess.open(DataAccessOption.READ_ACCESS);
- assertEquals(true, datasetAccess.canRead());
- assertEquals(false, datasetAccess.canWrite());
+ assertTrue(datasetAccess.canRead());
+ assertFalse(datasetAccess.canWrite());
datasetAccess.open(DataAccessOption.WRITE_ACCESS);
- assertEquals(false, datasetAccess.canRead());
- assertEquals(true, datasetAccess.canWrite());
+ assertFalse(datasetAccess.canRead());
+ assertTrue(datasetAccess.canWrite());
dataFileAccess.open(DataAccessOption.READ_ACCESS);
- assertEquals(true, dataFileAccess.canRead());
- assertEquals(false, dataFileAccess.canWrite());
+ assertTrue(dataFileAccess.canRead());
+ assertFalse(dataFileAccess.canWrite());
}
/**
@@ -133,7 +133,7 @@ public void testOpenAuxChannel() throws IOException {
*/
@Test
public void testIsAuxObjectCached() throws IOException {
- assertEquals(true, datasetAccess.isAuxObjectCached("Dataset"));
+ assertTrue(datasetAccess.isAuxObjectCached("Dataset"));
}
/**
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java
index 84a241b90f6..3aab66dc63b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java
@@ -189,48 +189,48 @@ public void testResponseHeaders() {
@Test
public void testFileLocation() {
- assertEquals(true, instance.isLocalFile());
+ assertTrue(instance.isLocalFile());
instance.setIsLocalFile(false);
- assertEquals(false, instance.isLocalFile());
+ assertFalse(instance.isLocalFile());
- assertEquals(false, instance.isRemoteAccess());
+ assertFalse(instance.isRemoteAccess());
instance.setIsRemoteAccess(true);
- assertEquals(true, instance.isRemoteAccess());
+ assertTrue(instance.isRemoteAccess());
}
@Test
public void testHttpAccess() {
- assertEquals(false, instance.isHttpAccess());
+ assertFalse(instance.isHttpAccess());
instance.setIsHttpAccess(true);
- assertEquals(true, instance.isHttpAccess());
+ assertTrue(instance.isHttpAccess());
}*/
@Test
public void testDownloadSupported() {
- assertEquals(true, instance.isDownloadSupported());
+ assertTrue(instance.isDownloadSupported());
instance.setIsDownloadSupported(false);
- assertEquals(false, instance.isDownloadSupported());
+ assertFalse(instance.isDownloadSupported());
}
@Test
public void testSubsetSupported() {
- assertEquals(false, instance.isSubsetSupported());
+ assertFalse(instance.isSubsetSupported());
instance.setIsSubsetSupported(true);
- assertEquals(true, instance.isSubsetSupported());
+ assertTrue(instance.isSubsetSupported());
}
@Test
public void testZippedStream() {
- assertEquals(false, instance.isZippedStream());
+ assertFalse(instance.isZippedStream());
instance.setIsZippedStream(true);
- assertEquals(true, instance.isZippedStream());
+ assertTrue(instance.isZippedStream());
}
@Test
public void testNoVarHeader() {
- assertEquals(false, instance.noVarHeader());
+ assertFalse(instance.noVarHeader());
instance.setNoVarHeader(true);
- assertEquals(true, instance.noVarHeader());
+ assertTrue(instance.noVarHeader());
}
@Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java
index 942e4329384..27e0ac758e0 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java
@@ -51,14 +51,14 @@ public void setUpClass() throws IOException {
*/
@Test
public void testPerms() throws IOException {
- assertEquals(false, datasetAccess.canRead());
- assertEquals(false, datasetAccess.canWrite());
+ assertFalse(datasetAccess.canRead());
+ assertFalse(datasetAccess.canWrite());
}
@Test
public void testIsExpiryExpired() {
long currentTime = 1502221467;
- assertEquals(false, swiftAccess.isExpiryExpired(60, 1502281, currentTime));
+ assertFalse(swiftAccess.isExpiryExpired(60, 1502281, currentTime));
}
@Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java
index eb19f22df63..148d34dc5f7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java
@@ -18,6 +18,7 @@
import org.apache.http.message.BasicStatusLine;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import org.junit.jupiter.api.Test;
public class DataCaptureModuleUtilTest {
@@ -25,13 +26,13 @@ public class DataCaptureModuleUtilTest {
@Test
public void testRsyncSupportEnabled() {
System.out.println("rsyncSupportEnabled");
- assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled(null));
- assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("dcm/rsync+ssh"));
+ assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled(null));
+ assertTrue(DataCaptureModuleUtil.rsyncSupportEnabled("dcm/rsync+ssh"));
// Comma sepratated lists of upload methods are supported.
- assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("native/http:dcm/rsync+ssh"));
- assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("native/http,dcm/rsync+ssh"));
- assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("native/http"));
- assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("junk"));
+ assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("native/http:dcm/rsync+ssh"));
+ assertTrue(DataCaptureModuleUtil.rsyncSupportEnabled("native/http,dcm/rsync+ssh"));
+ assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("native/http"));
+ assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("junk"));
}
@Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java
index 8eed2a33c5a..2db8851c48a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java
@@ -70,8 +70,8 @@ public void testGetThumbnailRestricted() {
*/
@Test
public void testDeleteDatasetLogo() {
- assertEquals(false, DatasetUtil.deleteDatasetLogo(null));
- assertEquals(false, DatasetUtil.deleteDatasetLogo(new Dataset()));
+ assertFalse(DatasetUtil.deleteDatasetLogo(null));
+ assertFalse(DatasetUtil.deleteDatasetLogo(new Dataset()));
}
/**
@@ -106,7 +106,7 @@ public void testGetThumbnailAsInputStream() {
@Test
public void testIsDatasetLogoPresent() {
Dataset dataset = MocksFactory.makeDataset();
- assertEquals(false, DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE));
+ assertFalse(DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE));
}
@Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java
index bfb9134cfca..475b4c1cff5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java
@@ -66,16 +66,16 @@ void variableTest(Map vmMap) {
assertEquals(vm.getLiteralquestion(), "This is a literal question.");
assertEquals(vm.getNotes(), "These are notes.\nA lot of them.");
assertEquals(vm.getUniverse(),"Our universe");
- assertEquals(false, vm.isIsweightvar());
- assertEquals(false, vm.isWeighted());
+ assertFalse(vm.isIsweightvar());
+ assertFalse(vm.isWeighted());
testCategoriesVar1(vm);
vm = vmMap.get(1169L);
assertNotNull(vm);
- assertEquals(false, vm.isIsweightvar());
- assertEquals(true, vm.isWeighted());
+ assertFalse(vm.isIsweightvar());
+ assertTrue(vm.isWeighted());
assertEquals(vm.getLabel(), "age_rollup" );
assertEquals(vm.getInterviewinstruction(), null);
@@ -90,8 +90,8 @@ void variableTest(Map vmMap) {
vm = vmMap.get(1168L);
assertNotNull(vm);
- assertEquals(true, vm.isIsweightvar());
- assertEquals(false, vm.isWeighted());
+ assertTrue(vm.isIsweightvar());
+ assertFalse(vm.isWeighted());
assertEquals(vm.getLabel(), "weight" );
assertEquals(vm.getInterviewinstruction(), null);
assertEquals(vm.getLiteralquestion(), "Literal question for weight");
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java
new file mode 100644
index 00000000000..f49ebcea39c
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java
@@ -0,0 +1,264 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
+import edu.harvard.iq.dataverse.util.JhoveFileType;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
+import org.jetbrains.annotations.NotNull;
+import org.joda.time.DateTime;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import org.mockito.MockedStatic;
+import org.mockito.Mockito;
+
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
+import java.nio.file.Path;
+import java.security.SecureRandom;
+import java.text.MessageFormat;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+import static edu.harvard.iq.dataverse.DataFile.ChecksumType.MD5;
+import static org.apache.commons.io.file.FilesUncheck.createDirectories;
+import static org.apache.commons.io.file.PathUtils.deleteDirectory;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.mockito.ArgumentMatchers.any;
+
+
+@LocalJvmSettings
+public class CreateNewDataFilesTest {
+ // TODO keep constants for annotations in sync with class name
+ Path testDir = Path.of("target/test/").resolve(getClass().getSimpleName());
+ PrintStream original_stderr;
+
+ @BeforeEach
+ public void cleanTmpDir() throws IOException {
+ original_stderr = System.err;
+ if(testDir.toFile().exists())
+ deleteDirectory(testDir);
+ }
+
+ @AfterEach void restoreStderr() {
+ System.setErr(original_stderr);
+ }
+
+ @Test
+ @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp")
+ public void execute_fails_to_upload_when_tmp_does_not_exist() throws FileNotFoundException {
+
+ mockTmpLookup();
+ var cmd = createCmd("scripts/search/data/shape/shapefile.zip", mockDatasetVersion(), 1000L, 500L);
+ var ctxt = mockCommandContext(mockSysConfig(true, 0L, MD5, 10));
+
+ assertThatThrownBy(() -> cmd.execute(ctxt))
+ .isInstanceOf(CommandException.class)
+ .hasMessageContaining("Failed to save the upload as a temp file (temp disk space?)")
+ .hasRootCauseInstanceOf(NoSuchFileException.class)
+ .getRootCause()
+ .hasMessageStartingWith("target/test/CreateNewDataFilesTest/tmp/temp/tmp");
+ }
+
+ @Test
+ @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp")
+ public void execute_fails_on_size_limit() throws Exception {
+ createDirectories(Path.of("target/test/CreateNewDataFilesTest/tmp/temp"));
+
+ mockTmpLookup();
+ var cmd = createCmd("scripts/search/data/binary/3files.zip", mockDatasetVersion(), 1000L, 500L);
+ var ctxt = mockCommandContext(mockSysConfig(true, 50L, MD5, 0));
+ try (var mockedStatic = Mockito.mockStatic(JhoveFileType.class)) {
+ mockedStatic.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf");
+
+ assertThatThrownBy(() -> cmd.execute(ctxt))
+ .isInstanceOf(CommandException.class)
+ .hasMessage("This file size (462 B) exceeds the size limit of 50 B.");
+ }
+ }
+
+ @Test
+ @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp")
+ public void execute_loads_individual_files_from_uploaded_zip() throws Exception {
+ var tempDir = testDir.resolve("tmp/temp");
+ createDirectories(tempDir);
+
+ mockTmpLookup();
+ var cmd = createCmd("src/test/resources/own-cloud-downloads/greetings.zip", mockDatasetVersion(), 1000L, 500L);
+ var ctxt = mockCommandContext(mockSysConfig(false, 1000000L, MD5, 10));
+ try (MockedStatic mockedStatic = Mockito.mockStatic(JhoveFileType.class)) {
+ mockedStatic.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf");
+
+ // the test
+ var result = cmd.execute(ctxt);
+
+ assertThat(result.getErrors()).hasSize(0);
+ assertThat(result.getDataFiles().stream().map(dataFile ->
+ dataFile.getFileMetadata().getDirectoryLabel() + "/" + dataFile.getDisplayName()
+ )).containsExactlyInAnyOrder(
+ "DD-1576/goodbye.txt", "DD-1576/hello.txt"
+ );
+ var storageIds = result.getDataFiles().stream().map(DataFile::getStorageIdentifier).toList();
+ assertThat(tempDir.toFile().list())
+ .containsExactlyInAnyOrderElementsOf(storageIds);
+ }
+ }
+
+ @Test
+ @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp")
+ public void execute_rezips_sets_of_shape_files_from_uploaded_zip() throws Exception {
+ var tempDir = testDir.resolve("tmp/temp");
+ createDirectories(tempDir);
+
+ mockTmpLookup();
+ var cmd = createCmd("src/test/resources/own-cloud-downloads/shapes.zip", mockDatasetVersion(), 1000L, 500L);
+ var ctxt = mockCommandContext(mockSysConfig(false, 100000000L, MD5, 10));
+ try (var mockedJHoveFileType = Mockito.mockStatic(JhoveFileType.class)) {
+ mockedJHoveFileType.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf");
+
+ // the test
+ var result = cmd.execute(ctxt);
+
+ assertThat(result.getErrors()).hasSize(0);
+ assertThat(result.getDataFiles().stream().map(dataFile ->
+ (dataFile.getFileMetadata().getDirectoryLabel() + "/" + dataFile.getDisplayName())
+ .replaceAll(".*/dataDir/", "")
+ )).containsExactlyInAnyOrder(
+ "shape1.zip",
+ "shape2/shape2",
+ "shape2/shape2.pdf",
+ "shape2/shape2.txt",
+ "shape2/shape2.zip",
+ "extra/shp_dictionary.xls",
+ "extra/notes",
+ "extra/README.MD"
+ );
+ var storageIds = result.getDataFiles().stream().map(DataFile::getStorageIdentifier).toList();
+ assertThat(tempDir.toFile().list())
+ .containsExactlyInAnyOrderElementsOf(storageIds);
+ }
+ }
+
+ @Disabled("Too slow. Intended for manual execution.")
+ @Test
+ @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "/tmp/test/CreateNewDataFilesTest/tmp")
+ public void extract_zip_performance() throws Exception {
+ /*
+ Developed to test performance difference between the old implementation with ZipInputStream and the new ZipFile implementation.
+ Play with numbers depending on:
+ - the time you want to spend on this test
+ - how much system stress you want to examine
+ */
+ var nrOfZipFiles = 20;
+ var avgNrOfFilesPerZip = 300;
+ var avgFileLength = 5000;
+
+ var tmpUploadStorage = Path.of("/tmp/test/CreateNewDataFilesTest/tmp/temp");
+ if(tmpUploadStorage.toFile().exists()) {
+ deleteDirectory(tmpUploadStorage);
+ }
+ createDirectories(tmpUploadStorage); // temp in target would choke intellij
+
+ var chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+ var random = new SecureRandom();
+ var totalNrOfFiles = 0;
+ var totalFileSize = 0;
+ var totalTime = 0L;
+ var tmp = Path.of(Files.createTempDirectory(null).toString());
+ var ctxt = mockCommandContext(mockSysConfig(false, 100000000L, MD5, 10000));
+ try (var mockedJHoveFileType = Mockito.mockStatic(JhoveFileType.class)) {
+ mockedJHoveFileType.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf");
+ for (var zipNr = 1; zipNr <= nrOfZipFiles; zipNr++) {
+ // build the zip
+ var zip = tmp.resolve(zipNr + "-data.zip");
+ var nrOfFilesInZip = random.nextInt(avgNrOfFilesPerZip * 2);
+ try (var zipStream = new ZipOutputStream(new FileOutputStream(zip.toFile()))) {
+ for (var fileInZipNr = 1; fileInZipNr <= nrOfFilesInZip; fileInZipNr++) {
+ // build content for a file
+ var stringLength = random.nextInt(avgFileLength * 2 -5);
+ StringBuilder sb = new StringBuilder(stringLength);
+ for (int i = 1; i <= stringLength; i++) {// zero length causes buffer underflow
+ sb.append(chars.charAt(random.nextInt(chars.length())));
+ }
+ // add the file to the zip
+ zipStream.putNextEntry(new ZipEntry(fileInZipNr + ".txt"));
+ zipStream.write((sb.toString()).getBytes());
+ zipStream.closeEntry();
+ totalFileSize += stringLength;
+ }
+ }
+
+ // upload the zip
+ var before = DateTime.now();
+ var result = createCmd(zip.toString(), mockDatasetVersion(), 1000L, 500L)
+ .execute(ctxt);
+ totalTime += DateTime.now().getMillis() - before.getMillis();
+
+ assertThat(result.getErrors()).hasSize(0);
+ assertThat(result.getDataFiles()).hasSize(nrOfFilesInZip);
+ totalNrOfFiles += nrOfFilesInZip;
+
+ // report after each zip to have some data even when aborting a test that takes too long
+ System.out.println(MessageFormat.format(
+ "Total time: {0}ms; nr of zips {1} total nr of files {2}; total file size {3}",
+ totalTime, zipNr, totalNrOfFiles, totalFileSize
+ ));
+ }
+ assertThat(tmpUploadStorage.toFile().list()).hasSize(totalNrOfFiles);
+ }
+ }
+
+ private static @NotNull CreateNewDataFilesCommand createCmd(String name, DatasetVersion dsVersion, long allocatedQuotaLimit, long usedQuotaLimit) throws FileNotFoundException {
+ return new CreateNewDataFilesCommand(
+ Mockito.mock(DataverseRequest.class),
+ dsVersion,
+ new FileInputStream(name),
+ "example.zip",
+ "application/zip",
+ null,
+ new UploadSessionQuotaLimit(allocatedQuotaLimit, usedQuotaLimit),
+ "sha");
+ }
+
+ private static @NotNull CommandContext mockCommandContext(SystemConfig sysCfg) {
+ var ctxt = Mockito.mock(CommandContext.class);
+ Mockito.when(ctxt.systemConfig()).thenReturn(sysCfg);
+ return ctxt;
+ }
+
+ private static @NotNull SystemConfig mockSysConfig(boolean isStorageQuataEnforced, long maxFileUploadSizeForStore, DataFile.ChecksumType checksumType, int zipUploadFilesLimit) {
+ var sysCfg = Mockito.mock(SystemConfig.class);
+ Mockito.when(sysCfg.isStorageQuotasEnforced()).thenReturn(isStorageQuataEnforced);
+ Mockito.when(sysCfg.getMaxFileUploadSizeForStore(any())).thenReturn(maxFileUploadSizeForStore);
+ Mockito.when(sysCfg.getFileFixityChecksumAlgorithm()).thenReturn(checksumType);
+ Mockito.when(sysCfg.getZipUploadFilesLimit()).thenReturn(zipUploadFilesLimit);
+ return sysCfg;
+ }
+
+ private static void mockTmpLookup() {
+ JvmSettings mockFilesDirectory = Mockito.mock(JvmSettings.class);
+ Mockito.when(mockFilesDirectory.lookup()).thenReturn("/mocked/path");
+ }
+
+ private static @NotNull DatasetVersion mockDatasetVersion() {
+ var dsVersion = Mockito.mock(DatasetVersion.class);
+ Mockito.when(dsVersion.getDataset()).thenReturn(Mockito.mock(Dataset.class));
+ return dsVersion;
+ }
+
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
index 508eac46cb4..0ba29f74774 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
@@ -18,7 +18,9 @@
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.search.SolrIndexServiceBean;
import edu.harvard.iq.dataverse.util.SystemConfig;
+import java.sql.Timestamp;
import java.util.ArrayList;
+import java.util.Date;
import java.util.List;
import java.util.concurrent.Future;
@@ -171,9 +173,9 @@ public void testCreatePrivateUrlSuccessfully() throws CommandException {
assertEquals(expectedUser.getIdentifier(), privateUrl.getRoleAssignment().getAssigneeIdentifier());
assertEquals(expectedUser.isSuperuser(), false);
assertEquals(expectedUser.isAuthenticated(), false);
- assertEquals(expectedUser.getDisplayInfo().getTitle(), "Private URL Enabled");
+ assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled");
assertNotNull(privateUrl.getToken());
- assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink());
+ assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink());
}
@Test
@@ -188,22 +190,24 @@ public void testCreateAnonymizedAccessPrivateUrlSuccessfully() throws CommandExc
assertEquals(expectedUser.getIdentifier(), privateUrl.getRoleAssignment().getAssigneeIdentifier());
assertEquals(expectedUser.isSuperuser(), false);
assertEquals(expectedUser.isAuthenticated(), false);
- assertEquals(expectedUser.getDisplayInfo().getTitle(), "Private URL Enabled");
+ assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled");
assertNotNull(privateUrl.getToken());
assertTrue(privateUrl.isAnonymizedAccess());
- assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink());
+ assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink());
}
@Test
- public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() {
+ public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() throws CommandException {
dataset = new Dataset();
List versions = new ArrayList<>();
+ dataset.setPublicationDate(new Timestamp(new Date().getTime()));
DatasetVersion datasetVersion = new DatasetVersion();
datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED);
DatasetVersion datasetVersion2 = new DatasetVersion();
-
- versions.add(datasetVersion);
+ datasetVersion2.setVersionState(DatasetVersion.VersionState.DRAFT);
+
versions.add(datasetVersion2);
+ versions.add(datasetVersion);
dataset.setVersions(versions);
dataset.setId(versionIsReleased);
PrivateUrl privateUrl = null;
@@ -211,6 +215,7 @@ public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() {
privateUrl = testEngine.submit(new CreatePrivateUrlCommand(null, dataset, true));
assertTrue(false);
} catch (CommandException ex) {
+
}
assertNull(privateUrl);
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java
index 9850e9d80e9..2121aa4d9f9 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java
@@ -253,7 +253,7 @@ public void testGetDisplayName() {
*/
@Test
public void testIsXMLFormat() {
- assertEquals(false, schemaDotOrgExporter instanceof XMLExporter);
+ assertFalse(schemaDotOrgExporter instanceof XMLExporter);
}
/**
@@ -261,7 +261,7 @@ public void testIsXMLFormat() {
*/
@Test
public void testIsHarvestable() {
- assertEquals(false, schemaDotOrgExporter.isHarvestable());
+ assertFalse(schemaDotOrgExporter.isHarvestable());
}
/**
@@ -269,7 +269,7 @@ public void testIsHarvestable() {
*/
@Test
public void testIsAvailableToUsers() {
- assertEquals(true, schemaDotOrgExporter.isAvailableToUsers());
+ assertTrue(schemaDotOrgExporter.isAvailableToUsers());
}
/**
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
index 4dfedf5aa17..955070a662a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
@@ -112,8 +112,8 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception {
}
// check filenames are unique and altered
- assertEquals(true, file1NameAltered);
- assertEquals(true, file2NameAltered);
+ assertTrue(file1NameAltered);
+ assertTrue(file2NameAltered);
// try to add data files with "-1" duplicates and see if it gets incremented to "-2"
IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null);
@@ -128,8 +128,8 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception {
}
// check filenames are unique and altered
- assertEquals(true, file1NameAltered);
- assertEquals(true, file2NameAltered);
+ assertTrue(file1NameAltered);
+ assertTrue(file2NameAltered);
}
@Test
@@ -218,8 +218,8 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce
}
// check filenames are unique and altered
- assertEquals(true, file1NameAltered);
- assertEquals(true, file2NameAltered);
+ assertTrue(file1NameAltered);
+ assertTrue(file2NameAltered);
// try to add data files with "-1" duplicates and see if it gets incremented to "-2"
IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null);
@@ -234,8 +234,8 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce
}
// check filenames are unique and altered
- assertEquals(true, file1NameAltered);
- assertEquals(true, file2NameAltered);
+ assertTrue(file1NameAltered);
+ assertTrue(file2NameAltered);
}
@Test
@@ -347,9 +347,9 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception {
}
// check filenames are unique
- assertEquals(true, file1NameAltered);
- assertEquals(true, file2NameAltered);
- assertEquals(false, file3NameAltered);
+ assertTrue(file1NameAltered);
+ assertTrue(file2NameAltered);
+ assertFalse(file3NameAltered);
// add duplicate file in root
datasetVersion.getFileMetadatas().add(fmd3);
@@ -371,9 +371,9 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception {
}
// check filenames are unique
- assertEquals(true, file1NameAltered);
- assertEquals(true, file2NameAltered);
- assertEquals(true, file3NameAltered);
+ assertTrue(file1NameAltered);
+ assertTrue(file2NameAltered);
+ assertTrue(file3NameAltered);
}
@Test
@@ -457,7 +457,7 @@ public void testCheckForDuplicateFileNamesTabular() throws Exception {
}
// check filename is altered since tabular and will change to .tab after ingest
- assertEquals(true, file2NameAltered);
+ assertTrue(file2NameAltered);
}
@@ -553,8 +553,8 @@ public void testCheckForDuplicateFileNamesWhenReplacing() throws Exception {
}
// check filenames are unique and unaltered
- assertEquals(true, file1NameAltered);
- assertEquals(false, file2NameAltered);
+ assertTrue(file1NameAltered);
+ assertFalse(file2NameAltered);
}
@Test
@@ -657,7 +657,7 @@ public void testRecalculateDatasetVersionUNF() {
DataTable dataTable = new DataTable();
dataTable.setUnf("unfOnDataTable");
datafile1.setDataTable(dataTable);
- assertEquals(true, datafile1.isTabularData());
+ assertTrue(datafile1.isTabularData());
FileMetadata fmd1 = new FileMetadata();
fmd1.setId(1L);
@@ -692,7 +692,7 @@ public void testGetUnfValuesOfFiles() {
@Test
public void testshouldHaveUnf() {
- assertEquals(false, IngestUtil.shouldHaveUnf(null));
+ assertFalse(IngestUtil.shouldHaveUnf(null));
}
@Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java b/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java
index a0ac22f99f3..a2729ce7514 100644
--- a/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java
@@ -2,6 +2,8 @@
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.Arrays;
@@ -171,13 +173,13 @@ public void testBasics() {
pager1 = new Pager(102, 10, 1);
msgt("Test: 102 results, 10 per page, page 1");
- assertEquals(true, pager1.isPagerNecessary());
+ assertTrue(pager1.isPagerNecessary());
assertEquals(102, pager1.getNumResults());
assertEquals(1, pager1.getPreviousPageNumber());
assertEquals(2, pager1.getNextPageNumber());
- assertEquals(false, pager1.hasPreviousPageNumber());
- assertEquals(true, pager1.hasNextPageNumber());
+ assertFalse(pager1.hasPreviousPageNumber());
+ assertTrue(pager1.hasNextPageNumber());
msg("page list: " + Arrays.toString(pager1.getPageNumberList()));
//assertEquals(new int[]{1, 2, 3, 4, 5}, pager1.getPageNumberList());
@@ -232,13 +234,13 @@ public void testNoResults() {
System.out.println("getNumResults");
Pager pager1 = new Pager(0, 10, 1);
- assertEquals(false, pager1.isPagerNecessary());
+ assertFalse(pager1.isPagerNecessary());
assertEquals(0, pager1.getNumResults());
assertEquals(0, pager1.getPreviousPageNumber());
assertEquals(0, pager1.getNextPageNumber());
- assertEquals(false, pager1.hasPreviousPageNumber());
- assertEquals(false, pager1.hasNextPageNumber());
+ assertFalse(pager1.hasPreviousPageNumber());
+ assertFalse(pager1.hasNextPageNumber());
msgt("page list: " + Arrays.toString(pager1.getPageNumberList()));
//assertEquals(null, pager1.getPageNumberList());
diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java
index 58d69da743b..bacb231b4d5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java
@@ -99,7 +99,7 @@
@JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "FAKE 1", varArgs = "fake1")
@JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = FakeDOIProvider.TYPE, varArgs = "fake1")
@JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "10.5074", varArgs = "fake1")
-@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "FK", varArgs = "fake1")
+@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "fk", varArgs = "fake1")
@JvmSetting(key = JvmSettings.PID_PROVIDER_MANAGED_LIST, value = "doi:10.5073/FK3ABCDEF", varArgs ="fake1")
//HANDLE 1
@@ -250,9 +250,12 @@ public void testDOIParsing() throws IOException {
assertEquals(pid1String, pid3.asString());
assertEquals("dc1", pid3.getProviderId());
- String pid4String = "doi:10.5072/FK3ABCDEF";
+ //Also test case insensitive
+ String pid4String = "doi:10.5072/fk3ABCDEF";
GlobalId pid4 = PidUtil.parseAsGlobalID(pid4String);
- assertEquals(pid4String, pid4.asString());
+ // Lower case is recognized by converting to upper case internally, so we need to test vs. the upper case identifier
+ // I.e. we are verifying that the lower case string is parsed the same as the upper case string, both give an internal upper case PID representation
+ assertEquals("doi:10.5072/FK3ABCDEF", pid4.asString());
assertEquals("dc2", pid4.getProviderId());
String pid5String = "doi:10.5072/FK2ABCDEF";
@@ -312,6 +315,13 @@ public void testUnmanagedParsing() throws IOException {
GlobalId pid6 = PidUtil.parseAsGlobalID(pid6String);
assertEquals(pid6String, pid6.asString());
assertEquals(UnmanagedPermaLinkPidProvider.ID, pid6.getProviderId());
+
+ //Lowercase test for unmanaged DOIs
+ String pid7String = "doi:10.5281/zenodo.6381129";
+ GlobalId pid7 = PidUtil.parseAsGlobalID(pid7String);
+ assertEquals(UnmanagedDOIProvider.ID, pid5.getProviderId());
+ assertEquals(pid7String.toUpperCase().replace("DOI", "doi"), pid7.asString());
+
}
@@ -350,15 +360,15 @@ public void testExcludedSetParsing() throws IOException {
@Test
public void testManagedSetParsing() throws IOException {
- String pid1String = "doi:10.5073/FK3ABCDEF";
+ String pid1String = "doi:10.5073/fk3ABCDEF";
GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String);
- assertEquals(pid1String, pid2.asString());
+ assertEquals(pid1String.toUpperCase().replace("DOI", "doi"), pid2.asString());
assertEquals("fake1", pid2.getProviderId());
assertEquals("https://doi.org/" + pid2.getAuthority() + PidUtil.getPidProvider(pid2.getProviderId()).getSeparator() + pid2.getIdentifier(),pid2.asURL());
assertEquals("10.5073", pid2.getAuthority());
assertEquals(AbstractDOIProvider.DOI_PROTOCOL, pid2.getProtocol());
GlobalId pid3 = PidUtil.parseAsGlobalID(pid2.asURL());
- assertEquals(pid1String, pid3.asString());
+ assertEquals(pid1String.toUpperCase().replace("DOI", "doi"), pid3.asString());
assertEquals("fake1", pid3.getProviderId());
assertFalse(PidUtil.getPidProvider(pid3.getProviderId()).canCreatePidsLike(pid3));
diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java
index c03146904de..2bd6818821d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java
@@ -1,15 +1,21 @@
package edu.harvard.iq.dataverse.pidproviders.doi.datacite;
+import edu.harvard.iq.dataverse.ControlledVocabularyValue;
+import edu.harvard.iq.dataverse.DataCitation;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetAuthor;
import edu.harvard.iq.dataverse.DatasetField;
+import edu.harvard.iq.dataverse.DatasetFieldCompoundValue;
import edu.harvard.iq.dataverse.DatasetFieldConstant;
import edu.harvard.iq.dataverse.DatasetFieldType;
+import edu.harvard.iq.dataverse.DatasetFieldValue;
import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
+import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.MetadataBlock;
import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
import edu.harvard.iq.dataverse.branding.BrandingUtil;
import edu.harvard.iq.dataverse.dataset.DatasetType;
@@ -20,16 +26,30 @@
import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.SystemConfig;
+import edu.harvard.iq.dataverse.util.json.CompoundVocabularyException;
+import edu.harvard.iq.dataverse.util.json.ControlledVocabularyException;
+import edu.harvard.iq.dataverse.util.json.JsonParseException;
+import edu.harvard.iq.dataverse.util.json.JsonParser;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
import edu.harvard.iq.dataverse.util.testing.JvmSetting;
import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
import edu.harvard.iq.dataverse.util.xml.XmlValidator;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonString;
+import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Paths;
import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import javax.xml.transform.stream.StreamSource;
@@ -73,6 +93,8 @@ public static void setupMocks() {
}
/**
+ * A minimal example to assure that the XMLMetadataTemplate generates output
+ * consistent with the DataCite XML v4.5 schema.
*/
@Test
public void testDataCiteXMLCreation() throws IOException {
@@ -106,7 +128,7 @@ public void testDataCiteXMLCreation() throws IOException {
doiMetadata.setAuthors(authors);
doiMetadata.setPublisher("Dataverse");
XmlMetadataTemplate template = new XmlMetadataTemplate(doiMetadata);
-
+
Dataset d = new Dataset();
GlobalId doi = new GlobalId("doi", "10.5072", "FK2/ABCDEF", null, null, null);
d.setGlobalId(doi);
@@ -135,15 +157,291 @@ public void testDataCiteXMLCreation() throws IOException {
d.setDatasetType(dType);
String xml = template.generateXML(d);
- System.out.println("Output is " + xml);
+ System.out.println("Output from minimal example is " + xml);
try {
StreamSource source = new StreamSource(new StringReader(xml));
source.setSystemId("DataCite XML for test dataset");
- assertTrue(XmlValidator.validateXmlSchema(source, new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd")));
+ assertTrue(XmlValidator.validateXmlSchema(source,
+ new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd")));
} catch (SAXException e) {
System.out.println("Invalid schema: " + e.getMessage());
}
-
+
+ }
+
+ /**
+ * This tests a more complete example based off of the dataset-all-defaults
+ * file, again checking for conformance of the result with the DataCite XML v4.5
+ * schema.
+ */
+ @Test
+ public void testDataCiteXMLCreationAllFields() throws IOException {
+ Dataverse collection = new Dataverse();
+ collection.setCitationDatasetFieldTypes(new ArrayList<>());
+ Dataset d = new Dataset();
+ d.setOwner(collection);
+ DatasetVersion dv = new DatasetVersion();
+ TermsOfUseAndAccess toa = new TermsOfUseAndAccess();
+ toa.setTermsOfUse("Some terms");
+ dv.setTermsOfUseAndAccess(toa);
+ dv.setDataset(d);
+ DatasetFieldType primitiveDSFType = new DatasetFieldType(DatasetFieldConstant.title,
+ DatasetFieldType.FieldType.TEXT, false);
+ DatasetField testDatasetField = new DatasetField();
+
+ dv.setVersionState(VersionState.DRAFT);
+
+ testDatasetField.setDatasetVersion(dv);
+
+ File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt");
+ String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
+ JsonObject datasetJson = JsonUtil.getJsonObject(datasetVersionAsJson);
+
+ GlobalId doi = new GlobalId("doi", datasetJson.getString("authority"), datasetJson.getString("identifier"),
+ null, null, null);
+ d.setGlobalId(doi);
+
+ List fields = assertDoesNotThrow(() -> XmlMetadataTemplateTest
+ .parseMetadataBlocks(datasetJson.getJsonObject("datasetVersion").getJsonObject("metadataBlocks")));
+ dv.setDatasetFields(fields);
+
+ ArrayList dsvs = new ArrayList<>();
+ dsvs.add(0, dv);
+ d.setVersions(dsvs);
+ DatasetType dType = new DatasetType();
+ dType.setName(DatasetType.DATASET_TYPE_DATASET);
+ d.setDatasetType(dType);
+ String xml = DOIDataCiteRegisterService.getMetadataFromDvObject(dv.getDataset().getGlobalId().asString(),
+ new DataCitation(dv).getDataCiteMetadata(), dv.getDataset());
+ System.out.println("Output from dataset-all-defaults is " + xml);
+ try {
+ StreamSource source = new StreamSource(new StringReader(xml));
+ source.setSystemId("DataCite XML for test dataset");
+ assertTrue(XmlValidator.validateXmlSchema(source,
+ new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd")));
+ } catch (SAXException e) {
+ System.out.println("Invalid schema: " + e.getMessage());
+ }
+
+ }
+
+ /**
+ * Mock Utility Methods - These methods support importing DatasetFields from the
+ * Dataverse JSON export format. They assume that any DatasetFieldType
+ * referenced exists, that any Controlled Vocabulary value exists, etc. which
+ * avoids having to do database lookups or read metadatablock tsv files. They
+ * are derived from the JsonParser methods of the same names with any db
+ * references and DatasetFieldType-related error checking removed.
+ */
+ public static List parseMetadataBlocks(JsonObject json) throws JsonParseException {
+
+ Map existingTypes = new HashMap<>();
+
+ Set keys = json.keySet();
+ List fields = new LinkedList<>();
+
+ for (String blockName : keys) {
+ MetadataBlock block = new MetadataBlock();
+ block.setName(blockName);
+ JsonObject blockJson = json.getJsonObject(blockName);
+ JsonArray fieldsJson = blockJson.getJsonArray("fields");
+ fields.addAll(parseFieldsFromArray(fieldsJson, true, block, existingTypes));
+ }
+ return fields;
+ }
+
+ private static List parseFieldsFromArray(JsonArray fieldsArray, Boolean testType, MetadataBlock block,
+ Map existingTypes) throws JsonParseException {
+ List fields = new LinkedList<>();
+ for (JsonObject fieldJson : fieldsArray.getValuesAs(JsonObject.class)) {
+
+ DatasetField field = parseField(fieldJson, testType, block, existingTypes);
+ if (field != null) {
+ fields.add(field);
+ }
+
+ }
+ return fields;
+
+ }
+
+ public static DatasetField parseField(JsonObject json, Boolean testType, MetadataBlock block,
+ Map existingTypes) throws JsonParseException {
+ if (json == null) {
+ return null;
+ }
+
+ DatasetField ret = new DatasetField();
+ String fieldName = json.getString("typeName", "");
+ String typeClass = json.getString("typeClass", "");
+ if (!existingTypes.containsKey(fieldName)) {
+ boolean multiple = json.getBoolean("multiple");
+ DatasetFieldType fieldType = new DatasetFieldType();
+ fieldType.setName(fieldName);
+ fieldType.setAllowMultiples(multiple);
+ fieldType.setAllowControlledVocabulary(typeClass.equals("controlledVocabulary"));
+ fieldType.setFieldType(FieldType.TEXT);
+ fieldType.setMetadataBlock(block);
+ fieldType.setChildDatasetFieldTypes(new ArrayList<>());
+ existingTypes.put(fieldName, fieldType);
+ }
+ DatasetFieldType type = existingTypes.get(fieldName);
+ ret.setDatasetFieldType(type);
+
+ if (typeClass.equals("compound")) {
+ parseCompoundValue(ret, type, json, testType, block, existingTypes);
+ } else if (type.isControlledVocabulary()) {
+ parseControlledVocabularyValue(ret, type, json);
+ } else {
+ parsePrimitiveValue(ret, type, json);
+ }
+
+ return ret;
+ }
+
+ public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, JsonObject json,
+ Boolean testType, MetadataBlock block, Map existingTypes)
+ throws JsonParseException {
+ List vocabExceptions = new ArrayList<>();
+ List vals = new LinkedList<>();
+ if (compoundType.isAllowMultiples()) {
+ int order = 0;
+ try {
+ json.getJsonArray("value").getValuesAs(JsonObject.class);
+ } catch (ClassCastException cce) {
+ throw new JsonParseException("Invalid values submitted for " + compoundType.getName()
+ + ". It should be an array of values.");
+ }
+ for (JsonObject obj : json.getJsonArray("value").getValuesAs(JsonObject.class)) {
+ DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue();
+ List fields = new LinkedList<>();
+ for (String fieldName : obj.keySet()) {
+ JsonObject childFieldJson = obj.getJsonObject(fieldName);
+ DatasetField f = null;
+ try {
+ f = parseField(childFieldJson, testType, block, existingTypes);
+ } catch (ControlledVocabularyException ex) {
+ vocabExceptions.add(ex);
+ }
+
+ if (f != null) {
+ f.setParentDatasetFieldCompoundValue(cv);
+ fields.add(f);
+ }
+ }
+ if (!fields.isEmpty()) {
+ cv.setChildDatasetFields(fields);
+ cv.setDisplayOrder(order);
+ vals.add(cv);
+ }
+ order++;
+ }
+
+ } else {
+
+ DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue();
+ List fields = new LinkedList<>();
+ JsonObject value = json.getJsonObject("value");
+ for (String key : value.keySet()) {
+ JsonObject childFieldJson = value.getJsonObject(key);
+ DatasetField f = null;
+ try {
+ f = parseField(childFieldJson, testType, block, existingTypes);
+ } catch (ControlledVocabularyException ex) {
+ vocabExceptions.add(ex);
+ }
+ if (f != null) {
+ f.setParentDatasetFieldCompoundValue(cv);
+ fields.add(f);
+ }
+ }
+ if (!fields.isEmpty()) {
+ cv.setChildDatasetFields(fields);
+ vals.add(cv);
+ }
+
+ }
+ if (!vocabExceptions.isEmpty()) {
+ throw new CompoundVocabularyException("Invalid controlled vocabulary in compound field ", vocabExceptions,
+ vals);
+ }
+
+ for (DatasetFieldCompoundValue dsfcv : vals) {
+ dsfcv.setParentDatasetField(dsf);
+ }
+ dsf.setDatasetFieldCompoundValues(vals);
+ }
+
+ public static void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft, JsonObject json)
+ throws JsonParseException {
+ List vals = new LinkedList<>();
+ if (dft.isAllowMultiples()) {
+ try {
+ json.getJsonArray("value").getValuesAs(JsonObject.class);
+ } catch (ClassCastException cce) {
+ throw new JsonParseException(
+ "Invalid values submitted for " + dft.getName() + ". It should be an array of values.");
+ }
+ for (JsonString val : json.getJsonArray("value").getValuesAs(JsonString.class)) {
+ DatasetFieldValue datasetFieldValue = new DatasetFieldValue(dsf);
+ datasetFieldValue.setDisplayOrder(vals.size() - 1);
+ datasetFieldValue.setValue(val.getString().trim());
+ vals.add(datasetFieldValue);
+ }
+
+ } else {
+ try {
+ json.getString("value");
+ } catch (ClassCastException cce) {
+ throw new JsonParseException(
+ "Invalid value submitted for " + dft.getName() + ". It should be a single value.");
+ }
+ DatasetFieldValue datasetFieldValue = new DatasetFieldValue();
+ datasetFieldValue.setValue(json.getString("value", "").trim());
+ datasetFieldValue.setDatasetField(dsf);
+ vals.add(datasetFieldValue);
+ }
+
+ dsf.setDatasetFieldValues(vals);
+ }
+
+ public static void parseControlledVocabularyValue(DatasetField dsf, DatasetFieldType cvvType, JsonObject json)
+ throws JsonParseException {
+ List vals = new LinkedList<>();
+ try {
+ if (cvvType.isAllowMultiples()) {
+ try {
+ json.getJsonArray("value").getValuesAs(JsonObject.class);
+ } catch (ClassCastException cce) {
+ throw new JsonParseException(
+ "Invalid values submitted for " + cvvType.getName() + ". It should be an array of values.");
+ }
+ for (JsonString strVal : json.getJsonArray("value").getValuesAs(JsonString.class)) {
+ String strValue = strVal.getString();
+ ControlledVocabularyValue cvv = new ControlledVocabularyValue();
+ cvv.setDatasetFieldType(cvvType);
+ cvv.setStrValue(strVal.getString());
+ vals.add(cvv);
+ }
+
+ } else {
+ try {
+ json.getString("value");
+ } catch (ClassCastException cce) {
+ throw new JsonParseException(
+ "Invalid value submitted for " + cvvType.getName() + ". It should be a single value.");
+ }
+ String strValue = json.getString("value", "");
+ ControlledVocabularyValue cvv = new ControlledVocabularyValue();
+ cvv.setDatasetFieldType(cvvType);
+ cvv.setStrValue(strValue);
+ vals.add(cvv);
+ }
+ } catch (ClassCastException cce) {
+ throw new JsonParseException("Invalid values submitted for " + cvvType.getName());
+ }
+
+ dsf.setControlledVocabularyValues(vals);
}
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java
index da94b288bee..f06be37578d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java
@@ -277,7 +277,7 @@ public void testGetPrivateUrlFromRoleAssignmentSuccess() {
PrivateUrl privateUrl = PrivateUrlUtil.getPrivateUrlFromRoleAssignment(ra, dataverseSiteUrl);
assertNotNull(privateUrl);
assertEquals(new Long(42), privateUrl.getDataset().getId());
- assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=cd71e9d7-73a7-4ec8-b890-3d00499e8693", privateUrl.getLink());
+ assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=cd71e9d7-73a7-4ec8-b890-3d00499e8693", privateUrl.getLink());
}
@Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java
index 124ce19369c..8e24c546556 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java
@@ -53,6 +53,7 @@ public void setUp() {
indexService.dataverseService = Mockito.mock(DataverseServiceBean.class);
indexService.datasetFieldService = Mockito.mock(DatasetFieldServiceBean.class);
indexService.datasetVersionService = Mockito.mock(DatasetVersionServiceBean.class);
+ indexService.datasetVersionFilesServiceBean = Mockito.mock(DatasetVersionFilesServiceBean.class);
BrandingUtil.injectServices(indexService.dataverseService, indexService.settingsService);
Mockito.when(indexService.dataverseService.findRootDataverse()).thenReturn(dataverse);
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java
index 59e175f30c1..d1cb30e2bc3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java
@@ -4,18 +4,11 @@
package edu.harvard.iq.dataverse.util.json;
-import edu.harvard.iq.dataverse.ControlledVocabularyValue;
-import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetField;
-import edu.harvard.iq.dataverse.DatasetFieldCompoundValue;
-import edu.harvard.iq.dataverse.DatasetFieldType;
+import edu.harvard.iq.dataverse.*;
import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
-import edu.harvard.iq.dataverse.DatasetFieldValue;
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseTheme.Alignment;
-import edu.harvard.iq.dataverse.FileMetadata;
import edu.harvard.iq.dataverse.UserNotification.Type;
+import edu.harvard.iq.dataverse.api.dto.DataverseDTO;
import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup;
import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroupProvider;
import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress;
@@ -50,16 +43,7 @@
import java.io.StringReader;
import java.math.BigDecimal;
import java.text.ParseException;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Set;
-import java.util.TimeZone;
+import java.util.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.*;
@@ -281,6 +265,33 @@ public void testParseCompleteDataverse() throws JsonParseException {
throw new JsonParseException("Couldn't read test file", ioe);
}
}
+
+ /**
+ * Test that a JSON object passed for a DataverseDTO is correctly parsed.
+ * This checks that all properties are parsed into the correct DataverseDTO properties.
+ * @throws JsonParseException when this test is broken.
+ */
+ @Test
+ public void parseDataverseDTO() throws JsonParseException {
+ JsonObject dvJson;
+ try (FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-complete.json")) {
+ dvJson = Json.createReader(reader).readObject();
+ DataverseDTO actual = sut.parseDataverseDTO(dvJson);
+ List actualDataverseContacts = actual.getDataverseContacts();
+ assertEquals("Scientific Research", actual.getName());
+ assertEquals("science", actual.getAlias());
+ assertEquals("Scientific Research University", actual.getAffiliation());
+ assertEquals("We do all the science.", actual.getDescription());
+ assertEquals("LABORATORY", actual.getDataverseType().toString());
+ assertEquals(2, actualDataverseContacts.size());
+ assertEquals("pi@example.edu", actualDataverseContacts.get(0).getContactEmail());
+ assertEquals("student@example.edu", actualDataverseContacts.get(1).getContactEmail());
+ assertEquals(0, actualDataverseContacts.get(0).getDisplayOrder());
+ assertEquals(1, actualDataverseContacts.get(1).getDisplayOrder());
+ } catch (IOException ioe) {
+ throw new JsonParseException("Couldn't read test file", ioe);
+ }
+ }
@Test
public void testParseThemeDataverse() throws JsonParseException {
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java
index 11da71e1980..7ec8e0b25f3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java
@@ -133,7 +133,7 @@ public void testJson_PrivateUrl() {
assertNotNull(job);
JsonObject jsonObject = job.build();
assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("token"));
- assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link"));
+ assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link"));
assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getJsonObject("roleAssignment").getString("privateUrlToken"));
assertEquals(PrivateUrlUser.PREFIX + "42", jsonObject.getJsonObject("roleAssignment").getString("assignee"));
}
@@ -290,7 +290,7 @@ public void testDataversePrinter() {
assertEquals("42 Inc.", jsonObject.getString("affiliation"));
assertEquals(0, jsonObject.getJsonArray("dataverseContacts").getJsonObject(0).getInt("displayOrder"));
assertEquals("dv42@mailinator.com", jsonObject.getJsonArray("dataverseContacts").getJsonObject(0).getString("contactEmail"));
- assertEquals(false, jsonObject.getBoolean("permissionRoot"));
+ assertFalse(jsonObject.getBoolean("permissionRoot"));
assertEquals("Description for Dataverse 42.", jsonObject.getString("description"));
assertEquals("UNCATEGORIZED", jsonObject.getString("dataverseType"));
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java
index 3c5b4797b0a..c4ee4547ed7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java
@@ -63,22 +63,22 @@ private File createBlankFile(String filename) throws IOException {
}
return Files.createFile(tempFolder.resolve(filename)).toFile();
}
-
+
private FileInputStream createZipReturnFilestream(List file_names, String zipfile_name) throws IOException{
-
+
File zip_file_obj = this.createAndZipFiles(file_names, zipfile_name);
if (zip_file_obj == null){
return null;
}
-
+
FileInputStream file_input_stream = new FileInputStream(zip_file_obj);
return file_input_stream;
-
+
}
-
+
/*
- Convenience class to create .zip file and return a FileInputStream
+ Convenience method to create .zip file and return a File
@param List file_names - List of filenames to add to .zip. These names will be used to create 0 length files
@param String zipfile_name - Name of .zip file to create
@@ -98,13 +98,13 @@ private File createAndZipFiles(List file_names, String zipfile_name) thr
}
Path zip_file_obj = this.tempFolder.resolve(zipfile_name);
- ZipOutputStream zip_stream = new ZipOutputStream(new FileOutputStream(zip_file_obj.toFile()));
+ try (ZipOutputStream zip_stream = new ZipOutputStream(new FileOutputStream(zip_file_obj.toFile()))) {
- // Iterate through File objects and add them to the ZipOutputStream
- for (File file_obj : fileCollection) {
- this.addToZipFile(file_obj.getName(), file_obj, zip_stream);
+ // Iterate through File objects and add them to the ZipOutputStream
+ for (File file_obj : fileCollection) {
+ this.addToZipFile(file_obj.getName(), file_obj, zip_stream);
+ }
}
-
/* -----------------------------------
Cleanup: Delete single files that were added to .zip
----------------------------------- */
@@ -126,7 +126,7 @@ public void testCreateZippedNonShapefile() throws IOException{
File zipfile_obj = createAndZipFiles(file_names, "not-quite-a-shape.zip");
// Pass the .zip to the ShapefileHandler
- ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj));
+ ShapefileHandler shp_handler = new ShapefileHandler(zipfile_obj);
shp_handler.DEBUG= true;
// Contains shapefile?
@@ -157,7 +157,7 @@ public void testShapefileWithQpjAndQmd() throws IOException {
File zipFile = createAndZipFiles(fileNames, "testShapeWithNewExtensions.zip");
// Pass the zip to the ShapefileHandler
- ShapefileHandler shpHandler = new ShapefileHandler(new FileInputStream(zipFile));
+ ShapefileHandler shpHandler = new ShapefileHandler(zipFile);
shpHandler.DEBUG = true;
// Check if it is recognized as a shapefile
@@ -191,7 +191,7 @@ public void testZippedTwoShapefiles() throws IOException{
File zipfile_obj = createAndZipFiles(file_names, "two-shapes.zip");
// Pass the .zip to the ShapefileHandler
- ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj));
+ ShapefileHandler shp_handler = new ShapefileHandler(zipfile_obj);
shp_handler.DEBUG= true;
assertTrue(shp_handler.containsShapefile(), "verify shapefile existance");
@@ -217,7 +217,7 @@ public void testZippedTwoShapefiles() throws IOException{
// Rezip/Reorder the files
File test_unzip_folder = Files.createDirectory(this.tempFolder.resolve("test_unzip")).toFile();
//File test_unzip_folder = new File("/Users/rmp553/Desktop/blah");
- shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), test_unzip_folder );
+ shp_handler.rezipShapefileSets(test_unzip_folder );
// Does the re-ordering do what we wanted?
@@ -244,7 +244,7 @@ public void testZippedShapefileWithExtraFiles() throws IOException{
File zipfile_obj = createAndZipFiles(file_names, "shape-plus.zip");
// Pass the .zip to the ShapefileHandler
- ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj));
+ ShapefileHandler shp_handler = new ShapefileHandler(zipfile_obj);
shp_handler.DEBUG= true;
assertTrue(shp_handler.containsShapefile(), "verify shapefile existance");
@@ -264,7 +264,7 @@ public void testZippedShapefileWithExtraFiles() throws IOException{
File unzip2Folder = Files.createDirectory(this.tempFolder.resolve("test_unzip2")).toFile();
// Rezip/Reorder the files
- shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), unzip2Folder);
+ shp_handler.rezipShapefileSets(unzip2Folder);
//shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), new File("/Users/rmp553/Desktop/blah"));
@@ -284,9 +284,9 @@ public void testZippedShapefileWithExtraFiles() throws IOException{
}
@Test
- public void testHiddenFiles() {
+ public void testHiddenFiles() throws IOException {
// test with shapefiles in hidden directory
- ShapefileHandler shp_handler = new ShapefileHandler("src/test/resources/hiddenShapefiles.zip");
+ ShapefileHandler shp_handler = new ShapefileHandler(new File("src/test/resources/hiddenShapefiles.zip"));
shp_handler.DEBUG= true;
assertFalse(shp_handler.containsShapefile());
}
diff --git a/src/test/resources/json/export-formats.json b/src/test/resources/json/export-formats.json
new file mode 100644
index 00000000000..b4dc0168629
--- /dev/null
+++ b/src/test/resources/json/export-formats.json
@@ -0,0 +1,83 @@
+{
+ "status": "OK",
+ "data": {
+ "OAI_ORE": {
+ "displayName": "OAI_ORE",
+ "mediaType": "application/json",
+ "isHarvestable": false,
+ "isVisibleInUserInterface": true
+ },
+ "Datacite": {
+ "displayName": "DataCite",
+ "mediaType": "application/xml",
+ "isHarvestable": true,
+ "isVisibleInUserInterface": true,
+ "XMLNameSpace": "http://datacite.org/schema/kernel-4",
+ "XMLSchemaLocation": "http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.5/metadata.xsd",
+ "XMLSchemaVersion": "4.5"
+ },
+ "oai_dc": {
+ "displayName": "Dublin Core",
+ "mediaType": "application/xml",
+ "isHarvestable": true,
+ "isVisibleInUserInterface": false,
+ "XMLNameSpace": "http://www.openarchives.org/OAI/2.0/oai_dc/",
+ "XMLSchemaLocation": "http://www.openarchives.org/OAI/2.0/oai_dc.xsd",
+ "XMLSchemaVersion": "2.0"
+ },
+ "oai_datacite": {
+ "displayName": "OpenAIRE",
+ "mediaType": "application/xml",
+ "isHarvestable": true,
+ "isVisibleInUserInterface": true,
+ "XMLNameSpace": "http://datacite.org/schema/kernel-4",
+ "XMLSchemaLocation": "http://schema.datacite.org/meta/kernel-4.1/metadata.xsd",
+ "XMLSchemaVersion": "4.1"
+ },
+ "schema.org": {
+ "displayName": "Schema.org JSON-LD",
+ "mediaType": "application/json",
+ "isHarvestable": false,
+ "isVisibleInUserInterface": true
+ },
+ "ddi": {
+ "displayName": "DDI Codebook v2",
+ "mediaType": "application/xml",
+ "isHarvestable": false,
+ "isVisibleInUserInterface": true,
+ "XMLNameSpace": "ddi:codebook:2_5",
+ "XMLSchemaLocation": "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd",
+ "XMLSchemaVersion": "2.5"
+ },
+ "dcterms": {
+ "displayName": "Dublin Core",
+ "mediaType": "application/xml",
+ "isHarvestable": false,
+ "isVisibleInUserInterface": true,
+ "XMLNameSpace": "http://purl.org/dc/terms/",
+ "XMLSchemaLocation": "http://dublincore.org/schemas/xmls/qdc/dcterms.xsd",
+ "XMLSchemaVersion": "2.0"
+ },
+ "html": {
+ "displayName": "DDI HTML Codebook",
+ "mediaType": "text/html",
+ "isHarvestable": false,
+ "isVisibleInUserInterface": true
+ },
+ "dataverse_json": {
+ "displayName": "JSON",
+ "mediaType": "application/json",
+ "isHarvestable": true,
+ "isVisibleInUserInterface": true
+ },
+ "oai_ddi": {
+ "displayName": "DDI Codebook v2",
+ "mediaType": "application/xml",
+ "isHarvestable": true,
+ "isVisibleInUserInterface": false,
+ "XMLNameSpace": "ddi:codebook:2_5",
+ "XMLSchemaLocation": "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd",
+ "XMLSchemaVersion": "2.5"
+ }
+ }
+}
diff --git a/src/test/resources/json/importGenericWithOtherId.json b/src/test/resources/json/importGenericWithOtherId.json
new file mode 100644
index 00000000000..af9241393e9
--- /dev/null
+++ b/src/test/resources/json/importGenericWithOtherId.json
@@ -0,0 +1,307 @@
+{
+ "UNF": "UNF",
+ "createTime": "2014-11-12 12:17:55 -05",
+ "distributionDate": "Distribution Date",
+ "id": 2,
+ "lastUpdateTime": "2014-11-12 12:20:32 -05",
+ "metadataBlocks": {
+ "astrophysics": {
+ "displayName": "Astronomy and Astrophysics Metadata",
+ "fields": [
+ {
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "typeName": "astroType",
+ "value": [
+ "Image",
+ "Mosaic",
+ "EventList"
+ ]
+ }
+ ]
+ },
+ "citation": {
+ "displayName": "Citation Metadata",
+ "fields": [
+ {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "title",
+ "value": "My Dataset"
+ },
+ {
+ "multiple": true,
+ "typeClass": "compound",
+ "typeName": "author",
+ "value": [
+ {
+ "authorAffiliation": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorAffiliation",
+ "value": "Top"
+ },
+ "authorIdentifier": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorIdentifier",
+ "value": "ellenid"
+ },
+ "authorIdentifierScheme": {
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "typeName": "authorIdentifierScheme",
+ "value": "ORCID"
+ },
+ "authorName": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorName",
+ "value": "Privileged, Pete"
+ }
+ },
+ {
+ "authorAffiliation": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorAffiliation",
+ "value": "Bottom"
+ },
+ "authorIdentifier": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorIdentifier",
+ "value": "audreyId"
+ },
+ "authorIdentifierScheme": {
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "typeName": "authorIdentifierScheme",
+ "value": "DAISY"
+ },
+ "authorName": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorName",
+ "value": "Awesome, Audrey"
+ }
+ }
+ ]
+ },
+ {
+ "multiple": true,
+ "typeClass": "primitive",
+ "typeName": "datasetContact",
+ "value": [
+ "pete@malinator.com"
+ ]
+ },
+ {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "dsDescription",
+ "value": "Here is my description"
+ },
+ {
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "typeName": "subject",
+ "value": [
+ "Arts and Humanities",
+ "Astronomy and Astrophysics",
+ "Business and Management"
+ ]
+ },
+ {
+ "multiple": true,
+ "typeClass": "primitive",
+ "typeName": "keyword",
+ "value": [
+ "keyword1",
+ "keyword2"
+ ]
+ },
+ {
+ "multiple": true,
+ "typeClass": "compound",
+ "typeName": "otherId",
+ "value": [
+ {
+ "otherIdAgency": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "otherIdAgency",
+ "value": "my agency"
+ },
+ "otherIdValue": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "otherIdValue",
+ "value": "otherId"
+ }
+ },
+ {
+ "otherIdAgency": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "otherIdAgency",
+ "value": "another agency"
+ },
+ "otherIdValue": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "otherIdValue",
+ "value": "otherId2"
+ }
+ },
+ {
+ "otherIdAgency": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "otherIdAgency",
+ "value": "another agency"
+ },
+ "otherIdValue": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "otherIdValue",
+ "value": "doi:10.7910/DVN/TJCLKP"
+ }
+ }
+ ]
+ },
+ {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "depositor",
+ "value": "Ellen K"
+ },
+ {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "dateOfDeposit",
+ "value": "2014-11-12"
+ }
+ ]
+ },
+ "geospatial": {
+ "displayName": "Geospatial Metadata",
+ "fields": [
+ {
+ "multiple": true,
+ "typeClass": "compound",
+ "typeName": "geographicCoverage",
+ "value": [
+ {
+ "city": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "city",
+ "value": "Arlington"
+ },
+ "country": {
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "typeName": "country",
+ "value": "United States"
+ },
+ "state": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "state",
+ "value": "MA"
+ }
+ },
+ {
+ "city": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "city",
+ "value": "beachcity"
+ },
+ "country": {
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "typeName": "country",
+ "value": "Aruba"
+ },
+ "state": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "state",
+ "value": "beach"
+ }
+ }
+ ]
+ },
+ {
+ "multiple": false,
+ "typeClass": "compound",
+ "typeName": "geographicBoundingBox",
+ "value":
+ {
+ "eastLongitude": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "eastLongitude",
+ "value": "23"
+ },
+ "northLatitude": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "northLatitude",
+ "value": "786"
+ },
+ "southLatitude": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "southLatitude",
+ "value": "34"
+ },
+ "westLongitude": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "westLongitude",
+ "value": "45"
+ }
+ }
+
+ }
+ ]
+ },
+ "socialscience": {
+ "displayName": "Social Science and Humanities Metadata",
+ "fields": [
+ {
+ "multiple": true,
+ "typeClass": "compound",
+ "typeName": "software",
+ "value": [
+ {
+ "softwareName": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "softwareName",
+ "value": "softwareName"
+ },
+ "softwareVersion": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "softwareVersion",
+ "value": "software version"
+ }
+ }
+ ]
+ },
+ {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "unitOfAnalysis",
+ "value": "unit of analysis"
+ }
+ ]
+ }
+ },
+ "productionDate": "Production Date",
+ "versionState": "DRAFT"
+ }
diff --git a/src/test/resources/json/importGenericWithoutOtherId.json b/src/test/resources/json/importGenericWithoutOtherId.json
new file mode 100644
index 00000000000..ceb2263c2cf
--- /dev/null
+++ b/src/test/resources/json/importGenericWithoutOtherId.json
@@ -0,0 +1,258 @@
+{
+ "UNF": "UNF",
+ "createTime": "2014-11-12 12:17:55 -05",
+ "distributionDate": "Distribution Date",
+ "id": 2,
+ "lastUpdateTime": "2014-11-12 12:20:32 -05",
+ "metadataBlocks": {
+ "astrophysics": {
+ "displayName": "Astronomy and Astrophysics Metadata",
+ "fields": [
+ {
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "typeName": "astroType",
+ "value": [
+ "Image",
+ "Mosaic",
+ "EventList"
+ ]
+ }
+ ]
+ },
+ "citation": {
+ "displayName": "Citation Metadata",
+ "fields": [
+ {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "title",
+ "value": "My Dataset"
+ },
+ {
+ "multiple": true,
+ "typeClass": "compound",
+ "typeName": "author",
+ "value": [
+ {
+ "authorAffiliation": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorAffiliation",
+ "value": "Top"
+ },
+ "authorIdentifier": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorIdentifier",
+ "value": "ellenid"
+ },
+ "authorIdentifierScheme": {
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "typeName": "authorIdentifierScheme",
+ "value": "ORCID"
+ },
+ "authorName": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorName",
+ "value": "Privileged, Pete"
+ }
+ },
+ {
+ "authorAffiliation": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorAffiliation",
+ "value": "Bottom"
+ },
+ "authorIdentifier": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorIdentifier",
+ "value": "audreyId"
+ },
+ "authorIdentifierScheme": {
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "typeName": "authorIdentifierScheme",
+ "value": "DAISY"
+ },
+ "authorName": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "authorName",
+ "value": "Awesome, Audrey"
+ }
+ }
+ ]
+ },
+ {
+ "multiple": true,
+ "typeClass": "primitive",
+ "typeName": "datasetContact",
+ "value": [
+ "pete@malinator.com"
+ ]
+ },
+ {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "dsDescription",
+ "value": "Here is my description"
+ },
+ {
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "typeName": "subject",
+ "value": [
+ "Arts and Humanities",
+ "Astronomy and Astrophysics",
+ "Business and Management"
+ ]
+ },
+ {
+ "multiple": true,
+ "typeClass": "primitive",
+ "typeName": "keyword",
+ "value": [
+ "keyword1",
+ "keyword2"
+ ]
+ },
+ {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "depositor",
+ "value": "Ellen K"
+ },
+ {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "dateOfDeposit",
+ "value": "2014-11-12"
+ }
+ ]
+ },
+ "geospatial": {
+ "displayName": "Geospatial Metadata",
+ "fields": [
+ {
+ "multiple": true,
+ "typeClass": "compound",
+ "typeName": "geographicCoverage",
+ "value": [
+ {
+ "city": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "city",
+ "value": "Arlington"
+ },
+ "country": {
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "typeName": "country",
+ "value": "United States"
+ },
+ "state": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "state",
+ "value": "MA"
+ }
+ },
+ {
+ "city": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "city",
+ "value": "beachcity"
+ },
+ "country": {
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "typeName": "country",
+ "value": "Aruba"
+ },
+ "state": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "state",
+ "value": "beach"
+ }
+ }
+ ]
+ },
+ {
+ "multiple": false,
+ "typeClass": "compound",
+ "typeName": "geographicBoundingBox",
+ "value":
+ {
+ "eastLongitude": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "eastLongitude",
+ "value": "23"
+ },
+ "northLatitude": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "northLatitude",
+ "value": "786"
+ },
+ "southLatitude": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "southLatitude",
+ "value": "34"
+ },
+ "westLongitude": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "westLongitude",
+ "value": "45"
+ }
+ }
+
+ }
+ ]
+ },
+ "socialscience": {
+ "displayName": "Social Science and Humanities Metadata",
+ "fields": [
+ {
+ "multiple": true,
+ "typeClass": "compound",
+ "typeName": "software",
+ "value": [
+ {
+ "softwareName": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "softwareName",
+ "value": "softwareName"
+ },
+ "softwareVersion": {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "softwareVersion",
+ "value": "software version"
+ }
+ }
+ ]
+ },
+ {
+ "multiple": false,
+ "typeClass": "primitive",
+ "typeName": "unitOfAnalysis",
+ "value": "unit of analysis"
+ }
+ ]
+ }
+ },
+ "productionDate": "Production Date",
+ "versionState": "DRAFT"
+ }
diff --git a/src/test/resources/own-cloud-downloads/greetings.zip b/src/test/resources/own-cloud-downloads/greetings.zip
new file mode 100644
index 00000000000..6e166d385d1
Binary files /dev/null and b/src/test/resources/own-cloud-downloads/greetings.zip differ
diff --git a/src/test/resources/own-cloud-downloads/shapes.zip b/src/test/resources/own-cloud-downloads/shapes.zip
new file mode 100644
index 00000000000..99d5f36c895
Binary files /dev/null and b/src/test/resources/own-cloud-downloads/shapes.zip differ
diff --git a/src/test/resources/tsv/whitespace-test.tsv b/src/test/resources/tsv/whitespace-test.tsv
new file mode 100644
index 00000000000..5485c948825
--- /dev/null
+++ b/src/test/resources/tsv/whitespace-test.tsv
@@ -0,0 +1,10 @@
+#metadataBlock name dataverseAlias displayName
+ whitespaceDemo Whitespace Demo
+#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id
+ whitespaceDemoOne One Trailing Space text 0 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo
+ whitespaceDemoTwo Two Leading Space text 1 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo
+ whitespaceDemoThree Three CV with errors text 2 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo
+#controlledVocabulary DatasetField Value identifier displayOrder
+ whitespaceDemoThree CV1 0
+ whitespaceDemoThree CV2 1
+ whitespaceDemoThree CV3 2