diff --git a/.env b/.env index d5cffcec0aa..9d604630073 100644 --- a/.env +++ b/.env @@ -1,5 +1,5 @@ APP_IMAGE=gdcc/dataverse:unstable -POSTGRES_VERSION=16 +POSTGRES_VERSION=17 DATAVERSE_DB_USER=dataverse SOLR_VERSION=9.3.0 -SKIP_DEPLOY=0 \ No newline at end of file +SKIP_DEPLOY=0 diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index b57aa23fc0f..f2a779bbf21 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -2,7 +2,7 @@ **Which issue(s) this PR closes**: -Closes # +- Closes # **Special notes for your reviewer**: diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index 028f0140cc9..eca8416732a 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -5,14 +5,18 @@ on: branches: - develop +concurrency: + group: deploy-beta-testing + cancel-in-progress: false + jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: distribution: 'zulu' java-version: '17' @@ -32,7 +36,7 @@ jobs: run: echo "war_file=$(ls *.war | head -1)">> $GITHUB_ENV - name: Upload war artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: built-app path: ./target/${{ env.war_file }} @@ -42,10 +46,10 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Download war artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: built-app path: ./ diff --git a/.github/workflows/guides_build_sphinx.yml b/.github/workflows/guides_build_sphinx.yml index 992f30f2872..86b59b11d35 100644 --- a/.github/workflows/guides_build_sphinx.yml +++ b/.github/workflows/guides_build_sphinx.yml @@ -11,6 +11,6 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - uses: OdumInstitute/sphinx-action@master + - uses: uncch-rdmc/sphinx-action@master with: docs-folder: "doc/sphinx-guides/" diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index a94b17a67ba..45180ea7aec 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -32,9 +32,9 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: ${{ matrix.jdk }} distribution: temurin @@ -57,7 +57,7 @@ jobs: # Upload the built war file. For download, it will be wrapped in a ZIP by GitHub. # See also https://github.com/actions/upload-artifact#zipped-artifact-downloads - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: dataverse-java${{ matrix.jdk }}.war path: target/dataverse*.war @@ -67,7 +67,7 @@ jobs: - run: | tar -cvf java-builddir.tar target tar -cvf java-m2-selection.tar ~/.m2/repository/io/gdcc/dataverse-* - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: java-artifacts path: | @@ -98,16 +98,16 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: ${{ matrix.jdk }} distribution: temurin cache: maven # Get the build output from the unit test job - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: java-artifacts - run: | @@ -119,7 +119,7 @@ jobs: # Wrap up and send to coverage job - run: tar -cvf java-reportdir.tar target/site - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: java-reportdir path: java-reportdir.tar @@ -132,15 +132,15 @@ jobs: steps: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: java-version: '17' distribution: temurin cache: maven # Get the build output from the integration test job - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: java-reportdir - run: tar -xvf java-reportdir.tar diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml index 2aed50e9998..d5c789c7189 100644 --- a/conf/solr/schema.xml +++ b/conf/solr/schema.xml @@ -234,6 +234,7 @@ + - 6.4 + 6.5 17 UTF-8 @@ -149,7 +149,7 @@ 6.2024.6 - 42.7.2 + 42.7.4 9.4.1 1.12.748 26.30.0 diff --git a/pom.xml b/pom.xml index edf72067976..5ecbd7059c1 100644 --- a/pom.xml +++ b/pom.xml @@ -27,10 +27,10 @@ war 1.2.18.4 - 9.22.1 + 10.19.0 1.20.1 5.2.1 - 2.4.1 + 2.9.1 5.5.3 Dataverse API @@ -68,6 +68,17 @@ + + + org.apache.james + apache-mime4j-core + 0.8.10 + + + org.apache.james + apache-mime4j-dom + 0.8.7 + org.eclipse.persistence @@ -553,7 +569,7 @@ org.xmlunit xmlunit-core - 2.9.1 + 2.10.0 com.google.cloud @@ -615,7 +631,7 @@ org.xmlunit xmlunit-assertj3 - 2.8.2 + 2.10.0 test @@ -993,7 +1009,7 @@ true docker-build - 16 + 17 gdcc/dataverse:${app.image.tag} unstable @@ -1127,4 +1143,4 @@ - \ No newline at end of file + diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java index 2cb6f27c3e4..d880da5b4a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java +++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java @@ -2,7 +2,7 @@ import java.io.Serializable; import java.util.List; import jakarta.persistence.*; -import org.hibernate.validator.constraints.NotBlank; +import jakarta.validation.constraints.NotBlank; /** * @@ -41,7 +41,7 @@ public void setId(Long id) { private String questionType; @NotBlank(message = "{custom.questiontext}") - @Column( nullable = false ) + @Column( nullable = false, columnDefinition = "TEXT") private String questionString; private boolean required; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java index 3977023fc4b..02fb59751fb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java @@ -792,6 +792,7 @@ private GlobalId getPIDFrom(DatasetVersion dsv, DvObject dv) { if (!dsv.getDataset().isHarvested() || HarvestingClient.HARVEST_STYLE_VDC.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle()) || HarvestingClient.HARVEST_STYLE_ICPSR.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle()) + || HarvestingClient.HARVEST_STYLE_DEFAULT.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle()) || HarvestingClient.HARVEST_STYLE_DATAVERSE .equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())) { if(!isDirect()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 98ac8ff387f..937f5693511 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1407,8 +1407,7 @@ public UploadSessionQuotaLimit getUploadSessionQuotaLimit(DvObjectContainer pare } public boolean isInReleasedVersion(Long id) { - Query query = em.createQuery("SELECT fm.id FROM FileMetadata fm, DvObject dvo WHERE fm.datasetVersion.id=(SELECT dv.id FROM DatasetVersion dv WHERE dv.dataset.id=dvo.owner.id and dv.versionState=edu.harvard.iq.dataverse.DatasetVersion.VersionState.RELEASED ORDER BY dv.versionNumber DESC, dv.minorVersionNumber DESC LIMIT 1) AND dvo.id=fm.dataFile.id AND fm.dataFile.id=:fid"); - query.setParameter("fid", id); + Query query = em.createNativeQuery("SELECT fm.id FROM filemetadata fm WHERE fm.datasetversion_id=(SELECT dv.id FROM datasetversion dv, dvobject dvo WHERE dv.dataset_id=dvo.owner_id AND dv.versionState='RELEASED' and dvo.id=" + id + " ORDER BY dv.versionNumber DESC, dv.minorVersionNumber DESC LIMIT 1) AND fm.datafile_id=" + id); try { query.getSingleResult(); diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 52cb7d6f2dc..78579b1de21 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -333,15 +333,20 @@ public DatasetVersion getLatestVersion() { return getVersions().get(0); } - public DatasetVersion getLatestVersionForCopy() { + public DatasetVersion getLatestVersionForCopy(boolean includeDeaccessioned) { for (DatasetVersion testDsv : getVersions()) { - if (testDsv.isReleased() || testDsv.isArchived()) { + if (testDsv.isReleased() || testDsv.isArchived() + || (testDsv.isDeaccessioned() && includeDeaccessioned)) { return testDsv; } } return getVersions().get(0); } + public DatasetVersion getLatestVersionForCopy(){ + return getLatestVersionForCopy(false); + } + public List getVersions() { return versions; } @@ -478,8 +483,17 @@ public Date getMostRecentMajorVersionReleaseDate() { if (this.isHarvested()) { return getVersions().get(0).getReleaseTime(); } else { + Long majorVersion = null; for (DatasetVersion version : this.getVersions()) { - if (version.isReleased() && version.getMinorVersionNumber().equals((long) 0)) { + if (version.isReleased()) { + if (version.getMinorVersionNumber().equals((long) 0)) { + return version.getReleaseTime(); + } else if (majorVersion == null) { + majorVersion = version.getVersionNumber(); + } + } else if (version.isDeaccessioned() && majorVersion != null + && majorVersion.longValue() == version.getVersionNumber().longValue() + && version.getMinorVersionNumber().equals((long) 0)) { return version.getReleaseTime(); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java index abb812d1ba3..71e339a6fca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java @@ -16,165 +16,164 @@ @Named("dfc") @Dependent public class DatasetFieldConstant implements java.io.Serializable { - - public final static String publication = "publication"; - public final static String otherId = "otherId"; - public final static String author = "author"; - public final static String authorFirstName = "authorFirstName"; - public final static String authorLastName = "authorLastName"; - public final static String producer = "producer"; - public final static String software = "software"; - public final static String grantNumber = "grantNumber"; - public final static String distributor = "distributor"; - public final static String datasetContact = "datasetContact"; - public final static String datasetContactEmail = "datasetContactEmail"; - public final static String datasetContactName = "datasetContactName"; - public final static String datasetContactAffiliation = "datasetContactAffiliation"; - public final static String series = "series"; - public final static String datasetVersion = "datasetVersion"; - - public final static String description = "dsDescription"; - public final static String keyword = "keyword"; - public final static String topicClassification = "topicClassification"; - public final static String geographicBoundingBox = "geographicBoundingBox"; - - public final static String note = "note"; - - public final static String publicationRelationType = "publicationRelationType"; - public final static String publicationCitation = "publicationCitation"; - public final static String publicationIDType = "publicationIDType"; - public final static String publicationIDNumber = "publicationIDNumber"; - public final static String publicationURL = "publicationURL"; - public final static String publicationReplicationData = "publicationReplicationData"; - - - public final static String title = "title"; - public final static String subTitle="subtitle"; //SEK 6-7-2016 to match what is in DB - public final static String alternativeTitle="alternativeTitle"; //missing from class - public final static String datasetId = "datasetId"; - public final static String authorName ="authorName"; - public final static String authorAffiliation = "authorAffiliation"; - public final static String authorIdType = "authorIdentifierScheme"; - public final static String authorIdValue = "authorIdentifier"; - public final static String otherIdValue="otherIdValue"; - public final static String otherIdAgency= "otherIdAgency"; - - public final static String producerName="producerName"; - public final static String producerURL="producerURL"; - public final static String producerLogo="producerLogoURL"; - public final static String producerAffiliation="producerAffiliation"; - public final static String producerAbbreviation= "producerAbbreviation"; - public final static String productionDate="productionDate"; - public final static String productionPlace="productionPlace"; - public final static String softwareName="softwareName"; - public final static String softwareVersion="softwareVersion"; - public final static String fundingAgency="fundingAgency"; - public final static String grantNumberValue="grantNumberValue"; - public final static String grantNumberAgency="grantNumberAgency"; - public final static String distributorName="distributorName"; - public final static String distributorURL="distributorURL"; - public final static String distributorLogo="distributorLogoURL"; - public final static String distributionDate="distributionDate"; - public final static String distributorContactName="distributorContactName"; - public final static String distributorContactAffiliation="distributorContactAffiliation"; - public final static String distributorContactEmail="distributorContactEmail"; - public final static String distributorAffiliation="distributorAffiliation"; - public final static String distributorAbbreviation="distributorAbbreviation"; - - public final static String contributor="contributor"; //SEK added for Dublin Core 6/22 - public final static String contributorType="contributorType"; - public final static String contributorName="contributorName"; - - public final static String depositor="depositor"; - public final static String dateOfDeposit="dateOfDeposit"; - public final static String seriesName="seriesName"; - public final static String seriesInformation="seriesInformation"; - public final static String datasetVersionValue="datasetVersionValue"; - public final static String versionDate="versionDate"; - public final static String keywordValue="keywordValue"; - public final static String keywordTermURI="keywordTermURI"; - public final static String keywordVocab="keywordVocabulary"; - public final static String keywordVocabURI="keywordVocabularyURI"; - public final static String topicClassValue="topicClassValue"; - public final static String topicClassVocab="topicClassVocab"; - public final static String topicClassVocabURI="topicClassVocabURI"; - public final static String descriptionText="dsDescriptionValue"; - public final static String descriptionDate="dsDescriptionDate"; - public final static String timePeriodCovered="timePeriodCovered"; // SEK added 6/13/2016 - public final static String timePeriodCoveredStart="timePeriodCoveredStart"; - public final static String timePeriodCoveredEnd="timePeriodCoveredEnd"; - public final static String dateOfCollection="dateOfCollection"; // SEK added 6/13/2016 - public final static String dateOfCollectionStart="dateOfCollectionStart"; - public final static String dateOfCollectionEnd="dateOfCollectionEnd"; - public final static String country="country"; - public final static String geographicCoverage="geographicCoverage"; - public final static String otherGeographicCoverage="otherGeographicCoverage"; - public final static String city="city"; // SEK added 6/13/2016 - public final static String state="state"; // SEK added 6/13/2016 - public final static String geographicUnit="geographicUnit"; - public final static String westLongitude="westLongitude"; - public final static String eastLongitude="eastLongitude"; - public final static String northLatitude="northLatitude"; - public final static String southLatitude="southLatitude"; - public final static String unitOfAnalysis="unitOfAnalysis"; - public final static String universe="universe"; - public final static String kindOfData="kindOfData"; - public final static String timeMethod="timeMethod"; - public final static String dataCollector="dataCollector"; - public final static String collectorTraining="collectorTraining"; - public final static String frequencyOfDataCollection="frequencyOfDataCollection"; - public final static String samplingProcedure="samplingProcedure"; - public final static String targetSampleSize = "targetSampleSize"; - public final static String targetSampleActualSize = "targetSampleActualSize"; - public final static String targetSampleSizeFormula = "targetSampleSizeFormula"; - public final static String deviationsFromSampleDesign="deviationsFromSampleDesign"; - public final static String collectionMode="collectionMode"; - public final static String researchInstrument="researchInstrument"; - public final static String dataSources="dataSources"; - public final static String originOfSources="originOfSources"; - public final static String characteristicOfSources="characteristicOfSources"; - public final static String accessToSources="accessToSources"; - public final static String dataCollectionSituation="dataCollectionSituation"; - public final static String actionsToMinimizeLoss="actionsToMinimizeLoss"; - public final static String controlOperations="controlOperations"; - public final static String weighting="weighting"; - public final static String cleaningOperations="cleaningOperations"; - public final static String datasetLevelErrorNotes="datasetLevelErrorNotes"; - public final static String responseRate="responseRate"; - public final static String samplingErrorEstimates="samplingErrorEstimates"; - - public final static String socialScienceNotes = "socialScienceNotes"; - public final static String socialScienceNotesType = "socialScienceNotesType"; - public final static String socialScienceNotesSubject = "socialScienceNotesSubject"; - public final static String socialScienceNotesText = "socialScienceNotesText"; - - public final static String otherDataAppraisal="otherDataAppraisal"; - public final static String placeOfAccess="placeOfAccess"; - public final static String originalArchive="originalArchive"; - public final static String availabilityStatus="availabilityStatus"; - public final static String collectionSize="collectionSize"; - public final static String datasetCompletion="datasetCompletion"; - public final static String numberOfFiles="numberOfFiles"; - public final static String confidentialityDeclaration="confidentialityDeclaration"; - public final static String specialPermissions="specialPermissions"; - public final static String restrictions="restrictions"; + + public static final String publication = "publication"; + public static final String otherId = "otherId"; + public static final String author = "author"; + public static final String authorFirstName = "authorFirstName"; + public static final String authorLastName = "authorLastName"; + public static final String producer = "producer"; + public static final String software = "software"; + public static final String grantNumber = "grantNumber"; + public static final String distributor = "distributor"; + public static final String datasetContact = "datasetContact"; + public static final String datasetContactEmail = "datasetContactEmail"; + public static final String datasetContactName = "datasetContactName"; + public static final String datasetContactAffiliation = "datasetContactAffiliation"; + public static final String series = "series"; + public static final String datasetVersion = "datasetVersion"; + + public static final String description = "dsDescription"; + public static final String keyword = "keyword"; + public static final String topicClassification = "topicClassification"; + public static final String geographicBoundingBox = "geographicBoundingBox"; + + public static final String note = "note"; + + public static final String publicationRelationType = "publicationRelationType"; + public static final String publicationCitation = "publicationCitation"; + public static final String publicationIDType = "publicationIDType"; + public static final String publicationIDNumber = "publicationIDNumber"; + public static final String publicationURL = "publicationURL"; + public static final String publicationReplicationData = "publicationReplicationData"; + + public static final String title = "title"; + public static final String subTitle="subtitle"; //SEK 6-7-2016 to match what is in DB + public static final String alternativeTitle="alternativeTitle"; //missing from class + public static final String datasetId = "datasetId"; + public static final String authorName ="authorName"; + public static final String authorAffiliation = "authorAffiliation"; + public static final String authorIdType = "authorIdentifierScheme"; + public static final String authorIdValue = "authorIdentifier"; + public static final String otherIdValue="otherIdValue"; + public static final String otherIdAgency= "otherIdAgency"; + + public static final String producerName="producerName"; + public static final String producerURL="producerURL"; + public static final String producerLogo="producerLogoURL"; + public static final String producerAffiliation="producerAffiliation"; + public static final String producerAbbreviation= "producerAbbreviation"; + public static final String productionDate="productionDate"; + public static final String productionPlace="productionPlace"; + public static final String softwareName="softwareName"; + public static final String softwareVersion="softwareVersion"; + public static final String fundingAgency="fundingAgency"; + public static final String grantNumberValue="grantNumberValue"; + public static final String grantNumberAgency="grantNumberAgency"; + public static final String distributorName="distributorName"; + public static final String distributorURL="distributorURL"; + public static final String distributorLogo="distributorLogoURL"; + public static final String distributionDate="distributionDate"; + public static final String distributorContactName="distributorContactName"; + public static final String distributorContactAffiliation="distributorContactAffiliation"; + public static final String distributorContactEmail="distributorContactEmail"; + public static final String distributorAffiliation="distributorAffiliation"; + public static final String distributorAbbreviation="distributorAbbreviation"; + + public static final String contributor="contributor"; //SEK added for Dublin Core 6/22 + public static final String contributorType="contributorType"; + public static final String contributorName="contributorName"; + + public static final String depositor="depositor"; + public static final String dateOfDeposit="dateOfDeposit"; + public static final String seriesName="seriesName"; + public static final String seriesInformation="seriesInformation"; + public static final String datasetVersionValue="datasetVersionValue"; + public static final String versionDate="versionDate"; + public static final String keywordValue="keywordValue"; + public static final String keywordTermURI="keywordTermURI"; + public static final String keywordVocab="keywordVocabulary"; + public static final String keywordVocabURI="keywordVocabularyURI"; + public static final String topicClassValue="topicClassValue"; + public static final String topicClassVocab="topicClassVocab"; + public static final String topicClassVocabURI="topicClassVocabURI"; + public static final String descriptionText="dsDescriptionValue"; + public static final String descriptionDate="dsDescriptionDate"; + public static final String timePeriodCovered="timePeriodCovered"; // SEK added 6/13/2016 + public static final String timePeriodCoveredStart="timePeriodCoveredStart"; + public static final String timePeriodCoveredEnd="timePeriodCoveredEnd"; + public static final String dateOfCollection="dateOfCollection"; // SEK added 6/13/2016 + public static final String dateOfCollectionStart="dateOfCollectionStart"; + public static final String dateOfCollectionEnd="dateOfCollectionEnd"; + public static final String country="country"; + public static final String geographicCoverage="geographicCoverage"; + public static final String otherGeographicCoverage="otherGeographicCoverage"; + public static final String city="city"; // SEK added 6/13/2016 + public static final String state="state"; // SEK added 6/13/2016 + public static final String geographicUnit="geographicUnit"; + public static final String westLongitude="westLongitude"; + public static final String eastLongitude="eastLongitude"; + public static final String northLatitude="northLatitude"; + public static final String southLatitude="southLatitude"; + public static final String unitOfAnalysis="unitOfAnalysis"; + public static final String universe="universe"; + public static final String kindOfData="kindOfData"; + public static final String timeMethod="timeMethod"; + public static final String dataCollector="dataCollector"; + public static final String collectorTraining="collectorTraining"; + public static final String frequencyOfDataCollection="frequencyOfDataCollection"; + public static final String samplingProcedure="samplingProcedure"; + public static final String targetSampleSize = "targetSampleSize"; + public static final String targetSampleActualSize = "targetSampleActualSize"; + public static final String targetSampleSizeFormula = "targetSampleSizeFormula"; + public static final String deviationsFromSampleDesign="deviationsFromSampleDesign"; + public static final String collectionMode="collectionMode"; + public static final String researchInstrument="researchInstrument"; + public static final String dataSources="dataSources"; + public static final String originOfSources="originOfSources"; + public static final String characteristicOfSources="characteristicOfSources"; + public static final String accessToSources="accessToSources"; + public static final String dataCollectionSituation="dataCollectionSituation"; + public static final String actionsToMinimizeLoss="actionsToMinimizeLoss"; + public static final String controlOperations="controlOperations"; + public static final String weighting="weighting"; + public static final String cleaningOperations="cleaningOperations"; + public static final String datasetLevelErrorNotes="datasetLevelErrorNotes"; + public static final String responseRate="responseRate"; + public static final String samplingErrorEstimates="samplingErrorEstimates"; + + public static final String socialScienceNotes = "socialScienceNotes"; + public static final String socialScienceNotesType = "socialScienceNotesType"; + public static final String socialScienceNotesSubject = "socialScienceNotesSubject"; + public static final String socialScienceNotesText = "socialScienceNotesText"; + + public static final String otherDataAppraisal="otherDataAppraisal"; + public static final String placeOfAccess="placeOfAccess"; + public static final String originalArchive="originalArchive"; + public static final String availabilityStatus="availabilityStatus"; + public static final String collectionSize="collectionSize"; + public static final String datasetCompletion="datasetCompletion"; + public static final String numberOfFiles="numberOfFiles"; + public static final String confidentialityDeclaration="confidentialityDeclaration"; + public static final String specialPermissions="specialPermissions"; + public static final String restrictions="restrictions"; @Deprecated //Doesn't appear to be used and is not datasetContact - public final static String contact="contact"; - public final static String citationRequirements="citationRequirements"; - public final static String depositorRequirements="depositorRequirements"; - public final static String conditions="conditions"; - public final static String disclaimer="disclaimer"; - public final static String relatedMaterial="relatedMaterial"; - //public final static String replicationFor="replicationFor"; - //public final static String relatedPublications="relatedPublications"; - public final static String relatedDatasets="relatedDatasets"; - public final static String otherReferences="otherReferences"; - public final static String notesText="notesText"; - public final static String language="language"; - public final static String noteInformationType="noteInformationType"; - public final static String notesInformationSubject="notesInformationSubject"; - public final static String subject="subject"; + public static final String contact="contact"; + public static final String citationRequirements="citationRequirements"; + public static final String depositorRequirements="depositorRequirements"; + public static final String conditions="conditions"; + public static final String disclaimer="disclaimer"; + public static final String relatedMaterial="relatedMaterial"; + //public static final String replicationFor="replicationFor"; + //public static final String relatedPublications="relatedPublications"; + public static final String relatedDatasets="relatedDatasets"; + public static final String otherReferences="otherReferences"; + public static final String notesText="notesText"; + public static final String language="language"; + public static final String noteInformationType="noteInformationType"; + public static final String notesInformationSubject="notesInformationSubject"; + public static final String subject="subject"; /* * The following getters are needed so we can use them as properties in JSP */ diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index ff78b0c83ec..ded7c83de62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -424,7 +424,7 @@ public Set getIndexableStringsByTermUri(String termUri, JsonObject cvocE for (int i = 0; i < jarr.size(); i++) { if (jarr.get(i).getValueType().equals(JsonValue.ValueType.STRING)) { strings.add(jarr.getString(i)); - } else if (jarr.get(i).getValueType().equals(ValueType.OBJECT)) { // This condition handles SKOMOS format like [{"lang": "en","value": "non-apis bee"},{"lang": "fr","value": "abeille non apis"}] + } else if (jarr.get(i).getValueType().equals(ValueType.OBJECT)) { // This condition handles SKOSMOS format like [{"lang": "en","value": "non-apis bee"},{"lang": "fr","value": "abeille non apis"}] JsonObject entry = jarr.getJsonObject(i); if (entry.containsKey("value")) { logger.fine("adding " + entry.getString("value") + " for " + termUri); @@ -891,6 +891,10 @@ public List findAllDisplayedOnCreateInMetadataBlock(MetadataBl } public List findAllInMetadataBlockAndDataverse(MetadataBlock metadataBlock, Dataverse dataverse, boolean onlyDisplayedOnCreate) { + if (!dataverse.isMetadataBlockRoot() && dataverse.getOwner() != null) { + return findAllInMetadataBlockAndDataverse(metadataBlock, dataverse.getOwner(), onlyDisplayedOnCreate); + } + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(DatasetFieldType.class); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 8522f2733c7..33a093c8044 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -101,6 +101,7 @@ import jakarta.faces.view.ViewScoped; import jakarta.inject.Inject; import jakarta.inject.Named; +import jakarta.persistence.OptimisticLockException; import org.apache.commons.lang3.StringUtils; import org.primefaces.event.FileUploadEvent; @@ -1935,13 +1936,13 @@ public void updateOwnerDataverse() { if (selectedHostDataverse != null && selectedHostDataverse.getId() != null) { ownerId = selectedHostDataverse.getId(); dataset.setOwner(selectedHostDataverse); - logger.info("New host dataverse id: "+ownerId); + logger.info("New host dataverse id: " + ownerId); // discard the dataset already created //If a global ID was already assigned, as is true for direct upload, keep it (if files were already uploaded, they are at the path corresponding to the existing global id) GlobalId gid = dataset.getGlobalId(); dataset = new Dataset(); - if(gid!=null) { - dataset.setGlobalId(gid); + if (gid != null) { + dataset.setGlobalId(gid); } // initiate from scratch: (isolate the creation of a new dataset in its own method?) @@ -2287,8 +2288,17 @@ private String init(boolean initFull) { JsfHelper.addWarningMessage(message); } } + if(isAnonymizedAccess()){ + dataverseHeaderFragment.setBreadcrumbs(new ArrayList<>()); + } return null; } + + public void viewActionInitBreadcrumbs(){ + if(!isAnonymizedAccess()){ + dataverseHeaderFragment.initBreadcrumbs(dataset); + } + } private void displayWorkflowComments() { List comments = workingVersion.getWorkflowComments(); @@ -2888,6 +2898,9 @@ private String releaseDataset(boolean minor) { // the lock info system. JsfHelper.addErrorMessage(ex.getLocalizedMessage()); } + if(ex.getCause()!=null && ex.getCause() instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelPublishError")); + } logger.severe(ex.getMessage()); } @@ -4002,6 +4015,10 @@ public String save() { Throwable cause = ex; while (cause.getCause()!= null) { cause = cause.getCause(); + if (cause != null && cause instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelUpdateError")); + return null; + } error.append(cause).append(" "); error.append(cause.getMessage()).append(" "); } @@ -4011,6 +4028,15 @@ public String save() { } catch (CommandException ex) { //FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString())); logger.log(Level.SEVERE, "CommandException, when attempting to update the dataset: " + ex.getMessage(), ex); + Throwable cause = ex; + while (cause.getCause()!= null) { + cause = cause.getCause(); + logger.info("Cause is: " + cause.getClass().getName() + ", Message: " + cause.getMessage()); + if (cause != null && cause instanceof OptimisticLockException) { + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataset.message.parallelUpdateError")); + return null; + } + } populateDatasetUpdateFailureMessage(); return returnToDraftVersion(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 0433c425fd2..a7bbc7c3ad4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -102,6 +102,10 @@ public int compare(DatasetVersion o1, DatasetVersion o2) { } } }; + public static final JsonObjectBuilder compareVersions(DatasetVersion originalVersion, DatasetVersion newVersion) { + DatasetVersionDifference diff = new DatasetVersionDifference(newVersion, originalVersion); + return diff.compareVersionsAsJson(); + } // TODO: Determine the UI implications of various version states //IMPORTANT: If you add a new value to this enum, you will also have to modify the @@ -1390,7 +1394,10 @@ public List getRelatedPublications() { relatedPublication.setIdNumber(subField.getDisplayValue()); break; case DatasetFieldConstant.publicationRelationType: - relatedPublication.setRelationType(subField.getDisplayValue()); + List values = subField.getValues_nondisplay(); + if (!values.isEmpty()) { + relatedPublication.setRelationType(values.get(0)); //only one value allowed + } break; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index eca0c84ae84..c5d6c31386c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -5,16 +5,24 @@ import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; import edu.harvard.iq.dataverse.util.StringUtil; +import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.logging.Logger; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; +import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; +import jakarta.json.JsonObjectBuilder; import org.apache.commons.lang3.StringUtils; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.Arrays; import java.util.Date; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; @@ -34,14 +42,13 @@ public final class DatasetVersionDifference { private List addedFiles = new ArrayList<>(); private List removedFiles = new ArrayList<>(); private List changedFileMetadata = new ArrayList<>(); + private Map>> changedFileMetadataDiff = new HashMap<>(); private List changedVariableMetadata = new ArrayList<>(); private List replacedFiles = new ArrayList<>(); private List changedTermsAccess = new ArrayList<>(); private List summaryDataForNote = new ArrayList<>(); private List blockDataForNote = new ArrayList<>(); - private VariableMetadataUtil variableMetadataUtil; - private List differenceSummaryGroups = new ArrayList<>(); public List getDifferenceSummaryGroups() { @@ -106,54 +113,73 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin addToSummary(null, dsfn); } } - - // TODO: ? - // It looks like we are going through the filemetadatas in both versions, - // *sequentially* (i.e. at the cost of O(N*M)), to select the lists of - // changed, deleted and added files between the 2 versions... But why - // are we doing it, if we are doing virtually the same thing inside - // the initDatasetFilesDifferenceList(), below - but in a more efficient - // way (sorting both lists, then goint through them in parallel, at the - // cost of (N+M) max.? - // -- 4.6 Nov. 2016 - + long startTime = System.currentTimeMillis(); + Map originalFileMetadataMap = new HashMap<>(); + Map previousIDtoFileMetadataMap = new HashMap<>(); for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { - boolean deleted = true; - for (FileMetadata fmdn : newVersion.getFileMetadatas()) { - if (fmdo.getDataFile().equals(fmdn.getDataFile())) { - deleted = false; - if (!compareFileMetadatas(fmdo, fmdn)) { - changedFileMetadata.add(fmdo); - changedFileMetadata.add(fmdn); - } - if (!variableMetadataUtil.compareVariableMetadata(fmdo,fmdn) || !compareVarGroup(fmdo, fmdn)) { - changedVariableMetadata.add(fmdo); - changedVariableMetadata.add(fmdn); - } - break; - } - } - if (deleted) { - removedFiles.add(fmdo); - } + originalFileMetadataMap.put(fmdo.getDataFile().getId(), fmdo); } + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { - boolean added = true; - for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { - if (fmdo.getDataFile().equals(fmdn.getDataFile())) { - added = false; - break; + DataFile ndf = fmdn.getDataFile(); + Long id = ndf.getId(); + FileMetadata fmdo = originalFileMetadataMap.get(id); + //If this file was in the original version + if(fmdo!= null) { + //Check for differences + Map> fileMetadataDiff = compareFileMetadatas(fmdo, fmdn); + if (!fileMetadataDiff.isEmpty()) { + changedFileMetadata.add(fmdo); + changedFileMetadata.add(fmdn); + // TODO: find a better key for the map. needs to be something that doesn't change + changedFileMetadataDiff.put(fmdo, fileMetadataDiff); + } + if (!VariableMetadataUtil.compareVariableMetadata(fmdo,fmdn) || !compareVarGroup(fmdo, fmdn)) { + changedVariableMetadata.add(fmdo); + changedVariableMetadata.add(fmdn); + } + // And drop it from the list since it can't be a deleted file + originalFileMetadataMap.remove(id); + } else { + //It wasn't in the original version + Long prevID = ndf.getPreviousDataFileId(); + //It might be a replacement file or an added file + if(prevID != null) { + //Add it to a map so we can check later to see if it's a replacement + previousIDtoFileMetadataMap.put(prevID, fmdn); + } else { + //Otherwise make it an added file now + addedFiles.add(fmdn); } } - if (added) { - addedFiles.add(fmdn); + } + //Finally check any remaining files from the original version that weren't in the new version' + for (Long removedId : originalFileMetadataMap.keySet()) { + //See if it has been replaced + FileMetadata replacingFmd = previousIDtoFileMetadataMap.get(removedId); + FileMetadata fmdRemoved = originalFileMetadataMap.get(removedId); + if (replacingFmd != null) { + //This is a replacement + replacedFiles.add(new FileMetadata[] { fmdRemoved, replacingFmd }); + //Drop if from the map + previousIDtoFileMetadataMap.remove(removedId); + } else { + //This is a removed file + removedFiles.add(fmdRemoved); } - } - getReplacedFiles(); + } + // Any fms left are not updating existing files and aren't replacing a file, but + // they are claiming a previous file id. That shouldn't be possible, but this will + // make sure they get listed in the difference if they do + for (Entry entry : previousIDtoFileMetadataMap.entrySet()) { + logger.warning("Previous file id claimed for a new file: fmd id: " + entry.getValue() + ", previous file id: " + entry.getKey()); + addedFiles.add(entry.getValue()); + } + + logger.fine("Main difference loop execution time: " + (System.currentTimeMillis() - startTime) + " ms"); initDatasetFilesDifferencesList(); - //Sort within blocks by datasetfieldtype dispaly order then.... - //sort via metadatablock order - citation first... + //Sort within blocks by datasetfieldtype display order for (List blockList : detailDataByBlock) { Collections.sort(blockList, (DatasetField[] l1, DatasetField[] l2) -> { DatasetField dsfa = l1[0]; //(DatasetField[]) l1.get(0); @@ -163,6 +189,17 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin return Integer.valueOf(a).compareTo(b); }); } + //Sort existing compoundValues by datasetfieldtype display order + for (List blockList : detailDataByBlock) { + for (DatasetField[] dfarr : blockList) { + for (DatasetField df : dfarr) { + for (DatasetFieldCompoundValue dfcv : df.getDatasetFieldCompoundValues()) { + Collections.sort(dfcv.getChildDatasetFields(), DatasetField.DisplayOrder); + } + } + } + } + //Sort via metadatablock order Collections.sort(detailDataByBlock, (List l1, List l2) -> { DatasetField dsfa[] = (DatasetField[]) l1.get(0); DatasetField dsfb[] = (DatasetField[]) l2.get(0); @@ -173,294 +210,62 @@ public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion origin getTermsDifferences(); } - private void getReplacedFiles() { - if (addedFiles.isEmpty() || removedFiles.isEmpty()) { - return; - } - List addedToReplaced = new ArrayList<>(); - List removedToReplaced = new ArrayList<>(); - for (FileMetadata added : addedFiles) { - DataFile addedDF = added.getDataFile(); - Long replacedId = addedDF.getPreviousDataFileId(); - if (added.getDataFile().getPreviousDataFileId() != null){ - } - for (FileMetadata removed : removedFiles) { - DataFile test = removed.getDataFile(); - if (test.getId().equals(replacedId)) { - addedToReplaced.add(added); - removedToReplaced.add(removed); - FileMetadata[] replacedArray = new FileMetadata[2]; - replacedArray[0] = removed; - replacedArray[1] = added; - replacedFiles.add(replacedArray); - } - } - } - if(addedToReplaced.isEmpty()){ - } else{ - addedToReplaced.stream().forEach((delete) -> { - addedFiles.remove(delete); - }); - removedToReplaced.stream().forEach((delete) -> { - removedFiles.remove(delete); - }); - } - } + private void getTermsDifferences() { - changedTermsAccess = new ArrayList<>(); - if (newVersion.getTermsOfUseAndAccess() != null && originalVersion.getTermsOfUseAndAccess() != null) { - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions())); - - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()), StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer())); - } - - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion()).equals(StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()))) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()), - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion())); - } + TermsOfUseAndAccess originalTerms = originalVersion.getTermsOfUseAndAccess(); + if(originalTerms == null) { + originalTerms = new TermsOfUseAndAccess(); + } + // newTerms should never be null + TermsOfUseAndAccess newTerms = newVersion.getTermsOfUseAndAccess(); + if(newTerms == null) { + logger.warning("New version does not have TermsOfUseAndAccess"); + newTerms = new TermsOfUseAndAccess(); } - - if (newVersion.getTermsOfUseAndAccess() != null && originalVersion.getTermsOfUseAndAccess() == null) { - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse()).isEmpty()) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfUse())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSpecialPermissions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getRestrictions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getCitationRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDepositorRequirements())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getConditions())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDisclaimer())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getTermsOfAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getDataAccessPlace())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getOriginalArchive())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getAvailabilityStatus())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getContactForAccess())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getSizeOfCollection())); - } - if (!StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, "", - StringUtil.nullToEmpty(newVersion.getTermsOfUseAndAccess().getStudyCompletion())); - } - } - - if (newVersion.getTermsOfUseAndAccess() == null && originalVersion.getTermsOfUseAndAccess() != null) { - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()).isEmpty()) { - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfUse()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConfidentialityDeclaration()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSpecialPermissions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getRestrictions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getCitationRequirements()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDepositorRequirements()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getConditions()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDisclaimer()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getTermsOfAccess()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getDataAccessPlace()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getOriginalArchive()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getAvailabilityStatus()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getContactForAccess()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getSizeOfCollection()), ""); - } - if (!StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()).isEmpty()){ - String diffLabel = BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion"); - changedTermsAccess = addToTermsChangedList(changedTermsAccess, diffLabel, - StringUtil.nullToEmpty(originalVersion.getTermsOfUseAndAccess().getStudyCompletion()), ""); - } - } - } - - private DifferenceSummaryItem createSummaryItem(){ - return null; - } - - private List addToSummaryGroup(String displayName, DifferenceSummaryItem differenceSummaryItem){ - return null; + checkAndAddToChangeList(originalTerms.getTermsOfUse(), newTerms.getTermsOfUse(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header")); + checkAndAddToChangeList(originalTerms.getConfidentialityDeclaration(), newTerms.getConfidentialityDeclaration(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration")); + checkAndAddToChangeList(originalTerms.getSpecialPermissions(), newTerms.getSpecialPermissions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions")); + checkAndAddToChangeList(originalTerms.getRestrictions(), newTerms.getRestrictions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions")); + checkAndAddToChangeList(originalTerms.getCitationRequirements(), newTerms.getCitationRequirements(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements")); + checkAndAddToChangeList(originalTerms.getDepositorRequirements(), newTerms.getDepositorRequirements(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements")); + checkAndAddToChangeList(originalTerms.getConditions(), newTerms.getConditions(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions")); + checkAndAddToChangeList(originalTerms.getDisclaimer(), newTerms.getDisclaimer(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer")); + checkAndAddToChangeList(originalTerms.getTermsOfAccess(), newTerms.getTermsOfAccess(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess")); + checkAndAddToChangeList(originalTerms.getDataAccessPlace(), newTerms.getDataAccessPlace(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace")); + checkAndAddToChangeList(originalTerms.getOriginalArchive(), newTerms.getOriginalArchive(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive")); + checkAndAddToChangeList(originalTerms.getAvailabilityStatus(), newTerms.getAvailabilityStatus(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus")); + checkAndAddToChangeList(originalTerms.getContactForAccess(), newTerms.getContactForAccess(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess")); + checkAndAddToChangeList(originalTerms.getSizeOfCollection(), newTerms.getSizeOfCollection(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection")); + checkAndAddToChangeList(originalTerms.getStudyCompletion(), newTerms.getStudyCompletion(), + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion")); } - - private List addToTermsChangedList(List listIn, String label, String origVal, String newVal) { - String[] diffArray; - diffArray = new String[3]; - diffArray[0] = label; - diffArray[1] = origVal; - diffArray[2] = newVal; - listIn.add(diffArray); - return listIn; + + private void checkAndAddToChangeList(String originalTerm, String newTerm, + String termLabel) { + originalTerm = StringUtil.nullToEmpty(originalTerm); + newTerm = StringUtil.nullToEmpty(newTerm); + if(!originalTerm.equals(newTerm)) { + changedTermsAccess.add(new String[]{termLabel, originalTerm, newTerm}); + } } - private void addToList(List listIn, DatasetField dsfo, DatasetField dsfn) { DatasetField[] dsfArray; dsfArray = new DatasetField[2]; @@ -523,7 +328,7 @@ private void addToNoteSummary(DatasetField dsfo, int added, int deleted, int cha summaryDataForNote.add(noteArray); } - private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { + static boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { List vglo = fmdo.getVarGroups(); List vgln = fmdn.getVarGroups(); @@ -533,7 +338,7 @@ private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { int count = 0; for (VarGroup vgo : vglo) { for (VarGroup vgn : vgln) { - if (!variableMetadataUtil.checkDiff(vgo.getLabel(), vgn.getLabel())) { + if (!VariableMetadataUtil.checkDiff(vgo.getLabel(), vgn.getLabel())) { Set dvo = vgo.getVarsInGroup(); Set dvn = vgn.getVarsInGroup(); if (dvo.equals(dvn)) { @@ -551,25 +356,34 @@ private boolean compareVarGroup(FileMetadata fmdo, FileMetadata fmdn) { } } - public static boolean compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { - + public static Map> compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { + Map> fileMetadataChanged = new HashMap<>(); if (!StringUtils.equals(StringUtil.nullToEmpty(fmdo.getDescription()), StringUtil.nullToEmpty(fmdn.getDescription()))) { - return false; + fileMetadataChanged.put("Description", + List.of(StringUtil.nullToEmpty(fmdo.getDescription()), StringUtil.nullToEmpty(fmdn.getDescription()))); } if (!StringUtils.equals(fmdo.getCategoriesByName().toString(), fmdn.getCategoriesByName().toString())) { - return false; + fileMetadataChanged.put("Categories", + List.of(fmdo.getCategoriesByName().toString(), fmdn.getCategoriesByName().toString())); } if (!StringUtils.equals(fmdo.getLabel(), fmdn.getLabel())) { - return false; + fileMetadataChanged.put("Label", + List.of(fmdo.getLabel(), fmdn.getLabel())); } if (!StringUtils.equals(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())) { - return false; + fileMetadataChanged.put("ProvFreeForm", + List.of(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())); } - - return fmdo.isRestricted() == fmdn.isRestricted(); + + if (fmdo.isRestricted() != fmdn.isRestricted()) { + fileMetadataChanged.put("isRestricted", + List.of(String.valueOf(fmdo.isRestricted()), String.valueOf(fmdn.isRestricted()))); + } + + return fileMetadataChanged; } private void compareValues(DatasetField originalField, DatasetField newField, boolean compound) { @@ -1819,4 +1633,138 @@ private static boolean fieldsAreDifferent(DatasetField originalField, DatasetFie } return false; } + + List getChangedVariableMetadata() { + return changedVariableMetadata; + } + + List getReplacedFiles() { + return replacedFiles; + } + public JsonObjectBuilder compareVersionsAsJson() { + JsonObjectBuilder job = new NullSafeJsonBuilder(); + JsonObjectBuilder jobVersion = new NullSafeJsonBuilder(); + jobVersion.add("versionNumber", originalVersion.getFriendlyVersionNumber()); + jobVersion.add("lastUpdatedDate", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(originalVersion.getLastUpdateTime())); + job.add("oldVersion", jobVersion); + jobVersion = new NullSafeJsonBuilder(); + jobVersion.add("versionNumber", newVersion.getFriendlyVersionNumber()); + jobVersion.add("lastUpdatedDate", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(newVersion.getLastUpdateTime())); + job.add("newVersion", jobVersion); + + if (!this.detailDataByBlock.isEmpty()) { + JsonArrayBuilder jabMetadata = Json.createArrayBuilder(); + for (List blocks : detailDataByBlock) { + JsonObjectBuilder jobMetadata = new NullSafeJsonBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + String blockDisplay = blocks.get(0)[0].getDatasetFieldType().getMetadataBlock().getDisplayName(); + for (DatasetField[] dsfArray : blocks) { + JsonObjectBuilder jb = new NullSafeJsonBuilder(); + jb.add("fieldName", dsfArray[0].getDatasetFieldType().getTitle()); + if (dsfArray[0].getDatasetFieldType().isPrimitive()) { + jb.add("oldValue", dsfArray[0].getRawValue()); + } else { + jb.add("oldValue", dsfArray[0].getCompoundRawValue()); + } + if (dsfArray[1].getDatasetFieldType().isPrimitive()) { + jb.add("newValue", dsfArray[1].getRawValue()); + } else { + jb.add("newValue", dsfArray[1].getCompoundRawValue()); + } + jab.add(jb); + } + jobMetadata.add("blockName", blockDisplay); + jobMetadata.add("changed", jab); + jabMetadata.add(jobMetadata); + } + job.add("metadataChanges", jabMetadata); + } + + // Format added, removed, and modified files + JsonArrayBuilder jabDiffFiles = Json.createArrayBuilder(); + if (!addedFiles.isEmpty()) { + JsonArrayBuilder jab = Json.createArrayBuilder(); + addedFiles.forEach(f -> { + jab.add(filesDiffJson(f)); + }); + job.add("filesAdded", jab); + } + if (!removedFiles.isEmpty()) { + JsonArrayBuilder jab = Json.createArrayBuilder(); + removedFiles.forEach(f -> { + jab.add(filesDiffJson(f)); + }); + job.add("filesRemoved", jab); + } + if (!replacedFiles.isEmpty()) { + JsonArrayBuilder jabReplaced = Json.createArrayBuilder(); + replacedFiles.forEach(fm -> { + if (fm.length == 2) { + JsonObjectBuilder jobReplaced = new NullSafeJsonBuilder(); + jobReplaced.add("oldFile", filesDiffJson(fm[0])); + jobReplaced.add("newFile", filesDiffJson(fm[1])); + jabReplaced.add(jobReplaced); + } + }); + job.add("filesReplaced", jabReplaced); + } + if (!changedFileMetadata.isEmpty()) { + changedFileMetadataDiff.entrySet().forEach(entry -> { + JsonArrayBuilder jab = Json.createArrayBuilder(); + JsonObjectBuilder jobChanged = new NullSafeJsonBuilder(); + jobChanged.add("fileName", entry.getKey().getDataFile().getDisplayName()); + jobChanged.add(entry.getKey().getDataFile().getChecksumType().name(), entry.getKey().getDataFile().getChecksumValue()); + jobChanged.add("fileId", entry.getKey().getDataFile().getId()); + entry.getValue().entrySet().forEach(e -> { + JsonObjectBuilder jobDiffField = new NullSafeJsonBuilder(); + jobDiffField.add("fieldName",e.getKey()); + jobDiffField.add("oldValue",e.getValue().get(0)); + jobDiffField.add("newValue",e.getValue().get(1)); + jab.add(jobDiffField); + }); + jobChanged.add("changed", jab); + jabDiffFiles.add(jobChanged); + }); + job.add("fileChanges", jabDiffFiles); + } + + // Format Terms Of Access changes + if (!changedTermsAccess.isEmpty()) { + JsonObjectBuilder jobTOA = new NullSafeJsonBuilder(); + JsonArrayBuilder jab = Json.createArrayBuilder(); + changedTermsAccess.forEach(toa -> { + JsonObjectBuilder jobValue = new NullSafeJsonBuilder(); + jobValue.add("fieldName",toa[0]); + jobValue.add("oldValue",toa[1]); + jobValue.add("newValue",toa[2]); + jab.add(jobValue); + }); + jobTOA.add("changed", jab); + job.add("TermsOfAccess", jobTOA); + } + + return job; + } + private JsonObjectBuilder filesDiffJson(FileMetadata fileMetadata) { + NullSafeJsonBuilder job = new NullSafeJsonBuilder(); + DataFile df = fileMetadata.getDataFile(); + job.add("fileName", df.getDisplayName()) + .add("filePath", fileMetadata.getDirectoryLabel()) + .add(df.getChecksumType().name(), df.getChecksumValue()) + .add("type",df.getContentType()) + .add("fileId", df.getId()) + .add("description", fileMetadata.getDescription()) + .add("isRestricted", df.isRestricted()); + if (fileMetadata.getCategories() != null && !fileMetadata.getCategories().isEmpty()) { + JsonArrayBuilder jabCategories = Json.createArrayBuilder(); + fileMetadata.getCategories().forEach(c -> jabCategories.add(c.getName())); + job.add("categories", jabCategories); + } + if (df.getTags() != null && !df.getTags().isEmpty()) { + JsonArrayBuilder jabTags = Json.createArrayBuilder(); + df.getTags().forEach(t -> jabTags.add(t.getTypeLabel())); + job.add("tags", jabTags); + } + return job; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 86e2e0207c1..1f11725e581 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -595,6 +595,10 @@ public void setMetadataBlocks(List metadataBlocks) { this.metadataBlocks = new ArrayList<>(metadataBlocks); } + public void clearMetadataBlocks() { + this.metadataBlocks.clear(); + } + public List getCitationDatasetFieldTypes() { return citationDatasetFieldTypes; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java index 5c77989f6d6..56f522fa816 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java @@ -42,23 +42,24 @@ public void delete(DataverseFacet dataverseFacet) { cache.invalidate(); } - public void deleteFacetsFor( Dataverse d ) { - em.createNamedQuery("DataverseFacet.removeByOwnerId") - .setParameter("ownerId", d.getId()) - .executeUpdate(); + public void deleteFacetsFor(Dataverse d) { + em.createNamedQuery("DataverseFacet.removeByOwnerId") + .setParameter("ownerId", d.getId()) + .executeUpdate(); cache.invalidate(d.getId()); - - } - + + } + public DataverseFacet create(int displayOrder, DatasetFieldType fieldType, Dataverse ownerDv) { DataverseFacet dataverseFacet = new DataverseFacet(); - + dataverseFacet.setDisplayOrder(displayOrder); dataverseFacet.setDatasetFieldType(fieldType); dataverseFacet.setDataverse(ownerDv); - ownerDv.getDataverseFacets().add(dataverseFacet); em.persist(dataverseFacet); + + cache.invalidate(ownerDv.getId()); return dataverseFacet; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index a4882f772d6..5dab43fbdbd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -27,9 +27,9 @@ @NamedQuery(name = "DvObject.ownedObjectsById", query="SELECT COUNT(obj) FROM DvObject obj WHERE obj.owner.id=:id"), @NamedQuery(name = "DvObject.findByGlobalId", - query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), + query = "SELECT o FROM DvObject o WHERE UPPER(o.identifier)=UPPER(:identifier) and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findIdByGlobalId", - query = "SELECT o.id FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), + query = "SELECT o.id FROM DvObject o WHERE UPPER(o.identifier)=UPPER(:identifier) and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findByAlternativeGlobalId", query = "SELECT o FROM DvObject o, AlternativePersistentIdentifier a WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"), @@ -37,7 +37,7 @@ query = "SELECT o.id FROM DvObject o, AlternativePersistentIdentifier a WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findByProtocolIdentifierAuthority", - query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol"), + query = "SELECT o FROM DvObject o WHERE UPPER(o.identifier)=UPPER(:identifier) and o.authority=:authority and o.protocol=:protocol"), @NamedQuery(name = "DvObject.findByOwnerId", query = "SELECT o FROM DvObject o WHERE o.owner.id=:ownerId order by o.dtype desc, o.id"), @NamedQuery(name = "DvObject.findByAuthenticatedUserId", @@ -53,7 +53,8 @@ @Table(indexes = {@Index(columnList="dtype") , @Index(columnList="owner_id") , @Index(columnList="creator_id") - , @Index(columnList="releaseuser_id")}, + , @Index(columnList="releaseuser_id") + , @Index(columnList="authority,protocol, UPPER(identifier)", name="INDEX_DVOBJECT_authority_protocol_upper_identifier")}, uniqueConstraints = {@UniqueConstraint(columnNames = {"authority,protocol,identifier"}),@UniqueConstraint(columnNames = {"owner_id,storageidentifier"})}) public abstract class DvObject extends DataverseEntity implements java.io.Serializable { diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java index f54b1fb6117..0922af007fb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java @@ -322,7 +322,7 @@ public String save() { logger.info("Guestbook Page Command Exception. Dataverse: " + dataverse.getName()); logger.info(ex.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("guestbook.save.fail"), " - " + ex.toString())); - //logger.severe(ex.getMessage()); + return null; } editMode = null; String msg = (create)? BundleUtil.getStringFromBundle("guestbook.create"): BundleUtil.getStringFromBundle("guestbook.save"); diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 1ea7d02791d..830c7740e34 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -15,6 +15,8 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; + +import edu.harvard.iq.dataverse.validation.ValidateEmail; import jakarta.persistence.*; import jakarta.validation.constraints.Size; import java.util.Collections; @@ -80,8 +82,8 @@ public class GuestbookResponse implements Serializable { @Size(max = 255, message = "{guestbook.response.nameLength}") private String name; - // TODO: Consider using EMailValidator as well. @Size(max = 255, message = "{guestbook.response.nameLength}") + @ValidateEmail(message = "{user.invalidEmail}") private String email; @Size(max = 255, message = "{guestbook.response.nameLength}") @@ -198,7 +200,8 @@ public String getEmail() { } public void setEmail(String email) { - this.email = email; + // ValidateEmail requires NULL or valid email. Empty String will fail validation + this.email = (email == null || email.trim().isEmpty()) ? null : email; } public Guestbook getGuestbook() { diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index 2995c0c5f47..c67a0293847 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -283,7 +283,7 @@ public Boolean sendNotificationEmail(UserNotification notification, String comme if (objectOfNotification != null){ String messageText = getMessageTextBasedOnNotification(notification, objectOfNotification, comment, requestor); String subjectText = MailUtil.getSubjectTextBasedOnNotification(notification, objectOfNotification); - if (!(messageText.isEmpty() || subjectText.isEmpty())){ + if (!(StringUtils.isEmpty(messageText) || StringUtils.isEmpty(subjectText))){ retval = sendSystemEmail(emailAddress, subjectText, messageText, isHtmlContent); } else { logger.warning("Skipping " + notification.getType() + " notification, because couldn't get valid message"); diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java index a3dfbf81512..121d03ef0c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Shib.java +++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java @@ -421,6 +421,9 @@ private String getValueFromAssertion(String key) { Object attribute = request.getAttribute(key); if (attribute != null) { String attributeValue = attribute.toString(); + if(systemConfig.isShibAttributeCharacterSetConversionEnabled()) { + attributeValue = new String(attributeValue.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8); + } String trimmedValue = attributeValue.trim(); if (!trimmedValue.isEmpty()) { logger.fine("The SAML assertion for \"" + key + "\" (optional) was \"" + attributeValue + "\" and was trimmed to \"" + trimmedValue + "\"."); diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 542cf39cfbe..46736da73d4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -5,6 +5,7 @@ */ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.StorageIO; @@ -20,7 +21,6 @@ import jakarta.ejb.EJB; import jakarta.enterprise.context.RequestScoped; -import jakarta.inject.Inject; import jakarta.inject.Named; /** @@ -33,9 +33,8 @@ public class ThumbnailServiceWrapper implements java.io.Serializable { private static final Logger logger = Logger.getLogger(ThumbnailServiceWrapper.class.getCanonicalName()); - - @Inject - PermissionsWrapper permissionsWrapper; + @EJB + PermissionServiceBean permissionService; @EJB DataverseServiceBean dataverseService; @EJB @@ -49,12 +48,15 @@ public class ThumbnailServiceWrapper implements java.io.Serializable { private Map dvobjectViewMap = new HashMap<>(); private Map hasThumbMap = new HashMap<>(); + private boolean hasDownloadFilePermission(DvObject dvo) { + return permissionService.on(dvo).has(Permission.DownloadFile) ; + } public String getFileCardImageAsUrl(SolrSearchResult result) { DataFile dataFile = result != null && result.getEntity() != null ? ((DataFile) result.getEntity()) : null; - if (dataFile == null || result.isHarvested() + if (dataFile == null + || result.isHarvested() || !isThumbnailAvailable(dataFile) - || dataFile.isRestricted() - || !dataFile.isReleased() + || (dataFile.isRestricted() && !hasDownloadFilePermission(dataFile)) || FileUtil.isActivelyEmbargoed(dataFile) || FileUtil.isRetentionExpired(dataFile)) { return null; @@ -105,7 +107,7 @@ public String getFileCardImageAsBase64Url(SolrSearchResult result) { } if ((!((DataFile)result.getEntity()).isRestricted() - || permissionsWrapper.hasDownloadFilePermission(result.getEntity())) + || hasDownloadFilePermission(result.getEntity())) && isThumbnailAvailable((DataFile) result.getEntity())) { cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64( diff --git a/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java b/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java index a8ea5fabde4..c51903e2ed4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java @@ -18,8 +18,8 @@ @Named public class WidgetWrapper implements java.io.Serializable { - private final static String WIDGET_PARAMETER = "widget"; - private final static char WIDGET_SEPARATOR = '@'; + private static final String WIDGET_PARAMETER = "widget"; + private static final char WIDGET_SEPARATOR = '@'; private Boolean widgetView; private String widgetHome; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 54e5eaf7b84..152bcf5066e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1,28 +1,11 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.BannerMessage; -import edu.harvard.iq.dataverse.BannerMessageServiceBean; -import edu.harvard.iq.dataverse.BannerMessageText; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetServiceBean; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseRequestServiceBean; -import edu.harvard.iq.dataverse.DataverseServiceBean; -import edu.harvard.iq.dataverse.DataverseSession; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.DvObjectServiceBean; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.StringUtil; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.EMailValidator; -import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.Template; -import edu.harvard.iq.dataverse.TemplateServiceBean; -import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; @@ -66,8 +49,9 @@ import java.io.InputStream; import java.io.StringReader; import java.nio.charset.StandardCharsets; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; +import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; import jakarta.ejb.EJB; @@ -81,7 +65,6 @@ import org.apache.commons.io.IOUtils; -import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationProvidersRegistrationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; @@ -118,9 +101,7 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.rolesToJson; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; + import jakarta.inject.Inject; import jakarta.json.JsonArray; import jakarta.persistence.Query; @@ -128,7 +109,6 @@ import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.StreamingOutput; import java.nio.file.Paths; -import java.util.TreeMap; /** * Where the secure, setup API calls live. @@ -139,7 +119,7 @@ @Path("admin") public class Admin extends AbstractApiBean { - private static final Logger logger = Logger.getLogger(Admin.class.getName()); + private static final Logger logger = Logger.getLogger(Admin.class.getName()); @EJB AuthenticationProvidersRegistrationServiceBean authProvidersRegistrationSvc; @@ -184,53 +164,53 @@ public class Admin extends AbstractApiBean { @Inject DataverseSession session; - public static final String listUsersPartialAPIPath = "list-users"; - public static final String listUsersFullAPIPath = "/api/admin/" + listUsersPartialAPIPath; - - @Path("settings") - @GET - public Response listAllSettings() { - JsonObjectBuilder bld = jsonObjectBuilder(); - settingsSvc.listAll().forEach(s -> bld.add(s.getName(), s.getContent())); - return ok(bld); - } - - @Path("settings/{name}") - @PUT - public Response putSetting(@PathParam("name") String name, String content) { - Setting s = settingsSvc.set(name, content); - return ok(jsonObjectBuilder().add(s.getName(), s.getContent())); - } - - @Path("settings/{name}/lang/{lang}") - @PUT - public Response putSettingLang(@PathParam("name") String name, @PathParam("lang") String lang, String content) { - Setting s = settingsSvc.set(name, lang, content); - return ok("Setting " + name + " - " + lang + " - added."); - } - - @Path("settings/{name}") - @GET - public Response getSetting(@PathParam("name") String name) { - String s = settingsSvc.get(name); - - return (s != null) ? ok(s) : notFound("Setting " + name + " not found"); - } - - @Path("settings/{name}") - @DELETE - public Response deleteSetting(@PathParam("name") String name) { - settingsSvc.delete(name); - - return ok("Setting " + name + " deleted."); - } - - @Path("settings/{name}/lang/{lang}") - @DELETE - public Response deleteSettingLang(@PathParam("name") String name, @PathParam("lang") String lang) { - settingsSvc.delete(name, lang); - return ok("Setting " + name + " - " + lang + " deleted."); - } + public static final String listUsersPartialAPIPath = "list-users"; + public static final String listUsersFullAPIPath = "/api/admin/" + listUsersPartialAPIPath; + + @Path("settings") + @GET + public Response listAllSettings() { + JsonObjectBuilder bld = jsonObjectBuilder(); + settingsSvc.listAll().forEach(s -> bld.add(s.getName(), s.getContent())); + return ok(bld); + } + + @Path("settings/{name}") + @PUT + public Response putSetting(@PathParam("name") String name, String content) { + Setting s = settingsSvc.set(name, content); + return ok(jsonObjectBuilder().add(s.getName(), s.getContent())); + } + + @Path("settings/{name}/lang/{lang}") + @PUT + public Response putSettingLang(@PathParam("name") String name, @PathParam("lang") String lang, String content) { + Setting s = settingsSvc.set(name, lang, content); + return ok("Setting " + name + " - " + lang + " - added."); + } + + @Path("settings/{name}") + @GET + public Response getSetting(@PathParam("name") String name) { + String s = settingsSvc.get(name); + + return (s != null) ? ok(s) : notFound("Setting " + name + " not found"); + } + + @Path("settings/{name}") + @DELETE + public Response deleteSetting(@PathParam("name") String name) { + settingsSvc.delete(name); + + return ok("Setting " + name + " deleted."); + } + + @Path("settings/{name}/lang/{lang}") + @DELETE + public Response deleteSettingLang(@PathParam("name") String name, @PathParam("lang") String lang) { + settingsSvc.delete(name, lang); + return ok("Setting " + name + " - " + lang + " deleted."); + } @Path("template/{id}") @DELETE @@ -301,130 +281,130 @@ public Response findTemplates(@PathParam("alias") String alias) { } - @Path("authenticationProviderFactories") - @GET - public Response listAuthProviderFactories() { - return ok(authSvc.listProviderFactories().stream() - .map(f -> jsonObjectBuilder().add("alias", f.getAlias()).add("info", f.getInfo())) - .collect(toJsonArray())); - } - - @Path("authenticationProviders") - @GET - public Response listAuthProviders() { - return ok(em.createNamedQuery("AuthenticationProviderRow.findAll", AuthenticationProviderRow.class) - .getResultList().stream().map(r -> json(r)).collect(toJsonArray())); - } - - @Path("authenticationProviders") - @POST - public Response addProvider(AuthenticationProviderRow row) { - try { - AuthenticationProviderRow managed = em.find(AuthenticationProviderRow.class, row.getId()); - if (managed != null) { - managed = em.merge(row); - } else { - em.persist(row); - managed = row; - } - if (managed.isEnabled()) { - AuthenticationProvider provider = authProvidersRegistrationSvc.loadProvider(managed); - authProvidersRegistrationSvc.deregisterProvider(provider.getId()); - authProvidersRegistrationSvc.registerProvider(provider); - } - return created("/api/admin/authenticationProviders/" + managed.getId(), json(managed)); - } catch (AuthorizationSetupException e) { - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } - } - - @Path("authenticationProviders/{id}") - @GET - public Response showProvider(@PathParam("id") String id) { - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - return (row != null) ? ok(json(row)) - : error(Status.NOT_FOUND, "Can't find authetication provider with id '" + id + "'"); - } - - @POST - @Path("authenticationProviders/{id}/:enabled") - public Response enableAuthenticationProvider_deprecated(@PathParam("id") String id, String body) { - return enableAuthenticationProvider(id, body); - } - - @PUT - @Path("authenticationProviders/{id}/enabled") - @Produces("application/json") - public Response enableAuthenticationProvider(@PathParam("id") String id, String body) { - body = body.trim(); - if (!Util.isBoolean(body)) { - return error(Response.Status.BAD_REQUEST, "Illegal value '" + body + "'. Use 'true' or 'false'"); - } - boolean enable = Util.isTrue(body); - - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - if (row == null) { - return notFound("Can't find authentication provider with id '" + id + "'"); - } - - row.setEnabled(enable); - em.merge(row); - - if (enable) { - // enable a provider - if (authSvc.getAuthenticationProvider(id) != null) { - return ok(String.format("Authentication provider '%s' already enabled", id)); - } - try { - authProvidersRegistrationSvc.registerProvider(authProvidersRegistrationSvc.loadProvider(row)); - return ok(String.format("Authentication Provider %s enabled", row.getId())); - - } catch (AuthenticationProviderFactoryNotFoundException ex) { - return notFound(String.format("Can't instantiate provider, as there's no factory with alias %s", - row.getFactoryAlias())); - } catch (AuthorizationSetupException ex) { - logger.log(Level.WARNING, "Error instantiating authentication provider: " + ex.getMessage(), ex); - return error(Status.INTERNAL_SERVER_ERROR, - String.format("Can't instantiate provider: %s", ex.getMessage())); - } - - } else { - // disable a provider - authProvidersRegistrationSvc.deregisterProvider(id); - return ok("Authentication Provider '" + id + "' disabled. " - + (authSvc.getAuthenticationProviderIds().isEmpty() - ? "WARNING: no enabled authentication providers left." - : "")); - } - } - - @GET - @Path("authenticationProviders/{id}/enabled") - public Response checkAuthenticationProviderEnabled(@PathParam("id") String id) { - List prvs = em - .createNamedQuery("AuthenticationProviderRow.findById", AuthenticationProviderRow.class) - .setParameter("id", id).getResultList(); - if (prvs.isEmpty()) { - return notFound("Can't find a provider with id '" + id + "'."); - } else { - return ok(Boolean.toString(prvs.get(0).isEnabled())); - } - } - - @DELETE - @Path("authenticationProviders/{id}/") - public Response deleteAuthenticationProvider(@PathParam("id") String id) { - authProvidersRegistrationSvc.deregisterProvider(id); - AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); - if (row != null) { - em.remove(row); - } - - return ok("AuthenticationProvider " + id + " deleted. " - + (authSvc.getAuthenticationProviderIds().isEmpty() - ? "WARNING: no enabled authentication providers left." - : "")); - } + @Path("authenticationProviderFactories") + @GET + public Response listAuthProviderFactories() { + return ok(authSvc.listProviderFactories().stream() + .map(f -> jsonObjectBuilder().add("alias", f.getAlias()).add("info", f.getInfo())) + .collect(toJsonArray())); + } + + @Path("authenticationProviders") + @GET + public Response listAuthProviders() { + return ok(em.createNamedQuery("AuthenticationProviderRow.findAll", AuthenticationProviderRow.class) + .getResultList().stream().map(r -> json(r)).collect(toJsonArray())); + } + + @Path("authenticationProviders") + @POST + public Response addProvider(AuthenticationProviderRow row) { + try { + AuthenticationProviderRow managed = em.find(AuthenticationProviderRow.class, row.getId()); + if (managed != null) { + managed = em.merge(row); + } else { + em.persist(row); + managed = row; + } + if (managed.isEnabled()) { + AuthenticationProvider provider = authProvidersRegistrationSvc.loadProvider(managed); + authProvidersRegistrationSvc.deregisterProvider(provider.getId()); + authProvidersRegistrationSvc.registerProvider(provider); + } + return created("/api/admin/authenticationProviders/" + managed.getId(), json(managed)); + } catch (AuthorizationSetupException e) { + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } + } + + @Path("authenticationProviders/{id}") + @GET + public Response showProvider(@PathParam("id") String id) { + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + return (row != null) ? ok(json(row)) + : error(Status.NOT_FOUND, "Can't find authetication provider with id '" + id + "'"); + } + + @POST + @Path("authenticationProviders/{id}/:enabled") + public Response enableAuthenticationProvider_deprecated(@PathParam("id") String id, String body) { + return enableAuthenticationProvider(id, body); + } + + @PUT + @Path("authenticationProviders/{id}/enabled") + @Produces("application/json") + public Response enableAuthenticationProvider(@PathParam("id") String id, String body) { + body = body.trim(); + if (!Util.isBoolean(body)) { + return error(Response.Status.BAD_REQUEST, "Illegal value '" + body + "'. Use 'true' or 'false'"); + } + boolean enable = Util.isTrue(body); + + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + if (row == null) { + return notFound("Can't find authentication provider with id '" + id + "'"); + } + + row.setEnabled(enable); + em.merge(row); + + if (enable) { + // enable a provider + if (authSvc.getAuthenticationProvider(id) != null) { + return ok(String.format("Authentication provider '%s' already enabled", id)); + } + try { + authProvidersRegistrationSvc.registerProvider(authProvidersRegistrationSvc.loadProvider(row)); + return ok(String.format("Authentication Provider %s enabled", row.getId())); + + } catch (AuthenticationProviderFactoryNotFoundException ex) { + return notFound(String.format("Can't instantiate provider, as there's no factory with alias %s", + row.getFactoryAlias())); + } catch (AuthorizationSetupException ex) { + logger.log(Level.WARNING, "Error instantiating authentication provider: " + ex.getMessage(), ex); + return error(Status.INTERNAL_SERVER_ERROR, + String.format("Can't instantiate provider: %s", ex.getMessage())); + } + + } else { + // disable a provider + authProvidersRegistrationSvc.deregisterProvider(id); + return ok("Authentication Provider '" + id + "' disabled. " + + (authSvc.getAuthenticationProviderIds().isEmpty() + ? "WARNING: no enabled authentication providers left." + : "")); + } + } + + @GET + @Path("authenticationProviders/{id}/enabled") + public Response checkAuthenticationProviderEnabled(@PathParam("id") String id) { + List prvs = em + .createNamedQuery("AuthenticationProviderRow.findById", AuthenticationProviderRow.class) + .setParameter("id", id).getResultList(); + if (prvs.isEmpty()) { + return notFound("Can't find a provider with id '" + id + "'."); + } else { + return ok(Boolean.toString(prvs.get(0).isEnabled())); + } + } + + @DELETE + @Path("authenticationProviders/{id}/") + public Response deleteAuthenticationProvider(@PathParam("id") String id) { + authProvidersRegistrationSvc.deregisterProvider(id); + AuthenticationProviderRow row = em.find(AuthenticationProviderRow.class, id); + if (row != null) { + em.remove(row); + } + + return ok("AuthenticationProvider " + id + " deleted. " + + (authSvc.getAuthenticationProviderIds().isEmpty() + ? "WARNING: no enabled authentication providers left." + : "")); + } @GET @Path("authenticatedUsers/{identifier}/") @@ -509,520 +489,520 @@ private Response deactivateAuthenticatedUser(AuthenticatedUser userToDisable) { } } - @POST - @Path("publishDataverseAsCreator/{id}") - public Response publishDataverseAsCreator(@PathParam("id") long id) { - try { - Dataverse dataverse = dataverseSvc.find(id); - if (dataverse != null) { - AuthenticatedUser authenticatedUser = dataverse.getCreator(); - return ok(json(execCommand( - new PublishDataverseCommand(createDataverseRequest(authenticatedUser), dataverse)))); - } else { - return error(Status.BAD_REQUEST, "Could not find dataverse with id " + id); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - } - - @Deprecated - @GET - @AuthRequired - @Path("authenticatedUsers") - public Response listAuthenticatedUsers(@Context ContainerRequestContext crc) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - JsonArrayBuilder userArray = Json.createArrayBuilder(); - authSvc.findAllAuthenticatedUsers().stream().forEach((user) -> { - userArray.add(json(user)); - }); - return ok(userArray); - } - - @GET - @AuthRequired - @Path(listUsersPartialAPIPath) - @Produces({ "application/json" }) - public Response filterAuthenticatedUsers( - @Context ContainerRequestContext crc, - @QueryParam("searchTerm") String searchTerm, - @QueryParam("selectedPage") Integer selectedPage, - @QueryParam("itemsPerPage") Integer itemsPerPage, - @QueryParam("sortKey") String sortKey - ) { - - User authUser = getRequestUser(crc); - - if (!authUser.isSuperuser()) { - return error(Response.Status.FORBIDDEN, - BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.not_superuser")); - } - - UserListMaker userListMaker = new UserListMaker(userService); - - // String sortKey = null; - UserListResult userListResult = userListMaker.runUserSearch(searchTerm, itemsPerPage, selectedPage, sortKey); - - return ok(userListResult.toJSON()); - } - - /** - * @todo Make this support creation of BuiltInUsers. - * - * @todo Add way more error checking. Only the happy path is tested by AdminIT. - */ - @POST - @Path("authenticatedUsers") - public Response createAuthenicatedUser(JsonObject jsonObject) { - logger.fine("JSON in: " + jsonObject); - String persistentUserId = jsonObject.getString("persistentUserId"); - String identifier = jsonObject.getString("identifier"); - String proposedAuthenticatedUserIdentifier = identifier.replaceFirst("@", ""); - String firstName = jsonObject.getString("firstName"); - String lastName = jsonObject.getString("lastName"); - String emailAddress = jsonObject.getString("email"); - String position = null; - String affiliation = null; - UserRecordIdentifier userRecordId = new UserRecordIdentifier(jsonObject.getString("authenticationProviderId"), - persistentUserId); - AuthenticatedUserDisplayInfo userDisplayInfo = new AuthenticatedUserDisplayInfo(firstName, lastName, - emailAddress, affiliation, position); - boolean generateUniqueIdentifier = true; - AuthenticatedUser authenticatedUser = authSvc.createAuthenticatedUser(userRecordId, - proposedAuthenticatedUserIdentifier, userDisplayInfo, true); - return ok(json(authenticatedUser)); - } + @POST + @Path("publishDataverseAsCreator/{id}") + public Response publishDataverseAsCreator(@PathParam("id") long id) { + try { + Dataverse dataverse = dataverseSvc.find(id); + if (dataverse != null) { + AuthenticatedUser authenticatedUser = dataverse.getCreator(); + return ok(json(execCommand( + new PublishDataverseCommand(createDataverseRequest(authenticatedUser), dataverse)))); + } else { + return error(Status.BAD_REQUEST, "Could not find dataverse with id " + id); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @Deprecated + @GET + @AuthRequired + @Path("authenticatedUsers") + public Response listAuthenticatedUsers(@Context ContainerRequestContext crc) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + JsonArrayBuilder userArray = Json.createArrayBuilder(); + authSvc.findAllAuthenticatedUsers().stream().forEach((user) -> { + userArray.add(json(user)); + }); + return ok(userArray); + } + + @GET + @AuthRequired + @Path(listUsersPartialAPIPath) + @Produces({ "application/json" }) + public Response filterAuthenticatedUsers( + @Context ContainerRequestContext crc, + @QueryParam("searchTerm") String searchTerm, + @QueryParam("selectedPage") Integer selectedPage, + @QueryParam("itemsPerPage") Integer itemsPerPage, + @QueryParam("sortKey") String sortKey + ) { + + User authUser = getRequestUser(crc); + + if (!authUser.isSuperuser()) { + return error(Response.Status.FORBIDDEN, + BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.not_superuser")); + } + + UserListMaker userListMaker = new UserListMaker(userService); + + // String sortKey = null; + UserListResult userListResult = userListMaker.runUserSearch(searchTerm, itemsPerPage, selectedPage, sortKey); + + return ok(userListResult.toJSON()); + } + + /** + * @todo Make this support creation of BuiltInUsers. + * + * @todo Add way more error checking. Only the happy path is tested by AdminIT. + */ + @POST + @Path("authenticatedUsers") + public Response createAuthenicatedUser(JsonObject jsonObject) { + logger.fine("JSON in: " + jsonObject); + String persistentUserId = jsonObject.getString("persistentUserId"); + String identifier = jsonObject.getString("identifier"); + String proposedAuthenticatedUserIdentifier = identifier.replaceFirst("@", ""); + String firstName = jsonObject.getString("firstName"); + String lastName = jsonObject.getString("lastName"); + String emailAddress = jsonObject.getString("email"); + String position = null; + String affiliation = null; + UserRecordIdentifier userRecordId = new UserRecordIdentifier(jsonObject.getString("authenticationProviderId"), + persistentUserId); + AuthenticatedUserDisplayInfo userDisplayInfo = new AuthenticatedUserDisplayInfo(firstName, lastName, + emailAddress, affiliation, position); + boolean generateUniqueIdentifier = true; + AuthenticatedUser authenticatedUser = authSvc.createAuthenticatedUser(userRecordId, + proposedAuthenticatedUserIdentifier, userDisplayInfo, true); + return ok(json(authenticatedUser)); + } //TODO: Delete this endpoint after 4.9.3. Was updated with change in docs. --MAD - /** - * curl -X PUT -d "shib@mailinator.com" - * http://localhost:8080/api/admin/authenticatedUsers/id/11/convertShibToBuiltIn - * - * @deprecated We have documented this API endpoint so we'll keep in around for - * a while but we should encourage everyone to switch to the - * "convertRemoteToBuiltIn" endpoint and then remove this - * Shib-specfic one. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/id/{id}/convertShibToBuiltIn") - @Deprecated - public Response convertShibUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - try { - BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); - if (builtinUser == null) { - return error(Response.Status.BAD_REQUEST, "User id " + id - + " could not be converted from Shibboleth to BuiltIn. An Exception was not thrown."); - } + /** + * curl -X PUT -d "shib@mailinator.com" + * http://localhost:8080/api/admin/authenticatedUsers/id/11/convertShibToBuiltIn + * + * @deprecated We have documented this API endpoint so we'll keep in around for + * a while but we should encourage everyone to switch to the + * "convertRemoteToBuiltIn" endpoint and then remove this + * Shib-specfic one. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/id/{id}/convertShibToBuiltIn") + @Deprecated + public Response convertShibUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + try { + BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); + if (builtinUser == null) { + return error(Response.Status.BAD_REQUEST, "User id " + id + + " could not be converted from Shibboleth to BuiltIn. An Exception was not thrown."); + } AuthenticatedUser authUser = authSvc.getAuthenticatedUser(builtinUser.getUserName()); - JsonObjectBuilder output = Json.createObjectBuilder(); - output.add("email", authUser.getEmail()); - output.add("username", builtinUser.getUserName()); - return ok(output); - } catch (Throwable ex) { - StringBuilder sb = new StringBuilder(); - sb.append(ex + " "); - while (ex.getCause() != null) { - ex = ex.getCause(); - sb.append(ex + " "); - } - String msg = "User id " + id - + " could not be converted from Shibboleth to BuiltIn. Details from Exception: " + sb; - logger.info(msg); - return error(Response.Status.BAD_REQUEST, msg); - } - } - - @PUT - @AuthRequired - @Path("authenticatedUsers/id/{id}/convertRemoteToBuiltIn") - public Response convertOAuthUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - try { - BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("email", authUser.getEmail()); + output.add("username", builtinUser.getUserName()); + return ok(output); + } catch (Throwable ex) { + StringBuilder sb = new StringBuilder(); + sb.append(ex + " "); + while (ex.getCause() != null) { + ex = ex.getCause(); + sb.append(ex + " "); + } + String msg = "User id " + id + + " could not be converted from Shibboleth to BuiltIn. Details from Exception: " + sb; + logger.info(msg); + return error(Response.Status.BAD_REQUEST, msg); + } + } + + @PUT + @AuthRequired + @Path("authenticatedUsers/id/{id}/convertRemoteToBuiltIn") + public Response convertOAuthUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + try { + BuiltinUser builtinUser = authSvc.convertRemoteToBuiltIn(id, newEmailAddress); //AuthenticatedUser authUser = authService.getAuthenticatedUser(aUser.getUserName()); - if (builtinUser == null) { - return error(Response.Status.BAD_REQUEST, "User id " + id - + " could not be converted from remote to BuiltIn. An Exception was not thrown."); - } + if (builtinUser == null) { + return error(Response.Status.BAD_REQUEST, "User id " + id + + " could not be converted from remote to BuiltIn. An Exception was not thrown."); + } AuthenticatedUser authUser = authSvc.getAuthenticatedUser(builtinUser.getUserName()); - JsonObjectBuilder output = Json.createObjectBuilder(); - output.add("email", authUser.getEmail()); - output.add("username", builtinUser.getUserName()); - return ok(output); - } catch (Throwable ex) { - StringBuilder sb = new StringBuilder(); - sb.append(ex + " "); - while (ex.getCause() != null) { - ex = ex.getCause(); - sb.append(ex + " "); - } - String msg = "User id " + id + " could not be converted from remote to BuiltIn. Details from Exception: " - + sb; - logger.info(msg); - return error(Response.Status.BAD_REQUEST, msg); - } - } - - /** - * This is used in testing via AdminIT.java but we don't expect sysadmins to use - * this. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/convert/builtin2shib") - public Response builtin2shib(@Context ContainerRequestContext crc, String content) { - logger.info("entering builtin2shib..."); - try { - AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); - if (!userToRunThisMethod.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - boolean disabled = false; - if (disabled) { - return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); - } - AuthenticatedUser builtInUserToConvert = null; - String emailToFind; - String password; - String authuserId = "0"; // could let people specify id on authuser table. probably better to let them - // tell us their - String newEmailAddressToUse; - try { - String[] args = content.split(":"); - emailToFind = args[0]; - password = args[1]; - newEmailAddressToUse = args[2]; - // authuserId = args[666]; - } catch (ArrayIndexOutOfBoundsException ex) { - return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); - } - AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); - String existing = "NOT FOUND"; - if (existingAuthUserFoundByEmail != null) { - builtInUserToConvert = existingAuthUserFoundByEmail; - existing = existingAuthUserFoundByEmail.getIdentifier(); - } else { - long longToLookup = Long.parseLong(authuserId); - AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); - if (specifiedUserToConvert != null) { - builtInUserToConvert = specifiedUserToConvert; - } else { - return error(Response.Status.BAD_REQUEST, - "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind - + " and no user was found using specified id " + longToLookup); - } - } - String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; - Map randomUser = authTestDataService.getRandomUser(); - // String eppn = UUID.randomUUID().toString().substring(0, 8); - String eppn = randomUser.get("eppn"); - String idPEntityId = randomUser.get("idp"); - String notUsed = null; - String separator = "|"; - UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(idPEntityId + separator + eppn, notUsed); - String overwriteFirstName = randomUser.get("firstName"); - String overwriteLastName = randomUser.get("lastName"); - String overwriteEmail = randomUser.get("email"); - overwriteEmail = newEmailAddressToUse; - logger.info("overwriteEmail: " + overwriteEmail); - boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); - if (!validEmail) { - // See https://github.com/IQSS/dataverse/issues/2998 - return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); - } - /** - * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo - * constructor. - */ - /** - * Here we are exercising (via an API test) shibService.getAffiliation with the - * TestShib IdP and a non-production DevShibAccountType. - */ - idPEntityId = ShibUtil.testShibIdpEntityId; - String overwriteAffiliation = shibService.getAffiliation(idPEntityId, - ShibServiceBean.DevShibAccountType.RANDOM); - logger.info("overwriteAffiliation: " + overwriteAffiliation); - /** - * @todo Find a place to put "position" in the authenticateduser table: - * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 - */ - String overwritePosition = "staff;student"; - AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, - overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); - JsonObjectBuilder response = Json.createObjectBuilder(); - JsonArrayBuilder problems = Json.createArrayBuilder(); - if (password != null) { - response.add("password supplied", password); - boolean knowsExistingPassword = false; - BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); - if (oldBuiltInUser != null) { + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("email", authUser.getEmail()); + output.add("username", builtinUser.getUserName()); + return ok(output); + } catch (Throwable ex) { + StringBuilder sb = new StringBuilder(); + sb.append(ex + " "); + while (ex.getCause() != null) { + ex = ex.getCause(); + sb.append(ex + " "); + } + String msg = "User id " + id + " could not be converted from remote to BuiltIn. Details from Exception: " + + sb; + logger.info(msg); + return error(Response.Status.BAD_REQUEST, msg); + } + } + + /** + * This is used in testing via AdminIT.java but we don't expect sysadmins to use + * this. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/convert/builtin2shib") + public Response builtin2shib(@Context ContainerRequestContext crc, String content) { + logger.info("entering builtin2shib..."); + try { + AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); + if (!userToRunThisMethod.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + boolean disabled = false; + if (disabled) { + return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); + } + AuthenticatedUser builtInUserToConvert = null; + String emailToFind; + String password; + String authuserId = "0"; // could let people specify id on authuser table. probably better to let them + // tell us their + String newEmailAddressToUse; + try { + String[] args = content.split(":"); + emailToFind = args[0]; + password = args[1]; + newEmailAddressToUse = args[2]; + // authuserId = args[666]; + } catch (ArrayIndexOutOfBoundsException ex) { + return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); + } + AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); + String existing = "NOT FOUND"; + if (existingAuthUserFoundByEmail != null) { + builtInUserToConvert = existingAuthUserFoundByEmail; + existing = existingAuthUserFoundByEmail.getIdentifier(); + } else { + long longToLookup = Long.parseLong(authuserId); + AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); + if (specifiedUserToConvert != null) { + builtInUserToConvert = specifiedUserToConvert; + } else { + return error(Response.Status.BAD_REQUEST, + "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind + + " and no user was found using specified id " + longToLookup); + } + } + String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; + Map randomUser = authTestDataService.getRandomUser(); + // String eppn = UUID.randomUUID().toString().substring(0, 8); + String eppn = randomUser.get("eppn"); + String idPEntityId = randomUser.get("idp"); + String notUsed = null; + String separator = "|"; + UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(idPEntityId + separator + eppn, notUsed); + String overwriteFirstName = randomUser.get("firstName"); + String overwriteLastName = randomUser.get("lastName"); + String overwriteEmail = randomUser.get("email"); + overwriteEmail = newEmailAddressToUse; + logger.info("overwriteEmail: " + overwriteEmail); + boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); + if (!validEmail) { + // See https://github.com/IQSS/dataverse/issues/2998 + return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); + } + /** + * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo + * constructor. + */ + /** + * Here we are exercising (via an API test) shibService.getAffiliation with the + * TestShib IdP and a non-production DevShibAccountType. + */ + idPEntityId = ShibUtil.testShibIdpEntityId; + String overwriteAffiliation = shibService.getAffiliation(idPEntityId, + ShibServiceBean.DevShibAccountType.RANDOM); + logger.info("overwriteAffiliation: " + overwriteAffiliation); + /** + * @todo Find a place to put "position" in the authenticateduser table: + * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 + */ + String overwritePosition = "staff;student"; + AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, + overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); + JsonObjectBuilder response = Json.createObjectBuilder(); + JsonArrayBuilder problems = Json.createArrayBuilder(); + if (password != null) { + response.add("password supplied", password); + boolean knowsExistingPassword = false; + BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); + if (oldBuiltInUser != null) { if (builtInUserToConvert.isDeactivated()) { problems.add("builtin account has been deactivated"); return error(Status.BAD_REQUEST, problems.build().toString()); } - String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); - response.add("old username", usernameOfBuiltinAccountToConvert); - AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, - password); - if (authenticatedUser != null) { - knowsExistingPassword = true; - AuthenticatedUser convertedUser = authSvc.convertBuiltInToShib(builtInUserToConvert, shibProviderId, - newUserIdentifierInLookupTable); - if (convertedUser != null) { - /** - * @todo Display name is not being overwritten. Logic must be in Shib backing - * bean - */ - AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); - if (updatedInfoUser != null) { - response.add("display name overwritten with", updatedInfoUser.getName()); - } else { - problems.add("couldn't update display info"); - } - } else { - problems.add("unable to convert user"); - } - } - } else { - problems.add("couldn't find old username"); - } - if (!knowsExistingPassword) { - String message = "User doesn't know password."; - problems.add(message); - /** - * @todo Someday we should make a errorResponse method that takes JSON arrays - * and objects. - */ - return error(Status.BAD_REQUEST, problems.build().toString()); - } - // response.add("knows existing password", knowsExistingPassword); - } - - response.add("user to convert", builtInUserToConvert.getIdentifier()); - response.add("existing user found by email (prompt to convert)", existing); - response.add("changing to this provider", shibProviderId); - response.add("value to overwrite old first name", overwriteFirstName); - response.add("value to overwrite old last name", overwriteLastName); - response.add("value to overwrite old email address", overwriteEmail); - if (overwriteAffiliation != null) { - response.add("affiliation", overwriteAffiliation); - } - response.add("problems", problems); - return ok(response); - } - - /** - * This is used in testing via AdminIT.java but we don't expect sysadmins to use - * this. - */ - @PUT - @AuthRequired - @Path("authenticatedUsers/convert/builtin2oauth") - public Response builtin2oauth(@Context ContainerRequestContext crc, String content) { - logger.info("entering builtin2oauth..."); - try { - AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); - if (!userToRunThisMethod.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - boolean disabled = false; - if (disabled) { - return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); - } - AuthenticatedUser builtInUserToConvert = null; - String emailToFind; - String password; - String authuserId = "0"; // could let people specify id on authuser table. probably better to let them - // tell us their - String newEmailAddressToUse; - String newProviderId; - String newPersistentUserIdInLookupTable; - logger.info("content: " + content); - try { - String[] args = content.split(":"); - emailToFind = args[0]; - password = args[1]; - newEmailAddressToUse = args[2]; - newProviderId = args[3]; - newPersistentUserIdInLookupTable = args[4]; - // authuserId = args[666]; - } catch (ArrayIndexOutOfBoundsException ex) { - return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); - } - AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); - String existing = "NOT FOUND"; - if (existingAuthUserFoundByEmail != null) { - builtInUserToConvert = existingAuthUserFoundByEmail; - existing = existingAuthUserFoundByEmail.getIdentifier(); - } else { - long longToLookup = Long.parseLong(authuserId); - AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); - if (specifiedUserToConvert != null) { - builtInUserToConvert = specifiedUserToConvert; - } else { - return error(Response.Status.BAD_REQUEST, - "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind - + " and no user was found using specified id " + longToLookup); - } - } - // String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; - Map randomUser = authTestDataService.getRandomUser(); - // String eppn = UUID.randomUUID().toString().substring(0, 8); - String eppn = randomUser.get("eppn"); - String idPEntityId = randomUser.get("idp"); - String notUsed = null; - String separator = "|"; - // UserIdentifier newUserIdentifierInLookupTable = new - // UserIdentifier(idPEntityId + separator + eppn, notUsed); - UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(newPersistentUserIdInLookupTable, notUsed); - String overwriteFirstName = randomUser.get("firstName"); - String overwriteLastName = randomUser.get("lastName"); - String overwriteEmail = randomUser.get("email"); - overwriteEmail = newEmailAddressToUse; - logger.info("overwriteEmail: " + overwriteEmail); - boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); - if (!validEmail) { - // See https://github.com/IQSS/dataverse/issues/2998 - return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); - } - /** - * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo - * constructor. - */ - /** - * Here we are exercising (via an API test) shibService.getAffiliation with the - * TestShib IdP and a non-production DevShibAccountType. - */ - // idPEntityId = ShibUtil.testShibIdpEntityId; - // String overwriteAffiliation = shibService.getAffiliation(idPEntityId, - // ShibServiceBean.DevShibAccountType.RANDOM); - String overwriteAffiliation = null; - logger.info("overwriteAffiliation: " + overwriteAffiliation); - /** - * @todo Find a place to put "position" in the authenticateduser table: - * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 - */ - String overwritePosition = "staff;student"; - AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, - overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); - JsonObjectBuilder response = Json.createObjectBuilder(); - JsonArrayBuilder problems = Json.createArrayBuilder(); - if (password != null) { - response.add("password supplied", password); - boolean knowsExistingPassword = false; - BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); - if (oldBuiltInUser != null) { - String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); - response.add("old username", usernameOfBuiltinAccountToConvert); - AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, - password); - if (authenticatedUser != null) { - knowsExistingPassword = true; - AuthenticatedUser convertedUser = authSvc.convertBuiltInUserToRemoteUser(builtInUserToConvert, - newProviderId, newUserIdentifierInLookupTable); - if (convertedUser != null) { - /** - * @todo Display name is not being overwritten. Logic must be in Shib backing - * bean - */ - AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); - if (updatedInfoUser != null) { - response.add("display name overwritten with", updatedInfoUser.getName()); - } else { - problems.add("couldn't update display info"); - } - } else { - problems.add("unable to convert user"); - } - } - } else { - problems.add("couldn't find old username"); - } - if (!knowsExistingPassword) { - String message = "User doesn't know password."; - problems.add(message); - /** - * @todo Someday we should make a errorResponse method that takes JSON arrays - * and objects. - */ - return error(Status.BAD_REQUEST, problems.build().toString()); - } - // response.add("knows existing password", knowsExistingPassword); - } - - response.add("user to convert", builtInUserToConvert.getIdentifier()); - response.add("existing user found by email (prompt to convert)", existing); - response.add("changing to this provider", newProviderId); - response.add("value to overwrite old first name", overwriteFirstName); - response.add("value to overwrite old last name", overwriteLastName); - response.add("value to overwrite old email address", overwriteEmail); - if (overwriteAffiliation != null) { - response.add("affiliation", overwriteAffiliation); - } - response.add("problems", problems); - return ok(response); - } - - - - - @Path("roles") - @POST - public Response createNewBuiltinRole(RoleDTO roleDto) { - ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "createBuiltInRole") - .setInfo(roleDto.getAlias() + ":" + roleDto.getDescription()); - try { - return ok(json(rolesSvc.save(roleDto.asRole()))); - } catch (Exception e) { - alr.setActionResult(ActionLogRecord.Result.InternalError); - alr.setInfo(alr.getInfo() + "// " + e.getMessage()); - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } finally { - actionLogSvc.log(alr); - } - } - - @Path("roles") - @GET - public Response listBuiltinRoles() { - try { - return ok(rolesToJson(rolesSvc.findBuiltinRoles())); - } catch (Exception e) { - return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); - } - } + String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); + response.add("old username", usernameOfBuiltinAccountToConvert); + AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, + password); + if (authenticatedUser != null) { + knowsExistingPassword = true; + AuthenticatedUser convertedUser = authSvc.convertBuiltInToShib(builtInUserToConvert, shibProviderId, + newUserIdentifierInLookupTable); + if (convertedUser != null) { + /** + * @todo Display name is not being overwritten. Logic must be in Shib backing + * bean + */ + AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); + if (updatedInfoUser != null) { + response.add("display name overwritten with", updatedInfoUser.getName()); + } else { + problems.add("couldn't update display info"); + } + } else { + problems.add("unable to convert user"); + } + } + } else { + problems.add("couldn't find old username"); + } + if (!knowsExistingPassword) { + String message = "User doesn't know password."; + problems.add(message); + /** + * @todo Someday we should make a errorResponse method that takes JSON arrays + * and objects. + */ + return error(Status.BAD_REQUEST, problems.build().toString()); + } + // response.add("knows existing password", knowsExistingPassword); + } + + response.add("user to convert", builtInUserToConvert.getIdentifier()); + response.add("existing user found by email (prompt to convert)", existing); + response.add("changing to this provider", shibProviderId); + response.add("value to overwrite old first name", overwriteFirstName); + response.add("value to overwrite old last name", overwriteLastName); + response.add("value to overwrite old email address", overwriteEmail); + if (overwriteAffiliation != null) { + response.add("affiliation", overwriteAffiliation); + } + response.add("problems", problems); + return ok(response); + } + + /** + * This is used in testing via AdminIT.java but we don't expect sysadmins to use + * this. + */ + @PUT + @AuthRequired + @Path("authenticatedUsers/convert/builtin2oauth") + public Response builtin2oauth(@Context ContainerRequestContext crc, String content) { + logger.info("entering builtin2oauth..."); + try { + AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc); + if (!userToRunThisMethod.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + boolean disabled = false; + if (disabled) { + return error(Response.Status.BAD_REQUEST, "API endpoint disabled."); + } + AuthenticatedUser builtInUserToConvert = null; + String emailToFind; + String password; + String authuserId = "0"; // could let people specify id on authuser table. probably better to let them + // tell us their + String newEmailAddressToUse; + String newProviderId; + String newPersistentUserIdInLookupTable; + logger.info("content: " + content); + try { + String[] args = content.split(":"); + emailToFind = args[0]; + password = args[1]; + newEmailAddressToUse = args[2]; + newProviderId = args[3]; + newPersistentUserIdInLookupTable = args[4]; + // authuserId = args[666]; + } catch (ArrayIndexOutOfBoundsException ex) { + return error(Response.Status.BAD_REQUEST, "Problem with content <<<" + content + ">>>: " + ex.toString()); + } + AuthenticatedUser existingAuthUserFoundByEmail = shibService.findAuthUserByEmail(emailToFind); + String existing = "NOT FOUND"; + if (existingAuthUserFoundByEmail != null) { + builtInUserToConvert = existingAuthUserFoundByEmail; + existing = existingAuthUserFoundByEmail.getIdentifier(); + } else { + long longToLookup = Long.parseLong(authuserId); + AuthenticatedUser specifiedUserToConvert = authSvc.findByID(longToLookup); + if (specifiedUserToConvert != null) { + builtInUserToConvert = specifiedUserToConvert; + } else { + return error(Response.Status.BAD_REQUEST, + "No user to convert. We couldn't find a *single* existing user account based on " + emailToFind + + " and no user was found using specified id " + longToLookup); + } + } + // String shibProviderId = ShibAuthenticationProvider.PROVIDER_ID; + Map randomUser = authTestDataService.getRandomUser(); + // String eppn = UUID.randomUUID().toString().substring(0, 8); + String eppn = randomUser.get("eppn"); + String idPEntityId = randomUser.get("idp"); + String notUsed = null; + String separator = "|"; + // UserIdentifier newUserIdentifierInLookupTable = new + // UserIdentifier(idPEntityId + separator + eppn, notUsed); + UserIdentifier newUserIdentifierInLookupTable = new UserIdentifier(newPersistentUserIdInLookupTable, notUsed); + String overwriteFirstName = randomUser.get("firstName"); + String overwriteLastName = randomUser.get("lastName"); + String overwriteEmail = randomUser.get("email"); + overwriteEmail = newEmailAddressToUse; + logger.info("overwriteEmail: " + overwriteEmail); + boolean validEmail = EMailValidator.isEmailValid(overwriteEmail); + if (!validEmail) { + // See https://github.com/IQSS/dataverse/issues/2998 + return error(Response.Status.BAD_REQUEST, "invalid email: " + overwriteEmail); + } + /** + * @todo If affiliation is not null, put it in RoleAssigneeDisplayInfo + * constructor. + */ + /** + * Here we are exercising (via an API test) shibService.getAffiliation with the + * TestShib IdP and a non-production DevShibAccountType. + */ + // idPEntityId = ShibUtil.testShibIdpEntityId; + // String overwriteAffiliation = shibService.getAffiliation(idPEntityId, + // ShibServiceBean.DevShibAccountType.RANDOM); + String overwriteAffiliation = null; + logger.info("overwriteAffiliation: " + overwriteAffiliation); + /** + * @todo Find a place to put "position" in the authenticateduser table: + * https://github.com/IQSS/dataverse/issues/1444#issuecomment-74134694 + */ + String overwritePosition = "staff;student"; + AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(overwriteFirstName, + overwriteLastName, overwriteEmail, overwriteAffiliation, overwritePosition); + JsonObjectBuilder response = Json.createObjectBuilder(); + JsonArrayBuilder problems = Json.createArrayBuilder(); + if (password != null) { + response.add("password supplied", password); + boolean knowsExistingPassword = false; + BuiltinUser oldBuiltInUser = builtinUserService.findByUserName(builtInUserToConvert.getUserIdentifier()); + if (oldBuiltInUser != null) { + String usernameOfBuiltinAccountToConvert = oldBuiltInUser.getUserName(); + response.add("old username", usernameOfBuiltinAccountToConvert); + AuthenticatedUser authenticatedUser = authSvc.canLogInAsBuiltinUser(usernameOfBuiltinAccountToConvert, + password); + if (authenticatedUser != null) { + knowsExistingPassword = true; + AuthenticatedUser convertedUser = authSvc.convertBuiltInUserToRemoteUser(builtInUserToConvert, + newProviderId, newUserIdentifierInLookupTable); + if (convertedUser != null) { + /** + * @todo Display name is not being overwritten. Logic must be in Shib backing + * bean + */ + AuthenticatedUser updatedInfoUser = authSvc.updateAuthenticatedUser(convertedUser, displayInfo); + if (updatedInfoUser != null) { + response.add("display name overwritten with", updatedInfoUser.getName()); + } else { + problems.add("couldn't update display info"); + } + } else { + problems.add("unable to convert user"); + } + } + } else { + problems.add("couldn't find old username"); + } + if (!knowsExistingPassword) { + String message = "User doesn't know password."; + problems.add(message); + /** + * @todo Someday we should make a errorResponse method that takes JSON arrays + * and objects. + */ + return error(Status.BAD_REQUEST, problems.build().toString()); + } + // response.add("knows existing password", knowsExistingPassword); + } + + response.add("user to convert", builtInUserToConvert.getIdentifier()); + response.add("existing user found by email (prompt to convert)", existing); + response.add("changing to this provider", newProviderId); + response.add("value to overwrite old first name", overwriteFirstName); + response.add("value to overwrite old last name", overwriteLastName); + response.add("value to overwrite old email address", overwriteEmail); + if (overwriteAffiliation != null) { + response.add("affiliation", overwriteAffiliation); + } + response.add("problems", problems); + return ok(response); + } + + + + + @Path("roles") + @POST + public Response createNewBuiltinRole(RoleDTO roleDto) { + ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "createBuiltInRole") + .setInfo(roleDto.getAlias() + ":" + roleDto.getDescription()); + try { + return ok(json(rolesSvc.save(roleDto.asRole()))); + } catch (Exception e) { + alr.setActionResult(ActionLogRecord.Result.InternalError); + alr.setInfo(alr.getInfo() + "// " + e.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } finally { + actionLogSvc.log(alr); + } + } + + @Path("roles") + @GET + public Response listBuiltinRoles() { + try { + return ok(rolesToJson(rolesSvc.findBuiltinRoles())); + } catch (Exception e) { + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } + } @DELETE - @AuthRequired + @AuthRequired @Path("roles/{id}") public Response deleteRole(@Context ContainerRequestContext crc, @PathParam("id") String id) { @@ -1284,77 +1264,77 @@ public void write(OutputStream os) throws IOException, return Response.ok(stream).build(); } - @Path("assignments/assignees/{raIdtf: .*}") - @GET - public Response getAssignmentsFor(@PathParam("raIdtf") String raIdtf) { - - JsonArrayBuilder arr = Json.createArrayBuilder(); - roleAssigneeSvc.getAssignmentsFor(raIdtf).forEach(a -> arr.add(json(a))); - - return ok(arr); - } - - /** - * This method is used in integration tests. - * - * @param userId - * The database id of an AuthenticatedUser. - * @return The confirm email token. - */ - @Path("confirmEmail/{userId}") - @GET - public Response getConfirmEmailToken(@PathParam("userId") long userId) { - AuthenticatedUser user = authSvc.findByID(userId); - if (user != null) { - ConfirmEmailData confirmEmailData = confirmEmailSvc.findSingleConfirmEmailDataByUser(user); - if (confirmEmailData != null) { - return ok(Json.createObjectBuilder().add("token", confirmEmailData.getToken())); - } - } - return error(Status.BAD_REQUEST, "Could not find confirm email token for user " + userId); - } - - /** - * This method is used in integration tests. - * - * @param userId - * The database id of an AuthenticatedUser. - */ - @Path("confirmEmail/{userId}") - @POST - public Response startConfirmEmailProcess(@PathParam("userId") long userId) { - AuthenticatedUser user = authSvc.findByID(userId); - if (user != null) { - try { - ConfirmEmailInitResponse confirmEmailInitResponse = confirmEmailSvc.beginConfirm(user); - ConfirmEmailData confirmEmailData = confirmEmailInitResponse.getConfirmEmailData(); - return ok(Json.createObjectBuilder().add("tokenCreated", confirmEmailData.getCreated().toString()) - .add("identifier", user.getUserIdentifier())); - } catch (ConfirmEmailException ex) { - return error(Status.BAD_REQUEST, - "Could not start confirm email process for user " + userId + ": " + ex.getLocalizedMessage()); - } - } - return error(Status.BAD_REQUEST, "Could not find user based on " + userId); - } - - /** - * This method is used by an integration test in UsersIT.java to exercise bug - * https://github.com/IQSS/dataverse/issues/3287 . Not for use by users! - */ - @Path("convertUserFromBcryptToSha1") - @POST - public Response convertUserFromBcryptToSha1(String json) { - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); - BuiltinUser builtinUser = builtinUserService.find(new Long(object.getInt("builtinUserId"))); - builtinUser.updateEncryptedPassword("4G7xxL9z11/JKN4jHPn4g9iIQck=", 0); // password is "sha-1Pass", 0 means - // SHA-1 - BuiltinUser savedUser = builtinUserService.save(builtinUser); - return ok("foo: " + savedUser); - - } + @Path("assignments/assignees/{raIdtf: .*}") + @GET + public Response getAssignmentsFor(@PathParam("raIdtf") String raIdtf) { + + JsonArrayBuilder arr = Json.createArrayBuilder(); + roleAssigneeSvc.getAssignmentsFor(raIdtf).forEach(a -> arr.add(json(a))); + + return ok(arr); + } + + /** + * This method is used in integration tests. + * + * @param userId + * The database id of an AuthenticatedUser. + * @return The confirm email token. + */ + @Path("confirmEmail/{userId}") + @GET + public Response getConfirmEmailToken(@PathParam("userId") long userId) { + AuthenticatedUser user = authSvc.findByID(userId); + if (user != null) { + ConfirmEmailData confirmEmailData = confirmEmailSvc.findSingleConfirmEmailDataByUser(user); + if (confirmEmailData != null) { + return ok(Json.createObjectBuilder().add("token", confirmEmailData.getToken())); + } + } + return error(Status.BAD_REQUEST, "Could not find confirm email token for user " + userId); + } + + /** + * This method is used in integration tests. + * + * @param userId + * The database id of an AuthenticatedUser. + */ + @Path("confirmEmail/{userId}") + @POST + public Response startConfirmEmailProcess(@PathParam("userId") long userId) { + AuthenticatedUser user = authSvc.findByID(userId); + if (user != null) { + try { + ConfirmEmailInitResponse confirmEmailInitResponse = confirmEmailSvc.beginConfirm(user); + ConfirmEmailData confirmEmailData = confirmEmailInitResponse.getConfirmEmailData(); + return ok(Json.createObjectBuilder().add("tokenCreated", confirmEmailData.getCreated().toString()) + .add("identifier", user.getUserIdentifier())); + } catch (ConfirmEmailException ex) { + return error(Status.BAD_REQUEST, + "Could not start confirm email process for user " + userId + ": " + ex.getLocalizedMessage()); + } + } + return error(Status.BAD_REQUEST, "Could not find user based on " + userId); + } + + /** + * This method is used by an integration test in UsersIT.java to exercise bug + * https://github.com/IQSS/dataverse/issues/3287 . Not for use by users! + */ + @Path("convertUserFromBcryptToSha1") + @POST + public Response convertUserFromBcryptToSha1(String json) { + JsonReader jsonReader = Json.createReader(new StringReader(json)); + JsonObject object = jsonReader.readObject(); + jsonReader.close(); + BuiltinUser builtinUser = builtinUserService.find(new Long(object.getInt("builtinUserId"))); + builtinUser.updateEncryptedPassword("4G7xxL9z11/JKN4jHPn4g9iIQck=", 0); // password is "sha-1Pass", 0 means + // SHA-1 + BuiltinUser savedUser = builtinUserService.save(builtinUser); + return ok("foo: " + savedUser); + + } @Path("permissions/{dvo}") @AuthRequired @@ -1375,43 +1355,43 @@ public Response findPermissonsOn(@Context final ContainerRequestContext crc, @Pa } } - @Path("assignee/{idtf}") - @GET - public Response findRoleAssignee(@PathParam("idtf") String idtf) { - RoleAssignee ra = roleAssigneeSvc.getRoleAssignee(idtf); - return (ra == null) ? notFound("Role Assignee '" + idtf + "' not found.") : ok(json(ra.getDisplayInfo())); - } - - @Path("datasets/integrity/{datasetVersionId}/fixmissingunf") - @POST - public Response fixUnf(@PathParam("datasetVersionId") String datasetVersionId, - @QueryParam("forceRecalculate") boolean forceRecalculate) { - JsonObjectBuilder info = datasetVersionSvc.fixMissingUnf(datasetVersionId, forceRecalculate); - return ok(info); - } - - @Path("datafiles/integrity/fixmissingoriginaltypes") - @GET - public Response fixMissingOriginalTypes() { - JsonObjectBuilder info = Json.createObjectBuilder(); - - List affectedFileIds = fileService.selectFilesWithMissingOriginalTypes(); - - if (affectedFileIds.isEmpty()) { - info.add("message", - "All the tabular files in the database already have the original types set correctly; exiting."); - } else { - for (Long fileid : affectedFileIds) { - logger.fine("found file id: " + fileid); - } - info.add("message", "Found " + affectedFileIds.size() - + " tabular files with missing original types. Kicking off an async job that will repair the files in the background."); - } - - ingestService.fixMissingOriginalTypes(affectedFileIds); - - return ok(info); - } + @Path("assignee/{idtf}") + @GET + public Response findRoleAssignee(@PathParam("idtf") String idtf) { + RoleAssignee ra = roleAssigneeSvc.getRoleAssignee(idtf); + return (ra == null) ? notFound("Role Assignee '" + idtf + "' not found.") : ok(json(ra.getDisplayInfo())); + } + + @Path("datasets/integrity/{datasetVersionId}/fixmissingunf") + @POST + public Response fixUnf(@PathParam("datasetVersionId") String datasetVersionId, + @QueryParam("forceRecalculate") boolean forceRecalculate) { + JsonObjectBuilder info = datasetVersionSvc.fixMissingUnf(datasetVersionId, forceRecalculate); + return ok(info); + } + + @Path("datafiles/integrity/fixmissingoriginaltypes") + @GET + public Response fixMissingOriginalTypes() { + JsonObjectBuilder info = Json.createObjectBuilder(); + + List affectedFileIds = fileService.selectFilesWithMissingOriginalTypes(); + + if (affectedFileIds.isEmpty()) { + info.add("message", + "All the tabular files in the database already have the original types set correctly; exiting."); + } else { + for (Long fileid : affectedFileIds) { + logger.fine("found file id: " + fileid); + } + info.add("message", "Found " + affectedFileIds.size() + + " tabular files with missing original types. Kicking off an async job that will repair the files in the background."); + } + + ingestService.fixMissingOriginalTypes(affectedFileIds); + + return ok(info); + } @Path("datafiles/integrity/fixmissingoriginalsizes") @GET @@ -1441,60 +1421,60 @@ public Response fixMissingOriginalSizes(@QueryParam("limit") Integer limit) { return ok(info); } - /** - * This method is used in API tests, called from UtilIt.java. - */ - @GET - @Path("datasets/thumbnailMetadata/{id}") - public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) { - Dataset dataset = datasetSvc.find(idSupplied); - if (dataset == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + "."); - } - JsonObjectBuilder data = Json.createObjectBuilder(); - DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); - data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail()); - data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); - if (datasetThumbnail != null) { - data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image()); - DataFile dataFile = datasetThumbnail.getDataFile(); - if (dataFile != null) { - /** - * @todo Change this from a String to a long. - */ - data.add("dataFileId", dataFile.getId().toString()); - } - } - return ok(data); - } - - /** - * validatePassword - *

- * Validate a password with an API call - * - * @param password - * The password - * @return A response with the validation result. - */ - @Path("validatePassword") - @POST - public Response validatePassword(String password) { - - final List errors = passwordValidatorService.validate(password, new Date(), false); - final JsonArrayBuilder errorArray = Json.createArrayBuilder(); - errors.forEach(errorArray::add); - return ok(Json.createObjectBuilder().add("password", password).add("errors", errorArray)); - } - - @GET - @Path("/isOrcid") - public Response isOrcidEnabled() { - return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you."); - } + /** + * This method is used in API tests, called from UtilIt.java. + */ + @GET + @Path("datasets/thumbnailMetadata/{id}") + public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) { + Dataset dataset = datasetSvc.find(idSupplied); + if (dataset == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + "."); + } + JsonObjectBuilder data = Json.createObjectBuilder(); + DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail(ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); + data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail()); + data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); + if (datasetThumbnail != null) { + data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image()); + DataFile dataFile = datasetThumbnail.getDataFile(); + if (dataFile != null) { + /** + * @todo Change this from a String to a long. + */ + data.add("dataFileId", dataFile.getId().toString()); + } + } + return ok(data); + } + + /** + * validatePassword + *

+ * Validate a password with an API call + * + * @param password + * The password + * @return A response with the validation result. + */ + @Path("validatePassword") + @POST + public Response validatePassword(String password) { + + final List errors = passwordValidatorService.validate(password, new Date(), false); + final JsonArrayBuilder errorArray = Json.createArrayBuilder(); + errors.forEach(errorArray::add); + return ok(Json.createObjectBuilder().add("password", password).add("errors", errorArray)); + } + + @GET + @Path("/isOrcid") + public Response isOrcidEnabled() { + return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you."); + } @POST - @AuthRequired + @AuthRequired @Path("{id}/reregisterHDLToPID") public Response reregisterHdlToPID(@Context ContainerRequestContext crc, @PathParam("id") String id) { logger.info("Starting to reregister " + id + " Dataset Id. (from hdl to doi)" + new Date()); @@ -1825,7 +1805,7 @@ public Response updateHashValues(@Context ContainerRequestContext crc, @PathPara } @POST - @AuthRequired + @AuthRequired @Path("/computeDataFileHashValue/{fileId}/algorithm/{alg}") public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @PathParam("alg") String alg) { @@ -1887,7 +1867,7 @@ public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @ } @POST - @AuthRequired + @AuthRequired @Path("/validateDataFileHashValue/{fileId}") public Response validateDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId) { @@ -1954,7 +1934,7 @@ public Response validateDataFileHashValue(@Context ContainerRequestContext crc, } @POST - @AuthRequired + @AuthRequired @Path("/submitDatasetVersionToArchive/{id}/{version}") public Response submitDatasetVersionToArchive(@Context ContainerRequestContext crc, @PathParam("id") String dsid, @PathParam("version") String versionNumber) { @@ -2027,7 +2007,7 @@ public void run() { * @return */ @POST - @AuthRequired + @AuthRequired @Path("/archiveAllUnarchivedDatasetVersions") public Response archiveAllUnarchivedDatasetVersions(@Context ContainerRequestContext crc, @QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) { @@ -2126,7 +2106,7 @@ public Response clearMetricsCacheByName(@PathParam("name") String name) { } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/addRoleAssignmentsToChildren") public Response addRoleAssignementsToChildren(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse owner = dataverseSvc.findByAlias(alias); @@ -2157,90 +2137,90 @@ public Response addRoleAssignementsToChildren(@Context ContainerRequestContext c } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response getStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver - return ok(dataverse.getStorageDriverId()); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver + return ok(dataverse.getStorageDriverId()); } @PUT - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response setStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias, String label) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - for (Entry store: DataAccess.getStorageDriverLabels().entrySet()) { - if(store.getKey().equals(label)) { - dataverse.setStorageDriverId(store.getValue()); - return ok("Storage set to: " + store.getKey() + "/" + store.getValue()); - } - } - return error(Response.Status.BAD_REQUEST, - "No Storage Driver found for : " + label); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + for (Entry store: DataAccess.getStorageDriverLabels().entrySet()) { + if(store.getKey().equals(label)) { + dataverse.setStorageDriverId(store.getValue()); + return ok("Storage set to: " + store.getKey() + "/" + store.getValue()); + } + } + return error(Response.Status.BAD_REQUEST, + "No Storage Driver found for : " + label); } @DELETE - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/storageDriver") public Response resetStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { - Dataverse dataverse = dataverseSvc.findByAlias(alias); - if (dataverse == null) { - return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); - } - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - dataverse.setStorageDriverId(""); - return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + Dataverse dataverse = dataverseSvc.findByAlias(alias); + if (dataverse == null) { + return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); + } + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + dataverse.setStorageDriverId(""); + return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/storageDrivers") public Response listStorageDrivers(@Context ContainerRequestContext crc) throws WrappedResponse { - try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); - if (!user.isSuperuser()) { - return error(Response.Status.FORBIDDEN, "Superusers only."); - } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - JsonObjectBuilder bld = jsonObjectBuilder(); - DataAccess.getStorageDriverLabels().entrySet().forEach(s -> bld.add(s.getKey(), s.getValue())); - return ok(bld); + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder bld = jsonObjectBuilder(); + DataAccess.getStorageDriverLabels().entrySet().forEach(s -> bld.add(s.getKey(), s.getValue())); + return ok(bld); } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2262,7 +2242,7 @@ public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathP } @PUT - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias, @QueryParam("name") String name) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2293,7 +2273,7 @@ public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathP } @DELETE - @AuthRequired + @AuthRequired @Path("/dataverse/{alias}/curationLabelSet") public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); @@ -2313,7 +2293,7 @@ public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @Pat } @GET - @AuthRequired + @AuthRequired @Path("/dataverse/curationLabelSets") public Response listCurationLabelSets(@Context ContainerRequestContext crc) throws WrappedResponse { try { @@ -2423,7 +2403,7 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon } @POST - @AuthRequired + @AuthRequired @Consumes("application/json") @Path("/requestSignedUrl") public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject urlInfo) { @@ -2541,4 +2521,160 @@ public Response getFeatureFlag(@PathParam("flag") String flagIn) { } } + @GET + @AuthRequired + @Path("/datafiles/auditFiles") + public Response getAuditFiles(@Context ContainerRequestContext crc, + @QueryParam("firstId") Long firstId, @QueryParam("lastId") Long lastId, + @QueryParam("datasetIdentifierList") String datasetIdentifierList) throws WrappedResponse { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + int datasetsChecked = 0; + long startId = (firstId == null ? 0 : firstId); + long endId = (lastId == null ? Long.MAX_VALUE : lastId); + + List datasetIdentifiers; + if (datasetIdentifierList == null || datasetIdentifierList.isEmpty()) { + datasetIdentifiers = Collections.emptyList(); + } else { + startId = 0; + endId = Long.MAX_VALUE; + datasetIdentifiers = List.of(datasetIdentifierList.split(",")); + } + if (endId < startId) { + return badRequest("Invalid Parameters: lastId must be equal to or greater than firstId"); + } + + NullSafeJsonBuilder jsonObjectBuilder = NullSafeJsonBuilder.jsonObjectBuilder(); + JsonArrayBuilder jsonDatasetsArrayBuilder = Json.createArrayBuilder(); + JsonArrayBuilder jsonFailuresArrayBuilder = Json.createArrayBuilder(); + + if (startId > 0) { + jsonObjectBuilder.add("firstId", startId); + } + if (endId < Long.MAX_VALUE) { + jsonObjectBuilder.add("lastId", endId); + } + + // compile the list of ids to process + List datasetIds; + if (datasetIdentifiers.isEmpty()) { + datasetIds = datasetService.findAllLocalDatasetIds(); + } else { + datasetIds = new ArrayList<>(datasetIdentifiers.size()); + JsonArrayBuilder jab = Json.createArrayBuilder(); + datasetIdentifiers.forEach(id -> { + String dId = id.trim(); + jab.add(dId); + Dataset d = datasetService.findByGlobalId(dId); + if (d != null) { + datasetIds.add(d.getId()); + } else { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetIdentifier",dId); + job.add("reason","Not Found"); + jsonFailuresArrayBuilder.add(job); + } + }); + jsonObjectBuilder.add("datasetIdentifierList", jab); + } + + for (Long datasetId : datasetIds) { + if (datasetId < startId) { + continue; + } else if (datasetId > endId) { + break; + } + Dataset dataset; + try { + dataset = findDatasetOrDie(String.valueOf(datasetId)); + datasetsChecked++; + } catch (WrappedResponse e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetId", datasetId); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + continue; + } + + List missingFiles = new ArrayList<>(); + List missingFileMetadata = new ArrayList<>(); + try { + Predicate filter = s -> true; + StorageIO datasetIO = DataAccess.getStorageIO(dataset); + final List result = datasetIO.cleanUp(filter, true); + // add files that are in dataset files but not in cleanup result or DataFiles with missing FileMetadata + dataset.getFiles().forEach(df -> { + try { + StorageIO datafileIO = df.getStorageIO(); + String storageId = df.getStorageIdentifier(); + FileMetadata fm = df.getFileMetadata(); + if (!datafileIO.exists()) { + missingFiles.add(storageId + "," + (fm != null ? + (fm.getDirectoryLabel() != null || !fm.getDirectoryLabel().isEmpty() ? "directoryLabel,"+fm.getDirectoryLabel()+"," : "") + +"label,"+fm.getLabel() : "type,"+df.getContentType())); + } + if (fm == null) { + missingFileMetadata.add(storageId + ",dataFileId," + df.getId()); + } + } catch (IOException e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("dataFileId", df.getId()); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + } + }); + } catch (IOException e) { + NullSafeJsonBuilder job = NullSafeJsonBuilder.jsonObjectBuilder(); + job.add("datasetId", datasetId); + job.add("reason", e.getMessage()); + jsonFailuresArrayBuilder.add(job); + } + + JsonObjectBuilder job = Json.createObjectBuilder(); + if (!missingFiles.isEmpty() || !missingFileMetadata.isEmpty()) { + job.add("id", dataset.getId()); + job.add("pid", dataset.getProtocol() + ":" + dataset.getAuthority() + "/" + dataset.getIdentifier()); + job.add("persistentURL", dataset.getPersistentURL()); + if (!missingFileMetadata.isEmpty()) { + JsonArrayBuilder jabMissingFileMetadata = Json.createArrayBuilder(); + missingFileMetadata.forEach(mm -> { + String[] missingMetadata = mm.split(","); + NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() + .add("storageIdentifier", missingMetadata[0]) + .add(missingMetadata[1], missingMetadata[2]); + jabMissingFileMetadata.add(jobj); + }); + job.add("missingFileMetadata", jabMissingFileMetadata); + } + if (!missingFiles.isEmpty()) { + JsonArrayBuilder jabMissingFiles = Json.createArrayBuilder(); + missingFiles.forEach(mf -> { + String[] missingFile = mf.split(","); + NullSafeJsonBuilder jobj = NullSafeJsonBuilder.jsonObjectBuilder() + .add("storageIdentifier", missingFile[0]); + for (int i = 2; i < missingFile.length; i+=2) { + jobj.add(missingFile[i-1], missingFile[i]); + } + jabMissingFiles.add(jobj); + }); + job.add("missingFiles", jabMissingFiles); + } + jsonDatasetsArrayBuilder.add(job); + } + } + + jsonObjectBuilder.add("datasetsChecked", datasetsChecked); + jsonObjectBuilder.add("datasets", jsonDatasetsArrayBuilder); + jsonObjectBuilder.add("failures", jsonFailuresArrayBuilder); + + return ok(jsonObjectBuilder); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java index 01c51dc2b4c..907295ad848 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java @@ -126,7 +126,7 @@ public Response getByName(@PathParam("name") String name) { String solrFieldSearchable = dsf.getSolrField().getNameSearchable(); String solrFieldFacetable = dsf.getSolrField().getNameFacetable(); String metadataBlock = dsf.getMetadataBlock().getName(); - String uri=dsf.getUri(); + String uri = dsf.getUri(); boolean hasParent = dsf.isHasParent(); boolean allowsMultiples = dsf.isAllowMultiples(); boolean isRequired = dsf.isRequired(); @@ -243,7 +243,9 @@ public Response loadDatasetFields(File file) { br = new BufferedReader(new FileReader("/" + file)); while ((line = br.readLine()) != null) { lineNumber++; - values = line.split(splitBy); + values = Arrays.stream(line.split(splitBy)) + .map(String::trim) + .toArray(String[]::new); if (values[0].startsWith("#")) { // Header row switch (values[0]) { case "#metadataBlock": @@ -326,7 +328,7 @@ public Response loadDatasetFields(File file) { */ public String getGeneralErrorMessage(HeaderType header, int lineNumber, String message) { List arguments = new ArrayList<>(); - arguments.add(header.name()); + arguments.add(header != null ? header.name() : "unknown"); arguments.add(String.valueOf(lineNumber)); arguments.add(message); return BundleUtil.getStringFromBundle("api.admin.datasetfield.load.GeneralErrorMessage", arguments); @@ -334,9 +336,9 @@ public String getGeneralErrorMessage(HeaderType header, int lineNumber, String m /** * Turn ArrayIndexOutOfBoundsException into an informative error message - * @param lineNumber * @param header - * @param e + * @param lineNumber + * @param wrongIndex * @return */ public String getArrayIndexOutOfBoundMessage(HeaderType header, diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index c7657768d16..2ec10816acc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2077,10 +2077,16 @@ public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") List dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId); JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder(); for (Dataverse dataverse : dvsThatLinkToThisDatasetId) { - dataversesThatLinkToThisDatasetIdBuilder.add(dataverse.getAlias() + " (id " + dataverse.getId() + ")"); + JsonObjectBuilder datasetBuilder = Json.createObjectBuilder(); + datasetBuilder.add("id", dataverse.getId()); + datasetBuilder.add("alias", dataverse.getAlias()); + datasetBuilder.add("displayName", dataverse.getDisplayName()); + dataversesThatLinkToThisDatasetIdBuilder.add(datasetBuilder.build()); } JsonObjectBuilder response = Json.createObjectBuilder(); - response.add("dataverses that link to dataset id " + datasetId, dataversesThatLinkToThisDatasetIdBuilder); + response.add("id", datasetId); + response.add("identifier", dataset.getIdentifier()); + response.add("linked-dataverses", dataversesThatLinkToThisDatasetIdBuilder); return ok(response); } catch (WrappedResponse wr) { return wr.getResponse(); @@ -2165,8 +2171,32 @@ public Response getAssignments(@Context ContainerRequestContext crc, @PathParam( @GET @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") @Path("{id}/privateUrl") public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + return getPreviewUrlData(crc, idSupplied); + } + + @POST + @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") + @Path("{id}/privateUrl") + public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @DefaultValue("false") @QueryParam("anonymizedAccess") boolean anonymizedAccess) { + return createPreviewUrl(crc, idSupplied, anonymizedAccess); + } + + @DELETE + @AuthRequired + @Deprecated(forRemoval = true, since = "2024-10-17") + @Path("{id}/privateUrl") + public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + return deletePreviewUrl(crc, idSupplied); + } + + @GET + @AuthRequired + @Path("{id}/previewUrl") + public Response getPreviewUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { return response( req -> { PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied))); return (privateUrl != null) ? ok(json(privateUrl)) @@ -2176,8 +2206,8 @@ public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathPar @POST @AuthRequired - @Path("{id}/privateUrl") - public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) { + @Path("{id}/previewUrl") + public Response createPreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) { if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) { throw new NotAcceptableException("Anonymized Access not enabled"); } @@ -2188,8 +2218,8 @@ public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathPara @DELETE @AuthRequired - @Path("{id}/privateUrl") - public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { + @Path("{id}/previewUrl") + public Response deletePreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) { return response( req -> { Dataset dataset = findDatasetOrDie(idSupplied); PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset)); @@ -2202,6 +2232,7 @@ public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathPara }, getRequestUser(crc)); } + @GET @AuthRequired @Path("{id}/thumbnail/candidates") @@ -2986,6 +3017,26 @@ public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("i } + @GET + @AuthRequired + @Path("{id}/versions/{versionId1}/compare/{versionId2}") + public Response getCompareVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, + @PathParam("versionId1") String versionId1, + @PathParam("versionId2") String versionId2, + @Context UriInfo uriInfo, @Context HttpHeaders headers) { + try { + DataverseRequest req = createDataverseRequest(getRequestUser(crc)); + DatasetVersion dsv1 = getDatasetVersionOrDie(req, versionId1, findDatasetOrDie(id), uriInfo, headers); + DatasetVersion dsv2 = getDatasetVersionOrDie(req, versionId2, findDatasetOrDie(id), uriInfo, headers); + if (dsv1.getCreateTime().getTime() > dsv2.getCreateTime().getTime()) { + return error(BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order")); + } + return ok(DatasetVersion.compareVersions(dsv1, dsv2)); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + private static Set getDatasetFilenames(Dataset dataset) { Set files = new HashSet<>(); for (DataFile dataFile: dataset.getFiles()) { @@ -4827,6 +4878,33 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String } return ok(responseJson); } + + @GET + @Path("previewUrlDatasetVersion/{previewUrlToken}") + public Response getPreviewUrlDatasetVersion(@PathParam("previewUrlToken") String previewUrlToken, @QueryParam("returnOwners") boolean returnOwners) { + PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(previewUrlToken); + if (privateUrlUser == null) { + return notFound("Private URL user not found"); + } + boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess(); + String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames); + if(isAnonymizedAccess && anonymizedFieldTypeNames == null) { + throw new NotAcceptableException("Anonymized Access not enabled"); + } + DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(previewUrlToken); + if (dsv == null || dsv.getId() == null) { + return notFound("Dataset version not found"); + } + JsonObjectBuilder responseJson; + if (isAnonymizedAccess) { + List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); + responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners); + } else { + responseJson = json(dsv, null, true, returnOwners); + } + return ok(responseJson); + } + @GET @Path("privateUrlDatasetVersion/{privateUrlToken}/citation") @@ -4839,6 +4917,18 @@ public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken" return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess())); } + + @GET + @Path("previewUrlDatasetVersion/{previewUrlToken}/citation") + public Response getPreviewUrlDatasetVersionCitation(@PathParam("previewUrlToken") String previewUrlToken) { + PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(previewUrlToken); + if (privateUrlUser == null) { + return notFound("Private URL user not found"); + } + DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(previewUrlToken); + return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") + : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess())); + } @GET @AuthRequired diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 17e3086f184..f864a5a9d1c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -3,12 +3,9 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.api.datadeposit.SwordServiceBean; -import edu.harvard.iq.dataverse.api.dto.DataverseMetadataBlockFacetDTO; +import edu.harvard.iq.dataverse.api.dto.*; import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.api.dto.ExplicitGroupDTO; -import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; -import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.api.imports.ImportException; import edu.harvard.iq.dataverse.api.imports.ImportServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; @@ -127,75 +124,158 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { @Path("{identifier}") public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { Dataverse newDataverse; - JsonObject newDataverseJson; try { - newDataverseJson = JsonUtil.getJsonObject(body); - newDataverse = jsonParser().parseDataverse(newDataverseJson); + newDataverse = parseAndValidateAddDataverseRequestBody(body); } catch (JsonParsingException jpe) { - logger.log(Level.SEVERE, "Json: {0}", body); return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { - logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage())); } try { - JsonObject metadataBlocksJson = newDataverseJson.getJsonObject("metadataBlocks"); - List inputLevels = null; - List metadataBlocks = null; - List facetList = null; - if (metadataBlocksJson != null) { - JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); - inputLevels = inputLevelsArray != null ? parseInputLevels(inputLevelsArray, newDataverse) : null; - - JsonArray metadataBlockNamesArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); - metadataBlocks = metadataBlockNamesArray != null ? parseNewDataverseMetadataBlocks(metadataBlockNamesArray) : null; - - JsonArray facetIdsArray = metadataBlocksJson.getJsonArray("facetIds"); - facetList = facetIdsArray != null ? parseFacets(facetIdsArray) : null; - } + List inputLevels = parseInputLevels(body, newDataverse); + List metadataBlocks = parseMetadataBlocks(body); + List facets = parseFacets(body); if (!parentIdtf.isEmpty()) { Dataverse owner = findDataverseOrDie(parentIdtf); newDataverse.setOwner(owner); } - // set the dataverse - contact relationship in the contacts - for (DataverseContact dc : newDataverse.getDataverseContacts()) { - dc.setDataverse(newDataverse); - } - AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, inputLevels, metadataBlocks)); + newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facets, inputLevels, metadataBlocks)); return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse)); - } catch (WrappedResponse ww) { - - String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause()); - if (!error.isEmpty()) { - logger.log(Level.INFO, error); - return ww.refineResponse(error); - } - return ww.getResponse(); + } catch (WrappedResponse ww) { + return handleWrappedResponse(ww); } catch (EJBException ex) { - Throwable cause = ex; - StringBuilder sb = new StringBuilder(); - sb.append("Error creating dataverse."); - while (cause.getCause() != null) { - cause = cause.getCause(); - if (cause instanceof ConstraintViolationException) { - sb.append(ConstraintViolationUtil.getErrorStringForConstraintViolations(cause)); - } - } - logger.log(Level.SEVERE, sb.toString()); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + sb.toString()); + return handleEJBException(ex, "Error creating dataverse."); } catch (Exception ex) { logger.log(Level.SEVERE, "Error creating dataverse", ex); return error(Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + ex.getMessage()); + } + } + private Dataverse parseAndValidateAddDataverseRequestBody(String body) throws JsonParsingException, JsonParseException { + try { + JsonObject addDataverseJson = JsonUtil.getJsonObject(body); + return jsonParser().parseDataverse(addDataverseJson); + } catch (JsonParsingException jpe) { + logger.log(Level.SEVERE, "Json: {0}", body); + throw jpe; + } catch (JsonParseException ex) { + logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); + throw ex; } } + @PUT + @AuthRequired + @Path("{identifier}") + public Response updateDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String identifier) { + Dataverse dataverse; + try { + dataverse = findDataverseOrDie(identifier); + } catch (WrappedResponse e) { + return e.getResponse(); + } + + DataverseDTO updatedDataverseDTO; + try { + updatedDataverseDTO = parseAndValidateUpdateDataverseRequestBody(body); + } catch (JsonParsingException jpe) { + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); + } catch (JsonParseException ex) { + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage())); + } + + try { + List inputLevels = parseInputLevels(body, dataverse); + List metadataBlocks = parseMetadataBlocks(body); + List facets = parseFacets(body); + + AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); + dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO, true)); + return ok(json(dataverse)); + + } catch (WrappedResponse ww) { + return handleWrappedResponse(ww); + } catch (EJBException ex) { + return handleEJBException(ex, "Error updating dataverse."); + } catch (Exception ex) { + logger.log(Level.SEVERE, "Error updating dataverse", ex); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Error updating dataverse: " + ex.getMessage()); + } + } + + private DataverseDTO parseAndValidateUpdateDataverseRequestBody(String body) throws JsonParsingException, JsonParseException { + try { + JsonObject updateDataverseJson = JsonUtil.getJsonObject(body); + return jsonParser().parseDataverseDTO(updateDataverseJson); + } catch (JsonParsingException jpe) { + logger.log(Level.SEVERE, "Json: {0}", body); + throw jpe; + } catch (JsonParseException ex) { + logger.log(Level.SEVERE, "Error parsing DataverseDTO from json: " + ex.getMessage(), ex); + throw ex; + } + } + + private List parseInputLevels(String body, Dataverse dataverse) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); + return inputLevelsArray != null ? parseInputLevels(inputLevelsArray, dataverse) : null; + } + + private List parseMetadataBlocks(String body) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray metadataBlocksArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); + return metadataBlocksArray != null ? parseNewDataverseMetadataBlocks(metadataBlocksArray) : null; + } + + private List parseFacets(String body) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray facetsArray = metadataBlocksJson.getJsonArray("facetIds"); + return facetsArray != null ? parseFacets(facetsArray) : null; + } + + private JsonObject getMetadataBlocksJson(String body) { + JsonObject dataverseJson = JsonUtil.getJsonObject(body); + return dataverseJson.getJsonObject("metadataBlocks"); + } + + private Response handleWrappedResponse(WrappedResponse ww) { + String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause()); + if (!error.isEmpty()) { + logger.log(Level.INFO, error); + return ww.refineResponse(error); + } + return ww.getResponse(); + } + + private Response handleEJBException(EJBException ex, String action) { + Throwable cause = ex; + StringBuilder sb = new StringBuilder(); + sb.append(action); + while (cause.getCause() != null) { + cause = cause.getCause(); + if (cause instanceof ConstraintViolationException) { + sb.append(ConstraintViolationUtil.getErrorStringForConstraintViolations(cause)); + } + } + logger.log(Level.SEVERE, sb.toString()); + return error(Response.Status.INTERNAL_SERVER_ERROR, sb.toString()); + } + private List parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse { List selectedMetadataBlocks = new ArrayList<>(); for (JsonString metadataBlockName : metadataBlockNamesArray.getValuesAs(JsonString.class)) { @@ -407,6 +487,12 @@ public Response importDataset(@Context ContainerRequestContext crc, String jsonB if (ds.getIdentifier() == null) { return badRequest("Please provide a persistent identifier, either by including it in the JSON, or by using the pid query parameter."); } + + PidProvider pidProvider = PidUtil.getPidProvider(ds.getGlobalId().getProviderId()); + if (pidProvider == null || !pidProvider.canManagePID()) { + return badRequest("Cannot import a dataset that has a PID that doesn't match the server's settings"); + } + boolean shouldRelease = StringUtil.isTrue(releaseParam); DataverseRequest request = createDataverseRequest(u); @@ -615,62 +701,22 @@ public Response deleteDataverse(@Context ContainerRequestContext crc, @PathParam public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier, @PathParam("attribute") String attribute, @QueryParam("value") String value) { try { - Dataverse collection = findDataverseOrDie(identifier); - User user = getRequestUser(crc); - DataverseRequest dvRequest = createDataverseRequest(user); - - // TODO: The cases below use hard coded strings, because we have no place for definitions of those! - // They are taken from util.json.JsonParser / util.json.JsonPrinter. This shall be changed. - // This also should be extended to more attributes, like the type, theme, contacts, some booleans, etc. - switch (attribute) { - case "alias": - collection.setAlias(value); - break; - case "name": - collection.setName(value); - break; - case "description": - collection.setDescription(value); - break; - case "affiliation": - collection.setAffiliation(value); - break; - /* commenting out the code from the draft pr #9462: - case "versionPidsConduct": - CollectionConduct conduct = CollectionConduct.findBy(value); - if (conduct == null) { - return badRequest("'" + value + "' is not one of [" + - String.join(",", CollectionConduct.asList()) + "]"); - } - collection.setDatasetVersionPidConduct(conduct); - break; - */ - case "filePIDsEnabled": - if(!user.isSuperuser()) { - return forbidden("You must be a superuser to change this setting"); - } - if(!settingsService.isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { - return forbidden("Changing File PID policy per collection is not enabled on this server"); - } - collection.setFilePIDsEnabled(parseBooleanOrDie(value)); - break; - default: - return badRequest("'" + attribute + "' is not a supported attribute"); - } - - // Off to persistence layer - execCommand(new UpdateDataverseCommand(collection, null, null, dvRequest, null)); - - // Also return modified collection to user - return ok("Update successful", JsonPrinter.json(collection)); - - // TODO: This is an anti-pattern, necessary due to this bean being an EJB, causing very noisy and unnecessary - // logging by the EJB container for bubbling exceptions. (It would be handled by the error handlers.) + Dataverse dataverse = findDataverseOrDie(identifier); + Object formattedValue = formatAttributeValue(attribute, value); + dataverse = execCommand(new UpdateDataverseAttributeCommand(createDataverseRequest(getRequestUser(crc)), dataverse, attribute, formattedValue)); + return ok("Update successful", JsonPrinter.json(dataverse)); } catch (WrappedResponse e) { return e.getResponse(); } } + private Object formatAttributeValue(String attribute, String value) throws WrappedResponse { + if (attribute.equals("filePIDsEnabled")) { + return parseBooleanOrDie(value); + } + return value; + } + @GET @AuthRequired @Path("{identifier}/inputLevels") diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index d786aab35a8..633d420c527 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -343,10 +343,10 @@ public Response deleteFileInDataset(@Context ContainerRequestContext crc, @PathP DataFile dataFile = findDataFileOrDie(fileIdOrPersistentId); FileMetadata fileToDelete = dataFile.getLatestFileMetadata(); Dataset dataset = dataFile.getOwner(); - DatasetVersion v = dataset.getOrCreateEditVersion(); + dataset.getOrCreateEditVersion(); deletePhysicalFile = !dataFile.isReleased(); - UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, Arrays.asList(fileToDelete), v); + UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, Arrays.asList(fileToDelete)); update_cmd.setValidateLenient(true); try { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java index 257519677d3..2439c996816 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java @@ -12,12 +12,17 @@ import jakarta.ws.rs.Produces; import org.apache.commons.io.IOUtils; +import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; +import io.gdcc.spi.export.Exporter; +import io.gdcc.spi.export.ExportException; +import io.gdcc.spi.export.XMLExporter; import jakarta.ejb.EJB; import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonValue; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; @@ -92,6 +97,32 @@ public Response getZipDownloadLimit() { return ok(zipDownloadLimit); } + @GET + @Path("exportFormats") + public Response getExportFormats() { + JsonObjectBuilder responseModel = Json.createObjectBuilder(); + ExportService instance = ExportService.getInstance(); + for (String[] labels : instance.getExportersLabels()) { + try { + Exporter exporter = instance.getExporter(labels[1]); + JsonObjectBuilder exporterObject = Json.createObjectBuilder().add("displayName", labels[0]) + .add("mediaType", exporter.getMediaType()).add("isHarvestable", exporter.isHarvestable()) + .add("isVisibleInUserInterface", exporter.isAvailableToUsers()); + if (exporter instanceof XMLExporter xmlExporter) { + exporterObject.add("XMLNameSpace", xmlExporter.getXMLNameSpace()) + .add("XMLSchemaLocation", xmlExporter.getXMLSchemaLocation()) + .add("XMLSchemaVersion", xmlExporter.getXMLSchemaVersion()); + } + responseModel.add(labels[1], exporterObject); + } + catch (ExportException ex){ + logger.warning("Failed to get: " + labels[1]); + logger.warning(ex.getLocalizedMessage()); + } + } + return ok(responseModel); + } + private Response getSettingResponseByKey(SettingsServiceBean.Key key) { String setting = settingsService.getValueForKey(key); if (setting != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 1f2f1039327..306b863c9e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -19,6 +19,9 @@ import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; import java.net.URL; import java.util.Iterator; import java.util.List; @@ -152,10 +155,17 @@ public Response updateCitationsForDataset(@PathParam("id") String id) throws IOE // DataCite wants "doi=", not "doi:". String authorityPlusIdentifier = persistentId.replaceFirst("doi:", ""); // Request max page size and then loop to handle multiple pages - URL url = new URL(JvmSettings.DATACITE_REST_API_URL.lookup() + + URL url = null; + try { + url = new URI(JvmSettings.DATACITE_REST_API_URL.lookup(pidProvider.getId()) + "/events?doi=" + authorityPlusIdentifier + - "&source=crossref&page[size]=1000"); + "&source=crossref&page[size]=1000").toURL(); + } catch (URISyntaxException e) { + //Nominally this means a config error/ bad DATACITE_REST_API_URL for this provider + logger.warning("Unable to create URL for " + persistentId + ", pidProvider " + pidProvider.getId()); + return error(Status.INTERNAL_SERVER_ERROR, "Unable to create DataCite URL to retrieve citations."); + } logger.fine("Retrieving Citations from " + url.toString()); boolean nextPage = true; JsonArrayBuilder dataBuilder = Json.createArrayBuilder(); @@ -178,7 +188,12 @@ public Response updateCitationsForDataset(@PathParam("id") String id) throws IOE dataBuilder.add(iter.next()); } if (links.containsKey("next")) { - url = new URL(links.getString("next")); + try { + url = new URI(links.getString("next")).toURL(); + } catch (URISyntaxException e) { + logger.warning("Unable to create URL from DataCite response: " + links.getString("next")); + return error(Status.INTERNAL_SERVER_ERROR, "Unable to retrieve all results from DataCite"); + } } else { nextPage = false; } @@ -187,7 +202,7 @@ public Response updateCitationsForDataset(@PathParam("id") String id) throws IOE JsonArray allData = dataBuilder.build(); List datasetExternalCitations = datasetExternalCitationsService.parseCitations(allData); /* - * ToDo: If this is the only source of citations, we should remove all the existing ones for the dataset and repopuate them. + * ToDo: If this is the only source of citations, we should remove all the existing ones for the dataset and repopulate them. * As is, this call doesn't remove old citations if there are now none (legacy issue if we decide to stop counting certain types of citation * as we've done for 'hasPart'). * If there are some, this call individually checks each one and if a matching item exists, it removes it and adds it back. Faster and better to delete all and diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java index 452e5df9f9a..f36c514859e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java @@ -206,12 +206,13 @@ public Response getDatasetsTimeSeriest(@Context Request req, @Context UriInfo ur return error(BAD_REQUEST, ia.getLocalizedMessage()); } String metricName = "datasets"; - JsonArray jsonArray = MetricsUtil.stringToJsonArray(metricsSvc.returnUnexpiredCacheAllTime(metricName, null, d)); + String validDataLocation = MetricsUtil.validateDataLocationStringType(dataLocation); + JsonArray jsonArray = MetricsUtil.stringToJsonArray(metricsSvc.returnUnexpiredCacheAllTime(metricName, validDataLocation, d)); if (null == jsonArray) { // run query and save - jsonArray = metricsSvc.getDatasetsTimeSeries(uriInfo, dataLocation, d); - metricsSvc.save(new Metric(metricName, null, null, d, jsonArray.toString())); + jsonArray = metricsSvc.getDatasetsTimeSeries(uriInfo, validDataLocation, d); + metricsSvc.save(new Metric(metricName, null, validDataLocation, d, jsonArray.toString())); } MediaType requestedType = getVariant(req, MediaType.valueOf(FileUtil.MIME_TYPE_CSV), MediaType.APPLICATION_JSON_TYPE); if ((requestedType != null) && (requestedType.equals(MediaType.APPLICATION_JSON_TYPE))) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java index 33a11a2df23..e6519c9ff36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java @@ -181,7 +181,7 @@ public Response delete(@PathParam("id") long doomedId, @QueryParam("unlink") boo try { wasDeleted = savedSearchSvc.delete(doomedId, unlink); } catch (Exception e) { - return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId); + return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId + ". Exception: " + e.getLocalizedMessage()); } if (wasDeleted) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java index 6b9fcb38305..f86f9f446fa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java @@ -175,7 +175,7 @@ public Response search( JsonArrayBuilder itemsArrayBuilder = Json.createArrayBuilder(); List solrSearchResults = solrQueryResponse.getSolrSearchResults(); for (SolrSearchResult solrSearchResult : solrSearchResults) { - itemsArrayBuilder.add(solrSearchResult.json(showRelevance, showEntityIds, showApiUrls, metadataFields, getDatasetFileCount(solrSearchResult))); + itemsArrayBuilder.add(solrSearchResult.json(showRelevance, showEntityIds, showApiUrls, metadataFields)); } JsonObjectBuilder spelling_alternatives = Json.createObjectBuilder(); @@ -229,15 +229,6 @@ public Response search( } } - private Long getDatasetFileCount(SolrSearchResult solrSearchResult) { - DvObject dvObject = solrSearchResult.getEntity(); - if (dvObject.isInstanceofDataset()) { - DatasetVersion datasetVersion = ((Dataset) dvObject).getVersionFromId(solrSearchResult.getDatasetVersionId()); - return datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion); - } - return null; - } - private User getUser(ContainerRequestContext crc) throws WrappedResponse { User userToExecuteSearchAs = GuestUser.get(); try { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java index 1f5430340c2..ecf7839e616 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java @@ -24,13 +24,7 @@ import jakarta.ejb.Stateless; import jakarta.json.JsonArray; import jakarta.json.JsonObjectBuilder; -import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; +import jakarta.ws.rs.*; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.MediaType; @@ -143,21 +137,26 @@ public Response deleteToken(@Context ContainerRequestContext crc) { @Path("token") @AuthRequired @GET - public Response getTokenExpirationDate() { - ApiToken token = authSvc.findApiToken(getRequestApiKey()); - - if (token == null) { - return notFound("Token " + getRequestApiKey() + " not found."); + public Response getTokenExpirationDate(@Context ContainerRequestContext crc) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + ApiToken token = authSvc.findApiTokenByUser(user); + + if (token == null) { + return notFound("Token not found."); + } + + return ok(String.format("Token %s expires on %s", token.getTokenString(), token.getExpireTime())); + + } catch (WrappedResponse wr) { + return wr.getResponse(); } - - return ok("Token " + getRequestApiKey() + " expires on " + token.getExpireTime()); - } @Path("token/recreate") @AuthRequired @POST - public Response recreateToken(@Context ContainerRequestContext crc) { + public Response recreateToken(@Context ContainerRequestContext crc, @QueryParam("returnExpiration") boolean returnExpiration) { User u = getRequestUser(crc); AuthenticatedUser au; @@ -174,8 +173,12 @@ public Response recreateToken(@Context ContainerRequestContext crc) { ApiToken newToken = authSvc.generateApiTokenForUser(au); authSvc.save(newToken); - return ok("New token for " + au.getUserIdentifier() + " is " + newToken.getTokenString()); + String message = "New token for " + au.getUserIdentifier() + " is " + newToken.getTokenString(); + if (returnExpiration) { + message += " and expires on " + newToken.getExpireTime(); + } + return ok(message); } @GET diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java new file mode 100644 index 00000000000..4f2f1032c07 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java @@ -0,0 +1,63 @@ +package edu.harvard.iq.dataverse.api.dto; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseContact; + +import java.util.List; + +public class DataverseDTO { + private String alias; + private String name; + private String description; + private String affiliation; + private List dataverseContacts; + private Dataverse.DataverseType dataverseType; + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getAffiliation() { + return affiliation; + } + + public void setAffiliation(String affiliation) { + this.affiliation = affiliation; + } + + public List getDataverseContacts() { + return dataverseContacts; + } + + public void setDataverseContacts(List dataverseContacts) { + this.dataverseContacts = dataverseContacts; + } + + public Dataverse.DataverseType getDataverseType() { + return dataverseType; + } + + public void setDataverseType(Dataverse.DataverseType dataverseType) { + this.dataverseType = dataverseType; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index 85d4868605d..35d35316f73 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -210,7 +210,7 @@ private void processDDI(ImportType importType, XMLStreamReader xmlr, DatasetDTO // study description section. we'll use the one we found in // the codeBook entry: FieldDTO otherIdValue = FieldDTO.createPrimitiveFieldDTO("otherIdValue", codeBookLevelId); - FieldDTO otherId = FieldDTO.createCompoundFieldDTO("otherId", otherIdValue); + FieldDTO otherId = FieldDTO.createMultipleCompoundFieldDTO("otherId", otherIdValue); citationBlock.getFields().add(otherId); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java index d32a548c8bf..aa5b25e3967 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java @@ -150,12 +150,16 @@ public DatasetDTO processXML( XMLStreamReader xmlr, ForeignMetadataFormatMapping } - // Helper method for importing harvested Dublin Core xml. + // Helper methods for importing harvested Dublin Core xml. // Dublin Core is considered a mandatory, built in metadata format mapping. // It is distributed as required content, in reference_data.sql. // Note that arbitrary formatting tags are supported for the outer xml // wrapper. -- L.A. 4.5 public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException { + return processOAIDCxml(DcXmlToParse, null, false); + } + + public DatasetDTO processOAIDCxml(String DcXmlToParse, String oaiIdentifier, boolean preferSuppliedIdentifier) throws XMLStreamException { // look up DC metadata mapping: ForeignMetadataFormatMapping dublinCoreMapping = findFormatMappingByName(DCTERMS); @@ -185,18 +189,37 @@ public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException datasetDTO.getDatasetVersion().setVersionState(DatasetVersion.VersionState.RELEASED); - // Our DC import handles the contents of the dc:identifier field - // as an "other id". In the context of OAI harvesting, we expect - // the identifier to be a global id, so we need to rearrange that: + // In some cases, the identifier that we want to use for the dataset is + // already supplied to the method explicitly. For example, in some + // harvesting cases we'll want to use the OAI identifier (the identifier + // from the

section of the OAI record) for that purpose, without + // expecting to find a valid persistent id in the body of the DC record: - String identifier = getOtherIdFromDTO(datasetDTO.getDatasetVersion()); - logger.fine("Imported identifier: "+identifier); + String globalIdentifier; - String globalIdentifier = reassignIdentifierAsGlobalId(identifier, datasetDTO); - logger.fine("Detected global identifier: "+globalIdentifier); + if (oaiIdentifier != null) { + logger.fine("Attempting to use " + oaiIdentifier + " as the persistentId of the imported dataset"); + + globalIdentifier = reassignIdentifierAsGlobalId(oaiIdentifier, datasetDTO); + } else { + // Our DC import handles the contents of the dc:identifier field + // as an "other id". Unless we are using an externally supplied + // global id, we will be using the first such "other id" that we + // can parse and recognize as the global id for the imported dataset + // (note that this is the default behavior during harvesting), + // so we need to reaassign it accordingly: + String identifier = selectIdentifier(datasetDTO.getDatasetVersion(), oaiIdentifier, preferSuppliedIdentifier); + logger.fine("Imported identifier: " + identifier); + + globalIdentifier = reassignIdentifierAsGlobalId(identifier, datasetDTO); + logger.fine("Detected global identifier: " + globalIdentifier); + } if (globalIdentifier == null) { - throw new EJBException("Failed to find a global identifier in the OAI_DC XML record."); + String exceptionMsg = oaiIdentifier == null ? + "Failed to find a global identifier in the OAI_DC XML record." : + "Failed to parse the supplied identifier as a valid Persistent Id"; + throw new EJBException(exceptionMsg); } return datasetDTO; @@ -205,8 +228,17 @@ public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException private void processXMLElement(XMLStreamReader xmlr, String currentPath, String openingTag, ForeignMetadataFormatMapping foreignFormatMapping, DatasetDTO datasetDTO) throws XMLStreamException { logger.fine("entering processXMLElement; ("+currentPath+")"); - - for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { + + while (xmlr.hasNext()) { + + int event; + try { + event = xmlr.next(); + } catch (XMLStreamException ex) { + logger.warning("Error occurred in the XML parsing : " + ex.getMessage()); + continue; // Skip Undeclared namespace prefix and Unexpected close tag related to com.ctc.wstx.exc.WstxParsingException + } + if (event == XMLStreamConstants.START_ELEMENT) { String currentElement = xmlr.getLocalName(); @@ -335,8 +367,20 @@ private FieldDTO makeDTO(DatasetFieldType dataverseFieldType, FieldDTO value, St return value; } - private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) { + public String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier) { + return selectIdentifier(datasetVersionDTO, suppliedIdentifier, false); + } + + private String selectIdentifier(DatasetVersionDTO datasetVersionDTO, String suppliedIdentifier, boolean preferSuppliedIdentifier) { List otherIds = new ArrayList<>(); + + if (suppliedIdentifier != null && preferSuppliedIdentifier) { + // This supplied identifier (in practice, his is likely the OAI-PMH + // identifier from the
section) will be our first + // choice candidate for the pid of the imported dataset: + otherIds.add(suppliedIdentifier); + } + for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { String key = entry.getKey(); MetadataBlockDTO value = entry.getValue(); @@ -354,6 +398,16 @@ private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) { } } } + + if (suppliedIdentifier != null && !preferSuppliedIdentifier) { + // Unless specifically instructed to prefer this extra identifier + // (in practice, this is likely the OAI-PMH identifier from the + //
section), we will try to use it as the *last* + // possible candidate for the pid, so, adding it to the end of the + // list: + otherIds.add(suppliedIdentifier); + } + if (!otherIds.isEmpty()) { // We prefer doi or hdl identifiers like "doi:10.7910/DVN/1HE30F" for (String otherId : otherIds) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index d2bba56f884..7dc2aed799e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -7,7 +7,6 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; -import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldConstant; @@ -20,6 +19,7 @@ import edu.harvard.iq.dataverse.DataverseContact; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType; @@ -31,6 +31,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.CreateHarvestedDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateHarvestedDatasetCommand; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -40,6 +41,7 @@ import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.pidproviders.PidUtil; +import edu.harvard.iq.dataverse.util.DatasetFieldUtil; import java.io.File; import java.io.FileOutputStream; @@ -206,9 +208,15 @@ public JsonObjectBuilder handleFile(DataverseRequest dataverseRequest, Dataverse } @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, String harvestIdentifier, String metadataFormat, File metadataFile, Date oaiDateStamp, PrintWriter cleanupLog) throws ImportException, IOException { + public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, + HarvestingClient harvestingClient, + String harvestIdentifier, + String metadataFormat, + File metadataFile, + Date oaiDateStamp, + PrintWriter cleanupLog) throws ImportException, IOException { if (harvestingClient == null || harvestingClient.getDataverse() == null) { - throw new ImportException("importHarvestedDataset called wiht a null harvestingClient, or an invalid harvestingClient."); + throw new ImportException("importHarvestedDataset called with a null harvestingClient, or an invalid harvestingClient."); } Dataverse owner = harvestingClient.getDataverse(); Dataset importedDataset = null; @@ -242,8 +250,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve } else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) { logger.fine("importing DC "+metadataFile.getAbsolutePath()); try { - String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); - dsDTO = importGenericService.processOAIDCxml(xmlToParse); + String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); + dsDTO = importGenericService.processOAIDCxml(xmlToParse, harvestIdentifier, harvestingClient.isUseOaiIdentifiersAsPids()); } catch (IOException | XMLStreamException e) { throw new ImportException("Failed to process Dublin Core XML record: "+ e.getClass() + " (" + e.getMessage() + ")"); } @@ -268,116 +276,121 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve } JsonObject obj = JsonUtil.getJsonObject(json); - //and call parse Json to read it into a dataset + + String protocol = obj.getString("protocol", null); + String authority = obj.getString("authority", null); + String identifier = obj.getString("identifier",null); + + GlobalId globalId; + + // A Global ID is required: + // (meaning, we will fail with an exception if the imports above have + // not managed to find an acceptable global identifier in the harvested + // metadata) + + try { + globalId = PidUtil.parseAsGlobalID(protocol, authority, identifier); + } catch (IllegalArgumentException iax) { + throw new ImportException("The harvested metadata record with the OAI server identifier " + harvestIdentifier + " does not contain a global identifier this Dataverse can parse, skipping."); + } + + if (globalId == null) { + throw new ImportException("The harvested metadata record with the OAI server identifier " + harvestIdentifier + " does not contain a global identifier this Dataverse recognizes, skipping."); + } + + String globalIdString = globalId.asString(); + + if (StringUtils.isEmpty(globalIdString)) { + // @todo this check may not be necessary, now that there's a null check above + throw new ImportException("The harvested metadata record with the OAI server identifier " + harvestIdentifier + " does not contain a global identifier this Dataverse recognizes, skipping."); + } + + DatasetVersion harvestedVersion; + + Dataset existingDataset = datasetService.findByGlobalId(globalIdString); + try { + Dataset harvestedDataset; + JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService, datasetTypeService, harvestingClient); parser.setLenient(true); - Dataset ds = parser.parseDataset(obj); - // For ImportType.NEW, if the metadata contains a global identifier, and it's not a protocol - // we support, it should be rejected. - // (TODO: ! - add some way of keeping track of supported protocols!) - //if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) { - // throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported"); - //} - ds.setOwner(owner); - ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields()); + if (existingDataset == null) { + // Creating a new dataset from scratch: + + harvestedDataset = parser.parseDataset(obj); - if (ds.getVersions().get(0).getReleaseTime() == null) { - ds.getVersions().get(0).setReleaseTime(oaiDateStamp); - } - - // Check data against required contraints - List> violations = ds.getVersions().get(0).validateRequired(); - if (!violations.isEmpty()) { - // For migration and harvest, add NA for missing required values - for (ConstraintViolation v : violations) { - DatasetField f = v.getRootBean(); - f.setSingleValue(DatasetField.NA_VALUE); + harvestedDataset.setHarvestedFrom(harvestingClient); + harvestedDataset.setHarvestIdentifier(harvestIdentifier); + + harvestedVersion = harvestedDataset.getVersions().get(0); + } else { + // We already have a dataset with this id in the database. + // Let's check a few things before we go any further with it: + + // If this dataset already exists IN ANOTHER COLLECTION + // we are just going to skip it! + if (existingDataset.getOwner() != null && !owner.getId().equals(existingDataset.getOwner().getId())) { + throw new ImportException("The dataset with the global id " + globalIdString + " already exists, in the dataverse " + existingDataset.getOwner().getAlias() + ", skipping."); } - } - - // Check data against validation constraints - // If we are migrating and "scrub migration data" is true we attempt to fix invalid data - // if the fix fails stop processing of this file by throwing exception - Set invalidViolations = ds.getVersions().get(0).validate(); - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - if (!invalidViolations.isEmpty()) { - for (ConstraintViolation v : invalidViolations) { - DatasetFieldValue f = v.getRootBean(); - boolean fixed = false; - boolean converted = false; - // TODO: Is this scrubbing something we want to continue doing? - if (settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) { - fixed = processMigrationValidationError(f, cleanupLog, metadataFile.getName()); - converted = true; - if (fixed) { - Set> scrubbedViolations = validator.validate(f); - if (!scrubbedViolations.isEmpty()) { - fixed = false; - } - } - } - if (!fixed) { - String msg = "Data modified - File: " + metadataFile.getName() + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " - + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'"; - cleanupLog.println(msg); - f.setValue(DatasetField.NA_VALUE); - - } + // And if we already have a dataset with this same global id at + // this Dataverse instance, but it is a LOCAL dataset (can happen!), + // we're going to skip it also: + if (!existingDataset.isHarvested()) { + throw new ImportException("A LOCAL dataset with the global id " + globalIdString + " already exists in this dataverse; skipping."); } + // For harvested datasets, there should always only be one version. + if (existingDataset.getVersions().size() != 1) { + throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDataset.getVersions().size() + " versions"); + } + + // We will attempt to import the new version, and replace the + // current, already existing version with it. + harvestedVersion = parser.parseDatasetVersion(obj.getJsonObject("datasetVersion")); + + // For the purposes of validation, the version needs to be attached + // to a non-null dataset. We will create a throwaway temporary + // dataset for this: + harvestedDataset = createTemporaryHarvestedDataset(harvestedVersion); } + + harvestedDataset.setOwner(owner); - // A Global ID is required, in order for us to be able to harvest and import - // this dataset: - if (StringUtils.isEmpty(ds.getGlobalId().asString())) { - throw new ImportException("The harvested metadata record with the OAI server identifier "+harvestIdentifier+" does not contain a global unique identifier that we could recognize, skipping."); - } - - ds.setHarvestedFrom(harvestingClient); - ds.setHarvestIdentifier(harvestIdentifier); + // Either a full new import, or an update of an existing harvested + // Dataset, perform some cleanup on the new version imported from the + // parsed metadata: - Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId().asString()); + harvestedVersion.setDatasetFields(harvestedVersion.initDatasetFields()); - if (existingDs != null) { - // If this dataset already exists IN ANOTHER DATAVERSE - // we are just going to skip it! - if (existingDs.getOwner() != null && !owner.getId().equals(existingDs.getOwner().getId())) { - throw new ImportException("The dataset with the global id "+ds.getGlobalId().asString()+" already exists, in the dataverse "+existingDs.getOwner().getAlias()+", skipping."); - } - // And if we already have a dataset with this same id, in this same - // dataverse, but it is LOCAL dataset (can happen!), we're going to - // skip it also: - if (!existingDs.isHarvested()) { - throw new ImportException("A LOCAL dataset with the global id "+ds.getGlobalId().asString()+" already exists in this dataverse; skipping."); - } - // For harvested datasets, there should always only be one version. - // We will replace the current version with the imported version. - if (existingDs.getVersions().size() != 1) { - throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions"); - } - // Purge all the SOLR documents associated with this client from the - // index server: - indexService.deleteHarvestedDocuments(existingDs); - // files from harvested datasets are removed unceremoniously, - // directly in the database. no need to bother calling the - // DeleteFileCommand on them. - for (DataFile harvestedFile : existingDs.getFiles()) { - DataFile merged = em.merge(harvestedFile); - em.remove(merged); - harvestedFile = null; - } - // TODO: - // Verify what happens with the indexed files in SOLR? - // are they going to be overwritten by the reindexing of the dataset? - existingDs.setFiles(null); - Dataset merged = em.merge(existingDs); - // harvested datasets don't have physical files - so no need to worry about that. - engineSvc.submit(new DestroyDatasetCommand(merged, dataverseRequest)); + if (harvestedVersion.getReleaseTime() == null) { + harvestedVersion.setReleaseTime(oaiDateStamp); } + + // Check data against validation constraints. + // Make an attempt to sanitize any invalid fields encountered - + // missing required fields or invalid values, by filling the values + // with NAs. + + boolean sanitized = validateAndSanitizeVersionMetadata(harvestedVersion, cleanupLog); + + // Note: this sanitizing approach, of replacing invalid values with + // "NA" does not work with certain fields. For example, using it to + // populate a GeoBox coordinate value will result in an invalid + // field. So we will attempt to re-validate the santized version. + // This time around, it will throw an exception if still invalid, so + // that we'll stop before proceeding any further: - importedDataset = engineSvc.submit(new CreateHarvestedDatasetCommand(ds, dataverseRequest)); + if (sanitized) { + validateVersionMetadata(harvestedVersion, cleanupLog); + } + + DatasetFieldUtil.tidyUpFields(harvestedVersion.getDatasetFields(), true); + + if (existingDataset != null) { + importedDataset = engineSvc.submit(new UpdateHarvestedDatasetCommand(existingDataset, harvestedVersion, dataverseRequest)); + } else { + importedDataset = engineSvc.submit(new CreateHarvestedDatasetCommand(harvestedDataset, dataverseRequest)); + } } catch (JsonParseException | ImportException | CommandException ex) { logger.fine("Failed to import harvested dataset: " + ex.getClass() + ": " + ex.getMessage()); @@ -439,7 +452,7 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o ds.setOwner(owner); ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields()); - // Check data against required contraints + // Check data against required constraints List> violations = ds.getVersions().get(0).validateRequired(); if (!violations.isEmpty()) { if ( importType.equals(ImportType.HARVEST) ) { @@ -696,6 +709,104 @@ private String convertInvalidDateString(String inString){ return null; } + /** + * A shortcut method for validating AND attempting to sanitize a DatasetVersion + * @param version + * @param cleanupLog - any invalid values and their replacements are logged there + * @return true if any invalid values were encountered and sanitized + * @throws ImportException (although it should never happen in this mode) + */ + private boolean validateAndSanitizeVersionMetadata(DatasetVersion version, PrintWriter cleanupLog) throws ImportException { + return validateVersionMetadata(version, true, cleanupLog); + } + + /** + * A shortcut method for validating a DatasetVersion; will throw an exception + * if invalid, without attempting to sanitize the invalid values. + * @param version + * @param log - will log the invalid fields encountered there + * @throws ImportException + */ + private void validateVersionMetadata(DatasetVersion version, PrintWriter log) throws ImportException { + validateVersionMetadata(version, false, log); + } + + /** + * Validate the metadata fields of a newly-created version, and depending on + * the "sanitize" flag supplied, may or may not attempt to sanitize the supplied + * values by replacing them with "NA"s. + * @param version + * @param sanitize - boolean indicating whether to attempt to fix invalid values + * @param cleanupLog - to log any invalid values encountered will be logged + * @return - true if any invalid values have been replaced + * @throws ImportException + */ + private boolean validateVersionMetadata(DatasetVersion version, boolean sanitize, PrintWriter cleanupLog) throws ImportException { + boolean fixed = false; + Set invalidViolations = version.validate(); + if (!invalidViolations.isEmpty()) { + for (ConstraintViolation v : invalidViolations) { + Object invalid = v.getRootBean(); + String msg = ""; + if (invalid instanceof DatasetField) { + DatasetField f = (DatasetField) invalid; + + msg += "Missing required field: " + f.getDatasetFieldType().getDisplayName() + ";"; + if (sanitize) { + msg += " populated with '" + DatasetField.NA_VALUE + "'"; + f.setSingleValue(DatasetField.NA_VALUE); + fixed = true; + } + } else if (invalid instanceof DatasetFieldValue) { + DatasetFieldValue fv = (DatasetFieldValue) invalid; + + msg += "Invalid metadata field: " + fv.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + + "Invalid value: '" + fv.getValue() + "'"; + if (sanitize) { + msg += ", replaced with '" + DatasetField.NA_VALUE + "'"; + fv.setValue(DatasetField.NA_VALUE); + fixed = true; + } + } else { + // DatasetVersion.validate() can also produce constraint violations + // in TermsOfUse and FileMetadata classes. + // We do not make any attempt to sanitize those. + if (invalid != null) { + msg += "Invalid " + invalid.getClass().getName() + ": " + v.getMessage(); + } + } + cleanupLog.println(msg); + + // Note: "NA" does not work with certain fields. For example, + // using it to populate a GeoBox coordinate value is going + // to result in an invalid field. So we'll need to validate the + // version again after the first, sanitizing pass and see if it + // helped or not. + } + if (!sanitize) { + throw new ImportException("Version was still failing validation after the first attempt to sanitize the invalid values."); + } + } + return fixed; + } + + /** + * Helper method that creates a throwaway Harvested Dataset to temporarily + * attach the newly-harvested version to. We need this when, instead of + * importing a brand-new harvested dataset from scratch, we are planning to + * attempt to update an already existing dataset harvested from the same + * archival location. + * @param harvestedVersion - a newly created Version imported from harvested metadata + * @return - a temporary dataset to which the new version has been attached + */ + private Dataset createTemporaryHarvestedDataset(DatasetVersion harvestedVersion) { + Dataset tempDataset = new Dataset(); + harvestedVersion.setDataset(tempDataset); + tempDataset.setVersions(new ArrayList<>(1)); + tempDataset.getVersions().add(harvestedVersion); + + return tempDataset; + } private static class MyCustomFormatter extends Formatter { diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java index 7fd7bf3e885..a6b7c1b9d49 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java @@ -30,7 +30,7 @@ */ public abstract class AbstractOAuth2AuthenticationProvider implements AuthenticationProvider { - final static Logger logger = Logger.getLogger(AbstractOAuth2AuthenticationProvider.class.getName()); + static final Logger logger = Logger.getLogger(AbstractOAuth2AuthenticationProvider.class.getName()); protected static class ParsedUserResponse { public final AuthenticatedUserDisplayInfo displayInfo; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java index 089ca40e164..323c78ab47a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java @@ -49,7 +49,7 @@ */ public class OrcidOAuth2AP extends AbstractOAuth2AuthenticationProvider { - final static Logger logger = Logger.getLogger(OrcidOAuth2AP.class.getName()); + static final Logger logger = Logger.getLogger(OrcidOAuth2AP.class.getName()); public static final String PROVIDER_ID_PRODUCTION = "orcid"; public static final String PROVIDER_ID_SANDBOX = "orcid-sandbox"; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 3bf2107e52b..d0da66c38e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -215,7 +215,7 @@ public long retrieveSizeFromMedia() { JsonArray dataArray = responseJson.getJsonArray("DATA"); if (dataArray != null && dataArray.size() != 0) { //File found - return (long) responseJson.getJsonArray("DATA").getJsonObject(0).getJsonNumber("size").longValueExact(); + return (long) dataArray.getJsonObject(0).getJsonNumber("size").longValueExact(); } } else { logger.warning("Response from " + get.getURI().toString() + " was " diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index d2fdec7b323..5b9e496281f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -753,6 +753,12 @@ public Path getFileSystemPath() throws UnsupportedDataAccessOperationException { @Override public boolean exists() { + try { + key = getMainFileKey(); + } catch (IOException e) { + logger.warning("Caught an IOException in S3AccessIO.exists(): " + e.getMessage()); + return false; + } String destinationKey = null; if (dvObject instanceof DataFile) { destinationKey = key; diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index a470f08f736..6b98848021c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -136,8 +136,6 @@ public class AddReplaceFileHelper{ private String newFileName; // step 30 private String newFileContentType; // step 30 private String newStorageIdentifier; // step 30 - private String newCheckSum; // step 30 - private ChecksumType newCheckSumType; //step 30 // -- Optional private DataFile fileToReplace; // step 25 @@ -146,6 +144,7 @@ public class AddReplaceFileHelper{ private DatasetVersion clone; List initialFileList; List finalFileList; + private boolean trustSuppliedFileSizes; // ----------------------------------- // Ingested files @@ -610,15 +609,9 @@ private boolean runAddReplacePhase1(Dataset owner, return false; } - if(optionalFileParams != null) { - if(optionalFileParams.hasCheckSum()) { - newCheckSum = optionalFileParams.getCheckSum(); - newCheckSumType = optionalFileParams.getCheckSumType(); - } - } msgt("step_030_createNewFilesViaIngest"); - if (!this.step_030_createNewFilesViaIngest()){ + if (!this.step_030_createNewFilesViaIngest(optionalFileParams)){ return false; } @@ -1191,7 +1184,7 @@ private boolean step_007_auto_isReplacementInLatestVersion(DataFile existingFile } - private boolean step_030_createNewFilesViaIngest(){ + private boolean step_030_createNewFilesViaIngest(OptionalFileParams optionalFileParams){ if (this.hasError()){ return false; @@ -1203,21 +1196,28 @@ private boolean step_030_createNewFilesViaIngest(){ //Don't repeatedly update the clone (losing changes) in multifile case clone = workingVersion.cloneDatasetVersion(); } + + Long suppliedFileSize = null; + String newCheckSum = null; + ChecksumType newCheckSumType = null; + + + if (optionalFileParams != null) { + if (optionalFileParams.hasCheckSum()) { + newCheckSum = optionalFileParams.getCheckSum(); + newCheckSumType = optionalFileParams.getCheckSumType(); + } + if (trustSuppliedFileSizes && optionalFileParams.hasFileSize()) { + suppliedFileSize = optionalFileParams.getFileSize(); + } + } + try { - /*CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, - this.newFileInputStream, - this.newFileName, - this.newFileContentType, - this.newStorageIdentifier, - this.newCheckSum, - this.newCheckSumType, - this.systemConfig);*/ - UploadSessionQuotaLimit quota = null; if (systemConfig.isStorageQuotasEnforced()) { quota = fileService.getUploadSessionQuotaLimit(dataset); } - Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType); + Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, suppliedFileSize); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); initialFileList = createDataFilesResult.getDataFiles(); @@ -2033,9 +2033,15 @@ public void setDuplicateFileWarning(String duplicateFileWarning) { * @param jsonData - an array of jsonData entries (one per file) using the single add file jsonData format * @param dataset * @param authUser + * @param trustSuppliedSizes - whether to accept the fileSize values passed + * in jsonData (we don't want to trust the users of the S3 direct + * upload API with that information - we will verify the status of + * the files in the S3 bucket and confirm the sizes in the process. + * we do want GlobusService to be able to pass the file sizes, since + * they are obtained and verified via a Globus API lookup). * @return */ - public Response addFiles(String jsonData, Dataset dataset, User authUser) { + public Response addFiles(String jsonData, Dataset dataset, User authUser, boolean trustSuppliedFileSizes) { msgt("(addFilesToDataset) jsonData: " + jsonData.toString()); JsonArrayBuilder jarr = Json.createArrayBuilder(); @@ -2044,6 +2050,7 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) { int totalNumberofFiles = 0; int successNumberofFiles = 0; + this.trustSuppliedFileSizes = trustSuppliedFileSizes; // ----------------------------------------------------------- // Read jsonData and Parse files information from jsondata : // ----------------------------------------------------------- @@ -2176,6 +2183,10 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) { .add("data", Json.createObjectBuilder().add("Files", jarr).add("Result", result)).build() ).build(); } + public Response addFiles(String jsonData, Dataset dataset, User authUser) { + return addFiles(jsonData, dataset, authUser, false); + } + /** * Replace multiple files with prepositioned replacements as listed in the * jsonData. Works with direct upload, Globus, and other out-of-band methods. diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java index 959dbc4e262..54844160163 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java @@ -39,6 +39,12 @@ * - Provenance related information * * @author rmp553 + * @todo (?) We may want to consider renaming this class to DataFileParams or + * DataFileInfo... it was originally created to encode some bits of info - + * the file "tags" specifically, that didn't fit in elsewhere in the normal + * workflow; but it's been expanded to cover pretty much everything else associated + * with DataFiles and it's not really "optional" anymore when, for example, used + * in the direct upload workflow. (?) */ public class OptionalFileParams { @@ -76,6 +82,8 @@ public class OptionalFileParams { public static final String MIME_TYPE_ATTR_NAME = "mimeType"; private String checkSumValue; private ChecksumType checkSumType; + public static final String FILE_SIZE_ATTR_NAME = "fileSize"; + private Long fileSize; public static final String LEGACY_CHECKSUM_ATTR_NAME = "md5Hash"; public static final String CHECKSUM_OBJECT_NAME = "checksum"; public static final String CHECKSUM_OBJECT_TYPE = "@type"; @@ -268,6 +276,18 @@ public String getCheckSum() { public ChecksumType getCheckSumType() { return checkSumType; } + + public boolean hasFileSize() { + return fileSize != null; + } + + public Long getFileSize() { + return fileSize; + } + + public void setFileSize(long fileSize) { + this.fileSize = fileSize; + } /** * Set tags @@ -416,7 +436,13 @@ else if ((jsonObj.has(CHECKSUM_OBJECT_NAME)) && (!jsonObj.get(CHECKSUM_OBJECT_NA this.checkSumType = ChecksumType.fromString(((JsonObject) jsonObj.get(CHECKSUM_OBJECT_NAME)).get(CHECKSUM_OBJECT_TYPE).getAsString()); } - + // ------------------------------- + // get file size as a Long, if supplied + // ------------------------------- + if ((jsonObj.has(FILE_SIZE_ATTR_NAME)) && (!jsonObj.get(FILE_SIZE_ATTR_NAME).isJsonNull())){ + + this.fileSize = jsonObj.get(FILE_SIZE_ATTR_NAME).getAsLong(); + } // ------------------------------- // get tags // ------------------------------- diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java index d792b616a0c..4d3ec2842a1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java @@ -26,9 +26,9 @@ public class DataverseRequest { private final String invocationId; private final HttpServletRequest httpServletRequest; - private final static String undefined = "0.0.0.0"; + private static final String undefined = "0.0.0.0"; - private final static String MDKEY_PREFIX="mdkey."; + private static final String MDKEY_PREFIX="mdkey."; private static final Logger logger = Logger.getLogger(DataverseRequest.class.getName()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java index db9dc142506..b36a638956f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java @@ -13,8 +13,10 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.pidproviders.PidProvider; import static edu.harvard.iq.dataverse.util.StringUtil.isEmpty; +import java.io.IOException; import java.util.Objects; import java.util.logging.Logger; +import org.apache.solr.client.solrj.SolrServerException; /**; * An abstract base class for commands that creates {@link Dataset}s. @@ -148,9 +150,19 @@ public Dataset execute(CommandContext ctxt) throws CommandException { //Use for code that requires database ids postDBFlush(theDataset, ctxt); - - ctxt.index().asyncIndexDataset(theDataset, true); - + + if (harvested) { + try { + ctxt.index().indexDataset(theDataset, true); + } catch (SolrServerException | IOException solrEx) { + logger.warning("Failed to index harvested dataset. " + solrEx.getMessage()); + } + } else { + // The asynchronous version does not throw any exceptions, + // logging them internally instead. + ctxt.index().asyncIndexDataset(theDataset, true); + } + return theDataset; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java new file mode 100644 index 00000000000..91f3a5b823c --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -0,0 +1,106 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; + +import java.util.ArrayList; +import java.util.List; + +/** + * An abstract base class for commands that perform write operations on {@link Dataverse}s. + */ +abstract class AbstractWriteDataverseCommand extends AbstractCommand { + + protected Dataverse dataverse; + private final List inputLevels; + private final List facets; + protected final List metadataBlocks; + private final boolean resetRelationsOnNullValues; + + public AbstractWriteDataverseCommand(Dataverse dataverse, + Dataverse affectedDataverse, + DataverseRequest request, + List facets, + List inputLevels, + List metadataBlocks, + boolean resetRelationsOnNullValues) { + super(request, affectedDataverse); + this.dataverse = dataverse; + if (facets != null) { + this.facets = new ArrayList<>(facets); + } else { + this.facets = null; + } + if (inputLevels != null) { + this.inputLevels = new ArrayList<>(inputLevels); + } else { + this.inputLevels = null; + } + if (metadataBlocks != null) { + this.metadataBlocks = new ArrayList<>(metadataBlocks); + } else { + this.metadataBlocks = null; + } + this.resetRelationsOnNullValues = resetRelationsOnNullValues; + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + dataverse = innerExecute(ctxt); + + processMetadataBlocks(); + processFacets(ctxt); + processInputLevels(ctxt); + + return ctxt.dataverses().save(dataverse); + } + + private void processMetadataBlocks() { + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { + dataverse.setMetadataBlockRoot(true); + dataverse.setMetadataBlocks(metadataBlocks); + } else if (resetRelationsOnNullValues) { + dataverse.setMetadataBlockRoot(false); + dataverse.clearMetadataBlocks(); + } + } + + private void processFacets(CommandContext ctxt) { + if (facets != null) { + ctxt.facets().deleteFacetsFor(dataverse); + dataverse.setDataverseFacets(new ArrayList<>()); + + if (!facets.isEmpty()) { + dataverse.setFacetRoot(true); + } + + for (int i = 0; i < facets.size(); i++) { + ctxt.facets().create(i, facets.get(i), dataverse); + } + } else if (resetRelationsOnNullValues) { + ctxt.facets().deleteFacetsFor(dataverse); + dataverse.setFacetRoot(false); + } + } + + private void processInputLevels(CommandContext ctxt) { + if (inputLevels != null) { + if (!inputLevels.isEmpty()) { + dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); + } + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + inputLevels.forEach(inputLevel -> { + inputLevel.setDataverse(dataverse); + ctxt.fieldTypeInputLevels().create(inputLevel); + }); + } else if (resetRelationsOnNullValues) { + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + } + } + + abstract protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException; +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 489b36e7cef..3728f3ee6ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -6,11 +6,9 @@ import edu.harvard.iq.dataverse.authorization.groups.Group; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -27,48 +25,26 @@ * @author michael */ @RequiredPermissions(Permission.AddDataverse) -public class CreateDataverseCommand extends AbstractCommand { - - private final Dataverse created; - private final List inputLevelList; - private final List facetList; - private final List metadataBlocks; +public class CreateDataverseCommand extends AbstractWriteDataverseCommand { public CreateDataverseCommand(Dataverse created, - DataverseRequest aRequest, - List facetList, - List inputLevelList) { - this(created, aRequest, facetList, inputLevelList, null); + DataverseRequest request, + List facets, + List inputLevels) { + this(created, request, facets, inputLevels, null); } public CreateDataverseCommand(Dataverse created, - DataverseRequest aRequest, - List facetList, - List inputLevelList, + DataverseRequest request, + List facets, + List inputLevels, List metadataBlocks) { - super(aRequest, created.getOwner()); - this.created = created; - if (facetList != null) { - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (inputLevelList != null) { - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - if (metadataBlocks != null) { - this.metadataBlocks = new ArrayList<>(metadataBlocks); - } else { - this.metadataBlocks = null; - } + super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks, false); } @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - - Dataverse owner = created.getOwner(); + protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException { + Dataverse owner = dataverse.getOwner(); if (owner == null) { if (ctxt.dataverses().isRootDataverseExists()) { throw new IllegalCommandException("Root Dataverse already exists. Cannot create another one", this); @@ -76,44 +52,44 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } if (metadataBlocks != null && !metadataBlocks.isEmpty()) { - created.setMetadataBlockRoot(true); - created.setMetadataBlocks(metadataBlocks); + dataverse.setMetadataBlockRoot(true); + dataverse.setMetadataBlocks(metadataBlocks); } - if (created.getCreateDate() == null) { - created.setCreateDate(new Timestamp(new Date().getTime())); + if (dataverse.getCreateDate() == null) { + dataverse.setCreateDate(new Timestamp(new Date().getTime())); } - if (created.getCreator() == null) { + if (dataverse.getCreator() == null) { final User user = getRequest().getUser(); if (user.isAuthenticated()) { - created.setCreator((AuthenticatedUser) user); + dataverse.setCreator((AuthenticatedUser) user); } else { throw new IllegalCommandException("Guest users cannot create a Dataverse.", this); } } - if (created.getDataverseType() == null) { - created.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); + if (dataverse.getDataverseType() == null) { + dataverse.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); } - if (created.getDefaultContributorRole() == null) { - created.setDefaultContributorRole(ctxt.roles().findBuiltinRoleByAlias(DataverseRole.EDITOR)); + if (dataverse.getDefaultContributorRole() == null) { + dataverse.setDefaultContributorRole(ctxt.roles().findBuiltinRoleByAlias(DataverseRole.EDITOR)); } // @todo for now we are saying all dataverses are permission root - created.setPermissionRoot(true); + dataverse.setPermissionRoot(true); - if (ctxt.dataverses().findByAlias(created.getAlias()) != null) { - throw new IllegalCommandException("A dataverse with alias " + created.getAlias() + " already exists", this); + if (ctxt.dataverses().findByAlias(dataverse.getAlias()) != null) { + throw new IllegalCommandException("A dataverse with alias " + dataverse.getAlias() + " already exists", this); } - if (created.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { + if (dataverse.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { throw new IllegalCommandException("File PIDs cannot be enabled per collection", this); } // Save the dataverse - Dataverse managedDv = ctxt.dataverses().save(created); + Dataverse managedDv = ctxt.dataverses().save(dataverse); // Find the built in admin role (currently by alias) DataverseRole adminRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.ADMIN); @@ -160,33 +136,6 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } managedDv.setPermissionModificationTime(new Timestamp(new Date().getTime())); - - if (facetList != null) { - ctxt.facets().deleteFacetsFor(managedDv); - - if (!facetList.isEmpty()) { - managedDv.setFacetRoot(true); - } - - int i = 0; - for (DatasetFieldType df : facetList) { - ctxt.facets().create(i++, df, managedDv); - } - } - - if (inputLevelList != null) { - if (!inputLevelList.isEmpty()) { - managedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); - } - ctxt.fieldTypeInputLevels().deleteFacetsFor(managedDv); - for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { - inputLevel.setDataverse(managedDv); - ctxt.fieldTypeInputLevels().create(inputLevel); - } - } - - // TODO: save is called here and above; we likely don't need both - managedDv = ctxt.dataverses().save(managedDv); return managedDv; } @@ -194,5 +143,4 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { public boolean onSuccess(CommandContext ctxt, Object r) { return ctxt.dataverses().index((Dataverse) r); } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 3a21345448b..e9a2025b112 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -2,34 +2,29 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; -import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -//import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit; -import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; -import static edu.harvard.iq.dataverse.util.FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; -import static edu.harvard.iq.dataverse.util.FileUtil.createIngestFailureReport; -import static edu.harvard.iq.dataverse.util.FileUtil.determineFileType; -import static edu.harvard.iq.dataverse.util.FileUtil.determineFileTypeByNameAndExtension; -import static edu.harvard.iq.dataverse.util.FileUtil.getFilesTempDirectory; -import static edu.harvard.iq.dataverse.util.FileUtil.saveInputStreamInTempFile; -import static edu.harvard.iq.dataverse.util.FileUtil.useRecognizedType; import edu.harvard.iq.dataverse.util.ShapefileHandler; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.file.BagItFileHandler; import edu.harvard.iq.dataverse.util.file.BagItFileHandlerFactory; import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; +import jakarta.enterprise.inject.spi.CDI; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; + import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -42,7 +37,7 @@ import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; -import java.util.Enumeration; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -51,12 +46,17 @@ import java.util.Set; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; -import java.util.zip.ZipFile; import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; -import jakarta.enterprise.inject.spi.CDI; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; +import java.util.zip.ZipFile; + +import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; +import static edu.harvard.iq.dataverse.util.FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; +import static edu.harvard.iq.dataverse.util.FileUtil.createIngestFailureReport; +import static edu.harvard.iq.dataverse.util.FileUtil.determineFileType; +import static edu.harvard.iq.dataverse.util.FileUtil.determineFileTypeByNameAndExtension; +import static edu.harvard.iq.dataverse.util.FileUtil.getFilesTempDirectory; +import static edu.harvard.iq.dataverse.util.FileUtil.saveInputStreamInTempFile; +import static edu.harvard.iq.dataverse.util.FileUtil.useRecognizedType; /** * @@ -93,6 +93,10 @@ public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion versi this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, null, null); } + public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, UploadSessionQuotaLimit quota, String newCheckSum, DataFile.ChecksumType newCheckSumType, Long newFileSize) { + this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, newFileSize, null); + } + // This version of the command must be used when files are created in the // context of creating a brand new dataset (from the Add Dataset page): @@ -140,9 +144,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (newStorageIdentifier == null) { - if (getFilesTempDirectory() != null) { + var filesTempDirectory = getFilesTempDirectory(); + if (filesTempDirectory != null) { try { - tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); + tempFile = Files.createTempFile(Paths.get(filesTempDirectory), "tmp", "upload"); // "temporary" location is the key here; this is why we are not using // the DataStore framework for this - the assumption is that // temp files will always be stored on the local filesystem. @@ -260,10 +265,6 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // DataFile objects from its contents: } else if (finalType.equals("application/zip")) { - ZipFile zipFile = null; - ZipInputStream unZippedIn = null; - ZipEntry zipEntry = null; - int fileNumberLimit = ctxt.systemConfig().getZipUploadFilesLimit(); Long combinedUnzippedFileSize = 0L; @@ -271,14 +272,14 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException Charset charset = null; /* TODO: (?) - We may want to investigate somehow letting the user specify + We may want to investigate somehow letting the user specify the charset for the filenames in the zip file... - - otherwise, ZipInputStream bails out if it encounteres a file - name that's not valid in the current charest (i.e., UTF-8, in - our case). It would be a bit trickier than what we're doing for - SPSS tabular ingests - with the lang. encoding pulldown menu - + - otherwise, ZipInputStream bails out if it encounteres a file + name that's not valid in the current charest (i.e., UTF-8, in + our case). It would be a bit trickier than what we're doing for + SPSS tabular ingests - with the lang. encoding pulldown menu - because this encoding needs to be specified *before* we upload and - attempt to unzip the file. + attempt to unzip the file. -- L.A. 4.0 beta12 logger.info("default charset is "+Charset.defaultCharset().name()); if (Charset.isSupported("US-ASCII")) { @@ -287,25 +288,21 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (charset != null) { logger.info("was able to obtain charset for US-ASCII"); } - + } */ - /** - * Perform a quick check for how many individual files are - * inside this zip archive. If it's above the limit, we can - * give up right away, without doing any unpacking. + /** + * Perform a quick check for how many individual files are + * inside this zip archive. If it's above the limit, we can + * give up right away, without doing any unpacking. * This should be a fairly inexpensive operation, we just need - * to read the directory at the end of the file. + * to read the directory at the end of the file. */ - - if (charset != null) { - zipFile = new ZipFile(tempFile.toFile(), charset); - } else { - zipFile = new ZipFile(tempFile.toFile()); - } + + /** - * The ZipFile constructors above will throw ZipException - + * The ZipFile constructors in openZipFile will throw ZipException - * a type of IOException - if there's something wrong * with this file as a zip. There's no need to intercept it * here, it will be caught further below, with other IOExceptions, @@ -313,8 +310,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException * then attempt to save it as is. */ - int numberOfUnpackableFiles = 0; - + int numberOfUnpackableFiles = 0; + /** * Note that we can't just use zipFile.size(), * unfortunately, since that's the total number of entries, @@ -323,83 +320,46 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException * that are files. */ - for (Enumeration entries = zipFile.entries(); entries.hasMoreElements();) { - ZipEntry entry = entries.nextElement(); - logger.fine("inside first zip pass; this entry: "+entry.getName()); - if (!entry.isDirectory()) { - String shortName = entry.getName().replaceFirst("^.*[\\/]", ""); - // ... and, finally, check if it's a "fake" file - a zip archive entry - // created for a MacOS X filesystem element: (these - // start with "._") - if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { - numberOfUnpackableFiles++; - if (numberOfUnpackableFiles > fileNumberLimit) { - logger.warning("Zip upload - too many files in the zip to process individually."); - warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit - + "); please upload a zip archive with fewer files, if you want them to be ingested " - + "as individual DataFiles."; - throw new IOException(); - } - // In addition to counting the files, we can - // also check the file size while we're here, - // provided the size limit is defined; if a single - // file is above the individual size limit, unzipped, - // we give up on unpacking this zip archive as well: - if (fileSizeLimit != null && entry.getSize() > fileSizeLimit) { - throw new FileExceedsMaxSizeException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(entry.getSize()), bytesToHumanReadable(fileSizeLimit))); - } - // Similarly, we want to check if saving all these unpacked - // files is going to push the disk usage over the - // quota: - if (storageQuotaLimit != null) { - combinedUnzippedFileSize = combinedUnzippedFileSize + entry.getSize(); - if (combinedUnzippedFileSize > storageQuotaLimit) { - //throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); - // change of plans: if the unzipped content inside exceeds the remaining quota, - // we reject the upload outright, rather than accepting the zip - // file as is. - throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.unzipped.quota_exceeded"), bytesToHumanReadable(storageQuotaLimit)), this); - } + try (var zipFile = openZipFile(tempFile, charset)) { + var zipEntries = filteredZipEntries(zipFile); + for (var entry : zipEntries) { + logger.fine("inside first zip pass; this entry: " + entry.getName()); + numberOfUnpackableFiles++; + if (numberOfUnpackableFiles > fileNumberLimit) { + logger.warning("Zip upload - too many files in the zip to process individually."); + warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit + + "); please upload a zip archive with fewer files, if you want them to be ingested " + + "as individual DataFiles."; + throw new IOException(); + } + // In addition to counting the files, we can + // also check the file size while we're here, + // provided the size limit is defined; if a single + // file is above the individual size limit, unzipped, + // we give up on unpacking this zip archive as well: + if (fileSizeLimit != null && entry.getSize() > fileSizeLimit) { + throw new FileExceedsMaxSizeException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(entry.getSize()), bytesToHumanReadable(fileSizeLimit))); + } + // Similarly, we want to check if saving all these unpacked + // files is going to push the disk usage over the + // quota: + if (storageQuotaLimit != null) { + combinedUnzippedFileSize = combinedUnzippedFileSize + entry.getSize(); + if (combinedUnzippedFileSize > storageQuotaLimit) { + //throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); + // change of plans: if the unzipped content inside exceeds the remaining quota, + // we reject the upload outright, rather than accepting the zip + // file as is. + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.unzipped.quota_exceeded"), bytesToHumanReadable(storageQuotaLimit)), this); } } } - } - - // OK we're still here - that means we can proceed unzipping. - - // Close the ZipFile, re-open as ZipInputStream: - zipFile.close(); - // reset: - combinedUnzippedFileSize = 0L; - - if (charset != null) { - unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); - } else { - unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); - } - - while (true) { - try { - zipEntry = unZippedIn.getNextEntry(); - } catch (IllegalArgumentException iaex) { - // Note: - // ZipInputStream documentation doesn't even mention that - // getNextEntry() throws an IllegalArgumentException! - // but that's what happens if the file name of the next - // entry is not valid in the current CharSet. - // -- L.A. - warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is."; - logger.warning(warningMessage); - throw new IOException(); - } + // OK we're still here - that means we can proceed unzipping. - if (zipEntry == null) { - break; - } - // Note that some zip entries may be directories - we - // simply skip them: + // reset: + combinedUnzippedFileSize = 0L; - if (!zipEntry.isDirectory()) { + for (var entry : zipEntries) { if (datafiles.size() > fileNumberLimit) { logger.warning("Zip upload - too many files."); warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit @@ -407,72 +367,55 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException + "as individual DataFiles."; throw new IOException(); } - - String fileEntryName = zipEntry.getName(); + var fileEntryName = entry.getName(); + var shortName = getShortName(fileEntryName); logger.fine("ZipEntry, file: " + fileEntryName); + String storageIdentifier = FileUtil.generateStorageIdentifier(); + File unzippedFile = new File(getFilesTempDirectory() + "/" + storageIdentifier); + Files.copy(zipFile.getInputStream(entry), unzippedFile.toPath(), StandardCopyOption.REPLACE_EXISTING); + // No need to check the size of this unpacked file against the size limit, + // since we've already checked for that in the first pass. + DataFile datafile = FileUtil.createSingleDataFile(version, null, storageIdentifier, shortName, + MIME_TYPE_UNDETERMINED_DEFAULT, + ctxt.systemConfig().getFileFixityChecksumAlgorithm(), null, false); + + if (!fileEntryName.equals(shortName)) { + // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), + // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all + // the leading, trailing and duplicate slashes; then replace all the characters that + // don't pass our validation rules. + String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", ""); + directoryName = StringUtil.sanitizeFileDirectory(directoryName, true); + // if (!"".equals(directoryName)) { + if (!StringUtil.isEmpty(directoryName)) { + logger.fine("setting the directory label to " + directoryName); + datafile.getFileMetadata().setDirectoryLabel(directoryName); + } + } - if (fileEntryName != null && !fileEntryName.equals("")) { - - String shortName = fileEntryName.replaceFirst("^.*[\\/]", ""); - - // Check if it's a "fake" file - a zip archive entry - // created for a MacOS X filesystem element: (these - // start with "._") - if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { - // OK, this seems like an OK file entry - we'll try - // to read it and create a DataFile with it: - - String storageIdentifier = FileUtil.generateStorageIdentifier(); - File unzippedFile = new File(getFilesTempDirectory() + "/" + storageIdentifier); - Files.copy(unZippedIn, unzippedFile.toPath(), StandardCopyOption.REPLACE_EXISTING); - // No need to check the size of this unpacked file against the size limit, - // since we've already checked for that in the first pass. - - DataFile datafile = FileUtil.createSingleDataFile(version, null, storageIdentifier, shortName, - MIME_TYPE_UNDETERMINED_DEFAULT, - ctxt.systemConfig().getFileFixityChecksumAlgorithm(), null, false); - - if (!fileEntryName.equals(shortName)) { - // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), - // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all - // the leading, trailing and duplicate slashes; then replace all the characters that - // don't pass our validation rules. - String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", ""); - directoryName = StringUtil.sanitizeFileDirectory(directoryName, true); - // if (!"".equals(directoryName)) { - if (!StringUtil.isEmpty(directoryName)) { - logger.fine("setting the directory label to " + directoryName); - datafile.getFileMetadata().setDirectoryLabel(directoryName); - } - } + if (datafile != null) { + // We have created this datafile with the mime type "unknown"; + // Now that we have it saved in a temporary location, + // let's try and determine its real type: - if (datafile != null) { - // We have created this datafile with the mime type "unknown"; - // Now that we have it saved in a temporary location, - // let's try and determine its real type: - - String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); - - try { - recognizedType = determineFileType(unzippedFile, shortName); - // null the File explicitly, to release any open FDs: - unzippedFile = null; - logger.fine("File utility recognized unzipped file as " + recognizedType); - if (recognizedType != null && !recognizedType.equals("")) { - datafile.setContentType(recognizedType); - } - } catch (Exception ex) { - logger.warning("Failed to run the file utility mime type check on file " + fileName); - } - - datafiles.add(datafile); - combinedUnzippedFileSize += datafile.getFilesize(); + String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); + + try { + recognizedType = determineFileType(unzippedFile, shortName); + // null the File explicitly, to release any open FDs: + unzippedFile = null; + logger.fine("File utility recognized unzipped file as " + recognizedType); + if (recognizedType != null && !recognizedType.equals("")) { + datafile.setContentType(recognizedType); } + } catch (Exception ex) { + logger.warning("Failed to run the file utility mime type check on file " + fileName); } + + datafiles.add(datafile); + combinedUnzippedFileSize += datafile.getFilesize(); } } - unZippedIn.closeEntry(); - } } catch (IOException ioex) { @@ -494,18 +437,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException //warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); //datafiles.clear(); throw new CommandExecutionException(fesqx.getMessage(), fesqx, this); - }*/ finally { - if (zipFile != null) { - try { - zipFile.close(); - } catch (Exception zEx) {} - } - if (unZippedIn != null) { - try { - unZippedIn.close(); - } catch (Exception zEx) {} - } - } + }*/ if (!datafiles.isEmpty()) { // remove the uploaded zip file: try { @@ -591,7 +523,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // The try-catch is due to error encountered in using NFS for stocking file, // cf. https://github.com/IQSS/dataverse/issues/5909 try { - FileUtils.deleteDirectory(rezipFolder); + if (rezipFolder!=null) + FileUtils.deleteDirectory(rezipFolder); } catch (IOException ioex) { // do nothing - it's a temp folder. logger.warning("Could not remove temp folder, error message : " + ioex.getMessage()); @@ -730,7 +663,37 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException return CreateDataFileResult.error(fileName, finalType); } // end createDataFiles - + + private static List filteredZipEntries(ZipFile zipFile) { + var entries = Collections.list(zipFile.entries()).stream().filter(e -> { + var entryName = e.getName(); + logger.fine("ZipEntry, file: " + entryName); + return !e.isDirectory() && !entryName.isEmpty() && !isFileToSkip(entryName); + }).toList(); + return entries; + } + + private static ZipFile openZipFile(Path tempFile, Charset charset) throws IOException { + if (charset != null) { + return new ZipFile(tempFile.toFile(), charset); + } + else { + return new ZipFile(tempFile.toFile()); + } + } + + private static boolean isFileToSkip(String fileName) { + // check if it's a "fake" file - a zip archive entry + // created for a MacOS X filesystem element: (these + // start with "._") + var shortName = getShortName(fileName); + return shortName.startsWith("._") || shortName.startsWith(".DS_Store") || "".equals(shortName); + } + + private static String getShortName(String fileName) { + return fileName.replaceFirst("^.*[\\/]", ""); + } + @Override public Map> getRequiredPermissions() { Map> ret = new HashMap<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index e6e8279a314..e378e2e2ef7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -131,7 +131,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasetversion.update.failure"), this); } else { - metadataUpdated = DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd); + metadataUpdated = !DatasetVersionDifference.compareFileMetadatas(publishedFmd, draftFmd).isEmpty(); publishedFmd.setLabel(draftFmd.getLabel()); publishedFmd.setDescription(draftFmd.getDescription()); publishedFmd.setCategories(draftFmd.getCategories()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 1ac41105237..902bea7f833 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -11,6 +11,9 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; + +import jakarta.persistence.OptimisticLockException; + import java.util.Optional; import java.util.logging.Logger; import static java.util.stream.Collectors.joining; @@ -105,10 +108,15 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException Optional prePubWf = ctxt.workflows().getDefaultWorkflow(TriggerType.PrePublishDataset); if ( prePubWf.isPresent() ) { // We start a workflow - theDataset = ctxt.em().merge(theDataset); - ctxt.em().flush(); - ctxt.workflows().start(prePubWf.get(), buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true); - return new PublishDatasetResult(theDataset, Status.Workflow); + try { + theDataset = ctxt.em().merge(theDataset); + ctxt.em().flush(); + ctxt.workflows().start(prePubWf.get(), + buildContext(theDataset, TriggerType.PrePublishDataset, datasetExternallyReleased), true); + return new PublishDatasetResult(theDataset, Status.Workflow); + } catch (OptimisticLockException e) { + throw new CommandException(e.getMessage(), e, this); + } } else{ // We will skip trying to register the global identifiers for datafiles @@ -157,7 +165,12 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException lock.setInfo(info); ctxt.datasets().addDatasetLock(theDataset, lock); } - theDataset = ctxt.em().merge(theDataset); + try { + theDataset = ctxt.em().merge(theDataset); + } catch (OptimisticLockException e) { + ctxt.datasets().removeDatasetLocks(theDataset, DatasetLock.Reason.finalizePublication); + throw new CommandException(e.getMessage(), e, this); + } // The call to FinalizePublicationCommand has been moved to the new @onSuccess() // method: //ctxt.datasets().callFinalizePublishCommandAsynchronously(theDataset.getId(), ctxt, request, datasetExternallyReleased); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index bb5f5a71e24..dc8884405ef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -115,7 +115,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { */ if(persistedVersion==null) { Long id = getDataset().getLatestVersion().getId(); - persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy().getId()); + persistedVersion = ctxt.datasetVersion().find(id!=null ? id : getDataset().getLatestVersionForCopy(true).getId()); } //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied. diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseAttributeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseAttributeCommand.java new file mode 100644 index 00000000000..57ac20fcee6 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseAttributeCommand.java @@ -0,0 +1,110 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + +import java.util.Collections; + +/** + * Command to update an existing Dataverse attribute. + */ +@RequiredPermissions(Permission.EditDataverse) +public class UpdateDataverseAttributeCommand extends AbstractCommand { + + private static final String ATTRIBUTE_ALIAS = "alias"; + private static final String ATTRIBUTE_NAME = "name"; + private static final String ATTRIBUTE_DESCRIPTION = "description"; + private static final String ATTRIBUTE_AFFILIATION = "affiliation"; + private static final String ATTRIBUTE_FILE_PIDS_ENABLED = "filePIDsEnabled"; + + private final Dataverse dataverse; + private final String attributeName; + private final Object attributeValue; + + public UpdateDataverseAttributeCommand(DataverseRequest request, Dataverse dataverse, String attributeName, Object attributeValue) { + super(request, dataverse); + this.dataverse = dataverse; + this.attributeName = attributeName; + this.attributeValue = attributeValue; + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + switch (attributeName) { + case ATTRIBUTE_ALIAS: + case ATTRIBUTE_NAME: + case ATTRIBUTE_DESCRIPTION: + case ATTRIBUTE_AFFILIATION: + setStringAttribute(attributeName, attributeValue); + break; + case ATTRIBUTE_FILE_PIDS_ENABLED: + setBooleanAttributeForFilePIDs(ctxt); + break; + default: + throw new IllegalCommandException("'" + attributeName + "' is not a supported attribute", this); + } + + return ctxt.engine().submit(new UpdateDataverseCommand(dataverse, null, null, getRequest(), null)); + } + + /** + * Helper method to set a string attribute. + * + * @param attributeName The name of the attribute. + * @param attributeValue The value of the attribute (must be a String). + * @throws IllegalCommandException if the provided attribute value is not of String type. + */ + private void setStringAttribute(String attributeName, Object attributeValue) throws IllegalCommandException { + if (!(attributeValue instanceof String stringValue)) { + throw new IllegalCommandException("'" + attributeName + "' requires a string value", this); + } + + switch (attributeName) { + case ATTRIBUTE_ALIAS: + dataverse.setAlias(stringValue); + break; + case ATTRIBUTE_NAME: + dataverse.setName(stringValue); + break; + case ATTRIBUTE_DESCRIPTION: + dataverse.setDescription(stringValue); + break; + case ATTRIBUTE_AFFILIATION: + dataverse.setAffiliation(stringValue); + break; + default: + throw new IllegalCommandException("Unsupported string attribute: " + attributeName, this); + } + } + + /** + * Helper method to handle the "filePIDsEnabled" boolean attribute. + * + * @param ctxt The command context. + * @throws PermissionException if the user doesn't have permission to modify this attribute. + */ + private void setBooleanAttributeForFilePIDs(CommandContext ctxt) throws CommandException { + if (!getRequest().getUser().isSuperuser()) { + throw new PermissionException("You must be a superuser to change this setting", + this, Collections.singleton(Permission.EditDataset), dataverse); + } + if (!ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { + throw new PermissionException("Changing File PID policy per collection is not enabled on this server", + this, Collections.singleton(Permission.EditDataset), dataverse); + } + + if (!(attributeValue instanceof Boolean)) { + throw new IllegalCommandException("'" + ATTRIBUTE_FILE_PIDS_ENABLED + "' requires a boolean value", this); + } + + dataverse.setFilePIDsEnabled((Boolean) attributeValue); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index bdb69dc918f..6dc4ab4d00d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -1,141 +1,144 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.Dataverse.DataverseType; -import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; +import edu.harvard.iq.dataverse.api.dto.DataverseDTO; import edu.harvard.iq.dataverse.authorization.Permission; import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; + import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import java.util.ArrayList; import java.util.List; -import java.util.logging.Logger; /** * Update an existing dataverse. + * * @author michael */ -@RequiredPermissions( Permission.EditDataverse ) -public class UpdateDataverseCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(UpdateDataverseCommand.class.getName()); - - private final Dataverse editedDv; - private final List facetList; +@RequiredPermissions(Permission.EditDataverse) +public class UpdateDataverseCommand extends AbstractWriteDataverseCommand { private final List featuredDataverseList; - private final List inputLevelList; + private final DataverseDTO updatedDataverseDTO; private boolean datasetsReindexRequired = false; - public UpdateDataverseCommand(Dataverse editedDv, List facetList, List featuredDataverseList, - DataverseRequest aRequest, List inputLevelList ) { - super(aRequest, editedDv); - this.editedDv = editedDv; - // add update template uses this command but does not - // update facet list or featured dataverses - if (facetList != null){ - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (featuredDataverseList != null){ - this.featuredDataverseList = new ArrayList<>(featuredDataverseList); - } else { - this.featuredDataverseList = null; - } - if (inputLevelList != null){ - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - } - - @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - logger.fine("Entering update dataverse command"); - - // Perform any optional validation steps, if defined: - if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { - // For admins, an override of the external validation step may be enabled: - if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { - String executable = ctxt.systemConfig().getDataverseValidationExecutable(); - boolean result = validateDataverseMetadataExternally(editedDv, executable, getRequest()); - - if (!result) { - String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); - throw new IllegalCommandException(rejectionMessage, this); - } - } - } - - Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); - - DataverseType oldDvType = oldDv.getDataverseType(); - String oldDvAlias = oldDv.getAlias(); - String oldDvName = oldDv.getName(); - oldDv = null; - - Dataverse result = ctxt.dataverses().save(editedDv); - - if ( facetList != null ) { - ctxt.facets().deleteFacetsFor(result); - int i=0; - for ( DatasetFieldType df : facetList ) { - ctxt.facets().create(i++, df.getId(), result.getId()); - } - } - if ( featuredDataverseList != null ) { - ctxt.featuredDataverses().deleteFeaturedDataversesFor(result); - int i=0; - for ( Object obj : featuredDataverseList ) { - Dataverse dv = (Dataverse) obj; - ctxt.featuredDataverses().create(i++, dv.getId(), result.getId()); + public UpdateDataverseCommand(Dataverse dataverse, + List facets, + List featuredDataverses, + DataverseRequest request, + List inputLevels) { + this(dataverse, facets, featuredDataverses, request, inputLevels, null, null, false); + } + + public UpdateDataverseCommand(Dataverse dataverse, + List facets, + List featuredDataverses, + DataverseRequest request, + List inputLevels, + List metadataBlocks, + DataverseDTO updatedDataverseDTO, + boolean resetRelationsOnNullValues) { + super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks, resetRelationsOnNullValues); + if (featuredDataverses != null) { + this.featuredDataverseList = new ArrayList<>(featuredDataverses); + } else { + this.featuredDataverseList = null; + } + this.updatedDataverseDTO = updatedDataverseDTO; + } + + @Override + protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException { + // Perform any optional validation steps, if defined: + if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { + // For admins, an override of the external validation step may be enabled: + if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { + String executable = ctxt.systemConfig().getDataverseValidationExecutable(); + boolean result = validateDataverseMetadataExternally(dataverse, executable, getRequest()); + + if (!result) { + String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); + throw new IllegalCommandException(rejectionMessage, this); } } - if ( inputLevelList != null ) { - ctxt.fieldTypeInputLevels().deleteFacetsFor(result); - for ( DataverseFieldTypeInputLevel obj : inputLevelList ) { - ctxt.fieldTypeInputLevels().create(obj); - } + } + + Dataverse oldDv = ctxt.dataverses().find(dataverse.getId()); + + DataverseType oldDvType = oldDv.getDataverseType(); + String oldDvAlias = oldDv.getAlias(); + String oldDvName = oldDv.getName(); + + // We don't want to reindex the children datasets unnecessarily: + // When these values are changed we need to reindex all children datasets + // This check is not recursive as all the values just report the immediate parent + if (!oldDvType.equals(dataverse.getDataverseType()) + || !oldDvName.equals(dataverse.getName()) + || !oldDvAlias.equals(dataverse.getAlias())) { + datasetsReindexRequired = true; + } + + if (featuredDataverseList != null) { + ctxt.featuredDataverses().deleteFeaturedDataversesFor(dataverse); + int i = 0; + for (Object obj : featuredDataverseList) { + Dataverse dv = (Dataverse) obj; + ctxt.featuredDataverses().create(i++, dv.getId(), dataverse.getId()); } - - // We don't want to reindex the children datasets unnecessarily: - // When these values are changed we need to reindex all children datasets - // This check is not recursive as all the values just report the immediate parent - if (!oldDvType.equals(editedDv.getDataverseType()) - || !oldDvName.equals(editedDv.getName()) - || !oldDvAlias.equals(editedDv.getAlias())) { - datasetsReindexRequired = true; + } + + if (updatedDataverseDTO != null) { + updateDataverseFromDTO(dataverse, updatedDataverseDTO); + } + + return dataverse; + } + + private void updateDataverseFromDTO(Dataverse dataverse, DataverseDTO dto) { + if (dto.getAlias() != null) { + dataverse.setAlias(dto.getAlias()); + } + if (dto.getName() != null) { + dataverse.setName(dto.getName()); + } + if (dto.getDescription() != null) { + dataverse.setDescription(dto.getDescription()); + } + if (dto.getAffiliation() != null) { + dataverse.setAffiliation(dto.getAffiliation()); + } + if (dto.getDataverseContacts() != null) { + dataverse.setDataverseContacts(dto.getDataverseContacts()); + for (DataverseContact dc : dataverse.getDataverseContacts()) { + dc.setDataverse(dataverse); } - - return result; - } - + } + if (dto.getDataverseType() != null) { + dataverse.setDataverseType(dto.getDataverseType()); + } + } + @Override public boolean onSuccess(CommandContext ctxt, Object r) { - + // first kick of async index of datasets // TODO: is this actually needed? Is there a better way to handle // It appears that we at some point lost some extra logic here, where // we only reindex the underlying datasets if one or more of the specific set - // of fields have been changed (since these values are included in the - // indexed solr documents for dataasets). So I'm putting that back. -L.A. + // of fields have been changed (since these values are included in the + // indexed solr documents for datasets). So I'm putting that back. -L.A. Dataverse result = (Dataverse) r; - + if (datasetsReindexRequired) { List datasets = ctxt.datasets().findByOwnerId(result.getId()); ctxt.index().asyncIndexDatasetList(datasets, true); } - - return ctxt.dataverses().index((Dataverse) r); - } + return ctxt.dataverses().index((Dataverse) r); + } } - diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateHarvestedDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateHarvestedDatasetCommand.java new file mode 100644 index 00000000000..09563686299 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateHarvestedDatasetCommand.java @@ -0,0 +1,202 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.FileMetadata; +import static edu.harvard.iq.dataverse.search.IndexServiceBean.solrDocIdentifierFile; +import edu.harvard.iq.dataverse.util.StringUtil; +import java.io.IOException; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.apache.solr.client.solrj.SolrServerException; + +/** + * + * @author landreev + * + * Much simplified version of UpdateDatasetVersionCommand, + * but with some extra twists. The goal is to avoid creating new Dataset and + * DataFile objects, and to instead preserve the database ids of the re-harvested + * datasets and files, whenever possible. This in turn allows us to avoid deleting + * and rebuilding from scratch the Solr documents for these objects. + */ +@RequiredPermissions(Permission.EditDataset) +public class UpdateHarvestedDatasetCommand extends AbstractDatasetCommand { + + private static final Logger logger = Logger.getLogger(UpdateHarvestedDatasetCommand.class.getCanonicalName()); + private final DatasetVersion newHarvestedVersion; + final private boolean validateLenient = true; + + public UpdateHarvestedDatasetCommand(Dataset theDataset, DatasetVersion newHarvestedVersion, DataverseRequest aRequest) { + super(aRequest, theDataset); + this.newHarvestedVersion = newHarvestedVersion; + } + + public boolean isValidateLenient() { + return validateLenient; + } + + @Override + public Dataset execute(CommandContext ctxt) throws CommandException { + + Dataset existingDataset = getDataset(); + + if (existingDataset == null + || existingDataset.getId() == null + || !existingDataset.isHarvested() + || existingDataset.getVersions().size() != 1) { + throw new IllegalCommandException("The command can only be called on an existing harvested dataset with only 1 version", this); + } + DatasetVersion existingVersion = existingDataset.getVersions().get(0); + + if (newHarvestedVersion == null || newHarvestedVersion.getId() != null) { + throw new IllegalCommandException("The command can only be called with a newly-harvested, not yet saved DatasetVersion supplied", this); + } + + newHarvestedVersion.setCreateTime(getTimestamp()); + newHarvestedVersion.setLastUpdateTime(getTimestamp()); + + + Map existingFilesIndex = new HashMap<>(); + + /* + Create a map of the files that are currently part of this existing + harvested dataset. We assume that a harvested file can be uniquely + defined by its storageidentifier. Which, in the case of a datafile + harvested from another Dataverse should be its data access api url. + */ + for (int i = 0; i < existingDataset.getFiles().size(); i++) { + String storageIdentifier = existingDataset.getFiles().get(i).getStorageIdentifier(); + if (!StringUtil.isEmpty(storageIdentifier)) { + existingFilesIndex.put(storageIdentifier, i); + } + } + + /* + Go through the files in the newly-harvested version and check if any of + them are (potentially new/updated) versions of files that we already + have, harvested previously from the same archive location. + */ + for (FileMetadata newFileMetadata : newHarvestedVersion.getFileMetadatas()) { + // is it safe to assume that each new FileMetadata will be + // pointing to a non-null DataFile here? + String storageIdentifier = newFileMetadata.getDataFile().getStorageIdentifier(); + if (!StringUtil.isEmpty(storageIdentifier) && existingFilesIndex.containsKey(storageIdentifier)) { + newFileMetadata.getDataFile().setFileMetadatas(new ArrayList<>()); + + int fileIndex = existingFilesIndex.get(storageIdentifier); + + // Make sure to update the existing DataFiles that we are going + // to keep in case their newly-harvested versions have different + // checksums, mime types etc. These values are supposed to be + // immutable, normally - but who knows, errors happen, the source + // Dataverse may have had to fix these in their database to + // correct a data integrity issue (for ex.): + existingDataset.getFiles().get(fileIndex).setContentType(newFileMetadata.getDataFile().getContentType()); + existingDataset.getFiles().get(fileIndex).setFilesize(newFileMetadata.getDataFile().getFilesize()); + existingDataset.getFiles().get(fileIndex).setChecksumType(newFileMetadata.getDataFile().getChecksumType()); + existingDataset.getFiles().get(fileIndex).setChecksumValue(newFileMetadata.getDataFile().getChecksumValue()); + + // Point the newly-harvested filemetadata to the existing datafile: + newFileMetadata.setDataFile(existingDataset.getFiles().get(fileIndex)); + + // Make sure this new FileMetadata is the only one attached to this existing file: + existingDataset.getFiles().get(fileIndex).setFileMetadatas(new ArrayList<>(1)); + existingDataset.getFiles().get(fileIndex).getFileMetadatas().add(newFileMetadata); + // (we don't want any cascade relationships left between this existing + // dataset and this version, since we are going to attemp to delete it). + + // Drop the file from the index map: + existingFilesIndex.remove(storageIdentifier); + } + } + + // @todo? check if there's anything special that needs to be done for things + // like file categories + + List solrIdsOfDocumentsToDelete = new ArrayList<>(); + + // Go through the existing files and delete the ones that are + // no longer present in the version that we have just harvesed: + for (FileMetadata oldFileMetadata : existingDataset.getVersions().get(0).getFileMetadatas()) { + DataFile oldDataFile = oldFileMetadata.getDataFile(); + String storageIdentifier = oldDataFile.getStorageIdentifier(); + // Is it still in the existing files map? - that means it is no longer + // present in the newly-harvested version + if (StringUtil.isEmpty(storageIdentifier) || existingFilesIndex.containsKey(storageIdentifier)) { + solrIdsOfDocumentsToDelete.add(solrDocIdentifierFile + oldDataFile.getId()); + existingDataset.getFiles().remove(oldDataFile); + // Files from harvested datasets are removed unceremoniously, + // directly in the database. No need to bother calling the + // DeleteFileCommand on them. We'll just need to remember to purge + // them from Solr as well (right below) + ctxt.em().remove(ctxt.em().merge(oldDataFile)); + // (no need to explicitly remove the oldFileMetadata; it will be + // removed with the entire old version is deleted) + } + } + + // purge all the SOLR documents associated with the files + // we have just deleted: + if (!solrIdsOfDocumentsToDelete.isEmpty()) { + ctxt.index().deleteHarvestedDocuments(solrIdsOfDocumentsToDelete); + } + + // ... And now delete the existing version itself: + existingDataset.setVersions(new ArrayList<>()); + existingVersion.setDataset(null); + + existingVersion = ctxt.em().merge(existingVersion); + ctxt.em().remove(existingVersion); + + // Now attach the newly-harvested version to the dataset: + existingDataset.getVersions().add(newHarvestedVersion); + newHarvestedVersion.setDataset(existingDataset); + + // ... There's one more thing to do - go through the new files, + // that are not in the database yet, and make sure they are + // attached to this existing dataset, instead of the dummy temp + // dataset into which they were originally imported: + for (FileMetadata newFileMetadata : newHarvestedVersion.getFileMetadatas()) { + if (newFileMetadata.getDataFile().getId() == null) { + existingDataset.getFiles().add(newFileMetadata.getDataFile()); + newFileMetadata.getDataFile().setOwner(existingDataset); + } + } + + ctxt.em().persist(newHarvestedVersion); + + Dataset savedDataset = ctxt.em().merge(existingDataset); + ctxt.em().flush(); + + return savedDataset; + } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + boolean retVal = true; + Dataset d = (Dataset) r; + + try { + // Note that we index harvested datasets synchronously: + ctxt.index().indexDataset(d, true); + } catch (SolrServerException|IOException solrServerEx) { + logger.log(Level.WARNING, "Exception while trying to index the updated Harvested dataset " + d.getGlobalId().asString(), solrServerEx.getMessage()); + retVal = false; + } + + return retVal; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java index edd01ae98a3..d76020cb8d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java @@ -98,8 +98,10 @@ public class DDIExportServiceBean { public static final String LEVEL_FILE = "file"; public static final String NOTE_TYPE_UNF = "VDC:UNF"; public static final String NOTE_TYPE_TAG = "DATAVERSE:TAG"; + public static final String NOTE_TYPE_FILEDESCRIPTION = "DATAVERSE:FILEDESC"; public static final String NOTE_SUBJECT_UNF = "Universal Numeric Fingerprint"; public static final String NOTE_SUBJECT_TAG = "Data File Tag"; + public static final String NOTE_SUBJECT_FILEDESCRIPTION = "DataFile Description"; /* * Internal service objects: @@ -742,11 +744,6 @@ private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, xmlw.writeEndElement(); // fileName } - /* - xmlw.writeStartElement("fileCont"); - xmlw.writeCharacters( df.getContentType() ); - xmlw.writeEndElement(); // fileCont - */ // dimensions if (checkField("dimensns", excludedFieldSet, includedFieldSet)) { if (dt.getCaseQuantity() != null || dt.getVarQuantity() != null || dt.getRecordsPerCase() != null) { @@ -801,26 +798,6 @@ private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, xmlw.writeEndElement(); // notes } - /* - xmlw.writeStartElement("notes"); - writeAttribute( xmlw, "type", "vdc:category" ); - xmlw.writeCharacters( fm.getCategory() ); - xmlw.writeEndElement(); // notes - */ - // A special note for LOCKSS crawlers indicating the restricted - // status of the file: - - /* - if (tdf != null && isRestrictedFile(tdf)) { - xmlw.writeStartElement("notes"); - writeAttribute( xmlw, "type", NOTE_TYPE_LOCKSS_CRAWL ); - writeAttribute( xmlw, "level", LEVEL_FILE ); - writeAttribute( xmlw, "subject", NOTE_SUBJECT_LOCKSS_PERM ); - xmlw.writeCharacters( "restricted" ); - xmlw.writeEndElement(); // notes - - } - */ if (checkField("tags", excludedFieldSet, includedFieldSet) && df.getTags() != null) { for (int i = 0; i < df.getTags().size(); i++) { xmlw.writeStartElement("notes"); @@ -831,6 +808,17 @@ private void createFileDscr(XMLStreamWriter xmlw, Set excludedFieldSet, xmlw.writeEndElement(); // notes } } + + // A dedicated node for the Description entry + if (!StringUtilisEmpty(fm.getDescription())) { + xmlw.writeStartElement("notes"); + xmlw.writeAttribute("level", LEVEL_FILE); + xmlw.writeAttribute("type", NOTE_TYPE_FILEDESCRIPTION); + xmlw.writeAttribute("subject", NOTE_SUBJECT_FILEDESCRIPTION); + xmlw.writeCharacters(fm.getDescription()); + xmlw.writeEndElement(); // notes + } + xmlw.writeEndElement(); // fileDscr } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index f5efc448090..05ddbe83e78 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -14,8 +14,10 @@ import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.LEVEL_FILE; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_TAG; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_UNF; +import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_FILEDESCRIPTION; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_TAG; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_UNF; +import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_FILEDESCRIPTION; import edu.harvard.iq.dataverse.export.DDIExporter; import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -1901,6 +1903,8 @@ private static void createFileDscr(XMLStreamWriter xmlw, JsonArray fileDetails) xmlw.writeEndElement(); // notes } + // If any tabular tags are present, each is formatted in a + // dedicated note: if (fileJson.containsKey("tabularTags")) { JsonArray tags = fileJson.getJsonArray("tabularTags"); for (int j = 0; j < tags.size(); j++) { @@ -1912,6 +1916,17 @@ private static void createFileDscr(XMLStreamWriter xmlw, JsonArray fileDetails) xmlw.writeEndElement(); // notes } } + + // Adding a dedicated node for the description entry (for + // non-tabular files we format it under the field) + if (fileJson.containsKey("description")) { + xmlw.writeStartElement("notes"); + xmlw.writeAttribute("level", LEVEL_FILE); + xmlw.writeAttribute("type", NOTE_TYPE_FILEDESCRIPTION); + xmlw.writeAttribute("subject", NOTE_SUBJECT_FILEDESCRIPTION); + xmlw.writeCharacters(fileJson.getString("description")); + xmlw.writeEndElement(); // notes + } // TODO: add the remaining fileDscr elements! xmlw.writeEndElement(); // fileDscr diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index ac3c81622fc..58992805dc8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -74,6 +74,7 @@ import edu.harvard.iq.dataverse.util.URLTokenUtil; import edu.harvard.iq.dataverse.util.UrlSignerUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import jakarta.json.JsonNumber; import jakarta.json.JsonReader; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -284,6 +285,52 @@ private int makeDir(GlobusEndpoint endpoint, String dir) { return result.status; } + private Map lookupFileSizes(GlobusEndpoint endpoint, String dir) { + MakeRequestResponse result; + + try { + logger.fine("Attempting to look up the contents of the Globus folder "+dir); + URL url = new URL( + "https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint.getId() + + "/ls?path=" + dir); + result = makeRequest(url, "Bearer", endpoint.getClientToken(), "GET", null); + + switch (result.status) { + case 200: + logger.fine("Looked up directory " + dir + " successfully."); + break; + default: + logger.warning("Status " + result.status + " received when looking up dir " + dir); + logger.fine("Response: " + result.jsonResponse); + return null; + } + } catch (MalformedURLException ex) { + // Misconfiguration + logger.warning("Failed to list the contents of the directory "+ dir + " on endpoint " + endpoint.getId()); + return null; + } + + Map ret = new HashMap<>(); + + JsonObject listObject = JsonUtil.getJsonObject(result.jsonResponse); + JsonArray dataArray = listObject.getJsonArray("DATA"); + + if (dataArray != null && !dataArray.isEmpty()) { + for (int i = 0; i < dataArray.size(); i++) { + String dataType = dataArray.getJsonObject(i).getString("DATA_TYPE", null); + if (dataType != null && dataType.equals("file")) { + // is it safe to assume that any entry with a valid "DATA_TYPE": "file" + // will also have valid "name" and "size" entries? + String fileName = dataArray.getJsonObject(i).getString("name"); + long fileSize = dataArray.getJsonObject(i).getJsonNumber("size").longValueExact(); + ret.put(fileName, fileSize); + } + } + } + + return ret; + } + private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissions permissions) { Gson gson = new GsonBuilder().create(); MakeRequestResponse result = null; @@ -938,9 +985,20 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName); } + + Map fileSizeMap = null; + + if (filesJsonArray.size() >= systemConfig.getGlobusBatchLookupSize()) { + // Look up the sizes of all the files in the dataset folder, to avoid + // looking them up one by one later: + // @todo: we should only be doing this if this is a managed store, probably (?) + GlobusEndpoint endpoint = getGlobusEndpoint(dataset); + fileSizeMap = lookupFileSizes(endpoint, endpoint.getBasePath()); + } // calculateMissingMetadataFields: checksum, mimetype JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, myLogger); + JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files"); logger.fine("Size: " + newfilesJsonArray.size()); logger.fine("Val: " + JsonUtil.prettyPrint(newfilesJsonArray.getJsonObject(0))); @@ -964,20 +1022,26 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut if (newfileJsonObject != null) { logger.fine("List Size: " + newfileJsonObject.size()); // if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) { - JsonPatch path = Json.createPatchBuilder() + JsonPatch patch = Json.createPatchBuilder() .add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build(); - fileJsonObject = path.apply(fileJsonObject); - path = Json.createPatchBuilder() + fileJsonObject = patch.apply(fileJsonObject); + patch = Json.createPatchBuilder() .add("/mimeType", newfileJsonObject.get(0).getString("mime")).build(); - fileJsonObject = path.apply(fileJsonObject); + fileJsonObject = patch.apply(fileJsonObject); + // If we already know the size of this file on the Globus end, + // we'll pass it to /addFiles, to avoid looking up file sizes + // one by one: + if (fileSizeMap != null && fileSizeMap.get(fileId) != null) { + Long uploadedFileSize = fileSizeMap.get(fileId); + myLogger.info("Found size for file " + fileId + ": " + uploadedFileSize + " bytes"); + patch = Json.createPatchBuilder() + .add("/fileSize", Json.createValue(uploadedFileSize)).build(); + fileJsonObject = patch.apply(fileJsonObject); + } else { + logger.fine("No file size entry found for file "+fileId); + } addFilesJsonData.add(fileJsonObject); countSuccess++; - // } else { - // globusLogger.info(fileName - // + " will be skipped from adding to dataset by second API due to missing - // values "); - // countError++; - // } } else { myLogger.info(fileName + " will be skipped from adding to dataset in the final AddReplaceFileHelper.addFiles() call. "); @@ -1029,7 +1093,7 @@ private void processUploadedFiles(JsonArray filesJsonArray, Dataset dataset, Aut // The old code had 2 sec. of sleep, so ... Thread.sleep(2000); - Response addFilesResponse = addFileHelper.addFiles(newjsonData, dataset, authUser); + Response addFilesResponse = addFileHelper.addFiles(newjsonData, dataset, authUser, true); if (addFilesResponse == null) { logger.info("null response from addFiles call"); @@ -1211,7 +1275,7 @@ private GlobusTaskState globusStatusCheck(GlobusEndpoint endpoint, String taskId return task; } - public JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger) + private JsonObject calculateMissingMetadataFields(List inputList, Logger globusLogger) throws InterruptedException, ExecutionException, IOException { List> hashvalueCompletableFutures = inputList.stream() @@ -1230,7 +1294,7 @@ public JsonObject calculateMissingMetadataFields(List inputList, Logger }); JsonArrayBuilder filesObject = (JsonArrayBuilder) completableFuture.get(); - + JsonObject output = Json.createObjectBuilder().add("files", filesObject).build(); return output; diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java index 0667f5594ce..7280b6af129 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java @@ -252,8 +252,16 @@ public void setAllowHarvestingMissingCVV(boolean allowHarvestingMissingCVV) { this.allowHarvestingMissingCVV = allowHarvestingMissingCVV; } - // TODO: do we need "orphanRemoval=true"? -- L.A. 4.4 - // TODO: should it be @OrderBy("startTime")? -- L.A. 4.4 + private boolean useOaiIdAsPid; + + public boolean isUseOaiIdentifiersAsPids() { + return useOaiIdAsPid; + } + + public void setUseOaiIdentifiersAsPids(boolean useOaiIdAsPid) { + this.useOaiIdAsPid = useOaiIdAsPid; + } + @OneToMany(mappedBy="harvestingClient", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) @OrderBy("id") private List harvestHistory; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index b42fd950528..71c498a4d0b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -344,10 +344,20 @@ public List saveAndAddFilesToDataset(DatasetVersion version, try { StorageIO dataAccess = DataAccess.getStorageIO(dataFile); //Populate metadata - dataAccess.open(DataAccessOption.READ_ACCESS); - // (the .open() above makes a remote call to check if - // the file exists and obtains its size) - confirmedFileSize = dataAccess.getSize(); + + // There are direct upload sub-cases where the file size + // is already known at this point. For example, direct uploads + // to S3 that go through the jsf dataset page. Or the Globus + // uploads, where the file sizes are looked up in bulk on + // the completion of the remote upload task. + if (dataFile.getFilesize() >= 0) { + confirmedFileSize = dataFile.getFilesize(); + } else { + dataAccess.open(DataAccessOption.READ_ACCESS); + // (the .open() above makes a remote call to check if + // the file exists and obtains its size) + confirmedFileSize = dataAccess.getSize(); + } // For directly-uploaded files, we will perform the file size // limit and quota checks here. Perform them *again*, in @@ -362,13 +372,16 @@ public List saveAndAddFilesToDataset(DatasetVersion version, if (fileSizeLimit == null || confirmedFileSize < fileSizeLimit) { //set file size - logger.fine("Setting file size: " + confirmedFileSize); - dataFile.setFilesize(confirmedFileSize); + if (dataFile.getFilesize() < 0) { + logger.fine("Setting file size: " + confirmedFileSize); + dataFile.setFilesize(confirmedFileSize); + } if (dataAccess instanceof S3AccessIO) { ((S3AccessIO) dataAccess).removeTempTag(); } savedSuccess = true; + logger.info("directly uploaded file successfully saved. file size: "+dataFile.getFilesize()); } } catch (IOException ioex) { logger.warning("Failed to get file size, storage id, or failed to remove the temp tag on the saved S3 object" + dataFile.getStorageIdentifier() + " (" diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java index 8c5dad237b1..27a2ab99376 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java @@ -100,71 +100,48 @@ public IngestServiceShapefileHelper(File zippedShapefile, File rezipFolder){ //this.processFile(zippedShapefile, rezipFolder); } - - private FileInputStream getFileInputStream(File fileObject){ - if (fileObject==null){ - return null; - } - try { + + private FileInputStream getFileInputStream(File fileObject){ + if (fileObject==null){ + return null; + } + try { return new FileInputStream(fileObject); } catch (FileNotFoundException ex) { logger.severe("Failed to create FileInputStream from File: " + fileObject.getAbsolutePath()); return null; } - } - - private void closeFileInputStream(FileInputStream fis){ - if (fis==null){ - return; - } + } + + private void closeFileInputStream(FileInputStream fis){ + if (fis==null){ + return; + } try { - fis.close(); + fis.close(); } catch (IOException ex) { logger.info("Failed to close FileInputStream"); } - } - + } + public boolean processFile() { if ((!isValidFile(this.zippedShapefile))||(!isValidFolder(this.rezipFolder))){ return false; } - - // (1) Use the ShapefileHandler to the .zip for a shapefile - // - FileInputStream shpfileInputStream = this.getFileInputStream(zippedShapefile); - if (shpfileInputStream==null){ - return false; - } - - this.shpHandler = new ShapefileHandler(shpfileInputStream); - if (!shpHandler.containsShapefile()){ - logger.severe("Shapefile was incorrectly detected upon Ingest (FileUtil) and passed here"); - return false; - } - - this.closeFileInputStream(shpfileInputStream); - - // (2) Rezip the shapefile pieces - logger.info("rezipFolder: " + rezipFolder.getAbsolutePath()); - shpfileInputStream = this.getFileInputStream(zippedShapefile); - if (shpfileInputStream==null){ - return false; - } - - boolean rezipSuccess; try { - rezipSuccess = shpHandler.rezipShapefileSets(shpfileInputStream, rezipFolder); + this.shpHandler = new ShapefileHandler(zippedShapefile); + if (!shpHandler.containsShapefile()){ + logger.severe("Shapefile was incorrectly detected upon Ingest (FileUtil) and passed here"); + return false; + } + logger.info("rezipFolder: " + rezipFolder.getAbsolutePath()); + return shpHandler.rezipShapefileSets(rezipFolder); } catch (IOException ex) { logger.severe("Shapefile was not correctly unpacked/repacked"); logger.severe("shpHandler message: " + shpHandler.errorMessage); return false; } - - this.closeFileInputStream(shpfileInputStream); - - return rezipSuccess; - // return createDataFiles(rezipFolder); } diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java index 50c24274bb2..fa56432cc3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java @@ -7,6 +7,9 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.pidproviders.PidUtil; + import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -40,7 +43,8 @@ public class DatasetExternalCitationsServiceBean implements java.io.Serializable Arrays.asList( "cites", "references", - "supplements")); + "supplements", + "is-supplement-to")); static ArrayList outboundRelationships = new ArrayList( Arrays.asList( "is-cited-by", @@ -59,12 +63,11 @@ public List parseCitations(JsonArray citations) { if (inboundRelationships.contains(relationship)) { Dataset localDs = null; if (objectUri.contains("doi")) { - String globalId = objectUri.replace("https://", "").replace("doi.org/", "doi:").toUpperCase().replace("DOI:", "doi:"); - localDs = datasetService.findByGlobalId(globalId); + localDs = datasetService.findByGlobalId(objectUri); exCit.setDataset(localDs); } exCit.setCitedByUrl(subjectUri); - + if (localDs != null && !exCit.getCitedByUrl().isEmpty()) { datasetExternalCitations.add(exCit); } @@ -72,9 +75,9 @@ public List parseCitations(JsonArray citations) { if (outboundRelationships.contains(relationship)) { Dataset localDs = null; if (subjectUri.contains("doi")) { - String globalId = subjectUri.replace("https://", "").replace("doi.org/", "doi:").toUpperCase().replace("DOI:", "doi:"); - localDs = datasetService.findByGlobalId(globalId); + localDs = datasetService.findByGlobalId(subjectUri); exCit.setDataset(localDs); + } exCit.setCitedByUrl(objectUri); diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index a74474efa15..5bdbeac031d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -168,25 +168,12 @@ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { } } - // Note that this SQL line in the code below: - // datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) - // behaves somewhat counter-intuitively if the versionnumber and/or - // minorversionnumber is/are NULL - it results in an empty string - // (NOT the string "{dataset_id}:", in other words). Some harvested - // versions do not have version numbers (only the ones harvested from - // other Dataverses!) It works fine - // for our purposes below, because we are simply counting the selected - // lines - i.e. we don't care if some of these lines are empty. - // But do not use this notation if you need the values returned to - // meaningfully identify the datasets! - - Query query = em.createNativeQuery( "select count(*)\n" + "from (\n" - + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + + "select DISTINCT ON (datasetversion.dataset_id) datasetversion.dataset_id \n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") @@ -194,7 +181,7 @@ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) { + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n ") + "and \n" + dataLocationLine // be careful about adding more and statements after this line. - + "group by dataset_id \n" + + " order by datasetversion.dataset_id, datasetversion.versionnumber desc, datasetversion.minorversionnumber desc\n" +") sub_temp" ); logger.log(Level.FINE, "Metric query: {0}", query); @@ -207,15 +194,15 @@ public List datasetsBySubjectToMonth(String yyyymm, String dataLocatio // A published local datasets may have more than one released version! // So that's why we have to jump through some extra hoops below // in order to select the latest one: - String originClause = "(datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in\n" + + String originClause = "(datasetversion.id in\n" + "(\n" + - "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n" + + "select DISTINCT ON (datasetversion.dataset_id) datasetversion.id\n" + " from datasetversion\n" + " join dataset on dataset.id = datasetversion.dataset_id\n" + " where versionstate='RELEASED'\n" + " and dataset.harvestingclient_id is null\n" + " and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + - " group by dataset_id\n" + + " order by datasetversion.dataset_id, datasetversion.versionnumber desc, datasetversion.minorversionnumber desc\n" + "))\n"; if (!DATA_LOCATION_LOCAL.equals(dataLocation)) { // Default api state is DATA_LOCATION_LOCAL @@ -273,7 +260,7 @@ public long datasetsPastDays(int days, String dataLocation, Dataverse d) { Query query = em.createNativeQuery( "select count(*)\n" + "from (\n" - + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max\n" + + "select DISTINCT ON (datasetversion.dataset_id) datasetversion.id\n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") @@ -281,7 +268,7 @@ public long datasetsPastDays(int days, String dataLocation, Dataverse d) { + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + "and \n" + dataLocationLine // be careful about adding more and statements after this line. - + "group by dataset_id \n" + + " order by datasetversion.dataset_id, datasetversion.versionnumber desc, datasetversion.minorversionnumber desc \n" +") sub_temp" ); logger.log(Level.FINE, "Metric query: {0}", query); @@ -322,9 +309,9 @@ public long filesToMonth(String yyyymm, Dataverse d) { + "select count(*)\n" + "from filemetadata\n" + "join datasetversion on datasetversion.id = filemetadata.datasetversion_id\n" - + "where datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in \n" + + "where datasetversion.id in \n" + "(\n" - + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" + + "select DISTINCT ON (datasetversion.dataset_id) datasetversion.id \n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") @@ -332,7 +319,7 @@ public long filesToMonth(String yyyymm, Dataverse d) { + ((d == null) ? "" : "and dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + "and date_trunc('month', releasetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + "and dataset.harvestingclient_id is null\n" - + "group by dataset_id \n" + + "order by datasetversion.dataset_id, datasetversion.versionnumber desc, datasetversion.minorversionnumber \n" + ");" ); logger.log(Level.FINE, "Metric query: {0}", query); @@ -345,9 +332,9 @@ public long filesPastDays(int days, Dataverse d) { + "select count(*)\n" + "from filemetadata\n" + "join datasetversion on datasetversion.id = filemetadata.datasetversion_id\n" - + "where datasetversion.dataset_id || ':' || datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber) in \n" + + "where datasetversion.id in \n" + "(\n" - + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber)) as max \n" + + "select DISTINCT ON (datasetversion.dataset_id) datasetversion.id \n" + "from datasetversion\n" + "join dataset on dataset.id = datasetversion.dataset_id\n" + ((d == null) ? "" : "join dvobject on dvobject.id = dataset.id\n") @@ -355,7 +342,7 @@ public long filesPastDays(int days, Dataverse d) { + "and releasetime > current_date - interval '" + days + "' day\n" + ((d == null) ? "" : "AND dvobject.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + "and dataset.harvestingclient_id is null\n" - + "group by dataset_id \n" + + "order by datasetversion.dataset_id, datasetversion.versionnumber desc, datasetversion.minorversionnumber desc \n" + ");" ); logger.log(Level.FINE, "Metric query: {0}", query); diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java index f6d142aac96..250eae7e5fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java @@ -36,9 +36,9 @@ public abstract class AbstractPidProvider implements PidProvider { private String datafilePidFormat = null; - private HashSet managedSet; + protected HashSet managedSet = new HashSet(); - private HashSet excludedSet; + protected HashSet excludedSet = new HashSet(); private String id; private String label; @@ -47,8 +47,6 @@ protected AbstractPidProvider(String id, String label, String protocol) { this.id = id; this.label = label; this.protocol = protocol; - this.managedSet = new HashSet(); - this.excludedSet = new HashSet(); } protected AbstractPidProvider(String id, String label, String protocol, String authority, String shoulder, @@ -60,8 +58,12 @@ protected AbstractPidProvider(String id, String label, String protocol, String a this.shoulder = shoulder; this.identifierGenerationStyle = identifierGenerationStyle; this.datafilePidFormat = datafilePidFormat; - this.managedSet = new HashSet(Arrays.asList(managedList.split(",\\s"))); - this.excludedSet = new HashSet(Arrays.asList(excludedList.split(",\\s"))); + if(!managedList.isEmpty()) { + this.managedSet.addAll(Arrays.asList(managedList.split(",\\s"))); + } + if(!excludedList.isEmpty()) { + this.excludedSet.addAll(Arrays.asList(excludedList.split(",\\s"))); + } if (logger.isLoggable(Level.FINE)) { Iterator iter = managedSet.iterator(); while (iter.hasNext()) { @@ -313,10 +315,17 @@ protected GlobalId parsePersistentId(String protocol, String identifierString) { } public GlobalId parsePersistentId(String protocol, String authority, String identifier) { + return parsePersistentId(protocol, authority, identifier, false); + } + + public GlobalId parsePersistentId(String protocol, String authority, String identifier, boolean isCaseInsensitive) { logger.fine("Parsing: " + protocol + ":" + authority + getSeparator() + identifier + " in " + getId()); if (!PidProvider.isValidGlobalId(protocol, authority, identifier)) { return null; } + if(isCaseInsensitive) { + identifier = identifier.toUpperCase(); + } // Check authority/identifier if this is a provider that manages specific // identifiers // /is not one of the unmanaged providers that has null authority @@ -333,7 +342,7 @@ public GlobalId parsePersistentId(String protocol, String authority, String iden logger.fine("managed in " + getId() + ": " + getManagedSet().contains(cleanIdentifier)); logger.fine("excluded from " + getId() + ": " + getExcludedSet().contains(cleanIdentifier)); - if (!(((authority.equals(getAuthority()) && identifier.startsWith(getShoulder())) + if (!(((authority.equals(getAuthority()) && identifier.startsWith(getShoulder().toUpperCase())) || getManagedSet().contains(cleanIdentifier)) && !getExcludedSet().contains(cleanIdentifier))) { return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/AbstractDOIProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/AbstractDOIProvider.java index 02a7dedce47..70ce1ec4c14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/AbstractDOIProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/AbstractDOIProvider.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.pidproviders.doi; import java.util.Arrays; +import java.util.HashSet; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; @@ -26,9 +27,24 @@ public abstract class AbstractDOIProvider extends AbstractPidProvider { public AbstractDOIProvider(String id, String label, String providerAuthority, String providerShoulder, String identifierGenerationStyle, String datafilePidFormat, String managedList, String excludedList) { super(id, label, DOI_PROTOCOL, providerAuthority, providerShoulder, identifierGenerationStyle, datafilePidFormat, managedList, excludedList); + //Create case insensitive (converted toUpperCase) managedSet and excludedSet + managedSet = clean(managedSet, "managed"); + excludedSet = clean(excludedSet, "excluded"); } - //For Unmanged provider + private HashSet clean(HashSet originalSet, String setName) { + HashSet cleanSet = new HashSet(); + for(String entry: originalSet) { + if(entry.startsWith(DOI_PROTOCOL)) { + cleanSet.add(DOI_PROTOCOL + entry.substring(DOI_PROTOCOL.length()).toUpperCase()); + } else { + logger.warning("Non-DOI found in " + setName + " set of pidProvider id: " + getId() + ": " + entry + ". Entry is being dropped."); + } + } + return cleanSet; + } + + //For Unmanaged provider public AbstractDOIProvider(String name, String label) { super(name, label, DOI_PROTOCOL); } @@ -67,7 +83,7 @@ public GlobalId parsePersistentId(String protocol, String authority, String iden if (!DOI_PROTOCOL.equals(protocol)) { return null; } - return super.parsePersistentId(protocol, authority, identifier); + return super.parsePersistentId(protocol, authority, identifier, true); } public String getUrlPrefix() { diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java index a74a9f34bc9..8199b7d9c9f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java @@ -1317,8 +1317,8 @@ private void writeDescriptions(XMLStreamWriter xmlw, DvObject dvObject, boolean } if (StringUtils.isNotBlank(softwareName)) { if (StringUtils.isNotBlank(softwareVersion)) { + softwareName = softwareName + ", " + softwareVersion; } - softwareName = softwareName + ", " + softwareVersion; descriptionsWritten = XmlWriterUtil.writeOpenTagIfNeeded(xmlw, "descriptions", descriptionsWritten); XmlWriterUtil.writeFullElementWithAttributes(xmlw, "description", attributes, softwareName); } diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteDOIProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteDOIProvider.java index 5630844fb32..b07cd027a01 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteDOIProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/DataCiteDOIProvider.java @@ -226,8 +226,7 @@ protected String getProviderKeyName() { @Override public String getProviderType() { - // TODO Auto-generated method stub - return null; + return TYPE; } public String getMdsUrl() { diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java index 9d61663d034..1f03d8a6cfb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java @@ -59,6 +59,11 @@ * service. * As of now, it only does the registration updates, to accommodate * the modifyRegistration datasets API sub-command. + * + * Note that while Handles are nominally case sensitive, handle.net is + * configured to be case-insensitive and Dataverse makes case-insensitve + * database look-ups to find Handles (See #11003). That said, database + * entries are stored in the case matching the configuration of the provider. */ public class HandlePidProvider extends AbstractPidProvider { diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java index 7b55292350f..2cc0d41ede7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/perma/PermaLinkPidProvider.java @@ -24,6 +24,9 @@ * overridable by a configurable parameter to support use of an external * resolver. * + * Note that while PermaLinks are nominally case sensitive, Dataverse makes + * case-insensitve database look-ups to find them (See #11003). That said, database + * entries are stored in the case matching the configuration of the provider. */ public class PermaLinkPidProvider extends AbstractPidProvider { diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java index beb676f60d1..63b5bf03ea7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrl.java @@ -30,7 +30,7 @@ public class PrivateUrl { public PrivateUrl(RoleAssignment roleAssignment, Dataset dataset, String dataverseSiteUrl) { this.token = roleAssignment.getPrivateUrlToken(); - this.link = dataverseSiteUrl + "/privateurl.xhtml?token=" + token; + this.link = dataverseSiteUrl + "/previewurl.xhtml?token=" + token; this.dataset = dataset; this.roleAssignment = roleAssignment; } diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java index 9af4bb6af9e..17c622be9e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java @@ -1,6 +1,10 @@ package edu.harvard.iq.dataverse.privateurl; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.PermissionsWrapper; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import java.io.Serializable; import java.util.logging.Logger; @@ -20,8 +24,14 @@ public class PrivateUrlPage implements Serializable { @EJB PrivateUrlServiceBean privateUrlService; + @EJB + DatasetServiceBean datasetServiceBean; @Inject DataverseSession session; + @Inject + PermissionsWrapper permissionsWrapper; + @Inject + DataverseRequestServiceBean dvRequestService; /** * The unique string used to look up a PrivateUrlUser and the associated @@ -34,7 +44,16 @@ public String init() { PrivateUrlRedirectData privateUrlRedirectData = privateUrlService.getPrivateUrlRedirectDataFromToken(token); String draftDatasetPageToBeRedirectedTo = privateUrlRedirectData.getDraftDatasetPageToBeRedirectedTo() + "&faces-redirect=true"; PrivateUrlUser privateUrlUser = privateUrlRedirectData.getPrivateUrlUser(); - session.setUser(privateUrlUser); + boolean sessionUserCanViewUnpublishedDataset = false; + if (session.getUser().isAuthenticated()){ + Long datasetId = privateUrlUser.getDatasetId(); + Dataset dataset = datasetServiceBean.find(datasetId); + sessionUserCanViewUnpublishedDataset = permissionsWrapper.canViewUnpublishedDataset(dvRequestService.getDataverseRequest(), dataset); + } + if(!sessionUserCanViewUnpublishedDataset){ + //Only Reset if user cannot view this Draft Version + session.setUser(privateUrlUser); + } logger.info("Redirecting PrivateUrlUser '" + privateUrlUser.getIdentifier() + "' to " + draftDatasetPageToBeRedirectedTo); return draftDatasetPageToBeRedirectedTo; } catch (Exception ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java index 6e939c1bb6d..1310e0eb199 100644 --- a/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java +++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/package-info.java @@ -1,19 +1,19 @@ /** - * Private URL for unpublished datasets. + * Preview URL for unpublished datasets. *

- * The Private URL feature has been implemented as a specialized role assignment + * The Preview (formerly Private) URL feature has been implemented as a specialized role assignment * with an associated token that permits read-only access to the metadata and * all files (regardless of if the files are restricted or not) of a draft * version of a dataset. *

- * As of this note, a second option - to create a Private URL that provides an + * As of this note, a second option - to create a Preview URL that provides an * anonymized view of the dataset has been added. This option works the same as * the original except that it hides author names in the citation block, hides * the values for an admin specified list of metadata fields, disables citation * downloads, and disables API access (except for file and file thumbnail * downloads which are used by the UI). *

- * The primary use case for a Private URL is for journal editors to send a link + * The primary use case for a Preview URL is for journal editors to send a link * to reviewers of a dataset before publication. In most cases, these journal * editors do not permit depositors to publish on their own, which is to say * they only allow depositors to have the "Contributor" role on the datasets @@ -24,42 +24,42 @@ * the depositor, who is in charge of both the security of the dataset and the * timing of when the dataset is published. *

- * A secondary use case for a Private URL is for depositors who have the ability + * A secondary use case for a Preview URL is for depositors who have the ability * to manage permissions on their dataset (depositors who have the "Curator" or * "Admin" role, which grants much more power than the "Contributor" role) to * send a link to coauthors or other trusted parties to preview the dataset * before the depositors publish the dataset on their own. For better security, * these depositors could ask their coauthors to create Dataverse accounts and - * assign roles to them directly, rather than using a Private URL which requires + * assign roles to them directly, rather than using a Preview URL which requires * no username or password. *

* As of this note, a second option aimed specifically at the review use case - - * to create a Private URL that provides an anonymized view of the dataset - has + * to create a Preview URL that provides an anonymized view of the dataset - has * been added. This option works the same as the original except that it hides * author names in the citation block, hides the values for an admin specified * list of metadata fields, disables citation downloads, and disables API access * (except for file and file thumbnail downloads which are used by the UI). *

- * The token associated with the Private URL role assignment that can be used + * The token associated with the Preview URL role assignment that can be used * either in the GUI or, for the non-anonymized-access option, via the API to * elevate privileges beyond what a "Guest" can see. The ability to use a - * Private URL token via API was added mostly to facilitate automated testing of - * the feature but the far more common case is expected to be use of the Private + * Preview URL token via API was added mostly to facilitate automated testing of + * the feature but the far more common case is expected to be use of the Preview * URL token in a link that is clicked to open a browser, similar to links * shared via Dropbox, Google, etc. *

- * When reviewers click a Private URL their browser sessions are set to the + * When reviewers click a Preview URL their browser sessions are set to the * "{@link edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser}" that * has the "Member" role only on the dataset in question and redirected to that * dataset, where they will see an indication in blue at the top of the page * that they are viewing an unpublished dataset. If the reviewer happens to be * logged into Dataverse already, clicking the link will log them out because * the review is meant to be blind. Because the dataset is always in draft when - * a Private URL is in effect, no downloads or any other activity by the - * reviewer are logged to the guestbook. All reviewers click the same Private + * a Preview URL is in effect, no downloads or any other activity by the + * reviewer are logged to the guestbook. All reviewers click the same Preview * URL containing the same token, and with the exception of an IP address being * logged, it should be impossible to trace which reviewers have clicked a - * Private URL. If the reviewer navigates to the home page, the session is set + * Preview URL. If the reviewer navigates to the home page, the session is set * to the Guest user and they will see what a Guest would see. *

* The "Member" role is used because it contains the necessary read-only @@ -76,51 +76,51 @@ * version. A Member can also download restricted files that have been deleted * from previously published versions. *

- * Likewise, when a Private URL token is used via API, commands are executed + * Likewise, when a Preview URL token is used via API, commands are executed * using the "PrivateUrlUser" that has the "Member" role only on the dataset in * question. This means that read-only operations such as downloads of the - * dataset's files are permitted. The Search API does not respect the Private + * dataset's files are permitted. The Search API does not respect the Preview * URL token but you can download files using the Access API, and, with the * non-anonymized-access option, download unpublished metadata using the Native * API. *

- * A Private URL cannot be created for a published version of a dataset. In the + * A Preview URL cannot be created for a published version of a dataset. In the * GUI, you will be reminded of this fact with a popup. The API will explain * this as well. *

- * An anonymized-access Private URL can't be created if any published dataset + * An anonymized-access Preview URL can't be created if any published dataset * version exists. The primary reason for this is that, since datasets have * DOIs, the full metadata about published versions is available directly from * the DOI provider. (While the metadata for that version could be somewhat * different, in practice it would probably provide a means of identifying * some/all of the authors). *

- * If a draft dataset containing a Private URL is - * published, the Private URL is deleted. This means that reviewers who click + * If a draft dataset containing a Preview URL is + * published, the Preview URL is deleted. This means that reviewers who click * the link after publication will see a 404. *

- * If a post-publication draft containing a Private URL is deleted, the Private + * If a post-publication draft containing a Preview URL is deleted, the Preview * URL is deleted. This is to ensure that if a new draft is created in the * future, a new token will be used. *

- * The creation and deletion of a Private URL are limited to the "Curator" and + * The creation and deletion of a Preview URL are limited to the "Curator" and * "Admin" roles because only those roles have the permission called * "ManageDatasetPermissions", which is the permission used by the * "AssignRoleCommand" and "RevokeRoleCommand" commands. If you have the - * permission to create or delete a Private URL, the fact that a Private URL is + * permission to create or delete a Preview URL, the fact that a Preview URL is * enabled for a dataset will be indicated in blue at the top of the page. * Success messages are shown at the top of the page when you create or delete a - * Private URL. In the GUI, deleting a Private URL is called "disabling" and you + * Preview URL. In the GUI, deleting a Preview URL is called "disabling" and you * will be prompted for a confirmation. No matter what you call it the role is - * revoked. You can also delete a Private URL by revoking the role. + * revoked. You can also delete a Preview URL by revoking the role. *

* A "Contributor" does not have the "ManageDatasetPermissions" permission and - * cannot see "Permissions" nor "Private URL" under the "Edit" menu of their - * dataset. When a Curator or Admin has enabled a Private URL on a Contributor's - * dataset, the Contributor does not see a visual indication that a Private URL + * cannot see "Permissions" nor "Preview URL" under the "Edit" menu of their + * dataset. When a Curator or Admin has enabled a Preview URL on a Contributor's + * dataset, the Contributor does not see a visual indication that a Preview URL * has been enabled for their dataset. *

- * There is no way for an "Admin" or "Curator" to see when a Private URL was + * There is no way for an "Admin" or "Curator" to see when a Preview URL was * created or deleted for a dataset but someone who has access to the database * can see that the following commands are logged to the "actionlogrecord" * database table: @@ -129,7 +129,7 @@ *

  • {@link edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand}
  • *
  • {@link edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand}
  • * - * See also the Private URL To Unpublished Dataset BRD at * https://docs.google.com/document/d/1FT47QkZKcmjSgRnePaJO2g1nzcotLyN3Yb2ORvBr6cs/edit?usp=sharing */ diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index a8cf9ed519b..4efd339ee46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -135,6 +135,9 @@ public class IndexServiceBean { @EJB DatasetFieldServiceBean datasetFieldService; + @Inject + DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; + public static final String solrDocIdentifierDataverse = "dataverse_"; public static final String solrDocIdentifierFile = "datafile_"; public static final String solrDocIdentifierDataset = "dataset_"; @@ -420,7 +423,7 @@ synchronized private static Dataset getNextToIndex(Long id, Dataset d) { public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { try { acquirePermitFromSemaphore(); - doAyncIndexDataset(dataset, doNormalSolrDocCleanUp); + doAsyncIndexDataset(dataset, doNormalSolrDocCleanUp); } catch (InterruptedException e) { String failureLogText = "Indexing failed: interrupted. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); failureLogText += "\r\n" + e.getLocalizedMessage(); @@ -430,7 +433,7 @@ public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { } } - private void doAyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { + private void doAsyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { Long id = dataset.getId(); Dataset next = getNextToIndex(id, dataset); // if there is an ongoing index job for this dataset, next is null (ongoing index job will reindex the newest version after current indexing finishes) while (next != null) { @@ -451,7 +454,7 @@ public void asyncIndexDatasetList(List datasets, boolean doNormalSolrDo for(Dataset dataset : datasets) { try { acquirePermitFromSemaphore(); - doAyncIndexDataset(dataset, true); + doAsyncIndexDataset(dataset, true); } catch (InterruptedException e) { String failureLogText = "Indexing failed: interrupted. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); failureLogText += "\r\n" + e.getLocalizedMessage(); @@ -1018,6 +1021,8 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, SetFeature Request/Idea: Harvest metadata values that aren't from a list of controlled values #9992 @@ -1296,7 +1299,6 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set findPermissionsInSolrOnly() throws SearchException { String dtype = dvObjectService.getDtype(id); if (dtype == null) { permissionInSolrOnly.add(docId); - } - if (dtype.equals(DType.Dataset.getDType())) { + }else if (dtype.equals(DType.Dataset.getDType())) { List states = datasetService.getVersionStates(id); if (states != null) { String latestState = states.get(states.size() - 1); @@ -2252,7 +2253,7 @@ public List findPermissionsInSolrOnly() throws SearchException { } else if (dtype.equals(DType.DataFile.getDType())) { List states = dataFileService.findVersionStates(id); Set strings = states.stream().map(VersionState::toString).collect(Collectors.toSet()); - logger.fine("States for " + docId + ": " + String.join(", ", strings)); + logger.finest("States for " + docId + ": " + String.join(", ", strings)); if (docId.endsWith("draft_permission")) { if (!states.contains(VersionState.DRAFT)) { permissionInSolrOnly.add(docId); @@ -2266,7 +2267,7 @@ public List findPermissionsInSolrOnly() throws SearchException { permissionInSolrOnly.add(docId); } else { if (!dataFileService.isInReleasedVersion(id)) { - logger.fine("Adding doc " + docId + " to list of permissions in Solr only"); + logger.finest("Adding doc " + docId + " to list of permissions in Solr only"); permissionInSolrOnly.add(docId); } } @@ -2407,6 +2408,11 @@ public void deleteHarvestedDocuments(Dataset harvestedDataset) { solrIdsOfDocumentsToDelete.add(solrDocIdentifierFile + datafile.getId()); } + deleteHarvestedDocuments(solrIdsOfDocumentsToDelete); + } + + public void deleteHarvestedDocuments(List solrIdsOfDocumentsToDelete) { + logger.fine("attempting to delete the following documents from the index: " + StringUtils.join(solrIdsOfDocumentsToDelete, ",")); IndexResponse resultOfAttemptToDeleteDocuments = solrIndexService.deleteMultipleSolrIds(solrIdsOfDocumentsToDelete); logger.fine("result of attempt to delete harvested documents: " + resultOfAttemptToDeleteDocuments + "\n"); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java index ef27a5eefaf..1f1137016f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java @@ -291,5 +291,6 @@ more targeted results for just datasets. The format is YYYY (i.e. public static final String DATASET_VALID = "datasetValid"; public static final String DATASET_LICENSE = "license"; + public static final String FILE_COUNT = "fileCount"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 4f3f6e46e48..9328dd03ca2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -34,6 +34,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.MissingResourceException; import java.util.Optional; import java.util.Set; import java.util.logging.Logger; @@ -1231,40 +1232,33 @@ public String getTypeFromFilterQuery(String filterQuery) { } public List getFriendlyNamesFromFilterQuery(String filterQuery) { - - - if ((filterQuery == null)|| - (datasetfieldFriendlyNamesBySolrField == null)|| - (staticSolrFieldFriendlyNamesBySolrField==null)){ + + if ((filterQuery == null) || + (datasetfieldFriendlyNamesBySolrField == null) || + (staticSolrFieldFriendlyNamesBySolrField == null)) { return null; } - - if(!filterQuery.contains(":")) { + + if (!filterQuery.contains(":")) { return null; } - + int index = filterQuery.indexOf(":"); String key = filterQuery.substring(0,index); String value = filterQuery.substring(index+1); - List friendlyNames = new ArrayList<>(); + // friendlyNames get 2 entries : key and value + List friendlyNames = new ArrayList<>(2); + // Get dataset field friendly name from default ressource bundle file String datasetfieldFriendyName = datasetfieldFriendlyNamesBySolrField.get(key); if (datasetfieldFriendyName != null) { friendlyNames.add(datasetfieldFriendyName); } else { + // Get non dataset field friendly name from "staticSearchFields" ressource bundle file String nonDatasetSolrField = staticSolrFieldFriendlyNamesBySolrField.get(key); if (nonDatasetSolrField != null) { friendlyNames.add(nonDatasetSolrField); - } else if (key.equals(SearchFields.PUBLICATION_STATUS)) { - /** - * @todo Refactor this quick fix for - * https://github.com/IQSS/dataverse/issues/618 . We really need - * to get rid of all the reflection that's happening with - * solrQueryResponse.getStaticSolrFieldFriendlyNamesBySolrField() - * and - */ - friendlyNames.add("Publication Status"); } else { // meh. better than nuthin' friendlyNames.add(key); @@ -1276,9 +1270,13 @@ public List getFriendlyNamesFromFilterQuery(String filterQuery) { String valueWithoutQuotes = noTrailingQuote; if (key.equals(SearchFields.METADATA_TYPES) && getDataverse() != null && getDataverse().getMetadataBlockFacets() != null) { - Optional friendlyName = getDataverse().getMetadataBlockFacets().stream().filter(block -> block.getMetadataBlock().getName().equals(valueWithoutQuotes)).findFirst().map(block -> block.getMetadataBlock().getLocaleDisplayFacet()); + Optional friendlyName = getDataverse().getMetadataBlockFacets() + .stream() + .filter(block -> block.getMetadataBlock().getName().equals(valueWithoutQuotes)) + .findFirst() + .map(block -> block.getMetadataBlock().getLocaleDisplayFacet()); logger.fine(String.format("action=getFriendlyNamesFromFilterQuery key=%s value=%s friendlyName=%s", key, value, friendlyName)); - if(friendlyName.isPresent()) { + if (friendlyName.isPresent()) { friendlyNames.add(friendlyName.get()); return friendlyNames; } @@ -1290,7 +1288,15 @@ public List getFriendlyNamesFromFilterQuery(String filterQuery) { } } - friendlyNames.add(valueWithoutQuotes); + // Get value friendly name from default ressource bundle file + String valueFriendlyName; + try { + valueFriendlyName = BundleUtil.getStringFromPropertyFile(noTrailingQuote, "Bundle"); + } catch (MissingResourceException e) { + valueFriendlyName = noTrailingQuote; + } + + friendlyNames.add(valueFriendlyName); return friendlyNames; } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index ee93c49ad34..3fd97d418c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -35,6 +35,8 @@ import jakarta.inject.Inject; import jakarta.inject.Named; import jakarta.persistence.NoResultException; + +import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery.SortClause; import org.apache.solr.client.solrj.SolrServerException; @@ -52,6 +54,8 @@ public class SearchServiceBean { private static final Logger logger = Logger.getLogger(SearchServiceBean.class.getCanonicalName()); + private static final String ALL_GROUPS = "*"; + /** * We're trying to make the SearchServiceBean lean, mean, and fast, with as * few injections of EJBs as possible. @@ -182,6 +186,7 @@ public SolrQueryResponse search( SolrQuery solrQuery = new SolrQuery(); query = SearchUtil.sanitizeQuery(query); + solrQuery.setQuery(query); if (sortField != null) { // is it ok not to specify any sort? - there are cases where we @@ -323,24 +328,13 @@ public SolrQueryResponse search( } } - //I'm not sure if just adding null here is good for hte permissions system... i think it needs something - if(dataverses != null) { - for(Dataverse dataverse : dataverses) { - // ----------------------------------- - // PERMISSION FILTER QUERY - // ----------------------------------- - String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, dataverse, onlyDatatRelatedToMe, addFacets); - if (permissionFilterQuery != null) { - solrQuery.addFilterQuery(permissionFilterQuery); - } - } - } else { - String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, null, onlyDatatRelatedToMe, addFacets); - if (permissionFilterQuery != null) { - solrQuery.addFilterQuery(permissionFilterQuery); - } + // ----------------------------------- + // PERMISSION FILTER QUERY + // ----------------------------------- + String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, onlyDatatRelatedToMe, addFacets); + if (!StringUtils.isBlank(permissionFilterQuery)) { + solrQuery.addFilterQuery(permissionFilterQuery); } - /** * @todo: do sanity checking... throw error if negative @@ -503,7 +497,8 @@ public SolrQueryResponse search( Long retentionEndDate = (Long) solrDocument.getFieldValue(SearchFields.RETENTION_END_DATE); // Boolean datasetValid = (Boolean) solrDocument.getFieldValue(SearchFields.DATASET_VALID); - + Long fileCount = (Long) solrDocument.getFieldValue(SearchFields.FILE_COUNT); + List matchedFields = new ArrayList<>(); SolrSearchResult solrSearchResult = new SolrSearchResult(query, name); @@ -576,6 +571,7 @@ public SolrQueryResponse search( solrSearchResult.setDeaccessionReason(deaccessionReason); solrSearchResult.setDvTree(dvTree); solrSearchResult.setDatasetValid(datasetValid); + solrSearchResult.setFileCount(fileCount); if (Boolean.TRUE.equals((Boolean) solrDocument.getFieldValue(SearchFields.IS_HARVESTED))) { solrSearchResult.setHarvested(true); @@ -994,7 +990,7 @@ public String getCapitalizedName(String name) { * * @return */ - private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQuery solrQuery, Dataverse dataverse, boolean onlyDatatRelatedToMe, boolean addFacets) { + private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQuery solrQuery, boolean onlyDatatRelatedToMe, boolean addFacets) { User user = dataverseRequest.getUser(); if (user == null) { @@ -1003,38 +999,22 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ if (solrQuery == null) { throw new NullPointerException("solrQuery cannot be null"); } - /** - * @todo For people who are not logged in, should we show stuff indexed - * with "AllUsers" group or not? If so, uncomment the allUsersString - * stuff below. - */ -// String allUsersString = IndexServiceBean.getGroupPrefix() + AllUsers.get().getAlias(); -// String publicOnly = "{!join from=" + SearchFields.DEFINITION_POINT + " to=id}" + SearchFields.DISCOVERABLE_BY + ":(" + IndexServiceBean.getPublicGroupString() + " OR " + allUsersString + ")"; - String publicOnly = "{!join from=" + SearchFields.DEFINITION_POINT + " to=id}" + SearchFields.DISCOVERABLE_BY + ":(" + IndexServiceBean.getPublicGroupString() + ")"; -// String publicOnly = "{!join from=" + SearchFields.GROUPS + " to=" + SearchFields.PERMS + "}id:" + IndexServiceBean.getPublicGroupString(); - // initialize to public only to be safe - String dangerZoneNoSolrJoin = null; - + if (user instanceof PrivateUrlUser) { user = GuestUser.get(); } - AuthenticatedUser au = null; + ArrayList groupList = new ArrayList(); + AuthenticatedUser au = null; Set groups; - - if (user instanceof GuestUser) { - // Yes, GuestUser may be part of one or more groups; such as IP Groups. - groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest)); - } else { - if (!(user instanceof AuthenticatedUser)) { - logger.severe("Should never reach here. A User must be an AuthenticatedUser or a Guest"); - throw new IllegalStateException("A User must be an AuthenticatedUser or a Guest"); - } + boolean avoidJoin = FeatureFlags.AVOID_EXPENSIVE_SOLR_JOIN.enabled(); + + if (user instanceof AuthenticatedUser) { au = (AuthenticatedUser) user; - + // ---------------------------------------------------- - // (3) Is this a Super User? + // Is this a Super User? // If so, they can see everything // ---------------------------------------------------- if (au.isSuperuser()) { @@ -1042,187 +1022,76 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ // to see everything in Solr with no regard to permissions. But it's // been this way since Dataverse 4.0. So relax. :) - return dangerZoneNoSolrJoin; + return buildPermissionFilterQuery(avoidJoin, ALL_GROUPS); } - + // ---------------------------------------------------- - // (4) User is logged in AND onlyDatatRelatedToMe == true + // User is logged in AND onlyDatatRelatedToMe == true // Yes, give back everything -> the settings will be in - // the filterqueries given to search + // the filterqueries given to search // ---------------------------------------------------- if (onlyDatatRelatedToMe == true) { if (systemConfig.myDataDoesNotUsePermissionDocs()) { logger.fine("old 4.2 behavior: MyData is not using Solr permission docs"); - return dangerZoneNoSolrJoin; + return buildPermissionFilterQuery(avoidJoin, ALL_GROUPS); } else { // fall-through logger.fine("new post-4.2 behavior: MyData is using Solr permission docs"); } } - // ---------------------------------------------------- - // (5) Work with Authenticated User who is not a Superuser + // Work with Authenticated User who is not a Superuser // ---------------------------------------------------- - - groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest)); + groupList.add(IndexServiceBean.getGroupPerUserPrefix() + au.getId()); } - if (FeatureFlags.AVOID_EXPENSIVE_SOLR_JOIN.enabled()) { - /** - * Instead of doing a super expensive join, we will rely on the - * new boolean field PublicObject:true for public objects. This field - * is indexed on the content document itself, rather than a permission - * document. An additional join will be added only for any extra, - * more restricted groups that the user may be part of. - * **Note the experimental nature of this optimization**. - */ - StringBuilder sb = new StringBuilder(); - StringBuilder sbgroups = new StringBuilder(); - - // All users, guests and authenticated, should see all the - // documents marked as publicObject_b:true, at least: - sb.append(SearchFields.PUBLIC_OBJECT + ":" + true); + // In addition to the user referenced directly, we will also + // add joins on all the non-public groups that may exist for the + // user: - // One or more groups *may* also be available for this user. Once again, - // do note that Guest users may be part of some groups, such as - // IP groups. - - int groupCounter = 0; + // Authenticated users, *and the GuestUser*, may be part of one or more groups; such + // as IP Groups. + groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest)); - // An AuthenticatedUser should also be able to see all the content - // on which they have direct permissions: - if (au != null) { - groupCounter++; - sbgroups.append(IndexServiceBean.getGroupPerUserPrefix() + au.getId()); - } - - // In addition to the user referenced directly, we will also - // add joins on all the non-public groups that may exist for the - // user: - for (Group group : groups) { - String groupAlias = group.getAlias(); - if (groupAlias != null && !groupAlias.isEmpty() && !groupAlias.startsWith("builtIn")) { - groupCounter++; - if (groupCounter > 1) { - sbgroups.append(" OR "); - } - sbgroups.append(IndexServiceBean.getGroupPrefix() + groupAlias); - } - } - - if (groupCounter > 1) { - // If there is more than one group, the parentheses must be added: - sbgroups.insert(0, "("); - sbgroups.append(")"); - } - - if (groupCounter > 0) { - // If there are any groups for this user, an extra join must be - // added to the query, and the extra sub-query must be added to - // the combined Solr query: - sb.append(" OR {!join from=" + SearchFields.DEFINITION_POINT + " to=id v=$q1}"); - // Add the subquery to the combined Solr query: - solrQuery.setParam("q1", SearchFields.DISCOVERABLE_BY + ":" + sbgroups.toString()); - logger.info("The sub-query q1 set to " + SearchFields.DISCOVERABLE_BY + ":" + sbgroups.toString()); - } - - String ret = sb.toString(); - logger.fine("Returning experimental query: " + ret); - return ret; - } - - // END OF EXPERIMENTAL OPTIMIZATION - - // Old, un-optimized way of handling permissions. - // Largely left intact, minus the lookups that have already been performed - // above. - - // ---------------------------------------------------- - // (1) Is this a GuestUser? - // ---------------------------------------------------- - if (user instanceof GuestUser) { - - StringBuilder sb = new StringBuilder(); - - String groupsFromProviders = ""; - for (Group group : groups) { - logger.fine("found group " + group.getIdentifier() + " with alias " + group.getAlias()); - String groupAlias = group.getAlias(); - if (groupAlias != null && !groupAlias.isEmpty()) { - sb.append(" OR "); - // i.e. group_builtIn/all-users, ip/ipGroup3 - sb.append(IndexServiceBean.getGroupPrefix()).append(groupAlias); - } - } - groupsFromProviders = sb.toString(); - logger.fine("groupsFromProviders:" + groupsFromProviders); - String guestWithGroups = "{!join from=" + SearchFields.DEFINITION_POINT + " to=id}" + SearchFields.DISCOVERABLE_BY + ":(" + IndexServiceBean.getPublicGroupString() + groupsFromProviders + ")"; - logger.fine(guestWithGroups); - return guestWithGroups; - } - - // ---------------------------------------------------- - // (5) Work with Authenticated User who is not a Superuser - // ---------------------------------------------------- - // It was already confirmed, that if the user is not GuestUser, we - // have an AuthenticatedUser au which is not null. - /** - * @todo all this code needs cleanup and clarification. - */ - /** - * Every AuthenticatedUser is part of a "User Private Group" (UGP), a - * concept we borrow from RHEL: - * https://access.redhat.com/site/documentation/en-US/Red_Hat_Enterprise_Linux/6/html/Deployment_Guide/ch-Managing_Users_and_Groups.html#s2-users-groups-private-groups - */ - /** - * @todo rename this from publicPlusUserPrivateGroup. Confusing - */ - // safe default: public only - String publicPlusUserPrivateGroup = publicOnly; -// + (onlyDatatRelatedToMe ? "" : (publicOnly + " OR ")) -// + "{!join from=" + SearchFields.GROUPS + " to=" + SearchFields.PERMS + "}id:" + IndexServiceBean.getGroupPerUserPrefix() + au.getId() + ")"; - -// /** -// * @todo add onlyDatatRelatedToMe option into the experimental JOIN -// * before enabling it. -// */ - /** - * From a search perspective, we don't care about if the group was - * created within one dataverse or another. We just want a list of *all* - * the groups the user is part of. We are greedy. We want all BuiltIn - * Groups, Shibboleth Groups, IP Groups, "system" groups, everything. - * - * A JOIN on "permission documents" will determine if the user can find - * a given "content document" (dataset version, etc) in Solr. - */ - String groupsFromProviders = ""; - StringBuilder sb = new StringBuilder(); for (Group group : groups) { - logger.fine("found group " + group.getIdentifier() + " with alias " + group.getAlias()); String groupAlias = group.getAlias(); - if (groupAlias != null && !groupAlias.isEmpty()) { - sb.append(" OR "); - // i.e. group_builtIn/all-users, group_builtIn/authenticated-users, group_1-explictGroup1, group_shib/2 - sb.append(IndexServiceBean.getGroupPrefix() + groupAlias); + if (groupAlias != null && !groupAlias.isEmpty() && (!avoidJoin || !groupAlias.startsWith("builtIn"))) { + groupList.add(IndexServiceBean.getGroupPrefix() + groupAlias); } } - groupsFromProviders = sb.toString(); - logger.fine(groupsFromProviders); - if (true) { - /** - * @todo get rid of "experimental" in name - */ - String experimentalJoin = "{!join from=" + SearchFields.DEFINITION_POINT + " to=id}" + SearchFields.DISCOVERABLE_BY + ":(" + IndexServiceBean.getPublicGroupString() + " OR " + IndexServiceBean.getGroupPerUserPrefix() + au.getId() + groupsFromProviders + ")"; - publicPlusUserPrivateGroup = experimentalJoin; + if (!avoidJoin) { + // Add the public group + groupList.add(0, IndexServiceBean.getPublicGroupString()); + } + + String groupString = null; + //If we have additional groups, format them correctly into a search string, with parens if there is more than one + if (groupList.size() > 1) { + groupString = "(" + StringUtils.join(groupList, " OR ") + ")"; + } else if (groupList.size() == 1) { + groupString = groupList.get(0); } - - //permissionFilterQuery = publicPlusUserPrivateGroup; - logger.fine(publicPlusUserPrivateGroup); - - return publicPlusUserPrivateGroup; - + logger.fine("Groups: " + groupString); + String permissionQuery = buildPermissionFilterQuery(avoidJoin, groupString); + logger.fine("Permission Query: " + permissionQuery); + return permissionQuery; } + private String buildPermissionFilterQuery(boolean avoidJoin, String permissionFilterGroups) { + String query = (avoidJoin&& !isAllGroups(permissionFilterGroups)) ? SearchFields.PUBLIC_OBJECT + ":" + true : ""; + if (permissionFilterGroups != null && !isAllGroups(permissionFilterGroups)) { + if (!query.isEmpty()) { + query = "(" + query + " OR " + "{!join from=" + SearchFields.DEFINITION_POINT + " to=id}" + SearchFields.DISCOVERABLE_BY + ":" + permissionFilterGroups + ")"; + } else { + query = "{!join from=" + SearchFields.DEFINITION_POINT + " to=id}" + SearchFields.DISCOVERABLE_BY + ":" + permissionFilterGroups; + } + } + return query; + } + + private boolean isAllGroups(String groups) { + return (groups!=null &&groups.equals(ALL_GROUPS)); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java index cfe29ea08c7..e4d885276d0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java @@ -34,7 +34,7 @@ public class SolrIndexServiceBean { private static final Logger logger = Logger.getLogger(SolrIndexServiceBean.class.getCanonicalName()); - + @EJB DvObjectServiceBean dvObjectService; @EJB @@ -149,7 +149,7 @@ private List constructDatasetSolrDocs(Dataset dataset) { return solrDocs; } -// private List constructDatafileSolrDocs(DataFile dataFile) { + // private List constructDatafileSolrDocs(DataFile dataFile) { private List constructDatafileSolrDocs(DataFile dataFile, Map> permStringByDatasetVersion) { List datafileSolrDocs = new ArrayList<>(); Map desiredCards = searchPermissionsService.getDesiredCards(dataFile.getOwner()); @@ -166,14 +166,14 @@ private List constructDatafileSolrDocs(DataFile dataFile, Map constructDatafileSolrDocsFromDataset(Dataset datas } else { perms = searchPermissionsService.findDatasetVersionPerms(datasetVersionFileIsAttachedTo); } + for (FileMetadata fileMetadata : datasetVersionFileIsAttachedTo.getFileMetadatas()) { Long fileId = fileMetadata.getDataFile().getId(); String solrIdStart = IndexServiceBean.solrDocIdentifierFile + fileId; String solrIdEnd = getDatasetOrDataFileSolrEnding(datasetVersionFileIsAttachedTo.getVersionState()); String solrId = solrIdStart + solrIdEnd; DvObjectSolrDoc dataFileSolrDoc = new DvObjectSolrDoc(fileId.toString(), solrId, datasetVersionFileIsAttachedTo.getId(), fileMetadata.getLabel(), perms); - logger.fine("adding fileid " + fileId); + logger.finest("adding fileid " + fileId); datafileSolrDocs.add(dataFileSolrDoc); } } @@ -361,20 +362,19 @@ private void persistToSolr(Collection docs) throws SolrServer public IndexResponse indexPermissionsOnSelfAndChildren(long definitionPointId) { DvObject definitionPoint = dvObjectService.findDvObject(definitionPointId); - if ( definitionPoint == null ) { + if (definitionPoint == null) { logger.log(Level.WARNING, "Cannot find a DvOpbject with id of {0}", definitionPointId); return null; } else { return indexPermissionsOnSelfAndChildren(definitionPoint); } } - + /** * We use the database to determine direct children since there is no * inheritance */ public IndexResponse indexPermissionsOnSelfAndChildren(DvObject definitionPoint) { - List dvObjectsToReindexPermissionsFor = new ArrayList<>(); List filesToReindexAsBatch = new ArrayList<>(); /** * @todo Re-indexing the definition point itself seems to be necessary @@ -383,27 +383,47 @@ public IndexResponse indexPermissionsOnSelfAndChildren(DvObject definitionPoint) // We don't create a Solr "primary/content" doc for the root dataverse // so don't create a Solr "permission" doc either. + int i = 0; + int numObjects = 0; if (definitionPoint.isInstanceofDataverse()) { Dataverse selfDataverse = (Dataverse) definitionPoint; if (!selfDataverse.equals(dataverseService.findRootDataverse())) { - dvObjectsToReindexPermissionsFor.add(definitionPoint); + indexPermissionsForOneDvObject(definitionPoint); + numObjects++; } List directChildDatasetsOfDvDefPoint = datasetService.findByOwnerId(selfDataverse.getId()); for (Dataset dataset : directChildDatasetsOfDvDefPoint) { - dvObjectsToReindexPermissionsFor.add(dataset); + indexPermissionsForOneDvObject(dataset); + numObjects++; for (DataFile datafile : filesToReIndexPermissionsFor(dataset)) { filesToReindexAsBatch.add(datafile); + i++; + if (i % 100 == 0) { + reindexFilesInBatches(filesToReindexAsBatch); + filesToReindexAsBatch.clear(); + } + if (i % 1000 == 0) { + logger.fine("Progress: " +i + " files permissions reindexed"); + } } + logger.fine("Progress : dataset " + dataset.getId() + " permissions reindexed"); } } else if (definitionPoint.isInstanceofDataset()) { - dvObjectsToReindexPermissionsFor.add(definitionPoint); + indexPermissionsForOneDvObject(definitionPoint); + numObjects++; // index files Dataset dataset = (Dataset) definitionPoint; for (DataFile datafile : filesToReIndexPermissionsFor(dataset)) { filesToReindexAsBatch.add(datafile); + i++; + if (i % 100 == 0) { + reindexFilesInBatches(filesToReindexAsBatch); + filesToReindexAsBatch.clear(); + } } } else { - dvObjectsToReindexPermissionsFor.add(definitionPoint); + indexPermissionsForOneDvObject(definitionPoint); + numObjects++; } /** @@ -412,64 +432,64 @@ public IndexResponse indexPermissionsOnSelfAndChildren(DvObject definitionPoint) * @todo Should update timestamps, probably, even thought these are * files, see https://github.com/IQSS/dataverse/issues/2421 */ - String response = reindexFilesInBatches(filesToReindexAsBatch); - - for (DvObject dvObject : dvObjectsToReindexPermissionsFor) { - /** - * @todo do something with this response - */ - IndexResponse indexResponse = indexPermissionsForOneDvObject(dvObject); - } - + reindexFilesInBatches(filesToReindexAsBatch); + logger.fine("Reindexed permissions for " + i + " files and " + numObjects + " datasets/collections"); return new IndexResponse("Number of dvObject permissions indexed for " + definitionPoint - + ": " + dvObjectsToReindexPermissionsFor.size() - ); + + ": " + numObjects); } private String reindexFilesInBatches(List filesToReindexPermissionsFor) { List docs = new ArrayList<>(); Map> byParentId = new HashMap<>(); Map> permStringByDatasetVersion = new HashMap<>(); - for (DataFile file : filesToReindexPermissionsFor) { - Dataset dataset = (Dataset) file.getOwner(); - Map desiredCards = searchPermissionsService.getDesiredCards(dataset); - for (DatasetVersion datasetVersionFileIsAttachedTo : datasetVersionsToBuildCardsFor(dataset)) { - boolean cardShouldExist = desiredCards.get(datasetVersionFileIsAttachedTo.getVersionState()); - if (cardShouldExist) { - List cachedPermission = permStringByDatasetVersion.get(datasetVersionFileIsAttachedTo.getId()); - if (cachedPermission == null) { - logger.fine("no cached permission! Looking it up..."); - List fileSolrDocs = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion); - for (DvObjectSolrDoc fileSolrDoc : fileSolrDocs) { - Long datasetVersionId = fileSolrDoc.getDatasetVersionId(); - if (datasetVersionId != null) { - permStringByDatasetVersion.put(datasetVersionId, fileSolrDoc.getPermissions()); + int i = 0; + try { + for (DataFile file : filesToReindexPermissionsFor) { + Dataset dataset = (Dataset) file.getOwner(); + Map desiredCards = searchPermissionsService.getDesiredCards(dataset); + for (DatasetVersion datasetVersionFileIsAttachedTo : datasetVersionsToBuildCardsFor(dataset)) { + boolean cardShouldExist = desiredCards.get(datasetVersionFileIsAttachedTo.getVersionState()); + if (cardShouldExist) { + List cachedPermission = permStringByDatasetVersion.get(datasetVersionFileIsAttachedTo.getId()); + if (cachedPermission == null) { + logger.finest("no cached permission! Looking it up..."); + List fileSolrDocs = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion); + for (DvObjectSolrDoc fileSolrDoc : fileSolrDocs) { + Long datasetVersionId = fileSolrDoc.getDatasetVersionId(); + if (datasetVersionId != null) { + permStringByDatasetVersion.put(datasetVersionId, fileSolrDoc.getPermissions()); + SolrInputDocument solrDoc = SearchUtil.createSolrDoc(fileSolrDoc); + docs.add(solrDoc); + i++; + } + } + } else { + logger.finest("cached permission is " + cachedPermission); + List fileSolrDocsBasedOnCachedPermissions = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion); + for (DvObjectSolrDoc fileSolrDoc : fileSolrDocsBasedOnCachedPermissions) { SolrInputDocument solrDoc = SearchUtil.createSolrDoc(fileSolrDoc); docs.add(solrDoc); + i++; } } - } else { - logger.fine("cached permission is " + cachedPermission); - List fileSolrDocsBasedOnCachedPermissions = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion); - for (DvObjectSolrDoc fileSolrDoc : fileSolrDocsBasedOnCachedPermissions) { - SolrInputDocument solrDoc = SearchUtil.createSolrDoc(fileSolrDoc); - docs.add(solrDoc); + if (i % 20 == 0) { + persistToSolr(docs); + docs = new ArrayList<>(); } } } + Long parent = file.getOwner().getId(); + List existingList = byParentId.get(parent); + if (existingList == null) { + List empty = new ArrayList<>(); + byParentId.put(parent, empty); + } else { + List updatedList = existingList; + updatedList.add(file.getId()); + byParentId.put(parent, updatedList); + } } - Long parent = file.getOwner().getId(); - List existingList = byParentId.get(parent); - if (existingList == null) { - List empty = new ArrayList<>(); - byParentId.put(parent, empty); - } else { - List updatedList = existingList; - updatedList.add(file.getId()); - byParentId.put(parent, updatedList); - } - } - try { + persistToSolr(docs); return " " + filesToReindexPermissionsFor.size() + " files indexed across " + docs.size() + " Solr documents "; } catch (SolrServerException | IOException ex) { @@ -517,29 +537,26 @@ public JsonObjectBuilder deleteAllFromSolrAndResetIndexTimes() throws SolrServer } /** - * - * * @return A list of dvobject ids that should have their permissions - * re-indexed because Solr was down when a permission was added. The permission - * should be added to Solr. The id of the permission contains the type of - * DvObject and the primary key of the dvObject. - * DvObjects of type DataFile are currently skipped because their index - * time isn't stored in the database, since they are indexed along - * with their parent dataset (this may change). + * re-indexed because Solr was down when a permission was added. The + * permission should be added to Solr. The id of the permission contains the + * type of DvObject and the primary key of the dvObject. DvObjects of type + * DataFile are currently skipped because their index time isn't stored in + * the database, since they are indexed along with their parent dataset + * (this may change). */ public List findPermissionsInDatabaseButStaleInOrMissingFromSolr() { List indexingRequired = new ArrayList<>(); long rootDvId = dataverseService.findRootDataverse().getId(); List missingDataversePermissionIds = dataverseService.findIdStalePermission(); List missingDatasetPermissionIds = datasetService.findIdStalePermission(); - for (Long id : missingDataversePermissionIds) { + for (Long id : missingDataversePermissionIds) { if (!id.equals(rootDvId)) { - indexingRequired.add(id); + indexingRequired.add(id); } } indexingRequired.addAll(missingDatasetPermissionIds); return indexingRequired; } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java index 27900bac63f..8802555affd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java @@ -78,6 +78,10 @@ public class SolrSearchResult { private String citation; private String citationHtml; private String datasetType; + /** + * Only Dataset can have a file count + */ + private Long fileCount; /** * Files and datasets might have a UNF. Dataverses don't. */ @@ -456,10 +460,10 @@ public JsonObjectBuilder getJsonForMyData(boolean isValid) { } // getJsonForMydata public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls) { - return json(showRelevance, showEntityIds, showApiUrls, null, null); + return json(showRelevance, showEntityIds, showApiUrls, null); } - public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields, Long datasetFileCount) { + public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, boolean showApiUrls, List metadataFields) { if (this.type == null) { return jsonObjectBuilder(); } @@ -597,7 +601,7 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool subjects.add(subject); } nullSafeJsonBuilder.add("subjects", subjects); - nullSafeJsonBuilder.add("fileCount", datasetFileCount); + nullSafeJsonBuilder.add("fileCount", this.fileCount); nullSafeJsonBuilder.add("versionId", dv.getId()); nullSafeJsonBuilder.add("versionState", dv.getVersionState().toString()); if (this.isPublishedState()) { @@ -1348,4 +1352,12 @@ public boolean isValid(Predicate canUpdateDataset) { } return !canUpdateDataset.test(this); } + + public Long getFileCount() { + return fileCount; + } + + public void setFileCount(Long fileCount) { + this.fileCount = fileCount; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java index 33e828e619d..20632c170e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java @@ -97,12 +97,16 @@ public enum FeatureFlags { * for the dataset. * * @apiNote Raise flag by setting - * "dataverse.feature.enable-dataset-thumbnail-autoselect" + * "dataverse.feature.disable-dataset-thumbnail-autoselect" * @since Dataverse 6.4 */ DISABLE_DATASET_THUMBNAIL_AUTOSELECT("disable-dataset-thumbnail-autoselect"), /** * Feature flag for the new Globus upload framework. + * + * @apiNote Raise flag by setting + * "dataverse.feature.globus-use-experimental-async-framework" + * @since Dataverse 6.4 */ GLOBUS_USE_EXPERIMENTAL_ASYNC_FRAMEWORK("globus-use-experimental-async-framework"), ; diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 8ed96690e84..b5eb483c2c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -539,6 +539,12 @@ Whether Harvesting (OAI) service is enabled * */ GlobusSingleFileTransfer, + /** Lower limit of the number of files in a Globus upload task where + * the batch mode should be utilized in looking up the file information + * on the remote end node (file sizes, primarily), instead of individual + * lookups. + */ + GlobusBatchLookupSize, /** * Optional external executables to run on the metadata for dataverses * and datasets being published; as an extra validation step, to diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java index 922e6ff5d28..771cf5fd0f0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java @@ -111,7 +111,7 @@ public static ResourceBundle getResourceBundle(String propertyFileName, Locale c ClassLoader loader = getClassLoader(filesRootDirectory); bundle = ResourceBundle.getBundle(propertyFileName, currentLocale, loader); } catch (MissingResourceException mre) { - logger.warning("No property file named " + propertyFileName + "_" + currentLocale.getLanguage() + logger.fine("No property file named " + propertyFileName + "_" + currentLocale.getLanguage() + " found in " + filesRootDirectory + ", using untranslated values"); bundle = ResourceBundle.getBundle("propertyFiles/" + propertyFileName, currentLocale); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index a0c32d5c8ce..991682ec8e8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -525,15 +525,18 @@ public static String determineFileType(File f, String fileName) throws IOExcepti // Check for shapefile extensions as described here: http://en.wikipedia.org/wiki/Shapefile //logger.info("Checking for shapefile"); - ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(f)); + ShapefileHandler shp_handler = new ShapefileHandler(f); if (shp_handler.containsShapefile()){ // logger.info("------- shapefile FOUND ----------"); fileType = ShapefileHandler.SHAPEFILE_FILE_TYPE; //"application/zipped-shapefile"; } - - Optional bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler(); - if(bagItFileHandler.isPresent() && bagItFileHandler.get().isBagItPackage(fileName, f)) { - fileType = BagItFileHandler.FILE_TYPE; + try { + Optional bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler(); + if (bagItFileHandler.isPresent() && bagItFileHandler.get().isBagItPackage(fileName, f)) { + fileType = BagItFileHandler.FILE_TYPE; + } + } catch (Exception e) { + logger.warning("Error checking for BagIt package: " + e.getMessage()); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java index f1440cc3c02..2b54f7a3bfe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java @@ -1,23 +1,21 @@ package edu.harvard.iq.dataverse.util; import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; import java.io.FileNotFoundException; import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.Date; import java.util.ArrayList; import java.util.List; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; -import java.util.zip.ZipException; +import java.util.zip.ZipFile; import java.util.HashMap; import java.util.*; import java.nio.file.Files; import java.nio.file.Paths; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; -import java.util.logging.Level; + import java.util.logging.Logger; import org.apache.commons.io.FileUtils; @@ -43,11 +41,10 @@ * "shape1.pdf", "README.md", "shape_notes.txt" * * Code Example: - * FileInputStream shp_file_input_stream = new FileInputStream(new File("zipped_shapefile.zip")) - * ShapefileHandler shp_handler = new ShapefileHandler(shp_file_input_stream); + * ShapefileHandler shp_handler = new ShapefileHandler(new File("zipped_shapefile.zip")); * if (shp_handler.containsShapefile()){ * File rezip_folder = new File("~/folder_for_rezipping"); - * boolean rezip_success = shp_handler.rezipShapefileSets(shp_file_input_stream, rezip_folder ); + * boolean rezip_success = shp_handler.rezipShapefileSets(rezip_folder ); * if (!rezip_success){ * // rezip failed, should be an error message (String) available System.out.println(shp_handler.error_message); @@ -68,13 +65,13 @@ public class ShapefileHandler{ private static final Logger logger = Logger.getLogger(ShapefileHandler.class.getCanonicalName()); // Reference for these extensions: http://en.wikipedia.org/wiki/Shapefile - public final static String SHAPEFILE_FILE_TYPE = "application/zipped-shapefile"; - public final static String SHAPEFILE_FILE_TYPE_FRIENDLY_NAME = "Shapefile as ZIP Archive"; - public final static List SHAPEFILE_MANDATORY_EXTENSIONS = Arrays.asList("shp", "shx", "dbf", "prj"); - public final static String SHP_XML_EXTENSION = "shp.xml"; - public final static String BLANK_EXTENSION = "__PLACEHOLDER-FOR-BLANK-EXTENSION__"; - public final static List SHAPEFILE_ALL_EXTENSIONS = Arrays.asList("shp", "shx", "dbf", "prj", "sbn", "sbx", "fbn", "fbx", "ain", "aih", "ixs", "mxs", "atx", "cpg", "qpj", "qmd", SHP_XML_EXTENSION); - + public static final String SHAPEFILE_FILE_TYPE = "application/zipped-shapefile"; + public static final String SHAPEFILE_FILE_TYPE_FRIENDLY_NAME = "Shapefile as ZIP Archive"; + public static final List SHAPEFILE_MANDATORY_EXTENSIONS = Arrays.asList("shp", "shx", "dbf", "prj"); + public static final String SHP_XML_EXTENSION = "shp.xml"; + public static final String BLANK_EXTENSION = "__PLACEHOLDER-FOR-BLANK-EXTENSION__"; + public static final List SHAPEFILE_ALL_EXTENSIONS = Arrays.asList("shp", "shx", "dbf", "prj", "sbn", "sbx", "fbn", "fbx", "ain", "aih", "ixs", "mxs", "atx", "cpg", "qpj", "qmd", SHP_XML_EXTENSION); + private final File zipFile; public boolean DEBUG = false; private boolean zipFileProcessed = false; @@ -97,9 +94,6 @@ public class ShapefileHandler{ private Map> fileGroups = new HashMap<>(); private List finalRezippedFiles = new ArrayList<>(); - - private String outputFolder = "unzipped"; - private String rezippedFolder = "rezipped"; // Debug helper private void msg(String s){ @@ -116,40 +110,28 @@ private void msgt(String s){ } /* - Constructor, start with filename - */ - public ShapefileHandler(String filename){ - - if (filename==null){ - this.addErrorMessage("The filename was null"); - return; - } - - FileInputStream zip_file_stream; - try { - zip_file_stream = new FileInputStream(new File(filename)); - } catch (FileNotFoundException ex) { - this.addErrorMessage("The file was not found"); + Constructor, start with File + */ + public ShapefileHandler(File zip_file) throws IOException { + zipFile = zip_file; + if (zip_file == null) { + this.addErrorMessage("The file was null"); return; } - - this.examineZipfile(zip_file_stream); - } - - - /* - Constructor, start with FileInputStream - */ - public ShapefileHandler(FileInputStream zip_file_stream){ - - if (zip_file_stream==null){ - this.addErrorMessage("The zip_file_stream was null"); - return; + try (var zip_file_object = new ZipFile(zip_file)) { + this.examineZipfile(zip_file_object); + } + catch (FileNotFoundException ex) { + // While this constructor had a FileInputStream as argument: + // FileUtil.determineFileType threw this exception before calling the constructor with a FileInputStream + // IngestServiceShapefileHelper.processFile won´t call this constructor if the file is not valid hence does not exist. + // When the file would have disappeared in the meantime, it would have produced a slightly different error message. + logger.severe("File not found: " + zip_file.getAbsolutePath()); + throw ex; } - this.examineZipfile(zip_file_stream); } - + public List getFinalRezippedFiles(){ return this.finalRezippedFiles; } @@ -291,26 +273,19 @@ inside the uploaded zip file (issue #6873). To achieve this, we recreate subfolders in the FileMetadata of the newly created DataFiles. (-- L.A. 09/2020) */ - private boolean unzipFilesToDirectory(FileInputStream zipfile_input_stream, File target_directory){ + private boolean unzipFilesToDirectory(ZipFile zipfileInput, File target_directory){ logger.fine("unzipFilesToDirectory: " + target_directory.getAbsolutePath() ); - if (zipfile_input_stream== null){ - this.addErrorMessage("unzipFilesToDirectory. The zipfile_input_stream is null."); - return false; - } if (!target_directory.isDirectory()){ this.addErrorMessage("This directory does not exist: " + target_directory.getAbsolutePath()); return false; } - List unzippedFileNames = new ArrayList<>(); - - ZipInputStream zipStream = new ZipInputStream(zipfile_input_stream); + List unzippedFileNames = new ArrayList<>(); + - ZipEntry origEntry; - byte[] buffer = new byte[2048]; try { - while((origEntry = zipStream.getNextEntry())!=null){ + for(var origEntry : Collections.list(zipfileInput.entries())){ String zentryFileName = origEntry.getName(); logger.fine("\nOriginal entry name: " + origEntry); @@ -360,15 +335,10 @@ private boolean unzipFilesToDirectory(FileInputStream zipfile_input_stream, File unzippedFileNames.add(outpath); } logger.fine("Write zip file: " + outpath); - FileOutputStream fileOutputStream; - long fsize = 0; - fileOutputStream = new FileOutputStream(outpath); - int len;// = 0; - while ((len = zipStream.read(buffer)) > 0){ - fileOutputStream.write(buffer, 0, len); - fsize+=len; - } // end while - fileOutputStream.close(); + try(var inputStream = zipfileInput.getInputStream(origEntry)) { + Files.createDirectories(new File(outpath).getParentFile().toPath()); + Files.copy(inputStream, Path.of(outpath), StandardCopyOption.REPLACE_EXISTING); + } } // end outer while } catch (IOException ex) { for (StackTraceElement el : ex.getStackTrace()){ @@ -377,19 +347,13 @@ private boolean unzipFilesToDirectory(FileInputStream zipfile_input_stream, File this.addErrorMessage("Failed to open ZipInputStream entry" + ex.getMessage()); return false; } - - try { - zipStream.close(); - } catch (IOException ex) { - Logger.getLogger(ShapefileHandler.class.getName()).log(Level.SEVERE, null, ex); - } - return true; + return true; } /* Rezip the shapefile(s) into a given directory Assumes that the zipfile_input_stream has already been checked! */ - public boolean rezipShapefileSets(FileInputStream zipfile_input_stream, File rezippedFolder) throws IOException{ + public boolean rezipShapefileSets(File rezippedFolder) throws IOException{ logger.fine("rezipShapefileSets"); //msgt("rezipShapefileSets"); if (!this.zipFileProcessed){ @@ -400,10 +364,6 @@ public boolean rezipShapefileSets(FileInputStream zipfile_input_stream, File rez this.addErrorMessage("There are no shapefiles here!"); return false; } - if (zipfile_input_stream== null){ - this.addErrorMessage("The zipfile_input_stream is null."); - return false; - } if (rezippedFolder == null){ this.addErrorMessage("The rezippedFolder is null."); return false; @@ -433,9 +393,11 @@ public boolean rezipShapefileSets(FileInputStream zipfile_input_stream, File rez // Unzip files! - if (!this.unzipFilesToDirectory(zipfile_input_stream, dir_for_unzipping)){ - this.addErrorMessage("Failed to unzip files."); - return false; + try(var zipfileObject = new ZipFile(zipFile)) { + if (!this.unzipFilesToDirectory(zipfileObject, dir_for_unzipping)) { + this.addErrorMessage("Failed to unzip files."); + return false; + } } // Redistribute files! String target_dirname = rezippedFolder.getAbsolutePath(); @@ -681,27 +643,19 @@ private boolean isFileToSkip(String fname){ /************************************** * Iterate through the zip file contents. * Does it contain any shapefiles? - * - * @param FileInputStream zip_file_stream */ - private boolean examineZipfile(FileInputStream zip_file_stream){ + private boolean examineZipfile(ZipFile zip_file){ // msgt("examineZipfile"); - - if (zip_file_stream==null){ - this.addErrorMessage("The zip file stream was null"); - return false; - } - + // Clear out file lists this.filesListInDir.clear(); this.filesizeHash.clear(); this.fileGroups.clear(); - try{ - ZipInputStream zipStream = new ZipInputStream(zip_file_stream); - ZipEntry entry; - List hiddenDirectories = new ArrayList<>(); - while((entry = zipStream.getNextEntry())!=null){ + try{ + List hiddenDirectories = new ArrayList<>(); + for(var entry : Collections.list(zip_file.entries())){ + String zentryFileName = entry.getName(); boolean isDirectory = entry.isDirectory(); @@ -748,8 +702,6 @@ private boolean examineZipfile(FileInputStream zip_file_stream){ this.filesizeHash.put(unzipFilePath, entry.getSize()); } } // end while - - zipStream.close(); if (this.filesListInDir.isEmpty()){ errorMessage = "No files in zipStream"; @@ -759,13 +711,8 @@ private boolean examineZipfile(FileInputStream zip_file_stream){ this.zipFileProcessed = true; return true; - }catch(ZipException ex){ - this.addErrorMessage("ZipException"); - msgt("ZipException"); - return false; - }catch(IOException ex){ - //ex.printStackTrace(); + //ex.printStackTrace(); this.addErrorMessage("IOException File name"); msgt("IOException"); return false; @@ -773,9 +720,6 @@ private boolean examineZipfile(FileInputStream zip_file_stream){ this.addErrorMessage("IllegalArgumentException when parsing zipfile"); msgt("IllegalArgumentException when parsing zipfile"); return false; - - }finally{ - } } // end examineFile diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 60967b13131..e769cacfdb1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -78,6 +78,7 @@ public class SystemConfig { public static final long defaultZipDownloadLimit = 104857600L; // 100MB private static final int defaultMultipleUploadFilesLimit = 1000; private static final int defaultLoginSessionTimeout = 480; // = 8 hours + private static final int defaultGlobusBatchLookupSize = 50; private String buildNumber = null; @@ -87,8 +88,8 @@ public class SystemConfig { private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE = 3000000L; // 3 MB private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_PDF = 1000000L; // 1 MB - public final static String DEFAULTCURATIONLABELSET = "DEFAULT"; - public final static String CURATIONLABELSDISABLED = "DISABLED"; + public static final String DEFAULTCURATIONLABELSET = "DEFAULT"; + public static final String CURATIONLABELSDISABLED = "DISABLED"; public String getVersion() { return getVersion(false); @@ -473,7 +474,7 @@ public Integer getSearchHighlightFragmentSize() { String fragSize = settingsService.getValueForKey(SettingsServiceBean.Key.SearchHighlightFragmentSize); if (fragSize != null) { try { - return new Integer(fragSize); + return Integer.valueOf(fragSize); } catch (NumberFormatException nfe) { logger.info("Could not convert " + SettingsServiceBean.Key.SearchHighlightFragmentSize + " to int: " + nfe); } @@ -490,7 +491,7 @@ public long getTabularIngestSizeLimit() { if (limitEntry != null) { try { - Long sizeOption = new Long(limitEntry); + Long sizeOption = Long.valueOf(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { logger.warning("Invalid value for TabularIngestSizeLimit option? - " + limitEntry); @@ -515,7 +516,7 @@ public long getTabularIngestSizeLimit(String formatName) { if (limitEntry != null) { try { - Long sizeOption = new Long(limitEntry); + Long sizeOption = Long.valueOf(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry ); @@ -954,6 +955,11 @@ public boolean isGlobusFileDownload() { return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false)); } + public int getGlobusBatchLookupSize() { + String batchSizeOption = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusBatchLookupSize); + return getIntLimitFromStringOrDefault(batchSizeOption, defaultGlobusBatchLookupSize); + } + private Boolean getMethodAvailable(String method, boolean upload) { String methods = settingsService.getValueForKey( upload ? SettingsServiceBean.Key.UploadMethods : SettingsServiceBean.Key.DownloadMethods); @@ -1061,7 +1067,7 @@ public long getDatasetValidationSizeLimit() { if (limitEntry != null) { try { - Long sizeOption = new Long(limitEntry); + Long sizeOption = Long.valueOf(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { logger.warning("Invalid value for DatasetValidationSizeLimit option? - " + limitEntry); @@ -1076,7 +1082,7 @@ public long getFileValidationSizeLimit() { if (limitEntry != null) { try { - Long sizeOption = new Long(limitEntry); + Long sizeOption = Long.valueOf(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { logger.warning("Invalid value for FileValidationSizeLimit option? - " + limitEntry); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 2f01c9bc2f2..232b7431a24 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -19,6 +19,7 @@ import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.api.Util; +import edu.harvard.iq.dataverse.api.dto.DataverseDTO; import edu.harvard.iq.dataverse.api.dto.FieldDTO; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; @@ -48,8 +49,10 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.Consumer; import java.util.logging.Logger; import java.util.stream.Collectors; + import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; @@ -128,7 +131,7 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { dv.setPermissionRoot(jobj.getBoolean("permissionRoot", false)); dv.setFacetRoot(jobj.getBoolean("facetRoot", false)); dv.setAffiliation(jobj.getString("affiliation", null)); - + if (jobj.containsKey("dataverseContacts")) { JsonArray dvContacts = jobj.getJsonArray("dataverseContacts"); int i = 0; @@ -141,7 +144,7 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { } dv.setDataverseContacts(dvContactList); } - + if (jobj.containsKey("theme")) { DataverseTheme theme = parseDataverseTheme(jobj.getJsonObject("theme")); dv.setDataverseTheme(theme); @@ -149,21 +152,21 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { } dv.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); // default - if (jobj.containsKey("dataverseType")) { - for (Dataverse.DataverseType dvtype : Dataverse.DataverseType.values()) { - if (dvtype.name().equals(jobj.getString("dataverseType"))) { - dv.setDataverseType(dvtype); - } - } + String receivedDataverseType = jobj.getString("dataverseType", null); + if (receivedDataverseType != null) { + Arrays.stream(Dataverse.DataverseType.values()) + .filter(type -> type.name().equals(receivedDataverseType)) + .findFirst() + .ifPresent(dv::setDataverseType); } - + if (jobj.containsKey("filePIDsEnabled")) { dv.setFilePIDsEnabled(jobj.getBoolean("filePIDsEnabled")); } /* We decided that subject is not user set, but gotten from the subject of the dataverse's datasets - leavig this code in for now, in case we need to go back to it at some point - + if (jobj.containsKey("dataverseSubjects")) { List dvSubjectList = new LinkedList<>(); DatasetFieldType subjectType = datasetFieldSvc.findByName(DatasetFieldConstant.subject); @@ -186,10 +189,49 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { dv.setDataverseSubjects(dvSubjectList); } */ - + return dv; } - + + public DataverseDTO parseDataverseDTO(JsonObject jsonObject) throws JsonParseException { + DataverseDTO dataverseDTO = new DataverseDTO(); + + setDataverseDTOPropertyIfPresent(jsonObject, "alias", dataverseDTO::setAlias); + setDataverseDTOPropertyIfPresent(jsonObject, "name", dataverseDTO::setName); + setDataverseDTOPropertyIfPresent(jsonObject, "description", dataverseDTO::setDescription); + setDataverseDTOPropertyIfPresent(jsonObject, "affiliation", dataverseDTO::setAffiliation); + + String dataverseType = jsonObject.getString("dataverseType", null); + if (dataverseType != null) { + Arrays.stream(Dataverse.DataverseType.values()) + .filter(type -> type.name().equals(dataverseType)) + .findFirst() + .ifPresent(dataverseDTO::setDataverseType); + } + + if (jsonObject.containsKey("dataverseContacts")) { + JsonArray dvContacts = jsonObject.getJsonArray("dataverseContacts"); + List contacts = new ArrayList<>(); + for (int i = 0; i < dvContacts.size(); i++) { + JsonObject contactObj = dvContacts.getJsonObject(i); + DataverseContact contact = new DataverseContact(); + contact.setContactEmail(getMandatoryString(contactObj, "contactEmail")); + contact.setDisplayOrder(i); + contacts.add(contact); + } + dataverseDTO.setDataverseContacts(contacts); + } + + return dataverseDTO; + } + + private void setDataverseDTOPropertyIfPresent(JsonObject jsonObject, String key, Consumer setter) { + String value = jsonObject.getString(key, null); + if (value != null) { + setter.accept(value); + } + } + public DataverseTheme parseDataverseTheme(JsonObject obj) { DataverseTheme theme = new DataverseTheme(); @@ -965,7 +1007,6 @@ public void parseControlledVocabularyValue(DatasetField dsf, DatasetFieldType cv if (cvv == null) { if (allowHarvestingMissingCVV) { // we need to process this as a primitive value - logger.warning(">>>> Value '" + strValue + "' does not exist in type '" + cvvType.getName() + "'. Processing as primitive per setting override."); parsePrimitiveValue(dsf, cvvType , json); return; } else { @@ -1011,6 +1052,7 @@ public String parseHarvestingClient(JsonObject obj, HarvestingClient harvestingC harvestingClient.setHarvestingSet(obj.getString("set",null)); harvestingClient.setCustomHttpHeaders(obj.getString("customHeaders", null)); harvestingClient.setAllowHarvestingMissingCVV(obj.getBoolean("allowHarvestingMissingCVV", false)); + harvestingClient.setUseOaiIdentifiersAsPids(obj.getBoolean("useOaiIdentifiersAsPids", false)); return dataverseAlias; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 34c8fc5c6a6..91af13c79a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -276,7 +276,9 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail, Boolean re } bld.add("permissionRoot", dv.isPermissionRoot()) .add("description", dv.getDescription()) - .add("dataverseType", dv.getDataverseType().name()); + .add("dataverseType", dv.getDataverseType().name()) + .add("isMetadataBlockRoot", dv.isMetadataBlockRoot()) + .add("isFacetRoot", dv.isFacetRoot()); if (dv.getOwner() != null) { bld.add("ownerId", dv.getOwner().getId()); } @@ -340,6 +342,7 @@ private static JsonObjectBuilder addEmbeddedOwnerObject(DvObject dvo, JsonObject ownerObject.add("type", "DATAVERSE"); Dataverse in = (Dataverse) dvo; ownerObject.add("identifier", in.getAlias()); + ownerObject.add("isReleased", in.isReleased()); } if (dvo.isInstanceofDataset()) { @@ -1010,6 +1013,7 @@ public static JsonObjectBuilder json(HarvestingClient harvestingClient) { add("status", harvestingClient.isHarvestingNow() ? "inProgress" : "inActive"). add("customHeaders", harvestingClient.getCustomHttpHeaders()). add("allowHarvestingMissingCVV", harvestingClient.getAllowHarvestingMissingCVV()). + add("useOaiIdentifiersAsPids", harvestingClient.isUseOaiIdentifiersAsPids()). add("lastHarvest", harvestingClient.getLastHarvestTime() == null ? null : harvestingClient.getLastHarvestTime().toString()). add("lastResult", harvestingClient.getLastResult()). add("lastSuccessful", harvestingClient.getLastSuccessfulHarvestTime() == null ? null : harvestingClient.getLastSuccessfulHarvestTime().toString()). diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java index 41e7f1b8b22..bbe7d135e0f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java @@ -13,6 +13,7 @@ import java.util.Date; import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Properties; import java.util.logging.Level; import java.util.logging.Logger; @@ -83,8 +84,7 @@ private enum ValidatorTypes { GoodStrengthValidator, StandardValidator } - @SuppressWarnings("unchecked") - private final static LinkedHashMap validators = new LinkedHashMap(2); + private static final Map validators = new LinkedHashMap<>(2); private int goodStrength; private int maxLength; private int minLength; @@ -100,7 +100,7 @@ private enum ValidatorTypes { public PasswordValidatorServiceBean() { final Properties properties = PropertiesMessageResolver.getDefaultProperties(); properties.setProperty(GoodStrengthRule.ERROR_CODE_GOODSTRENGTH, GoodStrengthRule.ERROR_MESSAGE_GOODSTRENGTH); - messageResolver = new PropertiesMessageResolver(properties); + messageResolver = new PropertiesMessageResolver(properties); } public PasswordValidatorServiceBean(List characterRules) { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index b00065cb275..2c201afb908 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -307,7 +307,13 @@ notification.typeDescription.WORKFLOW_FAILURE=External workflow run has failed notification.typeDescription.STATUSUPDATED=Status of dataset has been updated notification.typeDescription.DATASETCREATED=Dataset was created by user notification.typeDescription.DATASETMENTIONED=Dataset was referenced in remote system - +notification.typeDescription.GLOBUSUPLOADCOMPLETED=Globus upload is completed +notification.typeDescription.GLOBUSUPLOADCOMPLETEDWITHERRORS=Globus upload completed with errors +notification.typeDescription.GLOBUSDOWNLOADCOMPLETED=Globus download is completed +notification.typeDescription.GLOBUSDOWNLOADCOMPLETEDWITHERRORS=Globus download completed with errors +notification.typeDescription.GLOBUSUPLOADLOCALFAILURE=Globus upload failed, internal error +notification.typeDescription.GLOBUSUPLOADREMOTEFAILURE=Globus upload failed, remote transfer error +notification.typeDescription.REQUESTEDFILEACCESS=File access requested groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. user.message.signup.label=Create Account user.message.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files. @@ -837,7 +843,8 @@ notification.email.datasetWasMentioned=Hello {0},

    The {1} has just been notification.email.datasetWasMentioned.subject={0}: A Dataset Relationship has been reported! notification.email.globus.uploadCompleted.subject={0}: Files uploaded successfully via Globus and verified notification.email.globus.downloadCompleted.subject={0}: Files downloaded successfully via Globus -notification.email.globus.uploadCompletedWithErrors.subject={0}: Uploaded files via Globus with errors +notification.email.globus.downloadCompletedWithErrors.subject={0}: Globus download task completed, errors encountered +notification.email.globus.uploadCompletedWithErrors.subject={0}: Globus upload task completed with errors notification.email.globus.uploadFailedRemotely.subject={0}: Failed to upload files via Globus notification.email.globus.uploadFailedLocally.subject={0}: Failed to add files uploaded via Globus to dataset # dataverse.xhtml @@ -1457,14 +1464,14 @@ dataset.editBtn.itemLabel.metadata=Metadata dataset.editBtn.itemLabel.terms=Terms dataset.editBtn.itemLabel.permissions=Permissions dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets -dataset.editBtn.itemLabel.privateUrl=Private URL +dataset.editBtn.itemLabel.privateUrl=Preview URL dataset.editBtn.itemLabel.permissionsDataset=Dataset dataset.editBtn.itemLabel.permissionsFile=Restricted Files dataset.editBtn.itemLabel.deleteDataset=Delete Dataset dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version dataset.editBtn.itemLabel.deaccession=Deaccession Dataset dataset.exportBtn=Export Metadata -dataset.exportBtn.itemLabel.ddi=DDI +dataset.exportBtn.itemLabel.ddi=DDI Codebook v2 dataset.exportBtn.itemLabel.dublinCore=Dublin Core dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD dataset.exportBtn.itemLabel.datacite=DataCite @@ -1669,6 +1676,8 @@ dataset.message.createFailure=The dataset could not be created. dataset.message.termsFailure=The dataset terms could not be updated. dataset.message.label.fileAccess=Publicly-accessible storage dataset.message.publicInstall=Files in this dataset may be readable outside Dataverse, restricted and embargoed access are disabled +dataset.message.parallelUpdateError=Changes cannot be saved. This dataset has been edited since this page was opened. To continue, copy your changes, refresh the page to see the recent updates, and re-enter any changes you want to save. +dataset.message.parallelPublishError=Publishing is blocked. This dataset has been edited since this page was opened. To publish it, refresh the page to see the recent updates, and publish again. dataset.metadata.publicationDate=Publication Date dataset.metadata.publicationDate.tip=The publication date of a Dataset. dataset.metadata.citationDate=Citation Date @@ -1721,23 +1730,36 @@ dataset.transferUnrestricted=Click Continue to transfer the elligible files. dataset.requestAccessToRestrictedFiles=You may request access to any restricted file(s) by clicking the Request Access button. dataset.requestAccessToRestrictedFilesWithEmbargo=Embargoed files cannot be accessed during the embargo period. If your selection contains restricted files, you may request access to them by clicking the Request Access button. dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0} -dataset.privateurl.infoMessageReviewer=This unpublished dataset is being privately shared. -dataset.privateurl.header=Unpublished Dataset Private URL -dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide. -dataset.privateurl.absent=Private URL has not been created. -dataset.privateurl.createPrivateUrl=Create Private URL +dataset.privateurl.infoMessageReviewer=You are viewing a preview of this unpublished dataset version. +dataset.privateurl.header=Unpublished Dataset Preview URL +dataset.privateurl.tip=To cite this data in publications, use the dataset's persistent ID instead of this URL. For more information about the Preview URL feature, please refer to the User Guide. +dataset.privateurl.onlyone=Only one Preview URL can be active for a single dataset. +dataset.privateurl.absent=Preview URL has not been created. +dataset.privateurl.general.button.label=Create General Preview URL +dataset.privateurl.general.description=Create a URL that others can use to review this dataset version before it is published. They will be able to access all files in the dataset and see all metadata, including metadata that may identify the dataset's authors. +dataset.privateurl.general.title=General Preview +dataset.privateurl.anonymous.title=Anonymous Preview +dataset.privateurl.anonymous.button.label=Create Anonymous Preview URL +dataset.privateurl.anonymous.description=Create a URL that others can use to access an anonymized view of this unpublished dataset version. Metadata that could identify the dataset author will not be displayed. Non-identifying metadata will be visible. +dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and will be accessible if they're not restricted. Users of the Anonymous Preview URL will not be able to see the name of the Dataverse that this dataset is in but will be able to see the name of the repository, which might expose the dataset authors' identities. +dataset.privateurl.createPrivateUrl=Create Preview URL +dataset.privateurl.introduction=You can create a Preview URL to copy and share with others who will not need a repository account to review this unpublished dataset version. Once the dataset is published or if the URL is disabled, the URL will no longer work and will point to a "Page not found" page. dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published -dataset.privateurl.disablePrivateUrl=Disable Private URL -dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL -dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset. -dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets. -dataset.privateurl.roleassigeeTitle=Private URL Enabled +dataset.privateurl.disablePrivateUrl=Disable Preview URL +dataset.privateurl.disableGeneralPreviewUrl=Disable General Preview URL +dataset.privateurl.disableAnonPreviewUrl=Disable Anonymous Preview URL +dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Preview URL +dataset.privateurl.disableGeneralPreviewUrlConfirm=Yes, Disable General Preview URL +dataset.privateurl.disableAnonPreviewUrlConfirm=Yes, Disable Anonymous Preview URL +dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Preview URL? If you have shared the Preview URL with others they will no longer be able to use it to access your unpublished dataset. +dataset.privateurl.cannotCreate=Preview URL can only be used with unpublished versions of datasets. +dataset.privateurl.roleassigeeTitle=Preview URL Enabled dataset.privateurl.createdSuccess=Success! -dataset.privateurl.full=This Private URL provides full read access to the dataset -dataset.privateurl.anonymized=This Private URL provides access to the anonymized dataset -dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset. -dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}. +dataset.privateurl.full=This Preview URL provides full read access to the dataset +dataset.privateurl.anonymized=This Preview URL provides access to the anonymized dataset +dataset.privateurl.disabledSuccess=You have successfully disabled the Preview URL for this unpublished dataset. +dataset.privateurl.noPermToCreate=To create a Preview URL you must have the following permissions: {0}. dataset.externalstatus.header=Curation Status Changed dataset.externalstatus.removed=Curation Status Removed dataset.externalstatus.info=Curation Status is now "{0}" @@ -1934,7 +1956,7 @@ file.downloadBtn.format.all=All File Formats + Information file.downloadBtn.format.tab=Tab-Delimited file.downloadBtn.format.original={0} (Original File Format) file.downloadBtn.format.rdata=RData -file.downloadBtn.format.var=Variable Metadata +file.downloadBtn.format.var=DDI Codebook v2 file.downloadBtn.format.citation=Data File Citation file.download.filetype.unknown=Original File Format file.more.information.link=Link to more file information for @@ -2065,7 +2087,7 @@ file.deleteFileDialog.multiple.immediate=The file(s) will be deleted after you c file.deleteFileDialog.header=Delete Files file.deleteFileDialog.failed.tip=Files will not be removed from previously published versions of the dataset. file.deaccessionDialog.tip.permanent=Deaccession is permanent. -file.deaccessionDialog.tip=This dataset will no longer be public and a tumbstone will display the reason for deaccessioning.
    Please read the documentation if you have any questions. +file.deaccessionDialog.tip=This dataset will no longer be public and a tombstone will display the reason for deaccessioning.
    Please read the documentation if you have any questions. file.deaccessionDialog.version=Version file.deaccessionDialog.reason.question1=Which version(s) do you want to deaccession? file.deaccessionDialog.reason.question2=What is the reason for deaccession? @@ -2498,6 +2520,7 @@ dataset.version.file.changed=Files (Changed File Metadata: {0} dataset.version.file.changed2=; Changed File Metadata: {0} dataset.version.variablemetadata.changed=Variable Metadata (Changed Variable Metadata: {0} dataset.version.variablemetadata.changed2=; Changed Variable Metadata: {0} +dataset.version.compare.incorrect.order=Compare requires the older dataset version to be listed first. #DataversePage.java dataverse.item.required=Required @@ -2719,8 +2742,8 @@ datasets.api.grant.role.assignee.has.role.error=User already has this role for t datasets.api.revoke.role.not.found.error="Role assignment {0} not found" datasets.api.revoke.role.success=Role {0} revoked for assignee {1} in {2} datasets.api.privateurl.error.datasetnotfound=Could not find dataset. -datasets.api.privateurl.error.alreadyexists=Private URL already exists for this dataset. -datasets.api.privateurl.error.notdraft=Can't create Private URL because the latest version of this dataset is not a draft. +datasets.api.privateurl.error.alreadyexists=Preview URL already exists for this dataset. +datasets.api.privateurl.error.notdraft=Can't create Preview URL because the latest version of this dataset is not a draft. datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymized access because this dataset has been published. datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date diff --git a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties index 630539d912e..4507c22fdf8 100644 --- a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties +++ b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties @@ -15,6 +15,7 @@ m=text/x-matlab mat=application/matlab-mat md=text/markdown mp3=audio/mp3 +m4a=audio/mp4 nii=image/nii nc=application/netcdf ods=application/vnd.oasis.opendocument.spreadsheet diff --git a/src/main/resources/db/migration/V6.4.0.1.sql b/src/main/resources/db/migration/V6.4.0.1.sql new file mode 100644 index 00000000000..0bcd87dd736 --- /dev/null +++ b/src/main/resources/db/migration/V6.4.0.1.sql @@ -0,0 +1,4 @@ +-- Adding a case-insensitive index related to #11003 +-- + +CREATE UNIQUE INDEX IF NOT EXISTS INDEX_DVOBJECT_authority_protocol_upper_identifier ON dvobject (authority, protocol, UPPER(identifier)); \ No newline at end of file diff --git a/src/main/resources/db/migration/V6.4.0.2.sql b/src/main/resources/db/migration/V6.4.0.2.sql new file mode 100644 index 00000000000..bc4a85b278f --- /dev/null +++ b/src/main/resources/db/migration/V6.4.0.2.sql @@ -0,0 +1,2 @@ +-- #10118 +ALTER TABLE customquestion ALTER COLUMN questionstring TYPE text; diff --git a/src/main/resources/db/migration/V6.4.0.3.sql b/src/main/resources/db/migration/V6.4.0.3.sql new file mode 100644 index 00000000000..307d8ed206c --- /dev/null +++ b/src/main/resources/db/migration/V6.4.0.3.sql @@ -0,0 +1,2 @@ +-- Add this boolean flag to accommodate a new harvesting client feature +ALTER TABLE harvestingclient ADD COLUMN IF NOT EXISTS useOaiIdAsPid BOOLEAN DEFAULT FALSE; diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index 255e63fbfc2..03173faf989 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -12,7 +12,7 @@ or !empty termsOfUseAndAccess.originalArchive or !empty termsOfUseAndAccess.availabilityStatus or !empty termsOfUseAndAccess.contactForAccess or !empty termsOfUseAndAccess.sizeOfCollection or !empty termsOfUseAndAccess.studyCompletion - or termsOfUseAndAccess.fileAccessRequest}"/> + }"/>
    + - + @@ -834,7 +835,6 @@ /> @@ -995,7 +995,6 @@

    +

    #{bundle['dataset.privateurl.introduction']}

    +

    +

    +

    #{bundle['dataset.privateurl.onlyone']}

    +

    + +

    +

    +

    +

    #{bundle['dataset.privateurl.general.description']}

    + + +
    +
    + +

    + #{privateUrlLink} +

    +
    +
    +
    + + +
    +
    + +

    + +

    +

    +

    +

    #{bundle['dataset.privateurl.anonymous.description']}

    +

    #{bundle['dataset.privateurl.anonymous.description.paragraph.two']} #{bundle['dataset.privateurl.createPrivateUrl.anonymized.unavailable']}.

    + + + +

    + +
    +
    + +

    + #{privateUrlLink} +

    +
    +
    +
    + + +
    +
    +

    #{bundle['dataset.privateurl.absent']}

    @@ -1200,17 +1273,11 @@

    -
    - - - - - -
    +

    #{bundle['dataset.privateurl.cannotCreate']}

    @@ -1224,7 +1291,10 @@

    #{bundle['dataset.privateurl.disableConfirmationText']}

    - + + + + diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 117710cfd53..154700f7cf4 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -301,24 +301,24 @@
  • - + +
  • - + -
  • diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index 723f95148cd..f8367ce01f8 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -130,7 +130,7 @@ - +
  • -
    +
    #{bundle['mydataFragment.resultsByUserName']} - +
    @@ -150,4 +150,4 @@
    - \ No newline at end of file + diff --git a/src/main/webapp/previewurl.xhtml b/src/main/webapp/previewurl.xhtml new file mode 100644 index 00000000000..980d775506b --- /dev/null +++ b/src/main/webapp/previewurl.xhtml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/main/webapp/resources/iqbs/messages.xhtml b/src/main/webapp/resources/iqbs/messages.xhtml index bd17cf34d21..f8e1f5e8e9d 100644 --- a/src/main/webapp/resources/iqbs/messages.xhtml +++ b/src/main/webapp/resources/iqbs/messages.xhtml @@ -63,7 +63,7 @@ Server: - #{systemConfig.dataverseServer} + #{systemConfig.dataverseSiteUrl} #{msg.rendered()} diff --git a/src/main/webapp/resources/js/mydata.js b/src/main/webapp/resources/js/mydata.js index 899ba6637e2..c731d6772ac 100644 --- a/src/main/webapp/resources/js/mydata.js +++ b/src/main/webapp/resources/js/mydata.js @@ -391,7 +391,7 @@ function submit_my_data_search(){ // -------------------------------- // ah, but with the horribly coded xhtml page, we can't use form tags... //var formData = $('#mydata_filter_form').serialize(); - var formData = $("#my_data_filter_column :input").serialize() + '&' + $("#my_data_filter_column2 :input").serialize() ; + var formData = $("#my_data_filter_column :input").serialize() + '&' + $("#my_data_filter_column3 :input").serialize()+ '&' + $("#my_data_filter_column2 :input").serialize() ; // For debugging, show the search params if (MYDATA_DEBUG_ON){ diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml index 505fe681363..fc224443a8e 100644 --- a/src/main/webapp/search-include-fragment.xhtml +++ b/src/main/webapp/search-include-fragment.xhtml @@ -582,7 +582,7 @@ - +
    diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java index a235c9b0061..588bf5294e5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java @@ -63,17 +63,17 @@ public void testIsSanitizeHtml() { //if textbox then sanitize - allow tags instance.setFieldType(DatasetFieldType.FieldType.TEXTBOX); result = instance.isSanitizeHtml(); - assertEquals(true, result); + assertTrue(result); //if textbox then don't sanitize - allow tags instance.setFieldType(DatasetFieldType.FieldType.EMAIL); result = instance.isSanitizeHtml(); - assertEquals(false, result); + assertFalse(result); //URL, too instance.setFieldType(DatasetFieldType.FieldType.URL); result = instance.isSanitizeHtml(); - assertEquals(true, result); + assertTrue(result); } @Test @@ -102,7 +102,7 @@ public void testIsEscapeOutputText(){ //URL, too instance.setFieldType(DatasetFieldType.FieldType.URL); result = instance.isEscapeOutputText(); - assertEquals(false, result); + assertFalse(result); } @@ -121,7 +121,7 @@ public void testGetSolrField(){ parent.setAllowMultiples(true); instance.setParentDatasetFieldType(parent); solrField = instance.getSolrField(); - assertEquals(true, solrField.isAllowedToBeMultivalued()); + assertTrue(solrField.isAllowedToBeMultivalued()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java index 3f85acc1f87..b753f534c6b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java @@ -52,63 +52,63 @@ public void testIsValid() { //Make string too long - should fail. value.setValue("asdfgX"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Make string too long - should fail. value.setValue("asdf"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Now lets try Dates dft.setFieldType(DatasetFieldType.FieldType.DATE); dft.setValidationFormat(null); value.setValue("1999AD"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("44BCE"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("2004-10-27"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("2002-08"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("[1999?]"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("Blergh"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Float dft.setFieldType(DatasetFieldType.FieldType.FLOAT); value.setValue("44"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("44 1/2"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); //Integer dft.setFieldType(DatasetFieldType.FieldType.INT); value.setValue("44"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("-44"); result = instance.isValid(value, ctx); - assertEquals(true, result); + assertTrue(result); value.setValue("12.14"); result = instance.isValid(value, ctx); - assertEquals(false, result); + assertFalse(result); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java index 2153a336303..687e0af5b81 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java @@ -10,6 +10,7 @@ import static org.junit.jupiter.api.Assertions.*; import java.util.ArrayList; +import java.util.Date; import java.util.List; /** @@ -240,5 +241,41 @@ public void datasetShouldBeDeaccessionedWithDeaccessionedAndDeaccessionedVersion assertTrue(dataset.isDeaccessioned()); } - + + @Test + public void testGetMostRecentMajorVersionReleaseDateWithDeaccessionedVersions() { + List versionList = new ArrayList(); + + long ver = 5; + // 5.2 + DatasetVersion relVersion = new DatasetVersion(); + relVersion.setVersionState(VersionState.RELEASED); + relVersion.setMinorVersionNumber(2L); + relVersion.setVersionNumber(ver); + versionList.add(relVersion); + + // 5.1 + relVersion = new DatasetVersion(); + relVersion.setVersionState(VersionState.DEACCESSIONED); + relVersion.setMinorVersionNumber(1L); + relVersion.setVersionNumber(ver); + versionList.add(relVersion); + + // 5.0, 4.0, 3.0, 2.0, 1.0 + while (ver > 0) { + DatasetVersion deaccessionedVersion = new DatasetVersion(); + deaccessionedVersion.setVersionState(VersionState.DEACCESSIONED); + // only add an actual date to v5.0 so the assertNotNull will only pass if this version's date is returned + deaccessionedVersion.setReleaseTime((ver == 5) ? new Date() : null); + deaccessionedVersion.setMinorVersionNumber(0L); + deaccessionedVersion.setVersionNumber(ver--); + versionList.add(deaccessionedVersion); + } + + Dataset dataset = new Dataset(); + dataset.setVersions(versionList); + + Date releaseDate = dataset.getMostRecentMajorVersionReleaseDate(); + assertNotNull(releaseDate); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java new file mode 100644 index 00000000000..0ba8dde8aa0 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionDifferenceTest.java @@ -0,0 +1,460 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.branding.BrandingUtilTest; +import edu.harvard.iq.dataverse.datavariable.VariableMetadata; +import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; +import edu.harvard.iq.dataverse.license.License; +import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; +import edu.harvard.iq.dataverse.util.BundleUtil; + +import java.net.URI; +import java.sql.Timestamp; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.logging.Logger; + +import static org.assertj.core.util.DateUtil.now; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import edu.harvard.iq.dataverse.util.json.JsonUtil; +import io.restassured.path.json.JsonPath; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; + +public class DatasetVersionDifferenceTest { + + private static final Logger logger = Logger.getLogger(DatasetVersion.class.getCanonicalName()); + + private static List addedFiles; + private static List removedFiles; + private static List changedFileMetadata; + private static List changedVariableMetadata; + private static List replacedFiles; + private static Long fileId = Long.valueOf(0); + + @BeforeAll + public static void setUp() { + BrandingUtilTest.setupMocks(); + } + + @AfterAll + public static void tearDown() { + BrandingUtilTest.setupMocks(); + } + + @Test + public void testDifferencing() { + Dataset dataset = new Dataset(); + License license = new License("CC0 1.0", + "You can copy, modify, distribute and perform the work, even for commercial purposes, all without asking permission.", + URI.create("http://creativecommons.org/publicdomain/zero/1.0"), URI.create("/resources/images/cc0.png"), + true, 1l); + license.setDefault(true); + dataset.setProtocol("doi"); + dataset.setAuthority("10.5072/FK2"); + dataset.setIdentifier("LK0D1H"); + DatasetVersion datasetVersion = new DatasetVersion(); + datasetVersion.setDataset(dataset); + datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); + datasetVersion.setVersionNumber(1L); + datasetVersion.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); + DatasetVersion datasetVersion2 = new DatasetVersion(); + datasetVersion2.setDataset(dataset); + datasetVersion2.setVersionState(DatasetVersion.VersionState.DRAFT); + + // Published version's two files + DataFile dataFile = new DataFile(); + dataFile.setId(1L); + DataFile dataFile2 = new DataFile(); + dataFile2.setId(2L); + + FileMetadata fileMetadata1 = createFileMetadata(10L, datasetVersion, dataFile, "file1.txt"); + fileMetadata1.setLabel("file1.txt"); + + FileMetadata fileMetadata2 = createFileMetadata(20L, datasetVersion, dataFile2, "file2.txt"); + + // Draft version - same two files with one label change + FileMetadata fileMetadata3 = fileMetadata1.createCopy(); + fileMetadata3.setId(30L); + + FileMetadata fileMetadata4 = fileMetadata2.createCopy(); + fileMetadata4.setLabel("file3.txt"); + fileMetadata4.setId(40L); + + List fileMetadatas = new ArrayList<>(Arrays.asList(fileMetadata1, fileMetadata2)); + datasetVersion.setFileMetadatas(fileMetadatas); + List fileMetadatas2 = new ArrayList<>(Arrays.asList(fileMetadata3, fileMetadata4)); + datasetVersion2.setFileMetadatas(fileMetadatas2); + + SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd"); + Date publicationDate; + try { + publicationDate = dateFmt.parse("19551105"); + datasetVersion.setReleaseTime(publicationDate); + dataset.setPublicationDate(new Timestamp(publicationDate.getTime())); + } catch (ParseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + List versionList = new ArrayList<>(Arrays.asList(datasetVersion, datasetVersion2)); + dataset.setVersions(versionList); + + // One file has a changed label + List expectedAddedFiles = new ArrayList<>(); + List expectedRemovedFiles = new ArrayList<>(); + ; + List expectedChangedFileMetadata = Arrays.asList(fileMetadata2, fileMetadata4); + List expectedChangedVariableMetadata = new ArrayList<>(); + List expectedReplacedFiles = new ArrayList<>(); + List changedTerms = new ArrayList<>(); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + // change label for first file as well + fileMetadata3.setLabel("file1_updated.txt"); + expectedChangedFileMetadata = Arrays.asList(fileMetadata1, fileMetadata3, fileMetadata2, fileMetadata4); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + // Add one change to variable metadata + fileMetadata3.setVariableMetadatas(Arrays.asList(new VariableMetadata())); + expectedChangedVariableMetadata = Arrays.asList(fileMetadata1, fileMetadata3); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + // Replaced File + DataFile replacingFile = new DataFile(); + replacingFile.setId(3L); + replacingFile.setPreviousDataFileId(1L); + fileMetadata3.setDataFile(replacingFile); + expectedChangedFileMetadata = Arrays.asList(fileMetadata2, fileMetadata4); + expectedChangedVariableMetadata = new ArrayList<>(); + + FileMetadata[] filePair = new FileMetadata[2]; + filePair[0] = fileMetadata1; + filePair[1] = fileMetadata3; + expectedReplacedFiles = new ArrayList<>(); + expectedReplacedFiles.add(filePair); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Add a new file + DataFile newFile = new DataFile(); + newFile.setId(3L); + FileMetadata fileMetadata5 = createFileMetadata(50L, datasetVersion2, newFile, "newFile.txt"); + datasetVersion2.getFileMetadatas().add(fileMetadata5); + expectedAddedFiles = Arrays.asList(fileMetadata5); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Remove a file + datasetVersion2.getFileMetadatas().remove(fileMetadata4); + expectedRemovedFiles = Arrays.asList(fileMetadata2); + expectedChangedFileMetadata = new ArrayList<>(); + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set the published version's TermsOfUseAndAccess to a non-null value + TermsOfUseAndAccess termsOfUseAndAccess = new TermsOfUseAndAccess(); + datasetVersion.setTermsOfUseAndAccess(termsOfUseAndAccess); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set the draft version's TermsOfUseAndAccess to a non-null value + + datasetVersion2.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set a term field + + datasetVersion2.getTermsOfUseAndAccess().setTermsOfUse("Terms o' Use"); + String[] termField = new String[] { + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.header"), "", "Terms o' Use" }; + changedTerms.add(termField); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + // Set a term field in the original version + + datasetVersion.getTermsOfUseAndAccess().setDisclaimer("Not our fault"); + String[] termField2 = new String[] { + BundleUtil.getStringFromBundle("file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer"), + "Not our fault", "" }; + changedTerms.add(termField2); + + compareResults(datasetVersion, datasetVersion2, expectedAddedFiles, expectedRemovedFiles, + expectedChangedFileMetadata, expectedChangedVariableMetadata, expectedReplacedFiles, changedTerms); + + } + + private FileMetadata createFileMetadata(long id, DatasetVersion datasetVersion, DataFile dataFile, String label) { + FileMetadata fileMetadata = new FileMetadata(); + fileMetadata.setId(id); + fileMetadata.setDatasetVersion(datasetVersion); + fileMetadata.setDataFile(dataFile); + fileMetadata.setLabel(label); + fileMetadata.setCategories(new ArrayList<>()); + return fileMetadata; + } + + /** + * CompareResults is currently testing the output of the + * DatasetVersionDifference class with the manually created expected results + * included as parameters and with the results of the less efficient algorithm + * it is replacing. Once we're collectively convinced that the tests here are + * correct (i.e. the manually created expected* parameters are set correctly for + * each use case), we could drop running the originalCalculateDifference method + * and just compare with the expected* results. + * + * @param changedTerms + */ + private void compareResults(DatasetVersion datasetVersion, DatasetVersion datasetVersion2, + List expectedAddedFiles, List expectedRemovedFiles, + List expectedChangedFileMetadata, List expectedChangedVariableMetadata, + List expectedReplacedFiles, List changedTerms) { + DatasetVersionDifference diff = new DatasetVersionDifference(datasetVersion2, datasetVersion); + // Run the original algorithm + originalCalculateDifference(datasetVersion2, datasetVersion); + // Compare the old and new algorithms + assertEquals(addedFiles, diff.getAddedFiles()); + assertEquals(removedFiles, diff.getRemovedFiles()); + assertEquals(changedFileMetadata, diff.getChangedFileMetadata()); + assertEquals(changedVariableMetadata, diff.getChangedVariableMetadata()); + assertEquals(replacedFiles.size(), diff.getReplacedFiles().size()); + for (int i = 0; i < replacedFiles.size(); i++) { + assertEquals(replacedFiles.get(i)[0], diff.getReplacedFiles().get(i)[0]); + assertEquals(replacedFiles.get(i)[1], diff.getReplacedFiles().get(i)[1]); + } + + // Also compare the new algorithm with the manually created expected* values for + // the test cases + assertEquals(expectedAddedFiles, diff.getAddedFiles()); + assertEquals(expectedRemovedFiles, diff.getRemovedFiles()); + assertEquals(expectedChangedFileMetadata, diff.getChangedFileMetadata()); + assertEquals(expectedChangedVariableMetadata, diff.getChangedVariableMetadata()); + assertEquals(expectedReplacedFiles.size(), diff.getReplacedFiles().size()); + for (int i = 0; i < expectedReplacedFiles.size(); i++) { + assertEquals(expectedReplacedFiles.get(i)[0], diff.getReplacedFiles().get(i)[0]); + assertEquals(expectedReplacedFiles.get(i)[1], diff.getReplacedFiles().get(i)[1]); + } + + assertEquals(changedTerms.size(), diff.getChangedTermsAccess().size()); + for (int i = 0; i < changedTerms.size(); i++) { + String[] diffArray = diff.getChangedTermsAccess().get(i); + assertEquals(changedTerms.get(i)[0], diffArray[0]); + assertEquals(changedTerms.get(i)[1], diffArray[1]); + assertEquals(changedTerms.get(i)[2], diffArray[2]); + } + } + + @Deprecated + // This is the "Original" difference calculation from DatasetVersionDifference + // It is included here to help verify that the new implementation is the same as + // the original + private static void originalCalculateDifference(DatasetVersion newVersion, DatasetVersion originalVersion) { + + addedFiles = new ArrayList<>(); + removedFiles = new ArrayList<>(); + changedFileMetadata = new ArrayList<>(); + changedVariableMetadata = new ArrayList<>(); + replacedFiles = new ArrayList<>(); + long startTime = System.currentTimeMillis(); + // TODO: ? + // It looks like we are going through the filemetadatas in both versions, + // *sequentially* (i.e. at the cost of O(N*M)), to select the lists of + // changed, deleted and added files between the 2 versions... But why + // are we doing it, if we are doing virtually the same thing inside + // the initDatasetFilesDifferenceList(), below - but in a more efficient + // way (sorting both lists, then goint through them in parallel, at the + // cost of (N+M) max.? + // -- 4.6 Nov. 2016 + + for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { + boolean deleted = true; + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { + if (fmdo.getDataFile().equals(fmdn.getDataFile())) { + deleted = false; + if (!DatasetVersionDifference.compareFileMetadatas(fmdo, fmdn).isEmpty()) { + changedFileMetadata.add(fmdo); + changedFileMetadata.add(fmdn); + } + if (!VariableMetadataUtil.compareVariableMetadata(fmdo, fmdn) + || !DatasetVersionDifference.compareVarGroup(fmdo, fmdn)) { + changedVariableMetadata.add(fmdo); + changedVariableMetadata.add(fmdn); + } + break; + } + } + if (deleted) { + removedFiles.add(fmdo); + } + } + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { + boolean added = true; + for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { + if (fmdo.getDataFile().equals(fmdn.getDataFile())) { + added = false; + break; + } + } + if (added) { + addedFiles.add(fmdn); + } + } + + getReplacedFiles(); + logger.info("Difference Loop Execution time: " + (System.currentTimeMillis() - startTime) + " ms"); + + } + + @Deprecated + // This is used only in the original algorithm and was removed from + // DatasetVersionDifference + private static void getReplacedFiles() { + if (addedFiles.isEmpty() || removedFiles.isEmpty()) { + return; + } + List addedToReplaced = new ArrayList<>(); + List removedToReplaced = new ArrayList<>(); + for (FileMetadata added : addedFiles) { + DataFile addedDF = added.getDataFile(); + Long replacedId = addedDF.getPreviousDataFileId(); + if (added.getDataFile().getPreviousDataFileId() != null) { + } + for (FileMetadata removed : removedFiles) { + DataFile test = removed.getDataFile(); + if (test.getId().equals(replacedId)) { + addedToReplaced.add(added); + removedToReplaced.add(removed); + FileMetadata[] replacedArray = new FileMetadata[2]; + replacedArray[0] = removed; + replacedArray[1] = added; + replacedFiles.add(replacedArray); + } + } + } + if (addedToReplaced.isEmpty()) { + } else { + addedToReplaced.stream().forEach((delete) -> { + addedFiles.remove(delete); + }); + removedToReplaced.stream().forEach((delete) -> { + removedFiles.remove(delete); + }); + } + } + + @Test + public void testCompareVersionsAsJson() { + + Dataverse dv = new Dataverse(); + Dataset ds = new Dataset(); + ds.setOwner(dv); + ds.setGlobalId(new GlobalId(AbstractDOIProvider.DOI_PROTOCOL,"10.5072","FK2/BYM3IW", "/", AbstractDOIProvider.DOI_RESOLVER_URL, null)); + + DatasetVersion dv1 = initDatasetVersion(0L, ds, DatasetVersion.VersionState.RELEASED); + DatasetVersion dv2 = initDatasetVersion(1L, ds, DatasetVersion.VersionState.DRAFT); + ds.setVersions(List.of(dv1, dv2)); + + TermsOfUseAndAccess toa = new TermsOfUseAndAccess(); + toa.setDisclaimer("disclaimer"); + dv2.setTermsOfUseAndAccess(toa); + DatasetField dsf = new DatasetField(); + dsf.setDatasetFieldType(new DatasetFieldType("Author", DatasetFieldType.FieldType.TEXT, true)); + MetadataBlock mb = new MetadataBlock(); + mb.setDisplayName("testMetadataBlock"); + dsf.getDatasetFieldType().setMetadataBlock(mb); + dsf.setSingleValue("TEST"); + dv2.getDatasetFields().add(dsf); + // modify file at index 0 + dv2.getFileMetadatas().get(0).setRestricted(!dv2.getFileMetadatas().get(2).isRestricted()); + + FileMetadata addedFile = initFile(dv2); // add a new file + FileMetadata removedFile = dv2.getFileMetadatas().get(1); // remove the second file + dv2.getFileMetadatas().remove(1); + FileMetadata replacedFile = dv2.getFileMetadatas().get(1); // the third file is now at index 1 since the second file was removed + FileMetadata replacementFile = initFile(dv2, replacedFile.getDataFile().getId()); // replace the third file with a new file + dv2.getFileMetadatas().remove(1); + + DatasetVersionDifference dvd = new DatasetVersionDifference(dv2, dv1); + + JsonObjectBuilder json = dvd.compareVersionsAsJson(); + JsonObject obj = json.build(); + System.out.println(JsonUtil.prettyPrint(obj)); + + JsonPath dataFile = JsonPath.from(JsonUtil.prettyPrint(obj)); + assertTrue("TEST".equalsIgnoreCase(dataFile.getString("metadataChanges[0].changed[0].newValue"))); + assertTrue(addedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesAdded[0].fileName"))); + assertTrue(removedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesRemoved[0].fileName"))); + assertTrue(replacedFile.getLabel().equalsIgnoreCase(dataFile.getString("filesReplaced[0].oldFile.fileName"))); + assertTrue(replacementFile.getLabel().equalsIgnoreCase(dataFile.getString("filesReplaced[0].newFile.fileName"))); + assertTrue("true".equalsIgnoreCase(dataFile.getString("fileChanges[0].changed[0].newValue"))); + assertTrue("disclaimer".equalsIgnoreCase(dataFile.getString("TermsOfAccess.changed[0].newValue"))); + } + private DatasetVersion initDatasetVersion(Long id, Dataset ds, DatasetVersion.VersionState vs) { + DatasetVersion dv = new DatasetVersion(); + dv.setDataset(ds); + dv.setVersion(1L); + dv.setVersionState(vs); + dv.setMinorVersionNumber(0L); + if (vs == DatasetVersion.VersionState.RELEASED) { + dv.setVersionNumber(1L); + dv.setVersion(1L); + dv.setReleaseTime(now()); + } + dv.setId(id); + dv.setCreateTime(now()); + dv.setLastUpdateTime(now()); + dv.setTermsOfUseAndAccess(new TermsOfUseAndAccess()); + dv.setFileMetadatas(initFiles(dv)); + return dv; + } + private List initFiles(DatasetVersion dsv) { + List fileMetadatas = new ArrayList<>(); + fileId = 0L; + for (int i=0; i < 10; i++) { + FileMetadata fm = initFile(dsv); + fileMetadatas.add(fm); + } + return fileMetadatas; + } + private FileMetadata initFile(DatasetVersion dsv) { + return initFile(dsv, null); + } + private FileMetadata initFile(DatasetVersion dsv, Long prevId) { + Long id = fileId++; + FileMetadata fm = new FileMetadata(); + DataFile df = new DataFile(); + fm.setDatasetVersion(dsv); + DataTable dt = new DataTable(); + dt.setOriginalFileName("filename"+id+".txt"); + df.setId(id); + df.setDescription("Desc"+id); + df.setRestricted(false); + df.setFilesize(100 + id); + df.setChecksumType(DataFile.ChecksumType.MD5); + df.setChecksumValue("value"+id); + df.setDataTable(dt); + df.setOwner(dsv.getDataset()); + df.getFileMetadatas().add(fm); + df.setPreviousDataFileId(prevId); + fm.setId(id); + fm.setDataFile(df); + fm.setLabel("Label"+id); + fm.setDirectoryLabel("/myFilePath/"); + fm.setDescription("Desc"+id); + dsv.getFileMetadatas().add(fm); + return fm; + } +} diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java index 4cd6c4dfaa7..b36d8907472 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java @@ -80,7 +80,7 @@ public void testIsInReview() { DatasetVersion nonDraft = new DatasetVersion(); nonDraft.setVersionState(DatasetVersion.VersionState.RELEASED); - assertEquals(false, nonDraft.isInReview()); + assertFalse(nonDraft.isInReview()); ds.addLock(null); assertFalse(nonDraft.isInReview()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 6d7dd2eae29..94aece95861 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -16,6 +16,8 @@ import java.util.HashMap; import java.util.List; +import jakarta.json.Json; +import jakarta.json.JsonArray; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeAll; @@ -26,13 +28,11 @@ import java.util.Map; import java.util.UUID; -import java.util.logging.Level; import java.util.logging.Logger; import static jakarta.ws.rs.core.Response.Status.*; +import static org.hamcrest.CoreMatchers.*; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; import static org.junit.jupiter.api.Assertions.assertTrue; public class AdminIT { @@ -901,6 +901,50 @@ public void testDownloadTmpFile() throws IOException { .body("message", equalTo("Path must begin with '/tmp' but after normalization was '/etc/passwd'.")); } + @Test + public void testFindMissingFiles() { + Response createUserResponse = UtilIT.createRandomUser(); + createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUserResponse); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + UtilIT.setSuperuserStatus(username, true); + + String dataverseAlias = ":root"; + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + + // Upload file + Response uploadResponse = UtilIT.uploadRandomFile(datasetPersistentId, apiToken); + uploadResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + // Audit files + Response resp = UtilIT.auditFiles(apiToken, null, 100L, null); + resp.prettyPrint(); + JsonArray emptyArray = Json.createArrayBuilder().build(); + resp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.lastId", equalTo(100)); + + // Audit files with invalid parameters + resp = UtilIT.auditFiles(apiToken, 100L, 0L, null); + resp.prettyPrint(); + resp.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo("ERROR")) + .body("message", equalTo("Invalid Parameters: lastId must be equal to or greater than firstId")); + + // Audit files with list of dataset identifiers parameter + resp = UtilIT.auditFiles(apiToken, 1L, null, "bad/id, " + datasetPersistentId); + resp.prettyPrint(); + resp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.failures[0].datasetIdentifier", equalTo("bad/id")) + .body("data.failures[0].reason", equalTo("Not Found")); + } + private String createTestNonSuperuserApiToken() { Response createUserResponse = UtilIT.createRandomUser(); createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java index ca99960f240..5f00d34b276 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java @@ -1,15 +1,61 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.ControlledVocabularyValueServiceBean; +import edu.harvard.iq.dataverse.DatasetFieldServiceBean; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.MetadataBlockServiceBean; +import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonReader; +import jakarta.ws.rs.core.Response; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import java.io.File; +import java.io.StringReader; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; +import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; +@ExtendWith(MockitoExtension.class) public class DatasetFieldServiceApiTest { + @Mock + private ActionLogServiceBean actionLogSvc; + + @Mock + private MetadataBlockServiceBean metadataBlockService; + + @Mock + private DataverseServiceBean dataverseService; + + @Mock + private DatasetFieldServiceBean datasetFieldService; + + @Mock + private ControlledVocabularyValueServiceBean controlledVocabularyValueService; + + private DatasetFieldServiceApi api; + + @BeforeEach + public void setup(){ + api = new DatasetFieldServiceApi(); + api.actionLogSvc = actionLogSvc; + api.metadataBlockService = metadataBlockService; + api.dataverseService = dataverseService; + api.datasetFieldService = datasetFieldService; + api.controlledVocabularyValueService = controlledVocabularyValueService; + } + @Test public void testArrayIndexOutOfBoundMessageBundle() { List arguments = new ArrayList<>(); @@ -59,4 +105,41 @@ public void testGetGeneralErrorMessage() { message ); } + + @Test + public void testGetGeneralErrorMessageEmptyHeader() { + DatasetFieldServiceApi api = new DatasetFieldServiceApi(); + String message = api.getGeneralErrorMessage(null, 5, "some error"); + assertEquals( + "Error parsing metadata block in unknown part, line #5: some error", + message + ); + } + + @Test + public void testLoadDatasetFieldsWhitespaceTrimming() { + + Path resourceDirectory = Paths.get("src/test/resources/tsv/whitespace-test.tsv"); + File testfile = new File(resourceDirectory.toFile().getAbsolutePath()); + JsonReader jsonReader; + try (Response response = api.loadDatasetFields(testfile)) { + assertEquals(200, response.getStatus()); + jsonReader = Json.createReader(new StringReader(response.getEntity().toString())); + } + JsonObject jsonObject = jsonReader.readObject(); + + final List metadataNames = jsonObject.getJsonObject("data").getJsonArray("added") + .getValuesAs(e -> e.asJsonObject().getString("name")); + assertThat(metadataNames).contains("whitespaceDemo") + .contains("whitespaceDemoOne") + .contains("whitespaceDemoTwo") + .contains("whitespaceDemoThree") + .contains("CV1") + .contains("CV2") + .contains("CV3"); + assertThat(metadataNames).doesNotContain(" whitespaceDemo") + .doesNotContain("whitespaceDemoOne ") + .doesNotContain("CV1 ") + .doesNotContain(" CV2"); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index e8426b638d7..a0b9f5325d0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -94,7 +94,8 @@ public void testCreateSoftwareDatasetNative() { String dataset2Pid = JsonPath.from(createDataset.getBody().asString()).getString("data.persistentId"); UtilIT.publishDatasetViaNativeApi(dataset2Pid, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); - + //An explicit sleep is needed here because the searchAndShowFacets won't sleep for the query used here + UtilIT.sleepForReindex(dataset2Pid, apiToken, 5); Response searchCollection = UtilIT.searchAndShowFacets("parentName:" + dataverseAlias, null); searchCollection.prettyPrint(); searchCollection.then().assertThat() diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index f52aa4fe9bd..34afbb404f0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -667,6 +667,60 @@ public void testCreatePublishDestroyDataset() { deleteDatasetResponse.prettyPrint(); assertEquals(200, deleteDatasetResponse.getStatusCode()); + // Start of test of deleting a file from a deaccessioned version. + + // Create Dataset for deaccession test. + Response deaccessionTestDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + deaccessionTestDataset.prettyPrint(); + deaccessionTestDataset.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer deaccessionTestDatasetId = UtilIT.getDatasetIdFromResponse(deaccessionTestDataset); + + // File upload for deaccession test. + String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(deaccessionTestDatasetId.toString(), pathToFile, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + Integer deaccessionTestFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + + // Publish Dataset for deaccession test. + Response deaccessionTestPublishResponse = UtilIT.publishDatasetViaNativeApi(deaccessionTestDatasetId, "major", apiToken); + deaccessionTestPublishResponse.prettyPrint(); + + // Deaccession Dataset for deaccession test. + Response deaccessionTestDatasetResponse = UtilIT.deaccessionDataset(deaccessionTestDatasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionTestDatasetResponse.prettyPrint(); + deaccessionTestDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Version check for deaccession test - Deaccessioned. + Response deaccessionTestVersions = UtilIT.getDatasetVersions(deaccessionTestDatasetId.toString(), apiToken); + deaccessionTestVersions.prettyPrint(); + deaccessionTestVersions.then().assertThat() + .body("data[0].latestVersionPublishingState", equalTo("DEACCESSIONED")) + .statusCode(OK.getStatusCode()); + + // File deletion / Draft creation due diligence check for deaccession test. + Response deaccessionTestDeleteFile = UtilIT.deleteFileInDataset(deaccessionTestFileId, apiToken); + deaccessionTestDeleteFile.prettyPrint(); + deaccessionTestDeleteFile + .then().assertThat() + .statusCode(OK.getStatusCode()); + + // Version check for deaccession test - Draft. + deaccessionTestVersions = UtilIT.getDatasetVersions(deaccessionTestDatasetId.toString(), apiToken); + deaccessionTestVersions.prettyPrint(); + deaccessionTestVersions.then().assertThat() + .body("data[0].latestVersionPublishingState", equalTo("DRAFT")) + .statusCode(OK.getStatusCode()); + + // Deleting Dataset for deaccession test. + Response deaccessionTestDelete = UtilIT.destroyDataset(deaccessionTestDatasetId, apiToken); + deaccessionTestDelete.prettyPrint(); + deaccessionTestDelete.then() + .assertThat() + .statusCode(OK.getStatusCode()); + + // End of deaccession test. + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); deleteDataverseResponse.prettyPrint(); assertEquals(200, deleteDataverseResponse.getStatusCode()); @@ -1622,7 +1676,7 @@ public void testPrivateUrl() { List assignments = with(roleAssignments.body().asString()).param("member", "member").getJsonObject("data.findAll { data -> data._roleAlias == member }"); assertEquals(1, assignments.size()); PrivateUrlUser privateUrlUser = new PrivateUrlUser(datasetId); - assertEquals("Private URL Enabled", privateUrlUser.getDisplayInfo().getTitle()); + assertEquals("Preview URL Enabled", privateUrlUser.getDisplayInfo().getTitle()); List assigneeShouldExistForPrivateUrlUser = with(roleAssignments.body().asString()).param("assigneeString", privateUrlUser.getIdentifier()).getJsonObject("data.findAll { data -> data.assignee == assigneeString }"); logger.info(assigneeShouldExistForPrivateUrlUser + " found for " + privateUrlUser.getIdentifier()); assertEquals(1, assigneeShouldExistForPrivateUrlUser.size()); @@ -1703,7 +1757,7 @@ public void testPrivateUrl() { Response privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, apiToken); privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.prettyPrint(); - assertEquals(false, privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.body().asString().contains(privateUrlUser.getIdentifier())); + assertFalse(privateUrlRoleAssignmentShouldBeGoneAfterDraftDeleted.body().asString().contains(privateUrlUser.getIdentifier())); String newTitleAgain = "I am changing the title again"; Response draftCreatedAgainPostPub = UtilIT.updateDatasetTitleViaSword(dataset1PersistentId, newTitleAgain, apiToken); @@ -2080,8 +2134,11 @@ public void testGetDatasetOwners() { Response getDatasetWithOwners = UtilIT.getDatasetWithOwners(persistentId, apiToken, true); getDatasetWithOwners.prettyPrint(); - getDatasetWithOwners.then().assertThat().body("data.isPartOf.identifier", equalTo(dataverseAlias)); - + getDatasetWithOwners.then().assertThat().body("data.isPartOf.identifier", equalTo(dataverseAlias)); + getDatasetWithOwners.then().assertThat().body("data.isPartOf.isReleased", equalTo(false)); + getDatasetWithOwners.then().assertThat().body("data.isPartOf.isPartOf.identifier", equalTo("root")); + getDatasetWithOwners.then().assertThat().body("data.isPartOf.isPartOf.isReleased", equalTo(true)); + Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); assertEquals(200, destroyDatasetResponse.getStatusCode()); @@ -2963,6 +3020,34 @@ public void testLinkingDatasets() { linkDataset.then().assertThat() .statusCode(OK.getStatusCode()); + // Link another to test the list of linked datasets + Response createDataverse3 = UtilIT.createRandomDataverse(apiToken); + createDataverse3.prettyPrint(); + createDataverse3.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverse3Alias = UtilIT.getAliasFromResponse(createDataverse3); + Integer dataverse3Id = UtilIT.getDatasetIdFromResponse(createDataverse3); + linkDataset = UtilIT.linkDataset(datasetPid, dataverse3Alias, superuserApiToken); + linkDataset.prettyPrint(); + linkDataset.then().assertThat() + .statusCode(OK.getStatusCode()); + // get the list in Json format + Response linkDatasetsResponse = UtilIT.getDatasetLinks(datasetPid, superuserApiToken); + linkDatasetsResponse.prettyPrint(); + linkDatasetsResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + JsonObject linkDatasets = Json.createReader(new StringReader(linkDatasetsResponse.asString())).readObject(); + JsonArray lst = linkDatasets.getJsonObject("data").getJsonArray("linked-dataverses"); + List ids = List.of(dataverse2Id, dataverse3Id); + List uniqueids = new ArrayList<>(); + assertEquals(ids.size(), lst.size()); + for (int i = 0; i < lst.size(); i++) { + int id = lst.getJsonObject(i).getInt("id"); + assertTrue(ids.contains(id)); + assertFalse(uniqueids.contains(id)); + uniqueids.add(id); + } + //Experimental code for trying to trick test into thinking the dataset has been harvested /* createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverse1Alias, apiToken); @@ -4157,7 +4242,7 @@ public void testCitationDate() throws IOException { .statusCode(OK.getStatusCode()) .body("data.message", is(expectedCitation)); - Response exportDatasetAsDublinCore = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken); + Response exportDatasetAsDublinCore = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken, true); exportDatasetAsDublinCore.prettyPrint(); exportDatasetAsDublinCore.then().assertThat() .body("oai_dc.type", equalTo("Dataset")) @@ -4174,7 +4259,7 @@ public void testCitationDate() throws IOException { rexport.then().assertThat().statusCode(OK.getStatusCode()); String todayDate = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); - Response exportPostClear = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken); + Response exportPostClear = UtilIT.exportDataset(datasetPid, "oai_dc", apiToken, true); exportPostClear.prettyPrint(); exportPostClear.then().assertThat() .body("oai_dc.type", equalTo("Dataset")) @@ -5083,4 +5168,134 @@ public void testGetCanDownloadAtLeastOneFile() { Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getCanDownloadAtLeastOneFile("testInvalidId", DS_VERSION_LATEST, secondUserApiToken); getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } + + @Test + public void testCompareDatasetVersionsAPI() throws InterruptedException { + + Response createUser = UtilIT.createRandomUser(); + assertEquals(200, createUser.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + Response getDatasetJsonBeforePublishing = UtilIT.nativeGet(datasetId, apiToken); + String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol"); + String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.authority"); + String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.identifier"); + String datasetPersistentId = protocol + ":" + authority + "/" + identifier; + // used for all added files + JsonObjectBuilder json = Json.createObjectBuilder() + .add("description", "my description") + .add("directoryLabel", "/data/subdir1/") + .add("categories", Json.createArrayBuilder() + .add("Data") + ); + JsonObject jsonObj = json.build(); + String pathToFile = "src/main/webapp/resources/images/dataverse-icon-1200.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer modifyFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + pathToFile = "src/main/webapp/resources/images/dataverseproject_logo.jpg"; + uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer deleteFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + + pathToFile = "src/main/webapp/resources/images/fav/favicon-16x16.png"; + uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, jsonObj, apiToken); + uploadResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer replaceFileId = UtilIT.getDataFileIdFromResponse(uploadResponse); + + Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); + assertEquals(200, publishDataset.getStatusCode()); + + // post publish update to create DRAFT version + String pathToJsonFilePostPub = "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json"; + Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetPersistentId, pathToJsonFilePostPub, apiToken); + addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode()); + + // Test adding a file + pathToFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToFile, jsonObj, apiToken); + uploadTabularFileResponse.prettyPrint(); + uploadTabularFileResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + Integer addedFileId = UtilIT.getDataFileIdFromResponse(uploadTabularFileResponse); + + // Ensure tabular file is ingested + sleep(2000); + + String tabularTagName = "Survey"; + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(String.valueOf(addedFileId), apiToken, List.of(tabularTagName)); + setFileTabularTagsResponse.prettyPrint(); + setFileTabularTagsResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Test removing a file + uploadResponse = UtilIT.deleteFile(deleteFileId, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat() + .statusCode(NO_CONTENT.getStatusCode()); + + // Test Replacing a file + Response replaceResponse = UtilIT.replaceFile(String.valueOf(replaceFileId), "src/main/webapp/resources/images/fav/favicon-32x32.png", jsonObj, apiToken); + replaceResponse.prettyPrint(); + replaceResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Test modify by restricting the file + Response restrictResponse = UtilIT.restrictFile(modifyFileId.toString(), true, apiToken); + restrictResponse.prettyPrint(); + restrictResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Also test a terms of access change + String jsonLDTerms = "{\"https://dataverse.org/schema/core#fileTermsOfAccess\":{\"https://dataverse.org/schema/core#dataAccessPlace\":\"Somewhere\"}}"; + Response updateTerms = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, jsonLDTerms, true); + updateTerms.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":latest-published", ":draft", apiToken); + compareResponse.prettyPrint(); + compareResponse.then().assertThat() + .body("data.oldVersion.versionNumber", CoreMatchers.equalTo("1.0")) + .body("data.newVersion.versionNumber", CoreMatchers.equalTo("DRAFT")) + .body("data.metadataChanges[0].blockName", CoreMatchers.equalTo("Citation Metadata")) + .body("data.metadataChanges[0].changed[0].fieldName", CoreMatchers.equalTo("Author")) + .body("data.metadataChanges[0].changed[0].oldValue", CoreMatchers.containsString("Finch, Fiona; (Birds Inc.)")) + .body("data.metadataChanges[1].blockName", CoreMatchers.equalTo("Life Sciences Metadata")) + .body("data.metadataChanges[1].changed[0].fieldName", CoreMatchers.equalTo("Design Type")) + .body("data.metadataChanges[1].changed[0].oldValue", CoreMatchers.containsString("")) + .body("data.metadataChanges[1].changed[0].newValue", CoreMatchers.containsString("Parallel Group Design; Nested Case Control Design")) + .body("data.filesAdded[0].fileName", CoreMatchers.equalTo("test.tab")) + .body("data.filesAdded[0].filePath", CoreMatchers.equalTo("data/subdir1")) + .body("data.filesAdded[0].description", CoreMatchers.equalTo("my description")) + .body("data.filesAdded[0].tags[0]", CoreMatchers.equalTo("Survey")) + .body("data.filesRemoved[0].fileName", CoreMatchers.equalTo("dataverseproject_logo.jpg")) + .body("data.fileChanges[0].fileName", CoreMatchers.equalTo("dataverse-icon-1200.png")) + .body("data.fileChanges[0].changed[0].newValue", CoreMatchers.equalTo("true")) + .body("data.filesReplaced[0].oldFile.fileName", CoreMatchers.equalTo("favicon-16x16.png")) + .body("data.filesReplaced[0].newFile.fileName", CoreMatchers.equalTo("favicon-32x32.png")) + .body("data.TermsOfAccess", CoreMatchers.notNullValue()) + .statusCode(OK.getStatusCode()); + + compareResponse = UtilIT.compareDatasetVersions(datasetPersistentId, ":draft", ":latest-published", apiToken); + compareResponse.prettyPrint(); + compareResponse.then().assertThat() + .body("message", CoreMatchers.equalTo(BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order"))) + .statusCode(BAD_REQUEST.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 6fbe91c8405..76bb515beb2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1,12 +1,15 @@ package edu.harvard.iq.dataverse.api; import io.restassured.RestAssured; + import static io.restassured.RestAssured.given; import static io.restassured.path.json.JsonPath.with; + import io.restassured.response.Response; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; + import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; @@ -14,6 +17,7 @@ import java.util.Arrays; import java.util.List; import java.util.logging.Logger; + import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; @@ -31,6 +35,7 @@ import static org.junit.jupiter.api.Assertions.*; import java.nio.file.Files; + import io.restassured.path.json.JsonPath; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; @@ -43,7 +48,7 @@ public class DataversesIT { public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); } - + @AfterAll public static void afterClass() { Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport); @@ -130,14 +135,16 @@ public void testDataverseCategory() { public void testMinimalDataverse() throws FileNotFoundException { Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); - String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); JsonObject dvJson; FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-minimal.json"); dvJson = Json.createReader(reader).readObject(); Response create = UtilIT.createDataverse(dvJson, apiToken); create.prettyPrint(); - create.then().assertThat().statusCode(CREATED.getStatusCode()); + create.then().assertThat() + .body("data.isMetadataBlockRoot", equalTo(false)) + .body("data.isFacetRoot", equalTo(false)) + .statusCode(CREATED.getStatusCode()); Response deleteDataverse = UtilIT.deleteDataverse("science", apiToken); deleteDataverse.prettyPrint(); deleteDataverse.then().assertThat().statusCode(OK.getStatusCode()); @@ -646,10 +653,182 @@ public void testImportDDI() throws IOException, InterruptedException { Response deleteUserResponse = UtilIT.deleteUser(username); assertEquals(200, deleteUserResponse.getStatusCode()); } - + @Test - public void testAttributesApi() throws Exception { + public void testImport() throws IOException, InterruptedException { + Response createUser = UtilIT.createRandomUser(); + String username = UtilIT.getUsernameFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + JsonObjectBuilder datasetJson = Json.createObjectBuilder() + .add("datasetVersion", Json.createObjectBuilder() + .add("license", Json.createObjectBuilder() + .add("name", "CC0 1.0") + ) + .add("metadataBlocks", Json.createObjectBuilder() + .add("citation", Json.createObjectBuilder() + .add("fields", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("typeName", "title") + .add("value", "Test Dataset") + .add("typeClass", "primitive") + .add("multiple", false) + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("authorName", + Json.createObjectBuilder() + .add("value", "Simpson, Homer") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "authorName")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "author") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("datasetContactEmail", + Json.createObjectBuilder() + .add("value", "hsimpson@mailinator.com") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "datasetContactEmail")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "datasetContact") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("dsDescriptionValue", + Json.createObjectBuilder() + .add("value", "This a test dataset.") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "dsDescriptionValue")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "dsDescription") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add("Other") + ) + .add("typeClass", "controlledVocabulary") + .add("multiple", true) + .add("typeName", "subject") + ) + ) + ) + )); + + String json = datasetJson.build().toString(); + + Response importJSONNoPid = UtilIT.importDatasetViaNativeApi(apiToken, dataverseAlias, json, null, "no"); + logger.info(importJSONNoPid.prettyPrint()); + assertEquals(400, importJSONNoPid.getStatusCode()); + + String body = importJSONNoPid.getBody().asString(); + String status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + String message = JsonPath.from(body).getString("message"); + assertEquals( + "Please provide a persistent identifier, either by including it in the JSON, or by using the pid query parameter.", + message + ); + + Response importJSONNoPidRelease = UtilIT.importDatasetViaNativeApi(apiToken, dataverseAlias, json, null, "yes"); + logger.info( importJSONNoPidRelease.prettyPrint()); + assertEquals(400, importJSONNoPidRelease.getStatusCode()); + + body = importJSONNoPidRelease.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + message = JsonPath.from(body).getString("message"); + assertEquals( + "Please provide a persistent identifier, either by including it in the JSON, or by using the pid query parameter.", + message + ); + + Response importJSONUnmanagedPid = UtilIT.importDatasetViaNativeApi(apiToken, dataverseAlias, json, "doi:10.5073/FK2/ABCD11", "no"); + logger.info(importJSONUnmanagedPid.prettyPrint()); + assertEquals(400, importJSONUnmanagedPid.getStatusCode()); + + body = importJSONUnmanagedPid.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("ERROR", status); + + message = JsonPath.from(body).getString("message"); + assertEquals( + "Cannot import a dataset that has a PID that doesn't match the server's settings", + message + ); + + // Under normal conditions, you shouldn't need to destroy these datasets. + // Uncomment if they're still around from a previous failed run. +// Response destroy1 = UtilIT.destroyDataset("doi:10.5072/FK2/ABCD11", apiToken); +// destroy1.prettyPrint(); +// Response destroy2 = UtilIT.destroyDataset("doi:10.5072/FK2/ABCD22", apiToken); +// destroy2.prettyPrint(); + + Response importJSONPid = UtilIT.importDatasetViaNativeApi(apiToken, dataverseAlias, json, "doi:10.5072/FK2/ABCD11", "no"); + logger.info(importJSONPid.prettyPrint()); + assertEquals(201, importJSONPid.getStatusCode()); + + Response importJSONPidRel = UtilIT.importDatasetViaNativeApi(apiToken, dataverseAlias, json, "doi:10.5072/FK2/ABCD22", "yes"); + logger.info(importJSONPidRel.prettyPrint()); + assertEquals(201, importJSONPidRel.getStatusCode()); + + Integer datasetIdInt = JsonPath.from(importJSONPid.body().asString()).getInt("data.id"); + + Response search1 = UtilIT.search("id:dataset_" + datasetIdInt + "_draft", apiToken); // santity check, can find it + search1.prettyPrint(); + search1.then().assertThat() + .body("data.total_count", CoreMatchers.is(1)) + .body("data.count_in_response", CoreMatchers.is(1)) + .body("data.items[0].name", CoreMatchers.is("Test Dataset")) + .statusCode(OK.getStatusCode()); + + //cleanup + + Response destroyDatasetResponse = UtilIT.destroyDataset(datasetIdInt, apiToken); + assertEquals(200, destroyDatasetResponse.getStatusCode()); + + Integer datasetIdIntPidRel = JsonPath.from(importJSONPidRel.body().asString()).getInt("data.id"); + Response destroyDatasetResponsePidRel = UtilIT.destroyDataset(datasetIdIntPidRel, apiToken); + assertEquals(200, destroyDatasetResponsePidRel.getStatusCode()); + + UtilIT.sleepForDeadlock(UtilIT.MAXIMUM_IMPORT_DURATION); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + + Response deleteUserResponse = UtilIT.deleteUser(username); + assertEquals(200, deleteUserResponse.getStatusCode()); + } + + @Test + public void testAttributesApi() { Response createUser = UtilIT.createRandomUser(); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -664,30 +843,70 @@ public void testAttributesApi() throws Exception { String collectionAlias = UtilIT.getAliasFromResponse(createDataverseResponse); String newCollectionAlias = collectionAlias + "RENAMED"; - - // Change the alias of the collection: - - Response changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "alias", newCollectionAlias, apiToken); - changeAttributeResp.prettyPrint(); - + + // Change the name of the collection: + + String newCollectionName = "Renamed Name"; + Response changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "name", newCollectionName, apiToken); changeAttributeResp.then().assertThat() .statusCode(OK.getStatusCode()) .body("message.message", equalTo("Update successful")); - - // Check on the collection, under the new alias: - + + // Change the description of the collection: + + String newDescription = "Renamed Description"; + changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "description", newDescription, apiToken); + changeAttributeResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("message.message", equalTo("Update successful")); + + // Change the affiliation of the collection: + + String newAffiliation = "Renamed Affiliation"; + changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "affiliation", newAffiliation, apiToken); + changeAttributeResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("message.message", equalTo("Update successful")); + + // Cannot update filePIDsEnabled from a regular user: + + changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "filePIDsEnabled", "true", apiToken); + changeAttributeResp.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()); + + // Change the alias of the collection: + + changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "alias", newCollectionAlias, apiToken); + changeAttributeResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("message.message", equalTo("Update successful")); + + // Check on the collection, under the new alias: + Response collectionInfoResponse = UtilIT.exportDataverse(newCollectionAlias, apiToken); - collectionInfoResponse.prettyPrint(); - collectionInfoResponse.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.alias", equalTo(newCollectionAlias)); - + .body("data.alias", equalTo(newCollectionAlias)) + .body("data.name", equalTo(newCollectionName)) + .body("data.description", equalTo(newDescription)) + .body("data.affiliation", equalTo(newAffiliation)); + // Delete the collection (again, using its new alias): - + Response deleteCollectionResponse = UtilIT.deleteDataverse(newCollectionAlias, apiToken); - deleteCollectionResponse.prettyPrint(); assertEquals(OK.getStatusCode(), deleteCollectionResponse.getStatusCode()); + + // Cannot update root collection from a regular user: + + changeAttributeResp = UtilIT.setCollectionAttribute("root", "name", newCollectionName, apiToken); + changeAttributeResp.then().assertThat() + .statusCode(UNAUTHORIZED.getStatusCode()); + + collectionInfoResponse = UtilIT.exportDataverse("root", apiToken); + + collectionInfoResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.name", equalTo("Root")); } @Test @@ -699,6 +918,17 @@ public void testListMetadataBlocks() { createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + // New Dataverse should return just the citation block and its displayOnCreate fields when onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true + Response listMetadataBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); + listMetadataBlocks.prettyPrint(); + listMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + listMetadataBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data[0].name", is("citation")) + .body("data[0].fields.title.displayOnCreate", equalTo(true)) + .body("data[0].fields.size()", is(28)); + Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -1080,6 +1310,153 @@ public void testAddDataverse() { .body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\"")); } + @Test + public void testUpdateDataverse() { + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String testAliasSuffix = "-update-dataverse"; + + String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + Response createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root"); + createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + String newAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + String newName = "New Test Dataverse Name"; + String newAffiliation = "New Test Dataverse Affiliation"; + String newDataverseType = Dataverse.DataverseType.TEACHING_COURSES.toString(); + String[] newContactEmails = new String[]{"new_email@dataverse.com"}; + String[] newInputLevelNames = new String[]{"geographicCoverage"}; + String[] newFacetIds = new String[]{"contributorName"}; + String[] newMetadataBlockNames = new String[]{"citation", "geospatial", "biomedical"}; + + Response updateDataverseResponse = UtilIT.updateDataverse( + testDataverseAlias, + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + + // Assert dataverse properties are updated + updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + String actualDataverseAlias = updateDataverseResponse.then().extract().path("data.alias"); + assertEquals(newAlias, actualDataverseAlias); + String actualDataverseName = updateDataverseResponse.then().extract().path("data.name"); + assertEquals(newName, actualDataverseName); + String actualDataverseAffiliation = updateDataverseResponse.then().extract().path("data.affiliation"); + assertEquals(newAffiliation, actualDataverseAffiliation); + String actualDataverseType = updateDataverseResponse.then().extract().path("data.dataverseType"); + assertEquals(newDataverseType, actualDataverseType); + String actualContactEmail = updateDataverseResponse.then().extract().path("data.dataverseContacts[0].contactEmail"); + assertEquals("new_email@dataverse.com", actualContactEmail); + + // Assert metadata blocks are updated + Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + String actualDataverseMetadataBlock1 = listMetadataBlocksResponse.then().extract().path("data[0].name"); + String actualDataverseMetadataBlock2 = listMetadataBlocksResponse.then().extract().path("data[1].name"); + String actualDataverseMetadataBlock3 = listMetadataBlocksResponse.then().extract().path("data[2].name"); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock1)); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock2)); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock3)); + + // Assert custom facets are updated + Response listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken); + String actualFacetName = listDataverseFacetsResponse.then().extract().path("data[0]"); + assertThat(newFacetIds, hasItemInArray(actualFacetName)); + + // Assert input levels are updated + Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken); + String actualInputLevelName = listDataverseInputLevelsResponse.then().extract().path("data[0].datasetFieldTypeName"); + assertThat(newInputLevelNames, hasItemInArray(actualInputLevelName)); + + // The alias has been changed, so we should not be able to do any operation using the old one + String oldDataverseAlias = testDataverseAlias; + Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); + getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Update the dataverse without setting metadata blocks, facets, or input levels + updateDataverseResponse = UtilIT.updateDataverse( + newAlias, + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + null, + null, + null, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the metadata blocks are inherited from the parent + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data[0].name", equalTo("citation")); + + // Assert that the facets are inherited from the parent + String[] rootFacetIds = new String[]{"authorName", "subject", "keywordValue", "dateOfDeposit"}; + listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken); + String actualFacetName1 = listDataverseFacetsResponse.then().extract().path("data[0]"); + String actualFacetName2 = listDataverseFacetsResponse.then().extract().path("data[1]"); + String actualFacetName3 = listDataverseFacetsResponse.then().extract().path("data[2]"); + String actualFacetName4 = listDataverseFacetsResponse.then().extract().path("data[3]"); + assertThat(rootFacetIds, hasItemInArray(actualFacetName1)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName2)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName3)); + assertThat(rootFacetIds, hasItemInArray(actualFacetName4)); + + // Assert that the dataverse should not have any input level + listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken); + listDataverseInputLevelsResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(0)); + + // Should return error when the dataverse to edit does not exist + updateDataverseResponse = UtilIT.updateDataverse( + "unexistingDataverseAlias", + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // User with unprivileged API token cannot update Root dataverse + updateDataverseResponse = UtilIT.updateDataverse( + "root", + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); + + Response rootCollectionInfoResponse = UtilIT.exportDataverse("root", apiToken); + rootCollectionInfoResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.name", equalTo("Root")); + } + @Test public void testListFacets() { Response createUserResponse = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index e3c26284d55..98107eca33a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2275,7 +2275,6 @@ public void testDeleteFile() { Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", apiToken); v1.prettyPrint(); v1.then().assertThat() - .body("data.files[0].dataFile.filename", equalTo("cc0.png")) .statusCode(OK.getStatusCode()); Map v1files1 = with(v1.body().asString()).param("fileToFind", "cc0.png") @@ -2290,7 +2289,6 @@ public void testDeleteFile() { Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft2.prettyPrint(); postv1draft2.then().assertThat() - .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png")) .statusCode(OK.getStatusCode()); Map v1files2 = with(postv1draft2.body().asString()).param("fileToFind", "orcid_16x16.png") diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java index 5e436dd0e98..b198d2769a0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java @@ -6,11 +6,14 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; import static jakarta.ws.rs.core.Response.Status.OK; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Paths; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; +import org.skyscreamer.jsonassert.JSONAssert; public class InfoIT { @@ -81,6 +84,22 @@ public void testGetZipDownloadLimit() { .body("data", notNullValue()); } + @Test + public void testGetExportFormats() throws IOException { + Response response = given().urlEncodingEnabled(false) + .get("/api/info/exportFormats"); + response.prettyPrint(); + response.then().assertThat().statusCode(OK.getStatusCode()); + + String actual = response.getBody().asString(); + String expected = + java.nio.file.Files.readString( + Paths.get("src/test/resources/json/export-formats.json"), + StandardCharsets.UTF_8); + JSONAssert.assertEquals(expected, actual, true); + + } + private void testSettingEndpoint(SettingsServiceBean.Key settingKey, String testSettingValue) { String endpoint = "/api/info/settings/" + settingKey; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java index f7135ce7f3b..8951b0bd42e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java @@ -278,8 +278,8 @@ public void testMoveLinkedDataset() { .body("message", equalTo("Use the query parameter forceMove=true to complete the move. This dataset is linked to the new host dataverse or one of its parents. This move would remove the link to this dataset. ")); JsonObject linksBeforeData = Json.createReader(new StringReader(getLinksBefore.asString())).readObject(); - assertEquals("OK", linksBeforeData.getString("status")); - assertEquals(dataverse2Alias + " (id " + dataverse2Id + ")", linksBeforeData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).getString(0)); + assertEquals(datasetId, linksBeforeData.getJsonObject("data").getInt("id")); + assertEquals(dataverse2Id, linksBeforeData.getJsonObject("data").getJsonArray("linked-dataverses").get(0).asJsonObject().getInt("id")); boolean forceMove = true; Response forceMoveLinkedDataset = UtilIT.moveDataset(datasetId.toString(), dataverse2Alias, forceMove, superuserApiToken); @@ -308,8 +308,7 @@ public void testMoveLinkedDataset() { JsonObject linksAfterData = Json.createReader(new StringReader(getLinksAfter.asString())).readObject(); assertEquals("OK", linksAfterData.getString("status")); - assertEquals(0, linksAfterData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).size()); - + assertEquals(0, linksAfterData.getJsonObject("data").getJsonArray("linked-dataverses").size()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java index 90357596c25..08ebec31cd6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java @@ -22,7 +22,7 @@ public class SavedSearchIT { @BeforeAll public static void setUpClass() { - + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); } @AfterAll @@ -53,81 +53,55 @@ public void testSavedSearches() { Integer datasetId2 = UtilIT.getDatasetIdFromResponse(createDatasetResponse2); // missing body - Response resp = RestAssured.given() - .contentType("application/json") - .post("/api/admin/savedsearches"); + Response resp = UtilIT.setSavedSearch(); resp.prettyPrint(); resp.then().assertThat() .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); // creatorId null - resp = RestAssured.given() - .body(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // creatorId string - resp = RestAssured.given() - .body(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // creatorId not found - resp = RestAssured.given() - .body(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); // definitionPointId null - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // definitionPointId string - resp = RestAssured.given() - .body(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()); // definitionPointId not found - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(NOT_FOUND.getStatusCode()); // missing filter - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, dataverseId)) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, dataverseId)); resp.prettyPrint(); resp.then().assertThat() .statusCode(OK.getStatusCode()); // create a saved search as superuser : OK - resp = RestAssured.given() - .body(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences")) - .contentType("application/json") - .post("/api/admin/savedsearches"); + resp = UtilIT.setSavedSearch(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences")); resp.prettyPrint(); resp.then().assertThat() .statusCode(OK.getStatusCode()); @@ -136,8 +110,7 @@ public void testSavedSearches() { Integer createdSavedSearchId = path.getInt("data.id"); // get list as non superuser : OK - Response getListReponse = RestAssured.given() - .get("/api/admin/savedsearches/list"); + Response getListReponse = UtilIT.getSavedSearchList(); getListReponse.prettyPrint(); getListReponse.then().assertThat() .statusCode(OK.getStatusCode()); @@ -146,22 +119,19 @@ public void testSavedSearches() { List listBeforeDelete = path2.getList("data.savedSearches"); // makelinks/all as non superuser : OK - Response makelinksAll = RestAssured.given() - .put("/api/admin/savedsearches/makelinks/all"); + Response makelinksAll = UtilIT.setSavedSearchMakelinksAll(); makelinksAll.prettyPrint(); makelinksAll.then().assertThat() .statusCode(OK.getStatusCode()); //delete a saved search as non superuser : OK - Response deleteReponse = RestAssured.given() - .delete("/api/admin/savedsearches/" + createdSavedSearchId); + Response deleteReponse = UtilIT.deleteSavedSearchById(createdSavedSearchId); deleteReponse.prettyPrint(); deleteReponse.then().assertThat() .statusCode(OK.getStatusCode()); // check list count minus 1 - getListReponse = RestAssured.given() - .get("/api/admin/savedsearches/list"); + getListReponse = UtilIT.getSavedSearchList(); getListReponse.prettyPrint(); JsonPath path3 = JsonPath.from(getListReponse.body().asString()); List listAfterDelete = path3.getList("data.savedSearches"); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java index 3a2b684c421..b03c23cd1e2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java @@ -1308,8 +1308,8 @@ public void testSearchFilesAndUrlImages() { .statusCode(200); pathToFile = "src/main/webapp/resources/js/mydata.js"; Response uploadFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); - uploadImage.prettyPrint(); - uploadImage.then().assertThat() + uploadFile.prettyPrint(); + uploadFile.then().assertThat() .statusCode(200); Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); @@ -1337,7 +1337,7 @@ public void testSearchFilesAndUrlImages() { .statusCode(OK.getStatusCode()) .body("data.items[0].type", CoreMatchers.is("dataverse")) .body("data.items[0].url", CoreMatchers.containsString("/dataverse/")) - .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("url_image"))); + .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("image_url"))); searchResp = UtilIT.search("mydata", apiToken); searchResp.prettyPrint(); @@ -1345,6 +1345,6 @@ public void testSearchFilesAndUrlImages() { .statusCode(OK.getStatusCode()) .body("data.items[0].type", CoreMatchers.is("file")) .body("data.items[0].url", CoreMatchers.containsString("/datafile/")) - .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("url_image"))); + .body("data.items[0]", CoreMatchers.not(CoreMatchers.hasItem("image_url"))); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java index 518431bfa2d..709908ac6eb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java @@ -850,7 +850,7 @@ public void testDeleteFiles() { String citation = atomEntryDraftV2.body().xmlPath().getString("entry.bibliographicCitation"); logger.info("citation (should contain 'DRAFT'): " + citation); boolean draftStringFoundInCitation = citation.matches(".*DRAFT.*"); - assertEquals(true, draftStringFoundInCitation); + assertTrue(draftStringFoundInCitation); List oneFileLeftInV2Draft = statement3.getBody().xmlPath().getList("feed.entry.id"); logger.info("Number of files remaining in this post version 1 draft:" + oneFileLeftInV2Draft.size()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java index 0189ffd6e58..ce3b8bf75ff 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java @@ -23,6 +23,8 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.contains; import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.hamcrest.CoreMatchers; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -371,29 +373,38 @@ public void testAPITokenEndpoints() { .body("data.message", containsString(userApiToken)) .body("data.message", containsString("expires on")); + // Recreate given a bad API token Response recreateToken = UtilIT.recreateToken("BAD-Token-blah-89234"); recreateToken.prettyPrint(); recreateToken.then().assertThat() .statusCode(UNAUTHORIZED.getStatusCode()); + // Recreate given a valid API token recreateToken = UtilIT.recreateToken(userApiToken); recreateToken.prettyPrint(); recreateToken.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.message", containsString("New token for")); + .body("data.message", containsString("New token for")) + .body("data.message", CoreMatchers.not(containsString("and expires on"))); + // Recreate given a valid API token and returning expiration createUser = UtilIT.createRandomUser(); - createUser.prettyPrint(); - assertEquals(200, createUser.getStatusCode()); + assertEquals(OK.getStatusCode(), createUser.getStatusCode()); + + userApiToken = UtilIT.getApiTokenFromResponse(createUser); + + recreateToken = UtilIT.recreateToken(userApiToken, true); + recreateToken.prettyPrint(); + recreateToken.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", containsString("New token for")) + .body("data.message", containsString("and expires on")); - String userApiTokenForDelete = UtilIT.getApiTokenFromResponse(createUser); - /* Add tests for Private URL */ createUser = UtilIT.createRandomUser(); - String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); createDataverseResponse.prettyPrint(); @@ -416,8 +427,12 @@ public void testAPITokenEndpoints() { getExpiration = UtilIT.getTokenExpiration(tokenForPrivateUrlUser); getExpiration.prettyPrint(); getExpiration.then().assertThat() - .statusCode(NOT_FOUND.getStatusCode()); + .statusCode(UNAUTHORIZED.getStatusCode()); + createUser = UtilIT.createRandomUser(); + assertEquals(OK.getStatusCode(), createUser.getStatusCode()); + + String userApiTokenForDelete = UtilIT.getApiTokenFromResponse(createUser); Response deleteToken = UtilIT.deleteToken(userApiTokenForDelete); deleteToken.prettyPrint(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4e20e8e4c33..1930610532a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import io.restassured.http.ContentType; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; @@ -12,6 +13,7 @@ import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; +import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import static jakarta.ws.rs.core.Response.Status.CREATED; import java.nio.charset.StandardCharsets; @@ -239,6 +241,22 @@ public static Response clearThumbnailFailureFlag(long fileId) { return response; } + public static Response auditFiles(String apiToken, Long firstId, Long lastId, String csvList) { + String params = ""; + if (firstId != null) { + params = "?firstId="+ firstId; + } + if (lastId != null) { + params = params + (params.isEmpty() ? "?" : "&") + "lastId="+ lastId; + } + if (csvList != null) { + params = params + (params.isEmpty() ? "?" : "&") + "datasetIdentifierList="+ csvList; + } + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/admin/datafiles/auditFiles" + params); + } + private static String getAuthenticatedUserAsJsonString(String persistentUserId, String firstName, String lastName, String authenticationProviderId, String identifier) { JsonObjectBuilder builder = Json.createObjectBuilder(); builder.add("authenticationProviderId", authenticationProviderId); @@ -323,7 +341,14 @@ static Integer getDatasetIdFromResponse(Response createDatasetResponse) { logger.info("Id found in create dataset response: " + datasetId); return datasetId; } - + + static Integer getDataFileIdFromResponse(Response uploadDataFileResponse) { + JsonPath dataFile = JsonPath.from(uploadDataFileResponse.body().asString()); + int dataFileId = dataFile.getInt("data.files[0].dataFile.id"); + logger.info("Id found in upload DataFile response: " + dataFileId); + return dataFileId; + } + static Integer getSearchCountFromResponse(Response searchResponse) { JsonPath createdDataset = JsonPath.from(searchResponse.body().asString()); int searchCount = createdDataset.getInt("data.total_count"); @@ -389,6 +414,48 @@ static Response createSubDataverse(String alias, String category, String apiToke objectBuilder.add("affiliation", affiliation); } + updateDataverseRequestJsonWithMetadataBlocksConfiguration(inputLevelNames, facetIds, metadataBlockNames, objectBuilder); + + JsonObject dvData = objectBuilder.build(); + return given() + .body(dvData.toString()).contentType(ContentType.JSON) + .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken); + } + + static Response updateDataverse(String alias, + String newAlias, + String newName, + String newAffiliation, + String newDataverseType, + String[] newContactEmails, + String[] newInputLevelNames, + String[] newFacetIds, + String[] newMetadataBlockNames, + String apiToken) { + JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); + for(String contactEmail : newContactEmails) { + contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", contactEmail)); + } + NullSafeJsonBuilder jsonBuilder = jsonObjectBuilder() + .add("alias", newAlias) + .add("name", newName) + .add("affiliation", newAffiliation) + .add("dataverseContacts", contactArrayBuilder) + .add("dataverseType", newDataverseType) + .add("affiliation", newAffiliation); + + updateDataverseRequestJsonWithMetadataBlocksConfiguration(newInputLevelNames, newFacetIds, newMetadataBlockNames, jsonBuilder); + + JsonObject dvData = jsonBuilder.build(); + return given() + .body(dvData.toString()).contentType(ContentType.JSON) + .when().put("/api/dataverses/" + alias + "?key=" + apiToken); + } + + private static void updateDataverseRequestJsonWithMetadataBlocksConfiguration(String[] inputLevelNames, + String[] facetIds, + String[] metadataBlockNames, + JsonObjectBuilder objectBuilder) { JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); if (inputLevelNames != null) { @@ -420,12 +487,6 @@ static Response createSubDataverse(String alias, String category, String apiToke } objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); - - JsonObject dvData = objectBuilder.build(); - Response createDataverseResponse = given() - .body(dvData.toString()).contentType(ContentType.JSON) - .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken); - return createDataverseResponse; } static Response createDataverse(JsonObject dvData, String apiToken) { @@ -1570,7 +1631,16 @@ static Response getDatasetVersion(String persistentId, String versionNumber, Str + persistentId + (excludeFiles ? "&excludeFiles=true" : "")); } - + static Response compareDatasetVersions(String persistentId, String versionNumber1, String versionNumber2, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/:persistentId/versions/" + + versionNumber1 + + "/compare/" + + versionNumber2 + + "?persistentId=" + + persistentId); + } static Response getDatasetWithOwners(String persistentId, String apiToken, boolean returnOwners) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) @@ -2123,19 +2193,22 @@ static Response uploadProvFreeForm(String idOrPersistentId, JsonObject jsonObjec // return requestSpecification.delete("/api/files/" + idInPath + "/prov-freeform" + optionalQueryParam); // } static Response exportDataset(String datasetPersistentId, String exporter) { - return exportDataset(datasetPersistentId, exporter, null); + return exportDataset(datasetPersistentId, exporter, null, false); } - static Response exportDataset(String datasetPersistentId, String exporter, String apiToken) { -// http://localhost:8080/api/datasets/export?exporter=dataverse_json&persistentId=doi%3A10.5072/FK2/W6WIMQ + return exportDataset(datasetPersistentId, exporter, apiToken, false); + } + static Response exportDataset(String datasetPersistentId, String exporter, String apiToken, boolean wait) { + // Wait for the Async call to finish to get the updated data + if (wait) { + sleepForReexport(datasetPersistentId, apiToken, 10); + } RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken); } return requestSpecification - // .header(API_TOKEN_HTTP_HEADER, apiToken) - // .get("/api/datasets/:persistentId/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); .get("/api/datasets/export" + "?persistentId=" + datasetPersistentId + "&exporter=" + exporter); } @@ -2813,10 +2886,15 @@ static Response getTokenExpiration( String apiToken) { return response; } - static Response recreateToken( String apiToken) { + static Response recreateToken(String apiToken) { + return recreateToken(apiToken, false); + } + + static Response recreateToken(String apiToken, boolean returnExpiration) { Response response = given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .post("api/users/token/recreate"); + .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("returnExpiration", returnExpiration) + .post("api/users/token/recreate"); return response; } @@ -3672,6 +3750,35 @@ static Response importDatasetDDIViaNativeApi(String apiToken, String dataverseAl return importDDI.post(postString); } + + static Response importDatasetViaNativeApi(String apiToken, String dataverseAlias, String json, String pid, String release) { + String postString = "/api/dataverses/" + dataverseAlias + "/datasets/:import"; + if (pid != null || release != null ) { + //postString = postString + "?"; + if (pid != null) { + postString = postString + "?pid=" + pid; + if (release != null && release.compareTo("yes") == 0) { + postString = postString + "&release=" + release; + } + } else { + if (release != null && release.compareTo("yes") == 0) { + postString = postString + "?release=" + release; + } + } + } + logger.info("Here importDatasetViaNativeApi"); + logger.info(postString); + + RequestSpecification importJSON = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .urlEncodingEnabled(false) + .body(json) + .contentType("application/json"); + + return importJSON.post(postString); + } + + static Response retrieveMyDataAsJsonString(String apiToken, String userIdentifier, ArrayList roleIds) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) @@ -4059,8 +4166,37 @@ static Response setDatasetStorageDriver(Integer datasetId, String driverLabel, S .body(driverLabel) .put("/api/datasets/" + datasetId + "/storageDriver"); } - - + + /** GET on /api/admin/savedsearches/list */ + static Response getSavedSearchList() { + return given().get("/api/admin/savedsearches/list"); + } + + /** POST on /api/admin/savedsearches without body */ + static Response setSavedSearch() { + return given() + .contentType("application/json") + .post("/api/admin/savedsearches"); + } + + /** POST on /api/admin/savedsearches with body */ + static Response setSavedSearch(String body) { + return given() + .body(body) + .contentType("application/json") + .post("/api/admin/savedsearches"); + } + + /** PUT on /api/admin/savedsearches/makelinks/all */ + static Response setSavedSearchMakelinksAll() { + return given().put("/api/admin/savedsearches/makelinks/all"); + } + + /** DELETE on /api/admin/savedsearches/{id} with identifier */ + static Response deleteSavedSearchById(Integer id) { + return given().delete("/api/admin/savedsearches/" + id); + } + //Globus Store related - not currently used static Response getDatasetGlobusUploadParameters(Integer datasetId, String locale, String apiToken) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java index 44739f3f62a..acf5d970358 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java @@ -1,6 +1,13 @@ package edu.harvard.iq.dataverse.api.imports; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; +import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; + +import org.apache.commons.io.FileUtils; +import com.google.gson.Gson; +import java.io.File; +import java.io.IOException; + import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; @@ -8,6 +15,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; +import java.nio.charset.StandardCharsets; + @ExtendWith(MockitoExtension.class) public class ImportGenericServiceBeanTest { @@ -15,7 +24,47 @@ public class ImportGenericServiceBeanTest { private ImportGenericServiceBean importGenericService; @Test - public void testReassignIdentifierAsGlobalId() { + void testIdentifierHarvestableWithOtherID() throws IOException { + // "otherIdValue" containing the value : doi:10.7910/DVN/TJCLKP + File file = new File("src/test/resources/json/importGenericWithOtherId.json"); + String text = FileUtils.readFileToString(file, StandardCharsets.UTF_8); + DatasetVersionDTO dto = new Gson().fromJson(text, DatasetVersionDTO.class); + + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://doi.org/10.7910/DVN/TJCLKP")); + // junk or null + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "junk")); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, null)); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://www.example.com")); + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://dataverse.org")); + } + + @Test + void testIdentifierHarvestableWithoutOtherID() throws IOException { + // Does not contain data of type "otherIdValue" + File file = new File("src/test/resources/json/importGenericWithoutOtherId.json"); + String text = FileUtils.readFileToString(file, StandardCharsets.UTF_8); + DatasetVersionDTO dto = new Gson().fromJson(text, DatasetVersionDTO.class); + + // non-URL + assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "doi:10.7910/DVN/TJCLKP")); + assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "hdl:10.7910/DVN/TJCLKP")); + // HTTPS + assertEquals("https://doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://doi.org/10.7910/DVN/TJCLKP")); + assertEquals("https://dx.doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://dx.doi.org/10.7910/DVN/TJCLKP")); + assertEquals("https://hdl.handle.net/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "https://hdl.handle.net/10.7910/DVN/TJCLKP")); + // HTTP (no S) + assertEquals("http://doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://doi.org/10.7910/DVN/TJCLKP")); + assertEquals("http://dx.doi.org/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://dx.doi.org/10.7910/DVN/TJCLKP")); + assertEquals("http://hdl.handle.net/10.7910/DVN/TJCLKP", importGenericService.selectIdentifier(dto, "http://hdl.handle.net/10.7910/DVN/TJCLKP")); + // junk or null + assertNull(importGenericService.selectIdentifier(dto, "junk")); + assertNull(importGenericService.selectIdentifier(dto, null)); + assertNull(importGenericService.selectIdentifier(dto, "http://www.example.com")); + assertNull(importGenericService.selectIdentifier(dto, "https://dataverse.org")); + } + + @Test + void testReassignIdentifierAsGlobalId() { // non-URL assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("doi:10.7910/DVN/TJCLKP", new DatasetDTO())); assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("hdl:10.7910/DVN/TJCLKP", new DatasetDTO())); @@ -29,6 +78,8 @@ public void testReassignIdentifierAsGlobalId() { assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO())); // junk assertNull(importGenericService.reassignIdentifierAsGlobalId("junk", new DatasetDTO())); + assertNull(importGenericService.reassignIdentifierAsGlobalId("http://www.example.com", new DatasetDTO())); + assertNull(importGenericService.reassignIdentifierAsGlobalId("https://dataverse.org", new DatasetDTO())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationProviderTest.java index eac9a605c9e..d4d7b6fa69d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationProviderTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticationProviderTest.java @@ -15,7 +15,7 @@ public class AuthenticationProviderTest { - private final static String[] authProviders = {"null", "builtin", "github", "google", "orcid", "orcid-sandbox", "shib"}; + private static final String[] authProviders = {"null", "builtin", "github", "google", "orcid", "orcid-sandbox", "shib"}; private static Map bundleTestMap; @BeforeAll diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java index 7bd802b3b02..bd3bfcc1a60 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java @@ -378,20 +378,20 @@ public void testMutingInJson() { public void testHasEmailMuted() { testUser.setMutedEmails(mutedTypes); System.out.println("hasEmailMuted"); - assertEquals(true, testUser.hasEmailMuted(Type.ASSIGNROLE)); - assertEquals(true, testUser.hasEmailMuted(Type.REVOKEROLE)); - assertEquals(false, testUser.hasEmailMuted(Type.CREATEDV)); - assertEquals(false, testUser.hasEmailMuted(null)); + assertTrue(testUser.hasEmailMuted(Type.ASSIGNROLE)); + assertTrue(testUser.hasEmailMuted(Type.REVOKEROLE)); + assertFalse(testUser.hasEmailMuted(Type.CREATEDV)); + assertFalse(testUser.hasEmailMuted(null)); } @Test public void testHasNotificationsMutedMuted() { testUser.setMutedNotifications(mutedTypes); System.out.println("hasNotificationMuted"); - assertEquals(true, testUser.hasNotificationMuted(Type.ASSIGNROLE)); - assertEquals(true, testUser.hasNotificationMuted(Type.REVOKEROLE)); - assertEquals(false, testUser.hasNotificationMuted(Type.CREATEDV)); - assertEquals(false, testUser.hasNotificationMuted(null)); + assertTrue(testUser.hasNotificationMuted(Type.ASSIGNROLE)); + assertTrue(testUser.hasNotificationMuted(Type.REVOKEROLE)); + assertFalse(testUser.hasNotificationMuted(Type.CREATEDV)); + assertFalse(testUser.hasNotificationMuted(null)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java index a8dda2f6a7e..d3c5cdca470 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUserTest.java @@ -38,7 +38,7 @@ void getIdentifier() { @Test void testGetDisplayInfo() { RoleAssigneeDisplayInfo displayInfo = privateUrlUser.getDisplayInfo(); - assertEquals("Private URL Enabled", displayInfo.getTitle()); + assertEquals("Preview URL Enabled", displayInfo.getTitle()); assertNull(displayInfo.getEmailAddress()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java index 552d76b74e8..ea5cc4b66a8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java @@ -89,20 +89,20 @@ public void tearDownClass() throws IOException { */ @Test public void testOpen() throws IOException { - assertEquals(false, datasetAccess.canRead()); - assertEquals(false, datasetAccess.canWrite()); + assertFalse(datasetAccess.canRead()); + assertFalse(datasetAccess.canWrite()); datasetAccess.open(DataAccessOption.READ_ACCESS); - assertEquals(true, datasetAccess.canRead()); - assertEquals(false, datasetAccess.canWrite()); + assertTrue(datasetAccess.canRead()); + assertFalse(datasetAccess.canWrite()); datasetAccess.open(DataAccessOption.WRITE_ACCESS); - assertEquals(false, datasetAccess.canRead()); - assertEquals(true, datasetAccess.canWrite()); + assertFalse(datasetAccess.canRead()); + assertTrue(datasetAccess.canWrite()); dataFileAccess.open(DataAccessOption.READ_ACCESS); - assertEquals(true, dataFileAccess.canRead()); - assertEquals(false, dataFileAccess.canWrite()); + assertTrue(dataFileAccess.canRead()); + assertFalse(dataFileAccess.canWrite()); } /** @@ -133,7 +133,7 @@ public void testOpenAuxChannel() throws IOException { */ @Test public void testIsAuxObjectCached() throws IOException { - assertEquals(true, datasetAccess.isAuxObjectCached("Dataset")); + assertTrue(datasetAccess.isAuxObjectCached("Dataset")); } /** diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java index 84a241b90f6..3aab66dc63b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java @@ -189,48 +189,48 @@ public void testResponseHeaders() { @Test public void testFileLocation() { - assertEquals(true, instance.isLocalFile()); + assertTrue(instance.isLocalFile()); instance.setIsLocalFile(false); - assertEquals(false, instance.isLocalFile()); + assertFalse(instance.isLocalFile()); - assertEquals(false, instance.isRemoteAccess()); + assertFalse(instance.isRemoteAccess()); instance.setIsRemoteAccess(true); - assertEquals(true, instance.isRemoteAccess()); + assertTrue(instance.isRemoteAccess()); } @Test public void testHttpAccess() { - assertEquals(false, instance.isHttpAccess()); + assertFalse(instance.isHttpAccess()); instance.setIsHttpAccess(true); - assertEquals(true, instance.isHttpAccess()); + assertTrue(instance.isHttpAccess()); }*/ @Test public void testDownloadSupported() { - assertEquals(true, instance.isDownloadSupported()); + assertTrue(instance.isDownloadSupported()); instance.setIsDownloadSupported(false); - assertEquals(false, instance.isDownloadSupported()); + assertFalse(instance.isDownloadSupported()); } @Test public void testSubsetSupported() { - assertEquals(false, instance.isSubsetSupported()); + assertFalse(instance.isSubsetSupported()); instance.setIsSubsetSupported(true); - assertEquals(true, instance.isSubsetSupported()); + assertTrue(instance.isSubsetSupported()); } @Test public void testZippedStream() { - assertEquals(false, instance.isZippedStream()); + assertFalse(instance.isZippedStream()); instance.setIsZippedStream(true); - assertEquals(true, instance.isZippedStream()); + assertTrue(instance.isZippedStream()); } @Test public void testNoVarHeader() { - assertEquals(false, instance.noVarHeader()); + assertFalse(instance.noVarHeader()); instance.setNoVarHeader(true); - assertEquals(true, instance.noVarHeader()); + assertTrue(instance.noVarHeader()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java index 942e4329384..27e0ac758e0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java @@ -51,14 +51,14 @@ public void setUpClass() throws IOException { */ @Test public void testPerms() throws IOException { - assertEquals(false, datasetAccess.canRead()); - assertEquals(false, datasetAccess.canWrite()); + assertFalse(datasetAccess.canRead()); + assertFalse(datasetAccess.canWrite()); } @Test public void testIsExpiryExpired() { long currentTime = 1502221467; - assertEquals(false, swiftAccess.isExpiryExpired(60, 1502281, currentTime)); + assertFalse(swiftAccess.isExpiryExpired(60, 1502281, currentTime)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java index eb19f22df63..148d34dc5f7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java @@ -18,6 +18,7 @@ import org.apache.http.message.BasicStatusLine; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; import org.junit.jupiter.api.Test; public class DataCaptureModuleUtilTest { @@ -25,13 +26,13 @@ public class DataCaptureModuleUtilTest { @Test public void testRsyncSupportEnabled() { System.out.println("rsyncSupportEnabled"); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled(null)); - assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("dcm/rsync+ssh")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled(null)); + assertTrue(DataCaptureModuleUtil.rsyncSupportEnabled("dcm/rsync+ssh")); // Comma sepratated lists of upload methods are supported. - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("native/http:dcm/rsync+ssh")); - assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("native/http,dcm/rsync+ssh")); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("native/http")); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("junk")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("native/http:dcm/rsync+ssh")); + assertTrue(DataCaptureModuleUtil.rsyncSupportEnabled("native/http,dcm/rsync+ssh")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("native/http")); + assertFalse(DataCaptureModuleUtil.rsyncSupportEnabled("junk")); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java index 8eed2a33c5a..2db8851c48a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java @@ -70,8 +70,8 @@ public void testGetThumbnailRestricted() { */ @Test public void testDeleteDatasetLogo() { - assertEquals(false, DatasetUtil.deleteDatasetLogo(null)); - assertEquals(false, DatasetUtil.deleteDatasetLogo(new Dataset())); + assertFalse(DatasetUtil.deleteDatasetLogo(null)); + assertFalse(DatasetUtil.deleteDatasetLogo(new Dataset())); } /** @@ -106,7 +106,7 @@ public void testGetThumbnailAsInputStream() { @Test public void testIsDatasetLogoPresent() { Dataset dataset = MocksFactory.makeDataset(); - assertEquals(false, DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); + assertFalse(DatasetUtil.isDatasetLogoPresent(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java index bfb9134cfca..475b4c1cff5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java @@ -66,16 +66,16 @@ void variableTest(Map vmMap) { assertEquals(vm.getLiteralquestion(), "This is a literal question."); assertEquals(vm.getNotes(), "These are notes.\nA lot of them."); assertEquals(vm.getUniverse(),"Our universe"); - assertEquals(false, vm.isIsweightvar()); - assertEquals(false, vm.isWeighted()); + assertFalse(vm.isIsweightvar()); + assertFalse(vm.isWeighted()); testCategoriesVar1(vm); vm = vmMap.get(1169L); assertNotNull(vm); - assertEquals(false, vm.isIsweightvar()); - assertEquals(true, vm.isWeighted()); + assertFalse(vm.isIsweightvar()); + assertTrue(vm.isWeighted()); assertEquals(vm.getLabel(), "age_rollup" ); assertEquals(vm.getInterviewinstruction(), null); @@ -90,8 +90,8 @@ void variableTest(Map vmMap) { vm = vmMap.get(1168L); assertNotNull(vm); - assertEquals(true, vm.isIsweightvar()); - assertEquals(false, vm.isWeighted()); + assertTrue(vm.isIsweightvar()); + assertFalse(vm.isWeighted()); assertEquals(vm.getLabel(), "weight" ); assertEquals(vm.getInterviewinstruction(), null); assertEquals(vm.getLiteralquestion(), "Literal question for weight"); diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java new file mode 100644 index 00000000000..f49ebcea39c --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java @@ -0,0 +1,264 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit; +import edu.harvard.iq.dataverse.util.JhoveFileType; +import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; +import org.jetbrains.annotations.NotNull; +import org.joda.time.DateTime; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.mockito.MockedStatic; +import org.mockito.Mockito; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.nio.file.Path; +import java.security.SecureRandom; +import java.text.MessageFormat; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import static edu.harvard.iq.dataverse.DataFile.ChecksumType.MD5; +import static org.apache.commons.io.file.FilesUncheck.createDirectories; +import static org.apache.commons.io.file.PathUtils.deleteDirectory; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; + + +@LocalJvmSettings +public class CreateNewDataFilesTest { + // TODO keep constants for annotations in sync with class name + Path testDir = Path.of("target/test/").resolve(getClass().getSimpleName()); + PrintStream original_stderr; + + @BeforeEach + public void cleanTmpDir() throws IOException { + original_stderr = System.err; + if(testDir.toFile().exists()) + deleteDirectory(testDir); + } + + @AfterEach void restoreStderr() { + System.setErr(original_stderr); + } + + @Test + @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp") + public void execute_fails_to_upload_when_tmp_does_not_exist() throws FileNotFoundException { + + mockTmpLookup(); + var cmd = createCmd("scripts/search/data/shape/shapefile.zip", mockDatasetVersion(), 1000L, 500L); + var ctxt = mockCommandContext(mockSysConfig(true, 0L, MD5, 10)); + + assertThatThrownBy(() -> cmd.execute(ctxt)) + .isInstanceOf(CommandException.class) + .hasMessageContaining("Failed to save the upload as a temp file (temp disk space?)") + .hasRootCauseInstanceOf(NoSuchFileException.class) + .getRootCause() + .hasMessageStartingWith("target/test/CreateNewDataFilesTest/tmp/temp/tmp"); + } + + @Test + @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp") + public void execute_fails_on_size_limit() throws Exception { + createDirectories(Path.of("target/test/CreateNewDataFilesTest/tmp/temp")); + + mockTmpLookup(); + var cmd = createCmd("scripts/search/data/binary/3files.zip", mockDatasetVersion(), 1000L, 500L); + var ctxt = mockCommandContext(mockSysConfig(true, 50L, MD5, 0)); + try (var mockedStatic = Mockito.mockStatic(JhoveFileType.class)) { + mockedStatic.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf"); + + assertThatThrownBy(() -> cmd.execute(ctxt)) + .isInstanceOf(CommandException.class) + .hasMessage("This file size (462 B) exceeds the size limit of 50 B."); + } + } + + @Test + @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp") + public void execute_loads_individual_files_from_uploaded_zip() throws Exception { + var tempDir = testDir.resolve("tmp/temp"); + createDirectories(tempDir); + + mockTmpLookup(); + var cmd = createCmd("src/test/resources/own-cloud-downloads/greetings.zip", mockDatasetVersion(), 1000L, 500L); + var ctxt = mockCommandContext(mockSysConfig(false, 1000000L, MD5, 10)); + try (MockedStatic mockedStatic = Mockito.mockStatic(JhoveFileType.class)) { + mockedStatic.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf"); + + // the test + var result = cmd.execute(ctxt); + + assertThat(result.getErrors()).hasSize(0); + assertThat(result.getDataFiles().stream().map(dataFile -> + dataFile.getFileMetadata().getDirectoryLabel() + "/" + dataFile.getDisplayName() + )).containsExactlyInAnyOrder( + "DD-1576/goodbye.txt", "DD-1576/hello.txt" + ); + var storageIds = result.getDataFiles().stream().map(DataFile::getStorageIdentifier).toList(); + assertThat(tempDir.toFile().list()) + .containsExactlyInAnyOrderElementsOf(storageIds); + } + } + + @Test + @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "target/test/CreateNewDataFilesTest/tmp") + public void execute_rezips_sets_of_shape_files_from_uploaded_zip() throws Exception { + var tempDir = testDir.resolve("tmp/temp"); + createDirectories(tempDir); + + mockTmpLookup(); + var cmd = createCmd("src/test/resources/own-cloud-downloads/shapes.zip", mockDatasetVersion(), 1000L, 500L); + var ctxt = mockCommandContext(mockSysConfig(false, 100000000L, MD5, 10)); + try (var mockedJHoveFileType = Mockito.mockStatic(JhoveFileType.class)) { + mockedJHoveFileType.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf"); + + // the test + var result = cmd.execute(ctxt); + + assertThat(result.getErrors()).hasSize(0); + assertThat(result.getDataFiles().stream().map(dataFile -> + (dataFile.getFileMetadata().getDirectoryLabel() + "/" + dataFile.getDisplayName()) + .replaceAll(".*/dataDir/", "") + )).containsExactlyInAnyOrder( + "shape1.zip", + "shape2/shape2", + "shape2/shape2.pdf", + "shape2/shape2.txt", + "shape2/shape2.zip", + "extra/shp_dictionary.xls", + "extra/notes", + "extra/README.MD" + ); + var storageIds = result.getDataFiles().stream().map(DataFile::getStorageIdentifier).toList(); + assertThat(tempDir.toFile().list()) + .containsExactlyInAnyOrderElementsOf(storageIds); + } + } + + @Disabled("Too slow. Intended for manual execution.") + @Test + @JvmSetting(key = JvmSettings.FILES_DIRECTORY, value = "/tmp/test/CreateNewDataFilesTest/tmp") + public void extract_zip_performance() throws Exception { + /* + Developed to test performance difference between the old implementation with ZipInputStream and the new ZipFile implementation. + Play with numbers depending on: + - the time you want to spend on this test + - how much system stress you want to examine + */ + var nrOfZipFiles = 20; + var avgNrOfFilesPerZip = 300; + var avgFileLength = 5000; + + var tmpUploadStorage = Path.of("/tmp/test/CreateNewDataFilesTest/tmp/temp"); + if(tmpUploadStorage.toFile().exists()) { + deleteDirectory(tmpUploadStorage); + } + createDirectories(tmpUploadStorage); // temp in target would choke intellij + + var chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + var random = new SecureRandom(); + var totalNrOfFiles = 0; + var totalFileSize = 0; + var totalTime = 0L; + var tmp = Path.of(Files.createTempDirectory(null).toString()); + var ctxt = mockCommandContext(mockSysConfig(false, 100000000L, MD5, 10000)); + try (var mockedJHoveFileType = Mockito.mockStatic(JhoveFileType.class)) { + mockedJHoveFileType.when(JhoveFileType::getJhoveConfigFile).thenReturn("conf/jhove/jhove.conf"); + for (var zipNr = 1; zipNr <= nrOfZipFiles; zipNr++) { + // build the zip + var zip = tmp.resolve(zipNr + "-data.zip"); + var nrOfFilesInZip = random.nextInt(avgNrOfFilesPerZip * 2); + try (var zipStream = new ZipOutputStream(new FileOutputStream(zip.toFile()))) { + for (var fileInZipNr = 1; fileInZipNr <= nrOfFilesInZip; fileInZipNr++) { + // build content for a file + var stringLength = random.nextInt(avgFileLength * 2 -5); + StringBuilder sb = new StringBuilder(stringLength); + for (int i = 1; i <= stringLength; i++) {// zero length causes buffer underflow + sb.append(chars.charAt(random.nextInt(chars.length()))); + } + // add the file to the zip + zipStream.putNextEntry(new ZipEntry(fileInZipNr + ".txt")); + zipStream.write((sb.toString()).getBytes()); + zipStream.closeEntry(); + totalFileSize += stringLength; + } + } + + // upload the zip + var before = DateTime.now(); + var result = createCmd(zip.toString(), mockDatasetVersion(), 1000L, 500L) + .execute(ctxt); + totalTime += DateTime.now().getMillis() - before.getMillis(); + + assertThat(result.getErrors()).hasSize(0); + assertThat(result.getDataFiles()).hasSize(nrOfFilesInZip); + totalNrOfFiles += nrOfFilesInZip; + + // report after each zip to have some data even when aborting a test that takes too long + System.out.println(MessageFormat.format( + "Total time: {0}ms; nr of zips {1} total nr of files {2}; total file size {3}", + totalTime, zipNr, totalNrOfFiles, totalFileSize + )); + } + assertThat(tmpUploadStorage.toFile().list()).hasSize(totalNrOfFiles); + } + } + + private static @NotNull CreateNewDataFilesCommand createCmd(String name, DatasetVersion dsVersion, long allocatedQuotaLimit, long usedQuotaLimit) throws FileNotFoundException { + return new CreateNewDataFilesCommand( + Mockito.mock(DataverseRequest.class), + dsVersion, + new FileInputStream(name), + "example.zip", + "application/zip", + null, + new UploadSessionQuotaLimit(allocatedQuotaLimit, usedQuotaLimit), + "sha"); + } + + private static @NotNull CommandContext mockCommandContext(SystemConfig sysCfg) { + var ctxt = Mockito.mock(CommandContext.class); + Mockito.when(ctxt.systemConfig()).thenReturn(sysCfg); + return ctxt; + } + + private static @NotNull SystemConfig mockSysConfig(boolean isStorageQuataEnforced, long maxFileUploadSizeForStore, DataFile.ChecksumType checksumType, int zipUploadFilesLimit) { + var sysCfg = Mockito.mock(SystemConfig.class); + Mockito.when(sysCfg.isStorageQuotasEnforced()).thenReturn(isStorageQuataEnforced); + Mockito.when(sysCfg.getMaxFileUploadSizeForStore(any())).thenReturn(maxFileUploadSizeForStore); + Mockito.when(sysCfg.getFileFixityChecksumAlgorithm()).thenReturn(checksumType); + Mockito.when(sysCfg.getZipUploadFilesLimit()).thenReturn(zipUploadFilesLimit); + return sysCfg; + } + + private static void mockTmpLookup() { + JvmSettings mockFilesDirectory = Mockito.mock(JvmSettings.class); + Mockito.when(mockFilesDirectory.lookup()).thenReturn("/mocked/path"); + } + + private static @NotNull DatasetVersion mockDatasetVersion() { + var dsVersion = Mockito.mock(DatasetVersion.class); + Mockito.when(dsVersion.getDataset()).thenReturn(Mockito.mock(Dataset.class)); + return dsVersion; + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java index 508eac46cb4..0ba29f74774 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java @@ -18,7 +18,9 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; +import java.sql.Timestamp; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.concurrent.Future; @@ -171,9 +173,9 @@ public void testCreatePrivateUrlSuccessfully() throws CommandException { assertEquals(expectedUser.getIdentifier(), privateUrl.getRoleAssignment().getAssigneeIdentifier()); assertEquals(expectedUser.isSuperuser(), false); assertEquals(expectedUser.isAuthenticated(), false); - assertEquals(expectedUser.getDisplayInfo().getTitle(), "Private URL Enabled"); + assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled"); assertNotNull(privateUrl.getToken()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); } @Test @@ -188,22 +190,24 @@ public void testCreateAnonymizedAccessPrivateUrlSuccessfully() throws CommandExc assertEquals(expectedUser.getIdentifier(), privateUrl.getRoleAssignment().getAssigneeIdentifier()); assertEquals(expectedUser.isSuperuser(), false); assertEquals(expectedUser.isAuthenticated(), false); - assertEquals(expectedUser.getDisplayInfo().getTitle(), "Private URL Enabled"); + assertEquals(expectedUser.getDisplayInfo().getTitle(), "Preview URL Enabled"); assertNotNull(privateUrl.getToken()); assertTrue(privateUrl.isAnonymizedAccess()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=" + privateUrl.getToken(), privateUrl.getLink()); } @Test - public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() { + public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() throws CommandException { dataset = new Dataset(); List versions = new ArrayList<>(); + dataset.setPublicationDate(new Timestamp(new Date().getTime())); DatasetVersion datasetVersion = new DatasetVersion(); datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); DatasetVersion datasetVersion2 = new DatasetVersion(); - - versions.add(datasetVersion); + datasetVersion2.setVersionState(DatasetVersion.VersionState.DRAFT); + versions.add(datasetVersion2); + versions.add(datasetVersion); dataset.setVersions(versions); dataset.setId(versionIsReleased); PrivateUrl privateUrl = null; @@ -211,6 +215,7 @@ public void testAttemptCreateAnonymizedAccessPrivateUrlOnReleased() { privateUrl = testEngine.submit(new CreatePrivateUrlCommand(null, dataset, true)); assertTrue(false); } catch (CommandException ex) { + } assertNull(privateUrl); } diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java index 9850e9d80e9..2121aa4d9f9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java @@ -253,7 +253,7 @@ public void testGetDisplayName() { */ @Test public void testIsXMLFormat() { - assertEquals(false, schemaDotOrgExporter instanceof XMLExporter); + assertFalse(schemaDotOrgExporter instanceof XMLExporter); } /** @@ -261,7 +261,7 @@ public void testIsXMLFormat() { */ @Test public void testIsHarvestable() { - assertEquals(false, schemaDotOrgExporter.isHarvestable()); + assertFalse(schemaDotOrgExporter.isHarvestable()); } /** @@ -269,7 +269,7 @@ public void testIsHarvestable() { */ @Test public void testIsAvailableToUsers() { - assertEquals(true, schemaDotOrgExporter.isAvailableToUsers()); + assertTrue(schemaDotOrgExporter.isAvailableToUsers()); } /** diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java index 4dfedf5aa17..955070a662a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java @@ -112,8 +112,8 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); @@ -128,8 +128,8 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); } @Test @@ -218,8 +218,8 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); @@ -234,8 +234,8 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce } // check filenames are unique and altered - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); } @Test @@ -347,9 +347,9 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception { } // check filenames are unique - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); - assertEquals(false, file3NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); + assertFalse(file3NameAltered); // add duplicate file in root datasetVersion.getFileMetadatas().add(fmd3); @@ -371,9 +371,9 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception { } // check filenames are unique - assertEquals(true, file1NameAltered); - assertEquals(true, file2NameAltered); - assertEquals(true, file3NameAltered); + assertTrue(file1NameAltered); + assertTrue(file2NameAltered); + assertTrue(file3NameAltered); } @Test @@ -457,7 +457,7 @@ public void testCheckForDuplicateFileNamesTabular() throws Exception { } // check filename is altered since tabular and will change to .tab after ingest - assertEquals(true, file2NameAltered); + assertTrue(file2NameAltered); } @@ -553,8 +553,8 @@ public void testCheckForDuplicateFileNamesWhenReplacing() throws Exception { } // check filenames are unique and unaltered - assertEquals(true, file1NameAltered); - assertEquals(false, file2NameAltered); + assertTrue(file1NameAltered); + assertFalse(file2NameAltered); } @Test @@ -657,7 +657,7 @@ public void testRecalculateDatasetVersionUNF() { DataTable dataTable = new DataTable(); dataTable.setUnf("unfOnDataTable"); datafile1.setDataTable(dataTable); - assertEquals(true, datafile1.isTabularData()); + assertTrue(datafile1.isTabularData()); FileMetadata fmd1 = new FileMetadata(); fmd1.setId(1L); @@ -692,7 +692,7 @@ public void testGetUnfValuesOfFiles() { @Test public void testshouldHaveUnf() { - assertEquals(false, IngestUtil.shouldHaveUnf(null)); + assertFalse(IngestUtil.shouldHaveUnf(null)); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java b/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java index a0ac22f99f3..a2729ce7514 100644 --- a/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/mydata/PagerTest.java @@ -2,6 +2,8 @@ import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.Arrays; @@ -171,13 +173,13 @@ public void testBasics() { pager1 = new Pager(102, 10, 1); msgt("Test: 102 results, 10 per page, page 1"); - assertEquals(true, pager1.isPagerNecessary()); + assertTrue(pager1.isPagerNecessary()); assertEquals(102, pager1.getNumResults()); assertEquals(1, pager1.getPreviousPageNumber()); assertEquals(2, pager1.getNextPageNumber()); - assertEquals(false, pager1.hasPreviousPageNumber()); - assertEquals(true, pager1.hasNextPageNumber()); + assertFalse(pager1.hasPreviousPageNumber()); + assertTrue(pager1.hasNextPageNumber()); msg("page list: " + Arrays.toString(pager1.getPageNumberList())); //assertEquals(new int[]{1, 2, 3, 4, 5}, pager1.getPageNumberList()); @@ -232,13 +234,13 @@ public void testNoResults() { System.out.println("getNumResults"); Pager pager1 = new Pager(0, 10, 1); - assertEquals(false, pager1.isPagerNecessary()); + assertFalse(pager1.isPagerNecessary()); assertEquals(0, pager1.getNumResults()); assertEquals(0, pager1.getPreviousPageNumber()); assertEquals(0, pager1.getNextPageNumber()); - assertEquals(false, pager1.hasPreviousPageNumber()); - assertEquals(false, pager1.hasNextPageNumber()); + assertFalse(pager1.hasPreviousPageNumber()); + assertFalse(pager1.hasNextPageNumber()); msgt("page list: " + Arrays.toString(pager1.getPageNumberList())); //assertEquals(null, pager1.getPageNumberList()); diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java index 58d69da743b..bacb231b4d5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java @@ -99,7 +99,7 @@ @JvmSetting(key = JvmSettings.PID_PROVIDER_LABEL, value = "FAKE 1", varArgs = "fake1") @JvmSetting(key = JvmSettings.PID_PROVIDER_TYPE, value = FakeDOIProvider.TYPE, varArgs = "fake1") @JvmSetting(key = JvmSettings.PID_PROVIDER_AUTHORITY, value = "10.5074", varArgs = "fake1") -@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "FK", varArgs = "fake1") +@JvmSetting(key = JvmSettings.PID_PROVIDER_SHOULDER, value = "fk", varArgs = "fake1") @JvmSetting(key = JvmSettings.PID_PROVIDER_MANAGED_LIST, value = "doi:10.5073/FK3ABCDEF", varArgs ="fake1") //HANDLE 1 @@ -250,9 +250,12 @@ public void testDOIParsing() throws IOException { assertEquals(pid1String, pid3.asString()); assertEquals("dc1", pid3.getProviderId()); - String pid4String = "doi:10.5072/FK3ABCDEF"; + //Also test case insensitive + String pid4String = "doi:10.5072/fk3ABCDEF"; GlobalId pid4 = PidUtil.parseAsGlobalID(pid4String); - assertEquals(pid4String, pid4.asString()); + // Lower case is recognized by converting to upper case internally, so we need to test vs. the upper case identifier + // I.e. we are verifying that the lower case string is parsed the same as the upper case string, both give an internal upper case PID representation + assertEquals("doi:10.5072/FK3ABCDEF", pid4.asString()); assertEquals("dc2", pid4.getProviderId()); String pid5String = "doi:10.5072/FK2ABCDEF"; @@ -312,6 +315,13 @@ public void testUnmanagedParsing() throws IOException { GlobalId pid6 = PidUtil.parseAsGlobalID(pid6String); assertEquals(pid6String, pid6.asString()); assertEquals(UnmanagedPermaLinkPidProvider.ID, pid6.getProviderId()); + + //Lowercase test for unmanaged DOIs + String pid7String = "doi:10.5281/zenodo.6381129"; + GlobalId pid7 = PidUtil.parseAsGlobalID(pid7String); + assertEquals(UnmanagedDOIProvider.ID, pid5.getProviderId()); + assertEquals(pid7String.toUpperCase().replace("DOI", "doi"), pid7.asString()); + } @@ -350,15 +360,15 @@ public void testExcludedSetParsing() throws IOException { @Test public void testManagedSetParsing() throws IOException { - String pid1String = "doi:10.5073/FK3ABCDEF"; + String pid1String = "doi:10.5073/fk3ABCDEF"; GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); - assertEquals(pid1String, pid2.asString()); + assertEquals(pid1String.toUpperCase().replace("DOI", "doi"), pid2.asString()); assertEquals("fake1", pid2.getProviderId()); assertEquals("https://doi.org/" + pid2.getAuthority() + PidUtil.getPidProvider(pid2.getProviderId()).getSeparator() + pid2.getIdentifier(),pid2.asURL()); assertEquals("10.5073", pid2.getAuthority()); assertEquals(AbstractDOIProvider.DOI_PROTOCOL, pid2.getProtocol()); GlobalId pid3 = PidUtil.parseAsGlobalID(pid2.asURL()); - assertEquals(pid1String, pid3.asString()); + assertEquals(pid1String.toUpperCase().replace("DOI", "doi"), pid3.asString()); assertEquals("fake1", pid3.getProviderId()); assertFalse(PidUtil.getPidProvider(pid3.getProviderId()).canCreatePidsLike(pid3)); diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java index c03146904de..2bd6818821d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java @@ -1,15 +1,21 @@ package edu.harvard.iq.dataverse.pidproviders.doi.datacite; +import edu.harvard.iq.dataverse.ControlledVocabularyValue; +import edu.harvard.iq.dataverse.DataCitation; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetAuthor; import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; import edu.harvard.iq.dataverse.DatasetFieldConstant; import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataset.DatasetType; @@ -20,16 +26,30 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.json.CompoundVocabularyException; +import edu.harvard.iq.dataverse.util.json.ControlledVocabularyException; +import edu.harvard.iq.dataverse.util.json.JsonParseException; +import edu.harvard.iq.dataverse.util.json.JsonParser; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import edu.harvard.iq.dataverse.util.xml.XmlValidator; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonString; +import java.io.File; import java.io.IOException; import java.io.StringReader; import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; import javax.xml.transform.stream.StreamSource; @@ -73,6 +93,8 @@ public static void setupMocks() { } /** + * A minimal example to assure that the XMLMetadataTemplate generates output + * consistent with the DataCite XML v4.5 schema. */ @Test public void testDataCiteXMLCreation() throws IOException { @@ -106,7 +128,7 @@ public void testDataCiteXMLCreation() throws IOException { doiMetadata.setAuthors(authors); doiMetadata.setPublisher("Dataverse"); XmlMetadataTemplate template = new XmlMetadataTemplate(doiMetadata); - + Dataset d = new Dataset(); GlobalId doi = new GlobalId("doi", "10.5072", "FK2/ABCDEF", null, null, null); d.setGlobalId(doi); @@ -135,15 +157,291 @@ public void testDataCiteXMLCreation() throws IOException { d.setDatasetType(dType); String xml = template.generateXML(d); - System.out.println("Output is " + xml); + System.out.println("Output from minimal example is " + xml); try { StreamSource source = new StreamSource(new StringReader(xml)); source.setSystemId("DataCite XML for test dataset"); - assertTrue(XmlValidator.validateXmlSchema(source, new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); + assertTrue(XmlValidator.validateXmlSchema(source, + new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); } catch (SAXException e) { System.out.println("Invalid schema: " + e.getMessage()); } - + + } + + /** + * This tests a more complete example based off of the dataset-all-defaults + * file, again checking for conformance of the result with the DataCite XML v4.5 + * schema. + */ + @Test + public void testDataCiteXMLCreationAllFields() throws IOException { + Dataverse collection = new Dataverse(); + collection.setCitationDatasetFieldTypes(new ArrayList<>()); + Dataset d = new Dataset(); + d.setOwner(collection); + DatasetVersion dv = new DatasetVersion(); + TermsOfUseAndAccess toa = new TermsOfUseAndAccess(); + toa.setTermsOfUse("Some terms"); + dv.setTermsOfUseAndAccess(toa); + dv.setDataset(d); + DatasetFieldType primitiveDSFType = new DatasetFieldType(DatasetFieldConstant.title, + DatasetFieldType.FieldType.TEXT, false); + DatasetField testDatasetField = new DatasetField(); + + dv.setVersionState(VersionState.DRAFT); + + testDatasetField.setDatasetVersion(dv); + + File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt"); + String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath()))); + JsonObject datasetJson = JsonUtil.getJsonObject(datasetVersionAsJson); + + GlobalId doi = new GlobalId("doi", datasetJson.getString("authority"), datasetJson.getString("identifier"), + null, null, null); + d.setGlobalId(doi); + + List fields = assertDoesNotThrow(() -> XmlMetadataTemplateTest + .parseMetadataBlocks(datasetJson.getJsonObject("datasetVersion").getJsonObject("metadataBlocks"))); + dv.setDatasetFields(fields); + + ArrayList dsvs = new ArrayList<>(); + dsvs.add(0, dv); + d.setVersions(dsvs); + DatasetType dType = new DatasetType(); + dType.setName(DatasetType.DATASET_TYPE_DATASET); + d.setDatasetType(dType); + String xml = DOIDataCiteRegisterService.getMetadataFromDvObject(dv.getDataset().getGlobalId().asString(), + new DataCitation(dv).getDataCiteMetadata(), dv.getDataset()); + System.out.println("Output from dataset-all-defaults is " + xml); + try { + StreamSource source = new StreamSource(new StringReader(xml)); + source.setSystemId("DataCite XML for test dataset"); + assertTrue(XmlValidator.validateXmlSchema(source, + new URL("https://schema.datacite.org/meta/kernel-4/metadata.xsd"))); + } catch (SAXException e) { + System.out.println("Invalid schema: " + e.getMessage()); + } + + } + + /** + * Mock Utility Methods - These methods support importing DatasetFields from the + * Dataverse JSON export format. They assume that any DatasetFieldType + * referenced exists, that any Controlled Vocabulary value exists, etc. which + * avoids having to do database lookups or read metadatablock tsv files. They + * are derived from the JsonParser methods of the same names with any db + * references and DatasetFieldType-related error checking removed. + */ + public static List parseMetadataBlocks(JsonObject json) throws JsonParseException { + + Map existingTypes = new HashMap<>(); + + Set keys = json.keySet(); + List fields = new LinkedList<>(); + + for (String blockName : keys) { + MetadataBlock block = new MetadataBlock(); + block.setName(blockName); + JsonObject blockJson = json.getJsonObject(blockName); + JsonArray fieldsJson = blockJson.getJsonArray("fields"); + fields.addAll(parseFieldsFromArray(fieldsJson, true, block, existingTypes)); + } + return fields; + } + + private static List parseFieldsFromArray(JsonArray fieldsArray, Boolean testType, MetadataBlock block, + Map existingTypes) throws JsonParseException { + List fields = new LinkedList<>(); + for (JsonObject fieldJson : fieldsArray.getValuesAs(JsonObject.class)) { + + DatasetField field = parseField(fieldJson, testType, block, existingTypes); + if (field != null) { + fields.add(field); + } + + } + return fields; + + } + + public static DatasetField parseField(JsonObject json, Boolean testType, MetadataBlock block, + Map existingTypes) throws JsonParseException { + if (json == null) { + return null; + } + + DatasetField ret = new DatasetField(); + String fieldName = json.getString("typeName", ""); + String typeClass = json.getString("typeClass", ""); + if (!existingTypes.containsKey(fieldName)) { + boolean multiple = json.getBoolean("multiple"); + DatasetFieldType fieldType = new DatasetFieldType(); + fieldType.setName(fieldName); + fieldType.setAllowMultiples(multiple); + fieldType.setAllowControlledVocabulary(typeClass.equals("controlledVocabulary")); + fieldType.setFieldType(FieldType.TEXT); + fieldType.setMetadataBlock(block); + fieldType.setChildDatasetFieldTypes(new ArrayList<>()); + existingTypes.put(fieldName, fieldType); + } + DatasetFieldType type = existingTypes.get(fieldName); + ret.setDatasetFieldType(type); + + if (typeClass.equals("compound")) { + parseCompoundValue(ret, type, json, testType, block, existingTypes); + } else if (type.isControlledVocabulary()) { + parseControlledVocabularyValue(ret, type, json); + } else { + parsePrimitiveValue(ret, type, json); + } + + return ret; + } + + public static void parseCompoundValue(DatasetField dsf, DatasetFieldType compoundType, JsonObject json, + Boolean testType, MetadataBlock block, Map existingTypes) + throws JsonParseException { + List vocabExceptions = new ArrayList<>(); + List vals = new LinkedList<>(); + if (compoundType.isAllowMultiples()) { + int order = 0; + try { + json.getJsonArray("value").getValuesAs(JsonObject.class); + } catch (ClassCastException cce) { + throw new JsonParseException("Invalid values submitted for " + compoundType.getName() + + ". It should be an array of values."); + } + for (JsonObject obj : json.getJsonArray("value").getValuesAs(JsonObject.class)) { + DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue(); + List fields = new LinkedList<>(); + for (String fieldName : obj.keySet()) { + JsonObject childFieldJson = obj.getJsonObject(fieldName); + DatasetField f = null; + try { + f = parseField(childFieldJson, testType, block, existingTypes); + } catch (ControlledVocabularyException ex) { + vocabExceptions.add(ex); + } + + if (f != null) { + f.setParentDatasetFieldCompoundValue(cv); + fields.add(f); + } + } + if (!fields.isEmpty()) { + cv.setChildDatasetFields(fields); + cv.setDisplayOrder(order); + vals.add(cv); + } + order++; + } + + } else { + + DatasetFieldCompoundValue cv = new DatasetFieldCompoundValue(); + List fields = new LinkedList<>(); + JsonObject value = json.getJsonObject("value"); + for (String key : value.keySet()) { + JsonObject childFieldJson = value.getJsonObject(key); + DatasetField f = null; + try { + f = parseField(childFieldJson, testType, block, existingTypes); + } catch (ControlledVocabularyException ex) { + vocabExceptions.add(ex); + } + if (f != null) { + f.setParentDatasetFieldCompoundValue(cv); + fields.add(f); + } + } + if (!fields.isEmpty()) { + cv.setChildDatasetFields(fields); + vals.add(cv); + } + + } + if (!vocabExceptions.isEmpty()) { + throw new CompoundVocabularyException("Invalid controlled vocabulary in compound field ", vocabExceptions, + vals); + } + + for (DatasetFieldCompoundValue dsfcv : vals) { + dsfcv.setParentDatasetField(dsf); + } + dsf.setDatasetFieldCompoundValues(vals); + } + + public static void parsePrimitiveValue(DatasetField dsf, DatasetFieldType dft, JsonObject json) + throws JsonParseException { + List vals = new LinkedList<>(); + if (dft.isAllowMultiples()) { + try { + json.getJsonArray("value").getValuesAs(JsonObject.class); + } catch (ClassCastException cce) { + throw new JsonParseException( + "Invalid values submitted for " + dft.getName() + ". It should be an array of values."); + } + for (JsonString val : json.getJsonArray("value").getValuesAs(JsonString.class)) { + DatasetFieldValue datasetFieldValue = new DatasetFieldValue(dsf); + datasetFieldValue.setDisplayOrder(vals.size() - 1); + datasetFieldValue.setValue(val.getString().trim()); + vals.add(datasetFieldValue); + } + + } else { + try { + json.getString("value"); + } catch (ClassCastException cce) { + throw new JsonParseException( + "Invalid value submitted for " + dft.getName() + ". It should be a single value."); + } + DatasetFieldValue datasetFieldValue = new DatasetFieldValue(); + datasetFieldValue.setValue(json.getString("value", "").trim()); + datasetFieldValue.setDatasetField(dsf); + vals.add(datasetFieldValue); + } + + dsf.setDatasetFieldValues(vals); + } + + public static void parseControlledVocabularyValue(DatasetField dsf, DatasetFieldType cvvType, JsonObject json) + throws JsonParseException { + List vals = new LinkedList<>(); + try { + if (cvvType.isAllowMultiples()) { + try { + json.getJsonArray("value").getValuesAs(JsonObject.class); + } catch (ClassCastException cce) { + throw new JsonParseException( + "Invalid values submitted for " + cvvType.getName() + ". It should be an array of values."); + } + for (JsonString strVal : json.getJsonArray("value").getValuesAs(JsonString.class)) { + String strValue = strVal.getString(); + ControlledVocabularyValue cvv = new ControlledVocabularyValue(); + cvv.setDatasetFieldType(cvvType); + cvv.setStrValue(strVal.getString()); + vals.add(cvv); + } + + } else { + try { + json.getString("value"); + } catch (ClassCastException cce) { + throw new JsonParseException( + "Invalid value submitted for " + cvvType.getName() + ". It should be a single value."); + } + String strValue = json.getString("value", ""); + ControlledVocabularyValue cvv = new ControlledVocabularyValue(); + cvv.setDatasetFieldType(cvvType); + cvv.setStrValue(strValue); + vals.add(cvv); + } + } catch (ClassCastException cce) { + throw new JsonParseException("Invalid values submitted for " + cvvType.getName()); + } + + dsf.setControlledVocabularyValues(vals); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java index da94b288bee..f06be37578d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java @@ -277,7 +277,7 @@ public void testGetPrivateUrlFromRoleAssignmentSuccess() { PrivateUrl privateUrl = PrivateUrlUtil.getPrivateUrlFromRoleAssignment(ra, dataverseSiteUrl); assertNotNull(privateUrl); assertEquals(new Long(42), privateUrl.getDataset().getId()); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=cd71e9d7-73a7-4ec8-b890-3d00499e8693", privateUrl.getLink()); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=cd71e9d7-73a7-4ec8-b890-3d00499e8693", privateUrl.getLink()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java index 124ce19369c..8e24c546556 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java @@ -53,6 +53,7 @@ public void setUp() { indexService.dataverseService = Mockito.mock(DataverseServiceBean.class); indexService.datasetFieldService = Mockito.mock(DatasetFieldServiceBean.class); indexService.datasetVersionService = Mockito.mock(DatasetVersionServiceBean.class); + indexService.datasetVersionFilesServiceBean = Mockito.mock(DatasetVersionFilesServiceBean.class); BrandingUtil.injectServices(indexService.dataverseService, indexService.settingsService); Mockito.when(indexService.dataverseService.findRootDataverse()).thenReturn(dataverse); diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 59e175f30c1..d1cb30e2bc3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -4,18 +4,11 @@ package edu.harvard.iq.dataverse.util.json; -import edu.harvard.iq.dataverse.ControlledVocabularyValue; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; -import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; -import edu.harvard.iq.dataverse.DatasetFieldValue; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseTheme.Alignment; -import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.UserNotification.Type; +import edu.harvard.iq.dataverse.api.dto.DataverseDTO; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroupProvider; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; @@ -50,16 +43,7 @@ import java.io.StringReader; import java.math.BigDecimal; import java.text.ParseException; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Collections; -import java.util.Date; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; -import java.util.TimeZone; +import java.util.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.*; @@ -281,6 +265,33 @@ public void testParseCompleteDataverse() throws JsonParseException { throw new JsonParseException("Couldn't read test file", ioe); } } + + /** + * Test that a JSON object passed for a DataverseDTO is correctly parsed. + * This checks that all properties are parsed into the correct DataverseDTO properties. + * @throws JsonParseException when this test is broken. + */ + @Test + public void parseDataverseDTO() throws JsonParseException { + JsonObject dvJson; + try (FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-complete.json")) { + dvJson = Json.createReader(reader).readObject(); + DataverseDTO actual = sut.parseDataverseDTO(dvJson); + List actualDataverseContacts = actual.getDataverseContacts(); + assertEquals("Scientific Research", actual.getName()); + assertEquals("science", actual.getAlias()); + assertEquals("Scientific Research University", actual.getAffiliation()); + assertEquals("We do all the science.", actual.getDescription()); + assertEquals("LABORATORY", actual.getDataverseType().toString()); + assertEquals(2, actualDataverseContacts.size()); + assertEquals("pi@example.edu", actualDataverseContacts.get(0).getContactEmail()); + assertEquals("student@example.edu", actualDataverseContacts.get(1).getContactEmail()); + assertEquals(0, actualDataverseContacts.get(0).getDisplayOrder()); + assertEquals(1, actualDataverseContacts.get(1).getDisplayOrder()); + } catch (IOException ioe) { + throw new JsonParseException("Couldn't read test file", ioe); + } + } @Test public void testParseThemeDataverse() throws JsonParseException { diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index 11da71e1980..7ec8e0b25f3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -133,7 +133,7 @@ public void testJson_PrivateUrl() { assertNotNull(job); JsonObject jsonObject = job.build(); assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("token")); - assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link")); + assertEquals("https://dataverse.example.edu/previewurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link")); assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getJsonObject("roleAssignment").getString("privateUrlToken")); assertEquals(PrivateUrlUser.PREFIX + "42", jsonObject.getJsonObject("roleAssignment").getString("assignee")); } @@ -290,7 +290,7 @@ public void testDataversePrinter() { assertEquals("42 Inc.", jsonObject.getString("affiliation")); assertEquals(0, jsonObject.getJsonArray("dataverseContacts").getJsonObject(0).getInt("displayOrder")); assertEquals("dv42@mailinator.com", jsonObject.getJsonArray("dataverseContacts").getJsonObject(0).getString("contactEmail")); - assertEquals(false, jsonObject.getBoolean("permissionRoot")); + assertFalse(jsonObject.getBoolean("permissionRoot")); assertEquals("Description for Dataverse 42.", jsonObject.getString("description")); assertEquals("UNCATEGORIZED", jsonObject.getString("dataverseType")); } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java index 3c5b4797b0a..c4ee4547ed7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java @@ -63,22 +63,22 @@ private File createBlankFile(String filename) throws IOException { } return Files.createFile(tempFolder.resolve(filename)).toFile(); } - + private FileInputStream createZipReturnFilestream(List file_names, String zipfile_name) throws IOException{ - + File zip_file_obj = this.createAndZipFiles(file_names, zipfile_name); if (zip_file_obj == null){ return null; } - + FileInputStream file_input_stream = new FileInputStream(zip_file_obj); return file_input_stream; - + } - + /* - Convenience class to create .zip file and return a FileInputStream + Convenience method to create .zip file and return a File @param List file_names - List of filenames to add to .zip. These names will be used to create 0 length files @param String zipfile_name - Name of .zip file to create @@ -98,13 +98,13 @@ private File createAndZipFiles(List file_names, String zipfile_name) thr } Path zip_file_obj = this.tempFolder.resolve(zipfile_name); - ZipOutputStream zip_stream = new ZipOutputStream(new FileOutputStream(zip_file_obj.toFile())); + try (ZipOutputStream zip_stream = new ZipOutputStream(new FileOutputStream(zip_file_obj.toFile()))) { - // Iterate through File objects and add them to the ZipOutputStream - for (File file_obj : fileCollection) { - this.addToZipFile(file_obj.getName(), file_obj, zip_stream); + // Iterate through File objects and add them to the ZipOutputStream + for (File file_obj : fileCollection) { + this.addToZipFile(file_obj.getName(), file_obj, zip_stream); + } } - /* ----------------------------------- Cleanup: Delete single files that were added to .zip ----------------------------------- */ @@ -126,7 +126,7 @@ public void testCreateZippedNonShapefile() throws IOException{ File zipfile_obj = createAndZipFiles(file_names, "not-quite-a-shape.zip"); // Pass the .zip to the ShapefileHandler - ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj)); + ShapefileHandler shp_handler = new ShapefileHandler(zipfile_obj); shp_handler.DEBUG= true; // Contains shapefile? @@ -157,7 +157,7 @@ public void testShapefileWithQpjAndQmd() throws IOException { File zipFile = createAndZipFiles(fileNames, "testShapeWithNewExtensions.zip"); // Pass the zip to the ShapefileHandler - ShapefileHandler shpHandler = new ShapefileHandler(new FileInputStream(zipFile)); + ShapefileHandler shpHandler = new ShapefileHandler(zipFile); shpHandler.DEBUG = true; // Check if it is recognized as a shapefile @@ -191,7 +191,7 @@ public void testZippedTwoShapefiles() throws IOException{ File zipfile_obj = createAndZipFiles(file_names, "two-shapes.zip"); // Pass the .zip to the ShapefileHandler - ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj)); + ShapefileHandler shp_handler = new ShapefileHandler(zipfile_obj); shp_handler.DEBUG= true; assertTrue(shp_handler.containsShapefile(), "verify shapefile existance"); @@ -217,7 +217,7 @@ public void testZippedTwoShapefiles() throws IOException{ // Rezip/Reorder the files File test_unzip_folder = Files.createDirectory(this.tempFolder.resolve("test_unzip")).toFile(); //File test_unzip_folder = new File("/Users/rmp553/Desktop/blah"); - shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), test_unzip_folder ); + shp_handler.rezipShapefileSets(test_unzip_folder ); // Does the re-ordering do what we wanted? @@ -244,7 +244,7 @@ public void testZippedShapefileWithExtraFiles() throws IOException{ File zipfile_obj = createAndZipFiles(file_names, "shape-plus.zip"); // Pass the .zip to the ShapefileHandler - ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj)); + ShapefileHandler shp_handler = new ShapefileHandler(zipfile_obj); shp_handler.DEBUG= true; assertTrue(shp_handler.containsShapefile(), "verify shapefile existance"); @@ -264,7 +264,7 @@ public void testZippedShapefileWithExtraFiles() throws IOException{ File unzip2Folder = Files.createDirectory(this.tempFolder.resolve("test_unzip2")).toFile(); // Rezip/Reorder the files - shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), unzip2Folder); + shp_handler.rezipShapefileSets(unzip2Folder); //shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), new File("/Users/rmp553/Desktop/blah")); @@ -284,9 +284,9 @@ public void testZippedShapefileWithExtraFiles() throws IOException{ } @Test - public void testHiddenFiles() { + public void testHiddenFiles() throws IOException { // test with shapefiles in hidden directory - ShapefileHandler shp_handler = new ShapefileHandler("src/test/resources/hiddenShapefiles.zip"); + ShapefileHandler shp_handler = new ShapefileHandler(new File("src/test/resources/hiddenShapefiles.zip")); shp_handler.DEBUG= true; assertFalse(shp_handler.containsShapefile()); } diff --git a/src/test/resources/json/export-formats.json b/src/test/resources/json/export-formats.json new file mode 100644 index 00000000000..b4dc0168629 --- /dev/null +++ b/src/test/resources/json/export-formats.json @@ -0,0 +1,83 @@ +{ + "status": "OK", + "data": { + "OAI_ORE": { + "displayName": "OAI_ORE", + "mediaType": "application/json", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "Datacite": { + "displayName": "DataCite", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://datacite.org/schema/kernel-4", + "XMLSchemaLocation": "http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.5/metadata.xsd", + "XMLSchemaVersion": "4.5" + }, + "oai_dc": { + "displayName": "Dublin Core", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": false, + "XMLNameSpace": "http://www.openarchives.org/OAI/2.0/oai_dc/", + "XMLSchemaLocation": "http://www.openarchives.org/OAI/2.0/oai_dc.xsd", + "XMLSchemaVersion": "2.0" + }, + "oai_datacite": { + "displayName": "OpenAIRE", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://datacite.org/schema/kernel-4", + "XMLSchemaLocation": "http://schema.datacite.org/meta/kernel-4.1/metadata.xsd", + "XMLSchemaVersion": "4.1" + }, + "schema.org": { + "displayName": "Schema.org JSON-LD", + "mediaType": "application/json", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "ddi": { + "displayName": "DDI Codebook v2", + "mediaType": "application/xml", + "isHarvestable": false, + "isVisibleInUserInterface": true, + "XMLNameSpace": "ddi:codebook:2_5", + "XMLSchemaLocation": "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd", + "XMLSchemaVersion": "2.5" + }, + "dcterms": { + "displayName": "Dublin Core", + "mediaType": "application/xml", + "isHarvestable": false, + "isVisibleInUserInterface": true, + "XMLNameSpace": "http://purl.org/dc/terms/", + "XMLSchemaLocation": "http://dublincore.org/schemas/xmls/qdc/dcterms.xsd", + "XMLSchemaVersion": "2.0" + }, + "html": { + "displayName": "DDI HTML Codebook", + "mediaType": "text/html", + "isHarvestable": false, + "isVisibleInUserInterface": true + }, + "dataverse_json": { + "displayName": "JSON", + "mediaType": "application/json", + "isHarvestable": true, + "isVisibleInUserInterface": true + }, + "oai_ddi": { + "displayName": "DDI Codebook v2", + "mediaType": "application/xml", + "isHarvestable": true, + "isVisibleInUserInterface": false, + "XMLNameSpace": "ddi:codebook:2_5", + "XMLSchemaLocation": "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd", + "XMLSchemaVersion": "2.5" + } + } +} diff --git a/src/test/resources/json/importGenericWithOtherId.json b/src/test/resources/json/importGenericWithOtherId.json new file mode 100644 index 00000000000..af9241393e9 --- /dev/null +++ b/src/test/resources/json/importGenericWithOtherId.json @@ -0,0 +1,307 @@ +{ + "UNF": "UNF", + "createTime": "2014-11-12 12:17:55 -05", + "distributionDate": "Distribution Date", + "id": 2, + "lastUpdateTime": "2014-11-12 12:20:32 -05", + "metadataBlocks": { + "astrophysics": { + "displayName": "Astronomy and Astrophysics Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "astroType", + "value": [ + "Image", + "Mosaic", + "EventList" + ] + } + ] + }, + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "multiple": false, + "typeClass": "primitive", + "typeName": "title", + "value": "My Dataset" + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "author", + "value": [ + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Top" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "ellenid" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "ORCID" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Privileged, Pete" + } + }, + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Bottom" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "audreyId" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "DAISY" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Awesome, Audrey" + } + } + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "datasetContact", + "value": [ + "pete@malinator.com" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescription", + "value": "Here is my description" + }, + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "subject", + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics", + "Business and Management" + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "keyword", + "value": [ + "keyword1", + "keyword2" + ] + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "otherId", + "value": [ + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "my agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "otherId" + } + }, + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "another agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "otherId2" + } + }, + { + "otherIdAgency": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdAgency", + "value": "another agency" + }, + "otherIdValue": { + "multiple": false, + "typeClass": "primitive", + "typeName": "otherIdValue", + "value": "doi:10.7910/DVN/TJCLKP" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "depositor", + "value": "Ellen K" + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dateOfDeposit", + "value": "2014-11-12" + } + ] + }, + "geospatial": { + "displayName": "Geospatial Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "geographicCoverage", + "value": [ + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "Arlington" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "United States" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "MA" + } + }, + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "beachcity" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "Aruba" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "beach" + } + } + ] + }, + { + "multiple": false, + "typeClass": "compound", + "typeName": "geographicBoundingBox", + "value": + { + "eastLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "eastLongitude", + "value": "23" + }, + "northLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "northLatitude", + "value": "786" + }, + "southLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "southLatitude", + "value": "34" + }, + "westLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "westLongitude", + "value": "45" + } + } + + } + ] + }, + "socialscience": { + "displayName": "Social Science and Humanities Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "software", + "value": [ + { + "softwareName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareName", + "value": "softwareName" + }, + "softwareVersion": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareVersion", + "value": "software version" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "unitOfAnalysis", + "value": "unit of analysis" + } + ] + } + }, + "productionDate": "Production Date", + "versionState": "DRAFT" + } diff --git a/src/test/resources/json/importGenericWithoutOtherId.json b/src/test/resources/json/importGenericWithoutOtherId.json new file mode 100644 index 00000000000..ceb2263c2cf --- /dev/null +++ b/src/test/resources/json/importGenericWithoutOtherId.json @@ -0,0 +1,258 @@ +{ + "UNF": "UNF", + "createTime": "2014-11-12 12:17:55 -05", + "distributionDate": "Distribution Date", + "id": 2, + "lastUpdateTime": "2014-11-12 12:20:32 -05", + "metadataBlocks": { + "astrophysics": { + "displayName": "Astronomy and Astrophysics Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "astroType", + "value": [ + "Image", + "Mosaic", + "EventList" + ] + } + ] + }, + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "multiple": false, + "typeClass": "primitive", + "typeName": "title", + "value": "My Dataset" + }, + { + "multiple": true, + "typeClass": "compound", + "typeName": "author", + "value": [ + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Top" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "ellenid" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "ORCID" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Privileged, Pete" + } + }, + { + "authorAffiliation": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorAffiliation", + "value": "Bottom" + }, + "authorIdentifier": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorIdentifier", + "value": "audreyId" + }, + "authorIdentifierScheme": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "authorIdentifierScheme", + "value": "DAISY" + }, + "authorName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "authorName", + "value": "Awesome, Audrey" + } + } + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "datasetContact", + "value": [ + "pete@malinator.com" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescription", + "value": "Here is my description" + }, + { + "multiple": true, + "typeClass": "controlledVocabulary", + "typeName": "subject", + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics", + "Business and Management" + ] + }, + { + "multiple": true, + "typeClass": "primitive", + "typeName": "keyword", + "value": [ + "keyword1", + "keyword2" + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "depositor", + "value": "Ellen K" + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "dateOfDeposit", + "value": "2014-11-12" + } + ] + }, + "geospatial": { + "displayName": "Geospatial Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "geographicCoverage", + "value": [ + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "Arlington" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "United States" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "MA" + } + }, + { + "city": { + "multiple": false, + "typeClass": "primitive", + "typeName": "city", + "value": "beachcity" + }, + "country": { + "multiple": false, + "typeClass": "controlledVocabulary", + "typeName": "country", + "value": "Aruba" + }, + "state": { + "multiple": false, + "typeClass": "primitive", + "typeName": "state", + "value": "beach" + } + } + ] + }, + { + "multiple": false, + "typeClass": "compound", + "typeName": "geographicBoundingBox", + "value": + { + "eastLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "eastLongitude", + "value": "23" + }, + "northLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "northLatitude", + "value": "786" + }, + "southLatitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "southLatitude", + "value": "34" + }, + "westLongitude": { + "multiple": false, + "typeClass": "primitive", + "typeName": "westLongitude", + "value": "45" + } + } + + } + ] + }, + "socialscience": { + "displayName": "Social Science and Humanities Metadata", + "fields": [ + { + "multiple": true, + "typeClass": "compound", + "typeName": "software", + "value": [ + { + "softwareName": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareName", + "value": "softwareName" + }, + "softwareVersion": { + "multiple": false, + "typeClass": "primitive", + "typeName": "softwareVersion", + "value": "software version" + } + } + ] + }, + { + "multiple": false, + "typeClass": "primitive", + "typeName": "unitOfAnalysis", + "value": "unit of analysis" + } + ] + } + }, + "productionDate": "Production Date", + "versionState": "DRAFT" + } diff --git a/src/test/resources/own-cloud-downloads/greetings.zip b/src/test/resources/own-cloud-downloads/greetings.zip new file mode 100644 index 00000000000..6e166d385d1 Binary files /dev/null and b/src/test/resources/own-cloud-downloads/greetings.zip differ diff --git a/src/test/resources/own-cloud-downloads/shapes.zip b/src/test/resources/own-cloud-downloads/shapes.zip new file mode 100644 index 00000000000..99d5f36c895 Binary files /dev/null and b/src/test/resources/own-cloud-downloads/shapes.zip differ diff --git a/src/test/resources/tsv/whitespace-test.tsv b/src/test/resources/tsv/whitespace-test.tsv new file mode 100644 index 00000000000..5485c948825 --- /dev/null +++ b/src/test/resources/tsv/whitespace-test.tsv @@ -0,0 +1,10 @@ +#metadataBlock name dataverseAlias displayName + whitespaceDemo Whitespace Demo +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + whitespaceDemoOne One Trailing Space text 0 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo + whitespaceDemoTwo Two Leading Space text 1 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo + whitespaceDemoThree Three CV with errors text 2 TRUE TRUE TRUE FALSE TRUE FALSE whitespaceDemo +#controlledVocabulary DatasetField Value identifier displayOrder + whitespaceDemoThree CV1 0 + whitespaceDemoThree CV2 1 + whitespaceDemoThree CV3 2