From b7123a5bdbf4c2e5a4284f658828ef15e6f073da Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bj=C3=B6rn=20Selent?=
Date: Mon, 15 Jul 2024 16:19:55 +0200
Subject: [PATCH 1/5] Initial commit for feature branch properties
---
.../java/propertyFiles/EngMeta.properties | 231 ++++++++++++
.../java/propertyFiles/archive.properties | 21 ++
.../java/propertyFiles/citation.properties | 343 ++++++++++--------
.../java/propertyFiles/codeMeta20.properties | 8 +-
.../java/propertyFiles/enzymeML.properties | 157 ++++++++
.../java/propertyFiles/privacy.properties | 25 ++
.../java/propertyFiles/process.properties | 150 ++++++++
7 files changed, 776 insertions(+), 159 deletions(-)
create mode 100644 src/main/java/propertyFiles/EngMeta.properties
create mode 100644 src/main/java/propertyFiles/archive.properties
create mode 100644 src/main/java/propertyFiles/enzymeML.properties
create mode 100644 src/main/java/propertyFiles/privacy.properties
create mode 100644 src/main/java/propertyFiles/process.properties
diff --git a/src/main/java/propertyFiles/EngMeta.properties b/src/main/java/propertyFiles/EngMeta.properties
new file mode 100644
index 00000000000..1de07466506
--- /dev/null
+++ b/src/main/java/propertyFiles/EngMeta.properties
@@ -0,0 +1,231 @@
+metadatablock.name=EngMeta
+metadatablock.displayName=Engineering Metadata
+datasetfieldtype.engMetaMode.title=Data Generation
+datasetfieldtype.engMetaMode.description=Approach to data generation
+datasetfieldtype.engMetaMode.watermark=
+datasetfieldtype.engMetaMeasuredVar.title=Measured Variables
+datasetfieldtype.engMetaMeasuredVar.description=Specification of captured (measured / simulated / surveyed / dependent) variables.
+datasetfieldtype.engMetaMeasuredVar.watermark=
+datasetfieldtype.engMetaMeasuredVarName.title=Name
+datasetfieldtype.engMetaMeasuredVarName.description=Name of this variable.
+datasetfieldtype.engMetaMeasuredVarName.watermark=
+datasetfieldtype.engMetaMeasuredVarSymbol.title=Symbol
+datasetfieldtype.engMetaMeasuredVarSymbol.description=The symbol used to describe this variable.
+datasetfieldtype.engMetaMeasuredVarSymbol.watermark=
+datasetfieldtype.engMetaMeasuredVarUnit.title=Unit
+datasetfieldtype.engMetaMeasuredVarUnit.description=The unit or scale of this variable.
+datasetfieldtype.engMetaMeasuredVarUnit.watermark=
+datasetfieldtype.engMetaMeasuredVarError.title=Error
+datasetfieldtype.engMetaMeasuredVarError.description=A value for the uncertainty of this variable.
+datasetfieldtype.engMetaMeasuredVarError.watermark=
+datasetfieldtype.engMetaMeasuredVarErrorDesc.title=Error Description
+datasetfieldtype.engMetaMeasuredVarErrorDesc.description=The type the error is measured (e.g. standard deviation, percentage, …)
+datasetfieldtype.engMetaMeasuredVarErrorDesc.watermark=
+datasetfieldtype.engMetaMeasuredVarValueFrom.title=Minimum Value
+datasetfieldtype.engMetaMeasuredVarValueFrom.description=The minimum value of this variable (use for ranges)
+datasetfieldtype.engMetaMeasuredVarValueFrom.watermark=
+datasetfieldtype.engMetaMeasuredVarValueTo.title=Maximum Value
+datasetfieldtype.engMetaMeasuredVarValueTo.description=The maximum value of this variable (use for ranges)
+datasetfieldtype.engMetaMeasuredVarValueTo.watermark=
+datasetfieldtype.engMetaMeasuredVarTextValue.title=Textual Value
+datasetfieldtype.engMetaMeasuredVarTextValue.description=The value of this measured variable (for non numerical values).
+datasetfieldtype.engMetaMeasuredVarTextValue.watermark=
+datasetfieldtype.engMetaControlledVar.title=Controlled Variables
+datasetfieldtype.engMetaControlledVar.description=Specification of controlled (observed / independent) variables.
+datasetfieldtype.engMetaControlledVar.watermark=
+datasetfieldtype.engMetaControlledVarName.title=Name
+datasetfieldtype.engMetaControlledVarName.description=Name of this variable.
+datasetfieldtype.engMetaControlledVarName.watermark=
+datasetfieldtype.engMetaControlledVarSymbol.title=Symbol
+datasetfieldtype.engMetaControlledVarSymbol.description=The symbol used to describe this variable.
+datasetfieldtype.engMetaControlledVarSymbol.watermark=
+datasetfieldtype.engMetaControlledVarUnit.title=Unit
+datasetfieldtype.engMetaControlledVarUnit.description=The unit or scale of this variable.
+datasetfieldtype.engMetaControlledVarUnit.watermark=
+datasetfieldtype.engMetaControlledVarValue.title=Value
+datasetfieldtype.engMetaControlledVarValue.description=The (single) value of this variable.
+datasetfieldtype.engMetaControlledVarValue.watermark=
+datasetfieldtype.engMetaControlledVarValueFrom.title=Minimum Value
+datasetfieldtype.engMetaControlledVarValueFrom.description=The minimum value of this variable (use for ranges)
+datasetfieldtype.engMetaControlledVarValueFrom.watermark=
+datasetfieldtype.engMetaControlledVarValueTo.title=Maximum Value
+datasetfieldtype.engMetaControlledVarValueTo.description=The maximum value of this variable (use for ranges)
+datasetfieldtype.engMetaControlledVarValueTo.watermark=
+datasetfieldtype.engMetaControlledVarTextValue.title=Textual Value
+datasetfieldtype.engMetaControlledVarTextValue.description=The value of this controlled variable (for non numerical values).
+datasetfieldtype.engMetaControlledVarTextValue.watermark=
+datasetfieldtype.engMetaPhase.title=System Phases
+datasetfieldtype.engMetaPhase.description=Phases of the observed system.
+datasetfieldtype.engMetaPhase.watermark=
+datasetfieldtype.engMetaPhaseName.title=Name
+datasetfieldtype.engMetaPhaseName.description=Name of a phase.
+datasetfieldtype.engMetaPhaseName.watermark=
+datasetfieldtype.engMetaPhaseComps.title=Components
+datasetfieldtype.engMetaPhaseComps.description=List of all component names for this phase (detailed information about components should be given under System Components).
+datasetfieldtype.engMetaPhaseComps.watermark=
+datasetfieldtype.engMetaComp.title=System or Phase Components
+datasetfieldtype.engMetaComp.description=Specification of a component of the object of research / observed system
+datasetfieldtype.engMetaComp.watermark=
+datasetfieldtype.engMetaCompId.title=Id
+datasetfieldtype.engMetaCompId.description=Unique number that can be referred to in the metadata. Use if Name is not unique.
+datasetfieldtype.engMetaCompId.watermark=Number
+datasetfieldtype.engMetaCompName.title=Name
+datasetfieldtype.engMetaCompName.description=Name of this component.
+datasetfieldtype.engMetaCompName.watermark=
+datasetfieldtype.engMetaCompDescription.title=Description
+datasetfieldtype.engMetaCompDescription.description=Description of the component.
+datasetfieldtype.engMetaCompDescription.watermark=
+datasetfieldtype.engMetaCompInChI.title=InChICode
+datasetfieldtype.engMetaCompInChI.description=The IUPAC International Chemical Identifier
+datasetfieldtype.engMetaCompInChI.watermark=
+datasetfieldtype.engMetaCompSmilesCode.title=SmilesCode
+datasetfieldtype.engMetaCompSmilesCode.description=Simplified Molecular Input Line Entry Specification
+datasetfieldtype.engMetaCompSmilesCode.watermark=
+datasetfieldtype.engMetaCompIUPAC.title=IUPAC Name
+datasetfieldtype.engMetaCompIUPAC.description=Chemical nomenclature created and developed by the International Union of Pure and Applied Chemistry (IUPAC)
+datasetfieldtype.engMetaCompIUPAC.watermark=
+datasetfieldtype.engMetaCompQuantity.title=Quantity
+datasetfieldtype.engMetaCompQuantity.description=The amount of this component.
+datasetfieldtype.engMetaCompQuantity.watermark=
+datasetfieldtype.engMetaCompUnit.title=Unit
+datasetfieldtype.engMetaCompUnit.description=The unit in which the amount is measured.
+datasetfieldtype.engMetaCompUnit.watermark=
+datasetfieldtype.engMetaCompForcefield.title=Force Field
+datasetfieldtype.engMetaCompForcefield.description=Name of the force field belonging to this component (detailed information about the force field should be given under Force Field Parameters).
+datasetfieldtype.engMetaCompForcefield.watermark=
+datasetfieldtype.engMetaForcefield.title=Force Field
+datasetfieldtype.engMetaForcefield.description=Specification of a force field
+datasetfieldtype.engMetaForcefield.watermark=
+datasetfieldtype.engMetaForcefieldName.title=Name
+datasetfieldtype.engMetaForcefieldName.description=Name of the force field.
+datasetfieldtype.engMetaForcefieldName.watermark=
+datasetfieldtype.engMetaForcefieldPars.title=Parameters
+datasetfieldtype.engMetaForcefieldPars.description=List of all parameter names relevant for this force field (detailed information about parameters should be given under Force Field Parameters entry).
+datasetfieldtype.engMetaForcefieldPars.watermark=
+datasetfieldtype.engMetaForcefieldPar.title=Force Field Parameters
+datasetfieldtype.engMetaForcefieldPar.description=Specification of a force field parameter.
+datasetfieldtype.engMetaForcefieldPar.watermark=
+datasetfieldtype.engMetaForcefieldParName.title=Name
+datasetfieldtype.engMetaForcefieldParName.description=Name of the parameter.
+datasetfieldtype.engMetaForcefieldParName.watermark=
+datasetfieldtype.engMetaForcefieldParSymbol.title=Symbol
+datasetfieldtype.engMetaForcefieldParSymbol.description=The symbol used to describe this parameter.
+datasetfieldtype.engMetaForcefieldParSymbol.watermark=
+datasetfieldtype.engMetaForcefieldParUnit.title=Unit
+datasetfieldtype.engMetaForcefieldParUnit.description=The unit or scale of this parameter.
+datasetfieldtype.engMetaForcefieldParUnit.watermark=
+datasetfieldtype.engMetaForcefieldParValue.title=Value
+datasetfieldtype.engMetaForcefieldParValue.description=The value of this parameter.
+datasetfieldtype.engMetaForcefieldParValue.watermark=
+datasetfieldtype.engMetaSystemPar.title=System Parameters
+datasetfieldtype.engMetaSystemPar.description=Parameters of the observed system.
+datasetfieldtype.engMetaSystemPar.watermark=
+datasetfieldtype.engMetaSystemParName.title=Name
+datasetfieldtype.engMetaSystemParName.description=Name of the parameter.
+datasetfieldtype.engMetaSystemParName.watermark=
+datasetfieldtype.engMetaSystemParSymbol.title=Symbol
+datasetfieldtype.engMetaSystemParSymbol.description=The symbol used to describe this parameter.
+datasetfieldtype.engMetaSystemParSymbol.watermark=
+datasetfieldtype.engMetaSystemParUnit.title=Unit
+datasetfieldtype.engMetaSystemParUnit.description=The unit or scale of this parameter.
+datasetfieldtype.engMetaSystemParUnit.watermark=
+datasetfieldtype.engMetaSystemParValue.title=Value
+datasetfieldtype.engMetaSystemParValue.description=The (numerical) value of this parameter.
+datasetfieldtype.engMetaSystemParValue.watermark=
+datasetfieldtype.engMetaSystemParTextValue.title=Textual Value
+datasetfieldtype.engMetaSystemParTextValue.description=The value of this system parameter (for non numerical values).
+datasetfieldtype.engMetaSystemParTextValue.watermark=
+datasetfieldtype.engMetaGitter.title=Spatial Resolution
+datasetfieldtype.engMetaGitter.description=Specification of the spatial grid of the observation.
+datasetfieldtype.engMetaGitter.watermark=
+datasetfieldtype.engMetaGitterCountCells.title=Number of Cells
+datasetfieldtype.engMetaGitterCountCells.description=The number of 2D spatial cells.
+datasetfieldtype.engMetaGitterCountCells.watermark=
+datasetfieldtype.engMetaGitterCountBlocks.title=Number of Blocks
+datasetfieldtype.engMetaGitterCountBlocks.description=The number of 3D spatial blocks.
+datasetfieldtype.engMetaGitterCountBlocks.watermark=
+datasetfieldtype.engMetaGitterCountX.title=Number of Points X
+datasetfieldtype.engMetaGitterCountX.description=The number of points in x-direction.
+datasetfieldtype.engMetaGitterCountX.watermark=
+datasetfieldtype.engMetaGitterCountY.title=Number of Points Y
+datasetfieldtype.engMetaGitterCountY.description=The number of points in y-direction.
+datasetfieldtype.engMetaGitterCountY.watermark=
+datasetfieldtype.engMetaGitterCountZ.title=Number of Points Z
+datasetfieldtype.engMetaGitterCountZ.description=The number of points in z-direction.
+datasetfieldtype.engMetaGitterCountZ.watermark=
+datasetfieldtype.engMetaGitterIntervalX.title=Interval X
+datasetfieldtype.engMetaGitterIntervalX.description=The distance between the points in x-direction.
+datasetfieldtype.engMetaGitterIntervalX.watermark=
+datasetfieldtype.engMetaGitterIntervalY.title=Interval Y
+datasetfieldtype.engMetaGitterIntervalY.description=The distance between the points in y-direction.
+datasetfieldtype.engMetaGitterIntervalY.watermark=
+datasetfieldtype.engMetaGitterIntervalZ.title=Interval Z
+datasetfieldtype.engMetaGitterIntervalZ.description=The distance between the points in z-direction.
+datasetfieldtype.engMetaGitterIntervalZ.watermark=
+datasetfieldtype.engMetaGitterUnit.title=Unit
+datasetfieldtype.engMetaGitterUnit.description=The unit of the grid.
+datasetfieldtype.engMetaGitterUnit.watermark=
+datasetfieldtype.engMetaGitterScalingFormular.title=Scaling Formular
+datasetfieldtype.engMetaGitterScalingFormular.description=If the grid is not equidistant, the distance between points can be specified via a formular.
+datasetfieldtype.engMetaGitterScalingFormular.watermark=
+datasetfieldtype.engMetaTemp.title=Temporal Resolution
+datasetfieldtype.engMetaTemp.description=Temporal resolution of the observation. Can be defined either through a number of time steps with a definition of the intervals (with unit) between the timesteps (equidistant time steps) or a series of time steps together with the unit.
+datasetfieldtype.engMetaTemp.watermark=
+datasetfieldtype.engMetaTempPoints.title=Points
+datasetfieldtype.engMetaTempPoints.description=List of time points that describe the temporal resolution (if it can not be specified otherwise).
+datasetfieldtype.engMetaTempPoints.watermark=
+datasetfieldtype.engMetaTempCountPoints.title=Number of time steps
+datasetfieldtype.engMetaTempCountPoints.description=The number of time points (with equidistant distance).
+datasetfieldtype.engMetaTempCountPoints.watermark=
+datasetfieldtype.engMetaTempInterval.title=Interval
+datasetfieldtype.engMetaTempInterval.description=Distance between two time points.
+datasetfieldtype.engMetaTempInterval.watermark=
+datasetfieldtype.engMetaTempUnit.title=Unit
+datasetfieldtype.engMetaTempUnit.description=The unit of the temporal resolution.
+datasetfieldtype.engMetaTempUnit.watermark=
+datasetfieldtype.engMetaFlows.title=Flows
+datasetfieldtype.engMetaFlows.description=Information about in- and outflows relevant for boundary conditions.
+datasetfieldtype.engMetaFlows.watermark=
+datasetfieldtype.engMetaFlowsName.title=Name
+datasetfieldtype.engMetaFlowsName.description=Name of the flow.
+datasetfieldtype.engMetaFlowsName.watermark=
+datasetfieldtype.engMetaFlowsComp.title=Components
+datasetfieldtype.engMetaFlowsComp.description=List of system component names this flow belongs to.
+datasetfieldtype.engMetaFlowsComp.watermark=
+datasetfieldtype.engMetaFlowsShape.title=Shape
+datasetfieldtype.engMetaFlowsShape.description=Shape of the flow.
+datasetfieldtype.engMetaFlowsShape.watermark=
+datasetfieldtype.engMetaFlowsSize.title=Size
+datasetfieldtype.engMetaFlowsSize.description=Size of the flow.
+datasetfieldtype.engMetaFlowsSize.watermark=
+datasetfieldtype.engMetaFlowsPosition.title=Position
+datasetfieldtype.engMetaFlowsPosition.description=The position of the flow.
+datasetfieldtype.engMetaFlowsPosition.watermark=(x, y, z)
+datasetfieldtype.engMetaBoundCond.title=Boundary Conditions
+datasetfieldtype.engMetaBoundCond.description=Definition of boundaries of the observed system.
+datasetfieldtype.engMetaBoundCond.watermark=
+datasetfieldtype.engMetaBoundCondFlows.title=Flows
+datasetfieldtype.engMetaBoundCondFlows.description=List of in- and outflows describing this boundary condition (detailed information about flows should be given under Flows).
+datasetfieldtype.engMetaBoundCondFlows.watermark=
+datasetfieldtype.engMetaBoundCondPars.title=Parameters
+datasetfieldtype.engMetaBoundCondPars.description=List of all parameter names relevant for this boundary condition (detailed information about parameters should be given under Boundary Parameters.
+datasetfieldtype.engMetaBoundCondPars.watermark=
+datasetfieldtype.engMetaBoundCondPar.title=Boundary Parameters
+datasetfieldtype.engMetaBoundCondPar.description=Parameters relevant for boundary conditions.
+datasetfieldtype.engMetaBoundCondPar.watermark=
+datasetfieldtype.engMetaBoundCondParName.title=Name
+datasetfieldtype.engMetaBoundCondParName.description=Name of the parameter.
+datasetfieldtype.engMetaBoundCondParName.watermark=
+datasetfieldtype.engMetaBoundCondParSymbol.title=Symbol
+datasetfieldtype.engMetaBoundCondParSymbol.description=The symbol used to describe this parameter.
+datasetfieldtype.engMetaBoundCondParSymbol.watermark=
+datasetfieldtype.engMetaBoundCondParUnit.title=Unit
+datasetfieldtype.engMetaBoundCondParUnit.description=The unit or scale of this parameter.
+datasetfieldtype.engMetaBoundCondParUnit.watermark=
+datasetfieldtype.engMetaBoundCondParValue.title=Value
+datasetfieldtype.engMetaBoundCondParValue.description=The value of this parameter.
+datasetfieldtype.engMetaBoundCondParValue.watermark=
+controlledvocabulary.engMetaMode.simulation=Simulation
+controlledvocabulary.engMetaMode.analysis=Analysis
+controlledvocabulary.engMetaMode.experiment=Experiment
+controlledvocabulary.engMetaMode.prediction=Prediction
diff --git a/src/main/java/propertyFiles/archive.properties b/src/main/java/propertyFiles/archive.properties
new file mode 100644
index 00000000000..6e285e22ca8
--- /dev/null
+++ b/src/main/java/propertyFiles/archive.properties
@@ -0,0 +1,21 @@
+metadatablock.name=archive
+metadatablock.displayName=Archival Metadata
+datasetfieldtype.archiveActiveUntil.title=Active Until
+datasetfieldtype.archiveActiveUntil.description=A date (YYYY-MM-DD) up to which the data set is in the active state
+datasetfieldtype.archiveActiveUntil.watermark=YYYY-MM-DD
+datasetfieldtype.archiveArchivedFrom.title=Archived From
+datasetfieldtype.archiveArchivedFrom.description=A date (YYYY-MM-DD) from whith the data set is archived
+datasetfieldtype.archiveArchivedFrom.watermark=YYYY-MM-DD
+datasetfieldtype.archiveArchivedFor.title=Archived For
+datasetfieldtype.archiveArchivedFor.description=The period for which the record is to be archived.
+datasetfieldtype.archiveArchivedFor.watermark=
+datasetfieldtype.archiveArchivedAt.title=Archived At
+datasetfieldtype.archiveArchivedAt.description=The location where the files are archived
+datasetfieldtype.archiveArchivedAt.watermark=URL
+datasetfieldtype.archiveReason.title=Archive Reason
+datasetfieldtype.archiveReason.description=The reason, why this dataset is archived and not published
+datasetfieldtype.archiveReason.watermark=
+controlledvocabulary.archiveArchivedFor.1_year=1 year
+controlledvocabulary.archiveArchivedFor.3_years=3 years
+controlledvocabulary.archiveArchivedFor.5_years=5 years
+controlledvocabulary.archiveArchivedFor.10_years=10 years
diff --git a/src/main/java/propertyFiles/citation.properties b/src/main/java/propertyFiles/citation.properties
index cb864eb78e9..42fc0c4b539 100644
--- a/src/main/java/propertyFiles/citation.properties
+++ b/src/main/java/propertyFiles/citation.properties
@@ -2,242 +2,269 @@ metadatablock.name=citation
metadatablock.displayName=Citation Metadata
metadatablock.displayFacet=Citation
datasetfieldtype.title.title=Title
-datasetfieldtype.subtitle.title=Subtitle
-datasetfieldtype.alternativeTitle.title=Alternative Title
-datasetfieldtype.alternativeURL.title=Alternative URL
-datasetfieldtype.otherId.title=Other Identifier
-datasetfieldtype.otherIdAgency.title=Agency
-datasetfieldtype.otherIdValue.title=Identifier
-datasetfieldtype.author.title=Author
-datasetfieldtype.authorName.title=Name
-datasetfieldtype.authorAffiliation.title=Affiliation
-datasetfieldtype.authorIdentifierScheme.title=Identifier Type
-datasetfieldtype.authorIdentifier.title=Identifier
-datasetfieldtype.datasetContact.title=Point of Contact
-datasetfieldtype.datasetContactName.title=Name
-datasetfieldtype.datasetContactAffiliation.title=Affiliation
-datasetfieldtype.datasetContactEmail.title=E-mail
-datasetfieldtype.dsDescription.title=Description
-datasetfieldtype.dsDescriptionValue.title=Text
-datasetfieldtype.dsDescriptionDate.title=Date
-datasetfieldtype.subject.title=Subject
-datasetfieldtype.keyword.title=Keyword
-datasetfieldtype.keywordTermURI.title=Term URI
-datasetfieldtype.keywordValue.title=Term
-datasetfieldtype.keywordVocabulary.title=Controlled Vocabulary Name
-datasetfieldtype.keywordVocabularyURI.title=Controlled Vocabulary URL
-datasetfieldtype.topicClassification.title=Topic Classification
-datasetfieldtype.topicClassValue.title=Term
-datasetfieldtype.topicClassVocab.title=Controlled Vocabulary Name
-datasetfieldtype.topicClassVocabURI.title=Controlled Vocabulary URL
-datasetfieldtype.publication.title=Related Publication
-datasetfieldtype.publicationCitation.title=Citation
-datasetfieldtype.publicationIDType.title=Identifier Type
-datasetfieldtype.publicationIDNumber.title=Identifier
-datasetfieldtype.publicationURL.title=URL
-datasetfieldtype.notesText.title=Notes
-datasetfieldtype.language.title=Language
-datasetfieldtype.producer.title=Producer
-datasetfieldtype.producerName.title=Name
-datasetfieldtype.producerAffiliation.title=Affiliation
-datasetfieldtype.producerAbbreviation.title=Abbreviated Name
-datasetfieldtype.producerURL.title=URL
-datasetfieldtype.producerLogoURL.title=Logo URL
-datasetfieldtype.productionDate.title=Production Date
-datasetfieldtype.productionPlace.title=Production Location
-datasetfieldtype.contributor.title=Contributor
-datasetfieldtype.contributorType.title=Type
-datasetfieldtype.contributorName.title=Name
-datasetfieldtype.grantNumber.title=Funding Information
-datasetfieldtype.grantNumberAgency.title=Agency
-datasetfieldtype.grantNumberValue.title=Identifier
-datasetfieldtype.distributor.title=Distributor
-datasetfieldtype.distributorName.title=Name
-datasetfieldtype.distributorAffiliation.title=Affiliation
-datasetfieldtype.distributorAbbreviation.title=Abbreviated Name
-datasetfieldtype.distributorURL.title=URL
-datasetfieldtype.distributorLogoURL.title=Logo URL
-datasetfieldtype.distributionDate.title=Distribution Date
-datasetfieldtype.depositor.title=Depositor
-datasetfieldtype.dateOfDeposit.title=Deposit Date
-datasetfieldtype.timePeriodCovered.title=Time Period
-datasetfieldtype.timePeriodCoveredStart.title=Start Date
-datasetfieldtype.timePeriodCoveredEnd.title=End Date
-datasetfieldtype.dateOfCollection.title=Date of Collection
-datasetfieldtype.dateOfCollectionStart.title=Start Date
-datasetfieldtype.dateOfCollectionEnd.title=End Date
-datasetfieldtype.kindOfData.title=Data Type
-datasetfieldtype.series.title=Series
-datasetfieldtype.seriesName.title=Name
-datasetfieldtype.seriesInformation.title=Information
-datasetfieldtype.software.title=Software
-datasetfieldtype.softwareName.title=Name
-datasetfieldtype.softwareVersion.title=Version
-datasetfieldtype.relatedMaterial.title=Related Material
-datasetfieldtype.relatedDatasets.title=Related Dataset
-datasetfieldtype.otherReferences.title=Other Reference
-datasetfieldtype.dataSources.title=Data Source
-datasetfieldtype.originOfSources.title=Origin of Historical Sources
-datasetfieldtype.characteristicOfSources.title=Characteristic of Sources
-datasetfieldtype.accessToSources.title=Documentation and Access to Sources
datasetfieldtype.title.description=The main title of the Dataset
-datasetfieldtype.subtitle.description=A secondary title that amplifies or states certain limitations on the main title
-datasetfieldtype.alternativeTitle.description=Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title
-datasetfieldtype.alternativeURL.description=Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage
-datasetfieldtype.otherId.description=Another unique identifier for the Dataset (e.g. producer's or another repository's identifier)
-datasetfieldtype.otherIdAgency.description=The name of the agency that generated the other identifier
-datasetfieldtype.otherIdValue.description=Another identifier uniquely identifies the Dataset
-datasetfieldtype.author.description=The entity, e.g. a person or organization, that created the Dataset
-datasetfieldtype.authorName.description=The name of the author, such as the person's name or the name of an organization
-datasetfieldtype.authorAffiliation.description=The name of the entity affiliated with the author, e.g. an organization's name
-datasetfieldtype.authorIdentifierScheme.description=The type of identifier that uniquely identifies the author (e.g. ORCID, ISNI)
-datasetfieldtype.authorIdentifier.description=Uniquely identifies the author when paired with an identifier type
-datasetfieldtype.datasetContact.description=The entity, e.g. a person or organization, that users of the Dataset can contact with questions
-datasetfieldtype.datasetContactName.description=The name of the point of contact, e.g. the person's name or the name of an organization
-datasetfieldtype.datasetContactAffiliation.description=The name of the entity affiliated with the point of contact, e.g. an organization's name
-datasetfieldtype.datasetContactEmail.description=The point of contact's email address
-datasetfieldtype.dsDescription.description=A summary describing the purpose, nature, and scope of the Dataset
-datasetfieldtype.dsDescriptionValue.description=A summary describing the purpose, nature, and scope of the Dataset
-datasetfieldtype.dsDescriptionDate.description=The date when the description was added to the Dataset. If the Dataset contains more than one description, e.g. the data producer supplied one description and the data repository supplied another, this date is used to distinguish between the descriptions
-datasetfieldtype.subject.description=The area of study relevant to the Dataset
-datasetfieldtype.keyword.description=A key term that describes an important aspect of the Dataset and information about any controlled vocabulary used
-datasetfieldtype.keywordTermURI.description=A URI that points to the web presence of the Keyword Term
-datasetfieldtype.keywordValue.description=A key term that describes important aspects of the Dataset
-datasetfieldtype.keywordVocabulary.description=The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH)
-datasetfieldtype.keywordVocabularyURI.description=The URL where one can access information about the term's controlled vocabulary
-datasetfieldtype.topicClassification.description=Indicates a broad, important topic or subject that the Dataset covers and information about any controlled vocabulary used
-datasetfieldtype.topicClassValue.description=A topic or subject term
-datasetfieldtype.topicClassVocab.description=The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH)
-datasetfieldtype.topicClassVocabURI.description=The URL where one can access information about the term's controlled vocabulary
-datasetfieldtype.publication.description=The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab
-datasetfieldtype.publicationCitation.description=The full bibliographic citation for the related publication
-datasetfieldtype.publicationIDType.description=The type of identifier that uniquely identifies a related publication
-datasetfieldtype.publicationIDNumber.description=The identifier for a related publication
-datasetfieldtype.publicationURL.description=The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage
-datasetfieldtype.notesText.description=Additional information about the Dataset
-datasetfieldtype.language.description=A language that the Dataset's files is written in
-datasetfieldtype.producer.description=The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset
-datasetfieldtype.producerName.description=The name of the entity, e.g. the person's name or the name of an organization
-datasetfieldtype.producerAffiliation.description=The name of the entity affiliated with the producer, e.g. an organization's name
-datasetfieldtype.producerAbbreviation.description=The producer's abbreviated name (e.g. IQSS, ICPSR)
-datasetfieldtype.producerURL.description=The URL of the producer's website
-datasetfieldtype.producerLogoURL.description=The URL of the producer's logo
-datasetfieldtype.productionDate.description=The date when the data were produced (not distributed, published, or archived)
-datasetfieldtype.productionPlace.description=The location where the data and any related materials were produced or collected
-datasetfieldtype.contributor.description=The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset
-datasetfieldtype.contributorType.description=Indicates the type of contribution made to the dataset
-datasetfieldtype.contributorName.description=The name of the contributor, e.g. the person's name or the name of an organization
-datasetfieldtype.grantNumber.description=Information about the Dataset's financial support
-datasetfieldtype.grantNumberAgency.description=The agency that provided financial support for the Dataset
-datasetfieldtype.grantNumberValue.description=The grant identifier or contract identifier of the agency that provided financial support for the Dataset
-datasetfieldtype.distributor.description=The entity, such as a person or organization, designated to generate copies of the Dataset, including any editions or revisions
-datasetfieldtype.distributorName.description=The name of the entity, e.g. the person's name or the name of an organization
-datasetfieldtype.distributorAffiliation.description=The name of the entity affiliated with the distributor, e.g. an organization's name
-datasetfieldtype.distributorAbbreviation.description=The distributor's abbreviated name (e.g. IQSS, ICPSR)
-datasetfieldtype.distributorURL.description=The URL of the distributor's webpage
-datasetfieldtype.distributorLogoURL.description=The URL of the distributor's logo image, used to show the image on the Dataset's page
-datasetfieldtype.distributionDate.description=The date when the Dataset was made available for distribution/presentation
-datasetfieldtype.depositor.description=The entity, such as a person or organization, that deposited the Dataset in the repository
-datasetfieldtype.dateOfDeposit.description=The date when the Dataset was deposited into the repository
-datasetfieldtype.timePeriodCovered.description=The time period that the data refer to. Also known as span. This is the time period covered by the data, not the dates of coding, collecting data, or making documents machine-readable
-datasetfieldtype.timePeriodCoveredStart.description=The start date of the time period that the data refer to
-datasetfieldtype.timePeriodCoveredEnd.description=The end date of the time period that the data refer to
-datasetfieldtype.dateOfCollection.description=The dates when the data were collected or generated
-datasetfieldtype.dateOfCollectionStart.description=The date when the data collection started
-datasetfieldtype.dateOfCollectionEnd.description=The date when the data collection ended
-datasetfieldtype.kindOfData.description=The type of data included in the files (e.g. survey data, clinical data, or machine-readable text)
-datasetfieldtype.series.description=Information about the dataset series to which the Dataset belong
-datasetfieldtype.seriesName.description=The name of the dataset series
-datasetfieldtype.seriesInformation.description=Can include 1) a history of the series and 2) a summary of features that apply to the series
-datasetfieldtype.software.description=Information about the software used to generate the Dataset
-datasetfieldtype.softwareName.description=The name of software used to generate the Dataset
-datasetfieldtype.softwareVersion.description=The version of the software used to generate the Dataset, e.g. 4.11
-datasetfieldtype.relatedMaterial.description=Information, such as a persistent ID or citation, about the material related to the Dataset, such as appendices or sampling information available outside of the Dataset
-datasetfieldtype.relatedDatasets.description=Information, such as a persistent ID or citation, about a related dataset, such as previous research on the Dataset's subject
-datasetfieldtype.otherReferences.description=Information, such as a persistent ID or citation, about another type of resource that provides background or supporting material to the Dataset
-datasetfieldtype.dataSources.description=Information, such as a persistent ID or citation, about sources of the Dataset (e.g. a book, article, serial, or machine-readable data file)
-datasetfieldtype.originOfSources.description=For historical sources, the origin and any rules followed in establishing them as sources
-datasetfieldtype.characteristicOfSources.description=Characteristics not already noted elsewhere
-datasetfieldtype.accessToSources.description=1) Methods or procedures for accessing data sources and 2) any special permissions needed for access
datasetfieldtype.title.watermark=
+datasetfieldtype.subtitle.title=Subtitle
+datasetfieldtype.subtitle.description=A secondary title that amplifies or states certain limitations on the main title
datasetfieldtype.subtitle.watermark=
+datasetfieldtype.alternativeTitle.title=Alternative Title
+datasetfieldtype.alternativeTitle.description=Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title
datasetfieldtype.alternativeTitle.watermark=
+datasetfieldtype.alternativeURL.title=Alternative URL
+datasetfieldtype.alternativeURL.description=Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage
datasetfieldtype.alternativeURL.watermark=https://
+datasetfieldtype.otherId.title=Other Identifier
+datasetfieldtype.otherId.description=Another unique identifier for the Dataset (e.g. producer's or another repository's identifier)
datasetfieldtype.otherId.watermark=
+datasetfieldtype.otherIdAgency.title=Agency
+datasetfieldtype.otherIdAgency.description=The name of the agency that generated the other identifier
datasetfieldtype.otherIdAgency.watermark=
+datasetfieldtype.otherIdValue.title=Identifier
+datasetfieldtype.otherIdValue.description=Another identifier uniquely identifies the Dataset
datasetfieldtype.otherIdValue.watermark=
+datasetfieldtype.author.title=Author
+datasetfieldtype.author.description=The entity, e.g. a person or organization, that created the Dataset
datasetfieldtype.author.watermark=
+datasetfieldtype.authorName.title=Name
+datasetfieldtype.authorName.description=The name of the author, such as the person's name or the name of an organization
datasetfieldtype.authorName.watermark=1) Family Name, Given Name or 2) Organization XYZ
+datasetfieldtype.authorAffiliation.title=Affiliation
+datasetfieldtype.authorAffiliation.description=The name of the entity affiliated with the author, e.g. an organization's name
datasetfieldtype.authorAffiliation.watermark=Organization XYZ
+datasetfieldtype.authorIdentifierScheme.title=Identifier Type
+datasetfieldtype.authorIdentifierScheme.description=The type of identifier that uniquely identifies the author (e.g. ORCID, ISNI)
datasetfieldtype.authorIdentifierScheme.watermark=
+datasetfieldtype.authorIdentifier.title=Identifier
+datasetfieldtype.authorIdentifier.description=Uniquely identifies the author when paired with an identifier type
datasetfieldtype.authorIdentifier.watermark=
+datasetfieldtype.datasetContact.title=Point of Contact
+datasetfieldtype.datasetContact.description=The entity, e.g. a person or organization, that users of the Dataset can contact with questions
datasetfieldtype.datasetContact.watermark=
+datasetfieldtype.datasetContactName.title=Name
+datasetfieldtype.datasetContactName.description=The name of the point of contact, e.g. the person's name or the name of an organization
datasetfieldtype.datasetContactName.watermark=1) FamilyName, GivenName or 2) Organization
+datasetfieldtype.datasetContactAffiliation.title=Affiliation
+datasetfieldtype.datasetContactAffiliation.description=The name of the entity affiliated with the point of contact, e.g. an organization's name
datasetfieldtype.datasetContactAffiliation.watermark=Organization XYZ
+datasetfieldtype.datasetContactEmail.title=E-mail
+datasetfieldtype.datasetContactEmail.description=The point of contact's email address
datasetfieldtype.datasetContactEmail.watermark=name@email.xyz
+datasetfieldtype.dsDescription.title=Description
+datasetfieldtype.dsDescription.description=A summary describing the purpose, nature, and scope of the Dataset
datasetfieldtype.dsDescription.watermark=
+datasetfieldtype.dsDescriptionValue.title=Text
+datasetfieldtype.dsDescriptionValue.description=A summary describing the purpose, nature, and scope of the Dataset
datasetfieldtype.dsDescriptionValue.watermark=
+datasetfieldtype.dsDescriptionDate.title=Date
+datasetfieldtype.dsDescriptionDate.description=The date when the description was added to the Dataset. If the Dataset contains more than one description, e.g. the data producer supplied one description and the data repository supplied another, this date is used to distinguish between the descriptions
datasetfieldtype.dsDescriptionDate.watermark=YYYY-MM-DD
+datasetfieldtype.subject.title=Subject
+datasetfieldtype.subject.description=The area of study relevant to the Dataset
datasetfieldtype.subject.watermark=
+datasetfieldtype.keyword.title=Keyword
+datasetfieldtype.keyword.description=A key term that describes an important aspect of the Dataset and information about any controlled vocabulary used
datasetfieldtype.keyword.watermark=
-datasetfieldtype.keywordTermURI.watermark=https://
+datasetfieldtype.keywordValue.title=Term
+datasetfieldtype.keywordValue.description=A key term that describes important aspects of the Dataset
datasetfieldtype.keywordValue.watermark=
+datasetfieldtype.keywordTermURI.title=Term URI
+datasetfieldtype.keywordTermURI.description=A URI that points to the web presence of the Keyword Term
+datasetfieldtype.keywordTermURI.watermark=https://
+datasetfieldtype.keywordVocabulary.title=Controlled Vocabulary Name
+datasetfieldtype.keywordVocabulary.description=The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH)
datasetfieldtype.keywordVocabulary.watermark=
+datasetfieldtype.keywordVocabularyURI.title=Controlled Vocabulary URL
+datasetfieldtype.keywordVocabularyURI.description=The URL where one can access information about the term's controlled vocabulary
datasetfieldtype.keywordVocabularyURI.watermark=https://
+datasetfieldtype.topicClassification.title=Topic Classification
+datasetfieldtype.topicClassification.description=Indicates a broad, important topic or subject that the Dataset covers and information about any controlled vocabulary used
datasetfieldtype.topicClassification.watermark=
+datasetfieldtype.topicClassValue.title=Term
+datasetfieldtype.topicClassValue.description=A topic or subject term
datasetfieldtype.topicClassValue.watermark=
+datasetfieldtype.topicClassVocab.title=Controlled Vocabulary Name
+datasetfieldtype.topicClassVocab.description=The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH)
datasetfieldtype.topicClassVocab.watermark=
+datasetfieldtype.topicClassVocabURI.title=Controlled Vocabulary URL
+datasetfieldtype.topicClassVocabURI.description=The URL where one can access information about the term's controlled vocabulary
datasetfieldtype.topicClassVocabURI.watermark=https://
+datasetfieldtype.publication.title=Related Publication
+datasetfieldtype.publication.description=The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab
datasetfieldtype.publication.watermark=
+datasetfieldtype.publicationCitation.title=Citation
+datasetfieldtype.publicationCitation.description=The full bibliographic citation for the related publication
datasetfieldtype.publicationCitation.watermark=
+datasetfieldtype.publicationIDType.title=Identifier Type
+datasetfieldtype.publicationIDType.description=The type of identifier that uniquely identifies a related publication
datasetfieldtype.publicationIDType.watermark=
+datasetfieldtype.publicationIDNumber.title=Identifier
+datasetfieldtype.publicationIDNumber.description=The identifier for a related publication
datasetfieldtype.publicationIDNumber.watermark=
+datasetfieldtype.publicationURL.title=URL
+datasetfieldtype.publicationURL.description=The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage
datasetfieldtype.publicationURL.watermark=https://
+datasetfieldtype.notesText.title=Notes
+datasetfieldtype.notesText.description=Additional information about the Dataset
datasetfieldtype.notesText.watermark=
+datasetfieldtype.language.title=Language
+datasetfieldtype.language.description=A language that the Dataset's files is written in
datasetfieldtype.language.watermark=
+datasetfieldtype.producer.title=Producer
+datasetfieldtype.producer.description=The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset
datasetfieldtype.producer.watermark=
+datasetfieldtype.producerName.title=Name
+datasetfieldtype.producerName.description=The name of the entity, e.g. the person's name or the name of an organization
datasetfieldtype.producerName.watermark=1) FamilyName, GivenName or 2) Organization
+datasetfieldtype.producerAffiliation.title=Affiliation
+datasetfieldtype.producerAffiliation.description=The name of the entity affiliated with the producer, e.g. an organization's name
datasetfieldtype.producerAffiliation.watermark=Organization XYZ
+datasetfieldtype.producerAbbreviation.title=Abbreviated Name
+datasetfieldtype.producerAbbreviation.description=The producer's abbreviated name (e.g. IQSS, ICPSR)
datasetfieldtype.producerAbbreviation.watermark=
+datasetfieldtype.producerURL.title=URL
+datasetfieldtype.producerURL.description=The URL of the producer's website
datasetfieldtype.producerURL.watermark=https://
+datasetfieldtype.producerLogoURL.title=Logo URL
+datasetfieldtype.producerLogoURL.description=The URL of the producer's logo
datasetfieldtype.producerLogoURL.watermark=https://
+datasetfieldtype.productionDate.title=Production Date
+datasetfieldtype.productionDate.description=The date when the data were produced (not distributed, published, or archived)
datasetfieldtype.productionDate.watermark=YYYY-MM-DD
+datasetfieldtype.productionPlace.title=Production Location
+datasetfieldtype.productionPlace.description=The location where the data and any related materials were produced or collected
datasetfieldtype.productionPlace.watermark=
+datasetfieldtype.contributor.title=Contributor
+datasetfieldtype.contributor.description=The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset
datasetfieldtype.contributor.watermark=
+datasetfieldtype.contributorType.title=Type
+datasetfieldtype.contributorType.description=Indicates the type of contribution made to the dataset
datasetfieldtype.contributorType.watermark=
+datasetfieldtype.contributorName.title=Name
+datasetfieldtype.contributorName.description=The name of the contributor, e.g. the person's name or the name of an organization
datasetfieldtype.contributorName.watermark=1) FamilyName, GivenName or 2) Organization
+datasetfieldtype.grantNumber.title=Funding Information
+datasetfieldtype.grantNumber.description=Information about the Dataset's financial support
datasetfieldtype.grantNumber.watermark=
+datasetfieldtype.grantNumberAgency.title=Agency
+datasetfieldtype.grantNumberAgency.description=The agency that provided financial support for the Dataset
datasetfieldtype.grantNumberAgency.watermark=Organization XYZ
+datasetfieldtype.grantNumberValue.title=Identifier
+datasetfieldtype.grantNumberValue.description=The grant identifier or contract identifier of the agency that provided financial support for the Dataset
datasetfieldtype.grantNumberValue.watermark=
+datasetfieldtype.project.title=Project
+datasetfieldtype.project.description=Information about the project as context of the data
+datasetfieldtype.project.watermark=
+datasetfieldtype.projectName.title=Name
+datasetfieldtype.projectName.description=Name of the project
+datasetfieldtype.projectName.watermark=
+datasetfieldtype.projectLevel.title=Level
+datasetfieldtype.projectLevel.description=The main project should get level zero, subprojects can get higher levels
+datasetfieldtype.projectLevel.watermark=0
+datasetfieldtype.distributor.title=Distributor
+datasetfieldtype.distributor.description=The entity, such as a person or organization, designated to generate copies of the Dataset, including any editions or revisions
datasetfieldtype.distributor.watermark=
+datasetfieldtype.distributorName.title=Name
+datasetfieldtype.distributorName.description=The name of the entity, e.g. the person's name or the name of an organization
datasetfieldtype.distributorName.watermark=1) FamilyName, GivenName or 2) Organization
+datasetfieldtype.distributorAffiliation.title=Affiliation
+datasetfieldtype.distributorAffiliation.description=The name of the entity affiliated with the distributor, e.g. an organization's name
datasetfieldtype.distributorAffiliation.watermark=Organization XYZ
+datasetfieldtype.distributorAbbreviation.title=Abbreviated Name
+datasetfieldtype.distributorAbbreviation.description=The distributor's abbreviated name (e.g. IQSS, ICPSR)
datasetfieldtype.distributorAbbreviation.watermark=
+datasetfieldtype.distributorURL.title=URL
+datasetfieldtype.distributorURL.description=The URL of the distributor's webpage
datasetfieldtype.distributorURL.watermark=https://
+datasetfieldtype.distributorLogoURL.title=Logo URL
+datasetfieldtype.distributorLogoURL.description=The URL of the distributor's logo image, used to show the image on the Dataset's page
datasetfieldtype.distributorLogoURL.watermark=https://
+datasetfieldtype.distributionDate.title=Distribution Date
+datasetfieldtype.distributionDate.description=The date when the Dataset was made available for distribution/presentation
datasetfieldtype.distributionDate.watermark=YYYY-MM-DD
+datasetfieldtype.depositor.title=Depositor
+datasetfieldtype.depositor.description=The entity, such as a person or organization, that deposited the Dataset in the repository
datasetfieldtype.depositor.watermark=1) FamilyName, GivenName or 2) Organization
+datasetfieldtype.dateOfDeposit.title=Deposit Date
+datasetfieldtype.dateOfDeposit.description=The date when the Dataset was deposited into the repository
datasetfieldtype.dateOfDeposit.watermark=YYYY-MM-DD
+datasetfieldtype.timePeriodCovered.title=Time Period
+datasetfieldtype.timePeriodCovered.description=The time period that the data refer to. Also known as span. This is the time period covered by the data, not the dates of coding, collecting data, or making documents machine-readable
datasetfieldtype.timePeriodCovered.watermark=
+datasetfieldtype.timePeriodCoveredStart.title=Start Date
+datasetfieldtype.timePeriodCoveredStart.description=The start date of the time period that the data refer to
datasetfieldtype.timePeriodCoveredStart.watermark=YYYY-MM-DD
+datasetfieldtype.timePeriodCoveredEnd.title=End Date
+datasetfieldtype.timePeriodCoveredEnd.description=The end date of the time period that the data refer to
datasetfieldtype.timePeriodCoveredEnd.watermark=YYYY-MM-DD
+datasetfieldtype.dateOfCollection.title=Date of Collection
+datasetfieldtype.dateOfCollection.description=The dates when the data were collected or generated
datasetfieldtype.dateOfCollection.watermark=
+datasetfieldtype.dateOfCollectionStart.title=Start Date
+datasetfieldtype.dateOfCollectionStart.description=The date when the data collection started
datasetfieldtype.dateOfCollectionStart.watermark=YYYY-MM-DD
+datasetfieldtype.dateOfCollectionEnd.title=End Date
+datasetfieldtype.dateOfCollectionEnd.description=The date when the data collection ended
datasetfieldtype.dateOfCollectionEnd.watermark=YYYY-MM-DD
+datasetfieldtype.kindOfData.title=Data Type
+datasetfieldtype.kindOfData.description=The type of data included in the files (e.g. survey data, clinical data, or machine-readable text)
datasetfieldtype.kindOfData.watermark=
+datasetfieldtype.series.title=Series
+datasetfieldtype.series.description=Information about the dataset series to which the Dataset belong
datasetfieldtype.series.watermark=
+datasetfieldtype.seriesName.title=Name
+datasetfieldtype.seriesName.description=The name of the dataset series
datasetfieldtype.seriesName.watermark=
+datasetfieldtype.seriesInformation.title=Information
+datasetfieldtype.seriesInformation.description=Can include 1) a history of the series and 2) a summary of features that apply to the series
datasetfieldtype.seriesInformation.watermark=
+datasetfieldtype.software.title=Software
+datasetfieldtype.software.description=Information about the software used to generate the Dataset
datasetfieldtype.software.watermark=
+datasetfieldtype.softwareName.title=Name
+datasetfieldtype.softwareName.description=The name of software used to generate the Dataset
datasetfieldtype.softwareName.watermark=
+datasetfieldtype.softwareVersion.title=Version
+datasetfieldtype.softwareVersion.description=The version of the software used to generate the Dataset, e.g. 4.11
datasetfieldtype.softwareVersion.watermark=
+datasetfieldtype.relatedMaterial.title=Related Material
+datasetfieldtype.relatedMaterial.description=Information, such as a persistent ID or citation, about the material related to the Dataset, such as appendices or sampling information available outside of the Dataset
datasetfieldtype.relatedMaterial.watermark=
+datasetfieldtype.relatedDatasets.title=Related Dataset
+datasetfieldtype.relatedDatasets.description=Information, such as a persistent ID or citation, about a related dataset, such as previous research on the Dataset's subject
datasetfieldtype.relatedDatasets.watermark=
+datasetfieldtype.otherReferences.title=Other Reference
+datasetfieldtype.otherReferences.description=Information, such as a persistent ID or citation, about another type of resource that provides background or supporting material to the Dataset
datasetfieldtype.otherReferences.watermark=
+datasetfieldtype.dataSources.title=Data Source
+datasetfieldtype.dataSources.description=Information, such as a persistent ID or citation, about sources of the Dataset (e.g. a book, article, serial, or machine-readable data file)
datasetfieldtype.dataSources.watermark=
+datasetfieldtype.originOfSources.title=Origin of Historical Sources
+datasetfieldtype.originOfSources.description=For historical sources, the origin and any rules followed in establishing them as sources
datasetfieldtype.originOfSources.watermark=
+datasetfieldtype.characteristicOfSources.title=Characteristic of Sources
+datasetfieldtype.characteristicOfSources.description=Characteristics not already noted elsewhere
datasetfieldtype.characteristicOfSources.watermark=
+datasetfieldtype.accessToSources.title=Documentation and Access to Sources
+datasetfieldtype.accessToSources.description=1) Methods or procedures for accessing data sources and 2) any special permissions needed for access
datasetfieldtype.accessToSources.watermark=
+datasetfieldtype.worked.title=Did it work?
+datasetfieldtype.worked.description=Not only positive analyses are worthwhile to share, negative results prevent others from doing the same mistakes
+datasetfieldtype.worked.watermark=Yes or No
+datasetfieldtype.workedNote.title=Explanation
+datasetfieldtype.workedNote.description=Description of your last answer; explanation why it worked or not
+datasetfieldtype.workedNote.watermark=
+datasetfieldtype.storage.title=Storage
+datasetfieldtype.storage.description=Information about data that could NOT be uploaded into the system
+datasetfieldtype.storage.watermark=
+datasetfieldtype.storageFile.title=Name
+datasetfieldtype.storageFile.description=The name of the file, directory or archive
+datasetfieldtype.storageFile.watermark=
+datasetfieldtype.storageLocation.title=Location
+datasetfieldtype.storageLocation.description=The dns, path or url of the location the object is stored
+datasetfieldtype.storageLocation.watermark=
+datasetfieldtype.storageSize.title=Size
+datasetfieldtype.storageSize.description=The approximated size (with units) of the object
+datasetfieldtype.storageSize.watermark=
controlledvocabulary.subject.agricultural_sciences=Agricultural Sciences
controlledvocabulary.subject.arts_and_humanities=Arts and Humanities
controlledvocabulary.subject.astronomy_and_astrophysics=Astronomy and Astrophysics
@@ -254,8 +281,8 @@ controlledvocabulary.subject.social_sciences=Social Sciences
controlledvocabulary.subject.other=Other
controlledvocabulary.publicationIDType.ark=ark
controlledvocabulary.publicationIDType.arxiv=arXiv
-controlledvocabulary.publicationIDType.cstr=cstr
controlledvocabulary.publicationIDType.bibcode=bibcode
+controlledvocabulary.publicationIDType.cstr=cstr
controlledvocabulary.publicationIDType.doi=doi
controlledvocabulary.publicationIDType.ean13=ean13
controlledvocabulary.publicationIDType.eissn=eissn
diff --git a/src/main/java/propertyFiles/codeMeta20.properties b/src/main/java/propertyFiles/codeMeta20.properties
index c0e7eac6d4a..1e38ae2658a 100644
--- a/src/main/java/propertyFiles/codeMeta20.properties
+++ b/src/main/java/propertyFiles/codeMeta20.properties
@@ -5,7 +5,7 @@ datasetfieldtype.codeVersion.description=Version of the software instance, usual
datasetfieldtype.codeVersion.watermark=e.g. 0.2.1 or 1.3 or 2021.1 etc
datasetfieldtype.developmentStatus.title=Development Status
datasetfieldtype.developmentStatus.description=Description of development status, e.g. work in progress (wip), active, etc. See repostatus.org for more information.
-datasetfieldtype.developmentStatus.watermark= Development Status
+datasetfieldtype.developmentStatus.watermark=
datasetfieldtype.codeRepository.title=Code Repository
datasetfieldtype.codeRepository.description=Link to the repository where the un-compiled, human-readable code and related code is located (SVN, GitHub, CodePlex, institutional GitLab instance, Gitea, etc.).
datasetfieldtype.codeRepository.watermark=e.g. https://github.com/user/project
@@ -39,6 +39,9 @@ datasetfieldtype.softwareRequirements.watermark=e.g. Pandas 1.4.3
datasetfieldtype.softwareRequirementsInfoUrl.title=Info URL
datasetfieldtype.softwareRequirementsInfoUrl.description=Link to required software/library homepage or documentation (ideally also versioned)
datasetfieldtype.softwareRequirementsInfoUrl.watermark=e.g. https://pandas.pydata.org/pandas-docs/version/1.4.3
+datasetfieldtype.softwareRequirementsUrl.title=Download URL
+datasetfieldtype.softwareRequirementsUrl.description=Link to required software/library
+datasetfieldtype.softwareRequirementsUrl.watermark=https://...
datasetfieldtype.softwareSuggestionsItem.title=Software Suggestions
datasetfieldtype.softwareSuggestionsItem.description=Optional dependencies, e.g. for optional features, code development, etc.
datasetfieldtype.softwareSuggestionsItem.watermark=
@@ -48,6 +51,9 @@ datasetfieldtype.softwareSuggestions.watermark=e.g. Sphinx 5.0.2
datasetfieldtype.softwareSuggestionsInfoUrl.title=Info URL
datasetfieldtype.softwareSuggestionsInfoUrl.description=Link to optional software/library homepage or documentation (ideally also versioned)
datasetfieldtype.softwareSuggestionsInfoUrl.watermark=e.g. https://www.sphinx-doc.org
+datasetfieldtype.softwareSuggestionsUrl.title=Download URL
+datasetfieldtype.softwareSuggestionsUrl.description=Link to optional software/library
+datasetfieldtype.softwareSuggestionsUrl.watermark=https://...
datasetfieldtype.memoryRequirements.title=Memory Requirements
datasetfieldtype.memoryRequirements.description=Minimum memory requirements.
datasetfieldtype.memoryRequirements.watermark=
diff --git a/src/main/java/propertyFiles/enzymeML.properties b/src/main/java/propertyFiles/enzymeML.properties
new file mode 100644
index 00000000000..2f2bb19cbda
--- /dev/null
+++ b/src/main/java/propertyFiles/enzymeML.properties
@@ -0,0 +1,157 @@
+metadatablock.name=enzymeML
+metadatablock.displayName=EnzymeML
+datasetfieldtype.enzymeMLVessel.title=Vessels
+datasetfieldtype.enzymeMLVessel.description=This field describes which vessels have been used to carry out the experiment. For example an 'Eppendorf tube' of size 1mL that held a constant volume.
+datasetfieldtype.enzymeMLVessel.watermark=
+datasetfieldtype.enzymeMLVesselName.title=Name
+datasetfieldtype.enzymeMLVesselName.description=Specifies the exact production name of the vessel shoudl be given here (e.g. Eppendorf Tube)
+datasetfieldtype.enzymeMLVesselName.watermark=
+datasetfieldtype.enzymeMLVesselSize.title=Size
+datasetfieldtype.enzymeMLVesselSize.description=Specifies the volume value of the given vessel (e.g. '1')
+datasetfieldtype.enzymeMLVesselSize.watermark=
+datasetfieldtype.enzymeMLVesselUnits.title=Unit
+datasetfieldtype.enzymeMLVesselUnits.description=Specifies the SI unit corresponding to the given vessel size value (e.g. 'mL')
+datasetfieldtype.enzymeMLVesselUnits.watermark=
+datasetfieldtype.enzymeMLVesselConstant.title=Constant
+datasetfieldtype.enzymeMLVesselConstant.description=Specifies if the volume is constant or not. In some instances substances are added over the course of an experiment, where this field should be selected as 'Not constant'.
+datasetfieldtype.enzymeMLVesselConstant.watermark=
+datasetfieldtype.enzymeMLReactant.title=Reactants
+datasetfieldtype.enzymeMLReactant.description=This field describes the reactants that have been used in the course of the experiment. This should be small molecules exclusively such as NADH, pyruvate or any other molecule that is not a protein.
+datasetfieldtype.enzymeMLReactant.watermark=
+datasetfieldtype.enzymeMLReactantID.title=Identifier
+datasetfieldtype.enzymeMLReactantID.description=Specifies the internal identifier of the reactant. Please follow the convention of denote a reactant entity by an 's' followed by an integer. For instance, 's1' is a valid ID.
+datasetfieldtype.enzymeMLReactantID.watermark=
+datasetfieldtype.enzymeMLReactantName.title=Name
+datasetfieldtype.enzymeMLReactantName.description=Specifies the conventional or systematic name of the given reactant. Please note that this field on purpose can not be unique since molecule names vary in between fields. Please use either the InChI or SMILES code for a unique identification.
+datasetfieldtype.enzymeMLReactantName.watermark=
+datasetfieldtype.enzymeMLReactantVessel.title=Vessel Reference
+datasetfieldtype.enzymeMLReactantVessel.description=Specifies the vessel in which the reactant was given. If the reactant was part of multiple reactions carried out in different vessel, please separated these via semicolon ';'.
+datasetfieldtype.enzymeMLReactantVessel.watermark=
+datasetfieldtype.enzymeMLReactantInitialConcentration.title=Initial Concentration
+datasetfieldtype.enzymeMLReactantInitialConcentration.description=Specifies the initial concentration value at the beginning of the experiment.
+datasetfieldtype.enzymeMLReactantInitialConcentration.watermark=
+datasetfieldtype.enzymeMLReactantSubstanceUnits.title=Unit
+datasetfieldtype.enzymeMLReactantSubstanceUnits.description=Specifies the corresponding SI unit to the initial concentration value.
+datasetfieldtype.enzymeMLReactantSubstanceUnits.watermark=
+datasetfieldtype.enzymeMLReactantConstant.title=Constant
+datasetfieldtype.enzymeMLReactantConstant.description=Specifies if the reactants concentation was constant or not. If a reactant serves as a substrate/product, concentrations are likely to change and this field should be set to 'Not constant'.
+datasetfieldtype.enzymeMLReactantConstant.watermark=
+datasetfieldtype.enzymeMLReactantInchi.title=InChICode
+datasetfieldtype.enzymeMLReactantInchi.description=Specifies the IUPAC International Chemical Identifier, which is a unique identifier for a molecule
+datasetfieldtype.enzymeMLReactantInchi.watermark=
+datasetfieldtype.enzymeMLReactantSmiles.title=SmilesCode
+datasetfieldtype.enzymeMLReactantSmiles.description=Specifies the Simplified Molecular Input Line Entry Specification, which is a unique identifier for a molecule
+datasetfieldtype.enzymeMLReactantSmiles.watermark=
+datasetfieldtype.enzymeMLReactantSBOTerm.title=SBO Term
+datasetfieldtype.enzymeMLReactantSBOTerm.description=Specifies the ID of the System Biology Ontology from branch 'material entity' to define a reactants role (e.g. SBO:0000240)
+datasetfieldtype.enzymeMLReactantSBOTerm.watermark=SBO:0000xxx
+datasetfieldtype.enzymeMLProtein.title=Proteins
+datasetfieldtype.enzymeMLProtein.description=This field describes the proteins that haven been used in the course of the experiment. These should include catalytic active enzymes as well as non-catalytic entities such as inhibitors.
+datasetfieldtype.enzymeMLProtein.watermark=
+datasetfieldtype.enzymeMLProteinID.title=Identifier
+datasetfieldtype.enzymeMLProteinID.description=Specifies the internal identifier of the protein. Please follow the convention of denote a protein entity by a 'p' followed by an integer. For instance, 'p1' is a valid ID.
+datasetfieldtype.enzymeMLProteinID.watermark=
+datasetfieldtype.enzymeMLProteinName.title=Name
+datasetfieldtype.enzymeMLProteinName.description=Specifies the convential name of the protein. Please note that this field on purpose can not be unique since protein/enzyme names vary in between fields. Use the amino acid sequence, UniProtID and/or EC number for a unique identification.
+datasetfieldtype.enzymeMLProteinName.watermark=
+datasetfieldtype.enzymeMLProteinVessel.title=Vessel Reference
+datasetfieldtype.enzymeMLProteinVessel.description=Specifies the name of the vessel in which the protein was given. If the protein was part of multiple reactions carried out in different vessel, please separated these via semicolon ';'.
+datasetfieldtype.enzymeMLProteinVessel.watermark=
+datasetfieldtype.enzymeMLProteinInitialConcentration.title=Initial Concentration
+datasetfieldtype.enzymeMLProteinInitialConcentration.description=Specifies the initial concentration value at the beginning of the experiment.
+datasetfieldtype.enzymeMLProteinInitialConcentration.watermark=
+datasetfieldtype.enzymeMLProteinSubstanceUnits.title=Unit
+datasetfieldtype.enzymeMLProteinSubstanceUnits.description=Specifies the corresponding SI unit to the initial concentration value.
+datasetfieldtype.enzymeMLProteinSubstanceUnits.watermark=
+datasetfieldtype.enzymeMLProteinConstant.title=Constant
+datasetfieldtype.enzymeMLProteinConstant.description=Specifies if the proteins concentations was constant or not. If a protein serves as a substrate such as in protease reactions, concentrations are likely to change and this field should be set to 'Not constant'.
+datasetfieldtype.enzymeMLProteinConstant.watermark=
+datasetfieldtype.enzymeMLProteinSequence.title=Sequence
+datasetfieldtype.enzymeMLProteinSequence.description=Specifies the aminoacid sequence of the protein. For instance 'MGHAGAHHAGÉ'.
+datasetfieldtype.enzymeMLProteinSequence.watermark=
+datasetfieldtype.enzymeMLProteinOrganism.title=Organism
+datasetfieldtype.enzymeMLProteinOrganism.description=Specifies the host organism that was used to express the protein.
+datasetfieldtype.enzymeMLProteinOrganism.watermark=
+datasetfieldtype.enzymeMLProteinUniProtID.title=UniProtID
+datasetfieldtype.enzymeMLProteinUniProtID.description=Specifies the UniProt Identifier of the protein that is used to query the UniProt database.
+datasetfieldtype.enzymeMLProteinUniProtID.watermark=
+datasetfieldtype.enzymeMLProteinECNumber.title=EC-Number
+datasetfieldtype.enzymeMLProteinECNumber.description=Specifies the EC-Number of the protein that denotes the hierarchical releation within the functional tree of protein-families. For instance EC: 1.1.1.1 belongs to the family of alcohol dehydrogenases.
+datasetfieldtype.enzymeMLProteinECNumber.watermark=
+datasetfieldtype.enzymeMLProteinSBOTerm.title=SBO Term
+datasetfieldtype.enzymeMLProteinSBOTerm.description=Specifies the ID of the System Biology Ontology from branch 'material entity' to define a proteins role (e.g. SBO:0000240)
+datasetfieldtype.enzymeMLProteinSBOTerm.watermark=SBO:0000xxx
+datasetfieldtype.enzymeMLReaction.title=Reactions
+datasetfieldtype.enzymeMLReaction.description=This field describes the reactions that have been executed in the course of the experiment. Here, every defined reactants/proteins will be combined towards a meaningful biochemical reaction equation.
+datasetfieldtype.enzymeMLReaction.watermark=
+datasetfieldtype.enzymeMLReactionName.title=Name
+datasetfieldtype.enzymeMLReactionName.description=Specifies the conventional name of the reaction such as 'Alcohol dehydrogenation'. Please note, that this field on purpose can not be uniqe, since names vary in between fields and newly acquired reactions might not have a conventional name yet.
+datasetfieldtype.enzymeMLReactionName.watermark=
+datasetfieldtype.enzymeMLReactionTemperatureValue.title=Temperature Value
+datasetfieldtype.enzymeMLReactionTemperatureValue.description=Specifies the temperature value at which the experiment was executed.
+datasetfieldtype.enzymeMLReactionTemperatureValue.watermark=
+datasetfieldtype.enzymeMLReactionTemperatureUnit.title=Temperature Unit
+datasetfieldtype.enzymeMLReactionTemperatureUnit.description=Specifies the corresponding SI unit to the temperature value.
+datasetfieldtype.enzymeMLReactionTemperatureUnit.watermark=
+datasetfieldtype.enzymeMLReactionpH.title=pH Value
+datasetfieldtype.enzymeMLReactionpH.description=Specifies the pH value at which the experiment was executed. Please note that pH values should fall in between the interval 0-14.
+datasetfieldtype.enzymeMLReactionpH.watermark=
+datasetfieldtype.enzymeMLReactionEduct.title=Educts
+datasetfieldtype.enzymeMLReactionEduct.description=Specifies the participating reactants/proteins which serve as educts. If multiple educts have been used, separate each of them via semicolon ';'.
+datasetfieldtype.enzymeMLReactionEduct.watermark=
+datasetfieldtype.enzymeMLReactionProduct.title=Products
+datasetfieldtype.enzymeMLReactionProduct.description=Specifies the participating reactants/proteins which serve as products. If multiple products have been used, separate each of them via semicolon ';'.
+datasetfieldtype.enzymeMLReactionProduct.watermark=
+datasetfieldtype.enzymeMLReactionModifier.title=Modifiers
+datasetfieldtype.enzymeMLReactionModifier.description=Specifies the participating reactants/proteins which serve as modifiers. For instance catalysing proteins should be entered as modifiers, which is the same for activators/inhibitors. If multiple modifiers have been used, separate each of them via semicolon ';'.
+datasetfieldtype.enzymeMLReactionModifier.watermark=
+datasetfieldtype.enzymeMLReactionEquation.title=Equation
+datasetfieldtype.enzymeMLReactionEquation.description=Specifies the reaction equation by separating educts and products via '->', while denoting multiple educts/products with a plusign and stoichiometries by 'Y Molecule'. For instance the following describes an alcohol dehydrogenation '1 Ethanol + 1 NAD+ -> 1 Acetaldehyde + 1 NADH + 1 H+'.
+datasetfieldtype.enzymeMLReactionEquation.watermark=
+datasetfieldtype.enzymeMLKineticLaw.title=Kinetic Law
+datasetfieldtype.enzymeMLKineticLaw.description=This field describes the kinetic law that has been used to model the reaction kinetics. It includes the conventional as well as the mathematic equation.
+datasetfieldtype.enzymeMLKineticLaw.watermark=
+datasetfieldtype.enzymeMLKineticLawName.title=Name
+datasetfieldtype.enzymeMLKineticLawName.description=Specifies the conventional name of the kinetic law that has been used. For instance 'Reversible Michaelis-Menten'.
+datasetfieldtype.enzymeMLKineticLawName.watermark=
+datasetfieldtype.enzymeMLKineticLawReaction.title=Reaction Reference
+datasetfieldtype.enzymeMLKineticLawReaction.description=Specifies the reaction that has beeen modeled by the given kinetic law.
+datasetfieldtype.enzymeMLKineticLawReaction.watermark=
+datasetfieldtype.enzymeMLKineticLawEquation.title=Kinetic Model
+datasetfieldtype.enzymeMLKineticLawEquation.description=Specifies the mathematical equation of the given kinetic law. For variables that reference entities that are part of this EnzymeML document, please use the given identifier. Parameters will be defined in another field and are referenced by their conventional names. For instance, the following equation denotes a valid kinetic model 'vmax * s1 / ( km + s1 )'.
+datasetfieldtype.enzymeMLKineticLawEquation.watermark=
+datasetfieldtype.enzymeMLKineticParameter.title=Kinetic Parameters
+datasetfieldtype.enzymeMLKineticParameter.description=This field describe the kinetic parameters that have been estimated using the given kinetic law.
+datasetfieldtype.enzymeMLKineticParameter.watermark=
+datasetfieldtype.enzymeMLKineticParameterName.title=Name
+datasetfieldtype.enzymeMLKineticParameterName.description=Specifies the conventional name of the kinetic paramter that has been estimated. For instance, 'vmax' is a valid name for a parameter. Please note, that for unique identification the SBO Term should be included.
+datasetfieldtype.enzymeMLKineticParameterName.watermark=
+datasetfieldtype.enzymeMLKineticParameterValue.title=Value
+datasetfieldtype.enzymeMLKineticParameterValue.description=Specifies the numerical value of the estimated kinetic parameter.
+datasetfieldtype.enzymeMLKineticParameterValue.watermark=
+datasetfieldtype.enzymeMLKineticParameterUnit.title=Unit
+datasetfieldtype.enzymeMLKineticParameterUnit.description=Specifies the SI unit of the estimated kinetic parameter.
+datasetfieldtype.enzymeMLKineticParameterUnit.watermark=
+datasetfieldtype.enzymeMLKineticParameterSBOTerm.title=SBO Term
+datasetfieldtype.enzymeMLKineticParameterSBOTerm.description=Specifies the ID of the System Biology Ontology from branch 'systems description parameter' to define a kinetic parameter (e.g. SBO:0000545)
+datasetfieldtype.enzymeMLKineticParameterSBOTerm.watermark=SBO:0000xxx
+controlledvocabulary.enzymeMLVesselUnits.ul=ul
+controlledvocabulary.enzymeMLVesselUnits.ml=ml
+controlledvocabulary.enzymeMLVesselUnits.l=l
+controlledvocabulary.enzymeMLVesselConstant.constant=Constant
+controlledvocabulary.enzymeMLVesselConstant.not_constant=Not constant
+controlledvocabulary.enzymeMLReactantSubstanceUnits.nm=nM
+controlledvocabulary.enzymeMLReactantSubstanceUnits.um=uM
+controlledvocabulary.enzymeMLReactantSubstanceUnits.mm=mM
+controlledvocabulary.enzymeMLReactantSubstanceUnits.m=M
+controlledvocabulary.enzymeMLReactantConstant.constant=Constant
+controlledvocabulary.enzymeMLReactantConstant.not_constant=Not constant
+controlledvocabulary.enzymeMLProteinSubstanceUnits.pm=pM
+controlledvocabulary.enzymeMLProteinSubstanceUnits.nm=nM
+controlledvocabulary.enzymeMLProteinSubstanceUnits.um=uM
+controlledvocabulary.enzymeMLProteinSubstanceUnits.mm=mM
+controlledvocabulary.enzymeMLProteinSubstanceUnits.m=M
+controlledvocabulary.enzymeMLProteinConstant.constant=Constant
+controlledvocabulary.enzymeMLProteinConstant.not_constant=Not constant
+controlledvocabulary.enzymeMLReactionTemperatureUnit.celsius=Celsius
+controlledvocabulary.enzymeMLReactionTemperatureUnit.kelvin=Kelvin
diff --git a/src/main/java/propertyFiles/privacy.properties b/src/main/java/propertyFiles/privacy.properties
new file mode 100644
index 00000000000..f7ae8a9bbbf
--- /dev/null
+++ b/src/main/java/propertyFiles/privacy.properties
@@ -0,0 +1,25 @@
+metadatablock.name=privacy
+metadatablock.displayName=Privacy Metadata
+datasetfieldtype.privData.title=Personal Data
+datasetfieldtype.privData.description=Does the dataset contain personal data according to Art. 4 GDPR?
+datasetfieldtype.privData.watermark=
+datasetfieldtype.privSpecial.title=Special Categories
+datasetfieldtype.privSpecial.description=Does the dataset contain special categories of personal data according to Art.9 GDPR?
+datasetfieldtype.privSpecial.watermark=
+datasetfieldtype.privConsent.title=Explicit Consent
+datasetfieldtype.privConsent.description=Did each person whose information appears in the data give explicit permission to share the data?
+datasetfieldtype.privConsent.watermark=
+datasetfieldtype.privTermsOfConsent.title=Terms of Consent
+datasetfieldtype.privTermsOfConsent.description=Did the content has any restrictions on sharing?
+datasetfieldtype.privTermsOfConsent.watermark=List restrictions or terms under which the consent to share is given
+datasetfieldtype.privMeasures.title=Measures
+datasetfieldtype.privMeasures.description=Which technical and organisational measures are taken to secure the data (e.g. encryption of the data, rights management)?
+datasetfieldtype.privMeasures.watermark=
+controlledvocabulary.privData.no=no
+controlledvocabulary.privData.yes,_but_anonymized=yes, but anonymized
+controlledvocabulary.privData.yes,_but_pseudonymized=yes, but pseudonymized
+controlledvocabulary.privData.yes=yes
+controlledvocabulary.privSpecial.no=no
+controlledvocabulary.privSpecial.yes=yes
+controlledvocabulary.privConsent.no=no
+controlledvocabulary.privConsent.yes=yes
diff --git a/src/main/java/propertyFiles/process.properties b/src/main/java/propertyFiles/process.properties
new file mode 100644
index 00000000000..84272cf49e6
--- /dev/null
+++ b/src/main/java/propertyFiles/process.properties
@@ -0,0 +1,150 @@
+metadatablock.name=process
+metadatablock.displayName=Process Metadata
+datasetfieldtype.processMethods.title=Processing Methods
+datasetfieldtype.processMethods.description=Information about used methods in the data life cycle.
+datasetfieldtype.processMethods.watermark=
+datasetfieldtype.processMethodsName.title=Name
+datasetfieldtype.processMethodsName.description=Name of the method as free text.
+datasetfieldtype.processMethodsName.watermark=
+datasetfieldtype.processMethodsDescription.title=Description
+datasetfieldtype.processMethodsDescription.description=Description of the method as free text
+datasetfieldtype.processMethodsDescription.watermark=
+datasetfieldtype.processMethodsPars.title=Parameters
+datasetfieldtype.processMethodsPars.description=List of all parameter names relevant for this method (detailed information about parameters should be given under Method Parameters).
+datasetfieldtype.processMethodsPars.watermark=
+datasetfieldtype.processMethodsPar.title=Method Parameters
+datasetfieldtype.processMethodsPar.description=Parameters relevant for processing methods.
+datasetfieldtype.processMethodsPar.watermark=
+datasetfieldtype.processMethodsParName.title=Name
+datasetfieldtype.processMethodsParName.description=Name of the parameter.
+datasetfieldtype.processMethodsParName.watermark=
+datasetfieldtype.processMethodsParSymbol.title=Symbol
+datasetfieldtype.processMethodsParSymbol.description=The symbol used to describe this parameter.
+datasetfieldtype.processMethodsParSymbol.watermark=
+datasetfieldtype.processMethodsParUnit.title=Unit
+datasetfieldtype.processMethodsParUnit.description=The unit or scale of this parameter.
+datasetfieldtype.processMethodsParUnit.watermark=
+datasetfieldtype.processMethodsParValue.title=Value
+datasetfieldtype.processMethodsParValue.description=The (numerical) value of this parameter.
+datasetfieldtype.processMethodsParValue.watermark=
+datasetfieldtype.processMethodsParTextValue.title=Textual Value
+datasetfieldtype.processMethodsParTextValue.description=The value of this method parameter. (for non numerical values)
+datasetfieldtype.processMethodsParTextValue.watermark=
+datasetfieldtype.processSoftware.title=Software
+datasetfieldtype.processSoftware.description=Information about used software.
+datasetfieldtype.processSoftware.watermark=
+datasetfieldtype.processSoftwareName.title=Name
+datasetfieldtype.processSoftwareName.description=Name of the software.
+datasetfieldtype.processSoftwareName.watermark=
+datasetfieldtype.processSoftwareVersion.title=Version
+datasetfieldtype.processSoftwareVersion.description=Version of the software.
+datasetfieldtype.processSoftwareVersion.watermark=Number or commit
+datasetfieldtype.processSoftwareIDType.title=ID Type
+datasetfieldtype.processSoftwareIDType.description=The type of digital identifier used for this software (e.g., Digital Object Identifier (DOI)).
+datasetfieldtype.processSoftwareIDType.watermark=
+datasetfieldtype.processSoftwareIDNumber.title=ID Number
+datasetfieldtype.processSoftwareIDNumber.description=The identifier for the selected ID type.
+datasetfieldtype.processSoftwareIDNumber.watermark=
+datasetfieldtype.processSoftwareCitation.title=Citation
+datasetfieldtype.processSoftwareCitation.description=Assosicated text publication to the software.
+datasetfieldtype.processSoftwareCitation.watermark=
+datasetfieldtype.processSoftwareURL.title=URL
+datasetfieldtype.processSoftwareURL.description=Link to the software, code repository or application.
+datasetfieldtype.processSoftwareURL.watermark=
+datasetfieldtype.processSoftwareLicence.title=License
+datasetfieldtype.processSoftwareLicence.description=The license type of the software.
+datasetfieldtype.processSoftwareLicence.watermark=
+datasetfieldtype.processInstru.title=Instruments
+datasetfieldtype.processInstru.description=A representation of a piece of laboratory or field equipment, used in the execution of an experiment, that produces data.
+datasetfieldtype.processInstru.watermark=
+datasetfieldtype.processInstruName.title=Name
+datasetfieldtype.processInstruName.description=Name of this instrument.
+datasetfieldtype.processInstruName.watermark=
+datasetfieldtype.processInstruDescr.title=Description
+datasetfieldtype.processInstruDescr.description=Description of the instrument, e.g., what it is used for.
+datasetfieldtype.processInstruDescr.watermark=
+datasetfieldtype.processInstruType.title=Version
+datasetfieldtype.processInstruType.description=The type or version of this instrument.
+datasetfieldtype.processInstruType.watermark=
+datasetfieldtype.processInstruPartnum.title=Part Number
+datasetfieldtype.processInstruPartnum.description=A vendors part number for an instrument or a piece of equipment.
+datasetfieldtype.processInstruPartnum.watermark=
+datasetfieldtype.processInstruSerialnum.title=Serial Number
+datasetfieldtype.processInstruSerialnum.description=A vendors serial number for an instrument or a piece of equipment.
+datasetfieldtype.processInstruSerialnum.watermark=
+datasetfieldtype.processInstruSoftware.title=Software
+datasetfieldtype.processInstruSoftware.description=Required software for this instrument.
+datasetfieldtype.processInstruSoftware.watermark=
+datasetfieldtype.processInstruLocation.title=Location
+datasetfieldtype.processInstruLocation.description=Location of the instrument.
+datasetfieldtype.processInstruLocation.watermark=
+datasetfieldtype.processEnv.title=Environments
+datasetfieldtype.processEnv.description=(Computation) environments of the data generation.
+datasetfieldtype.processEnv.watermark=
+datasetfieldtype.processEnvName.title=Name
+datasetfieldtype.processEnvName.description=Name of the environment.
+datasetfieldtype.processEnvName.watermark=
+datasetfieldtype.processEnvCompName.title=Compiler Names and Flags
+datasetfieldtype.processEnvCompName.description=Name and flags of the used compilers.
+datasetfieldtype.processEnvCompName.watermark=
+datasetfieldtype.processEnvNodes.title=Number of Nodes
+datasetfieldtype.processEnvNodes.description=Number of compute nodes inside a high performance cluster environment.
+datasetfieldtype.processEnvNodes.watermark=
+datasetfieldtype.processEnvPPN.title=PPN
+datasetfieldtype.processEnvPPN.description=Processors per node
+datasetfieldtype.processEnvPPN.watermark=
+datasetfieldtype.processStep.title=Processing Steps
+datasetfieldtype.processStep.description=Specification of the processing steps in the data life cycle.
+datasetfieldtype.processStep.watermark=
+datasetfieldtype.processStepId.title=Id
+datasetfieldtype.processStepId.description=Used to order the processing steps.
+datasetfieldtype.processStepId.watermark=number
+datasetfieldtype.processStepType.title=Type
+datasetfieldtype.processStepType.description=Specifies the position in the data life cycle.
+datasetfieldtype.processStepType.watermark=
+datasetfieldtype.processStepDate.title=Date
+datasetfieldtype.processStepDate.description=Date this step has been performed.
+datasetfieldtype.processStepDate.watermark=
+datasetfieldtype.processStepMethods.title=Methods
+datasetfieldtype.processStepMethods.description=List of methods used in this processing step (detailed information about methods should be given under Processing Methods).
+datasetfieldtype.processStepMethods.watermark=
+datasetfieldtype.processStepErrorMethod.title=Error Method
+datasetfieldtype.processStepErrorMethod.description=Method used to measure the errors or uncertainties of this processing step.
+datasetfieldtype.processStepErrorMethod.watermark=
+datasetfieldtype.processStepSoftware.title=Software
+datasetfieldtype.processStepSoftware.description=List of software names used in this processing step (detailed information about software should be given under Software).
+datasetfieldtype.processStepSoftware.watermark=
+datasetfieldtype.processStepHardware.title=Instruments
+datasetfieldtype.processStepHardware.description=List of instrumental hardware used in this processing step (detailed information about instruments should be given under Instruments).
+datasetfieldtype.processStepHardware.watermark=
+datasetfieldtype.processStepEnvironment.title=Environment
+datasetfieldtype.processStepEnvironment.description=Name of the environment used for this processing step (detailed information about the environment should be given under Environments).
+datasetfieldtype.processStepEnvironment.watermark=
+datasetfieldtype.processStepInput.title=Input
+datasetfieldtype.processStepInput.description=List of file names or objects that were the input of this processing step
+datasetfieldtype.processStepInput.watermark=
+datasetfieldtype.processStepOutput.title=Output
+datasetfieldtype.processStepOutput.description=List of file names or objects that were the output of this processing step
+datasetfieldtype.processStepOutput.watermark=
+controlledvocabulary.processStepType.generation=Generation
+controlledvocabulary.processStepType.postprocessing=Postprocessing
+controlledvocabulary.processStepType.analysis=Analysis
+controlledvocabulary.processStepType.other=Other
+controlledvocabulary.processSoftwareIDType.ark=ark
+controlledvocabulary.processSoftwareIDType.arxiv=arXiv
+controlledvocabulary.processSoftwareIDType.bibcode=bibcode
+controlledvocabulary.processSoftwareIDType.doi=doi
+controlledvocabulary.processSoftwareIDType.ean13=ean13
+controlledvocabulary.processSoftwareIDType.eissn=eissn
+controlledvocabulary.processSoftwareIDType.handle=handle
+controlledvocabulary.processSoftwareIDType.isbn=isbn
+controlledvocabulary.processSoftwareIDType.issn=issn
+controlledvocabulary.processSoftwareIDType.istc=istc
+controlledvocabulary.processSoftwareIDType.lissn=lissn
+controlledvocabulary.processSoftwareIDType.lsid=lsid
+controlledvocabulary.processSoftwareIDType.pmid=pmid
+controlledvocabulary.processSoftwareIDType.purl=purl
+controlledvocabulary.processSoftwareIDType.swmath=swmath
+controlledvocabulary.processSoftwareIDType.upc=upc
+controlledvocabulary.processSoftwareIDType.url=url
+controlledvocabulary.processSoftwareIDType.urn=urn
From d67139fec61858a6069ee52c8bb06a643e707b4f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bj=C3=B6rn=20Selent?=
Date: Wed, 31 Jul 2024 10:34:01 +0200
Subject: [PATCH 2/5] Changed descriptions
---
src/main/java/propertyFiles/citation.properties | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/main/java/propertyFiles/citation.properties b/src/main/java/propertyFiles/citation.properties
index 42fc0c4b539..3cd5242d084 100644
--- a/src/main/java/propertyFiles/citation.properties
+++ b/src/main/java/propertyFiles/citation.properties
@@ -85,8 +85,8 @@ datasetfieldtype.topicClassValue.watermark=
datasetfieldtype.topicClassVocab.title=Controlled Vocabulary Name
datasetfieldtype.topicClassVocab.description=The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH)
datasetfieldtype.topicClassVocab.watermark=
-datasetfieldtype.topicClassVocabURI.title=Controlled Vocabulary URL
-datasetfieldtype.topicClassVocabURI.description=The URL where one can access information about the term's controlled vocabulary
+datasetfieldtype.topicClassVocabURI.title=Term URL
+datasetfieldtype.topicClassVocabURI.description=The URL where one can access information about the term, if available. Otherwise the URL of the controlled vocabulary
datasetfieldtype.topicClassVocabURI.watermark=https://
datasetfieldtype.publication.title=Related Publication
datasetfieldtype.publication.description=The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab
@@ -149,7 +149,7 @@ datasetfieldtype.grantNumberAgency.title=Agency
datasetfieldtype.grantNumberAgency.description=The agency that provided financial support for the Dataset
datasetfieldtype.grantNumberAgency.watermark=Organization XYZ
datasetfieldtype.grantNumberValue.title=Identifier
-datasetfieldtype.grantNumberValue.description=The grant identifier or contract identifier of the agency that provided financial support for the Dataset
+datasetfieldtype.grantNumberValue.description=For DFG as agency: the grant identifier; Otherwise the grant identifier or contract identifier of the agency that provided financial support for the Dataset
datasetfieldtype.grantNumberValue.watermark=
datasetfieldtype.project.title=Project
datasetfieldtype.project.description=Information about the project as context of the data
From 8f27562344467df5b6b3af4e70f0ec6165db6995 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bj=C3=B6rn=20Selent?=
Date: Wed, 31 Jul 2024 13:08:55 +0200
Subject: [PATCH 3/5] Description of topicClassification corrected
---
src/main/java/propertyFiles/citation.properties | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/main/java/propertyFiles/citation.properties b/src/main/java/propertyFiles/citation.properties
index 3cd5242d084..93ab1eaa0ba 100644
--- a/src/main/java/propertyFiles/citation.properties
+++ b/src/main/java/propertyFiles/citation.properties
@@ -85,8 +85,8 @@ datasetfieldtype.topicClassValue.watermark=
datasetfieldtype.topicClassVocab.title=Controlled Vocabulary Name
datasetfieldtype.topicClassVocab.description=The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH)
datasetfieldtype.topicClassVocab.watermark=
-datasetfieldtype.topicClassVocabURI.title=Term URL
-datasetfieldtype.topicClassVocabURI.description=The URL where one can access information about the term, if available. Otherwise the URL of the controlled vocabulary
+datasetfieldtype.topicClassVocabURI.title=Term URI
+datasetfieldtype.topicClassVocabURI.description=A URI that points to the web presence of the Keyword Term
datasetfieldtype.topicClassVocabURI.watermark=https://
datasetfieldtype.publication.title=Related Publication
datasetfieldtype.publication.description=The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab
From 265834159fae75d1c5c059f912cddb13dfd67cec Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bj=C3=B6rn=20Selent?=
Date: Thu, 1 Aug 2024 10:37:35 +0200
Subject: [PATCH 4/5] Metadata field descriptions adapted
---
src/main/java/propertyFiles/citation.properties | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/main/java/propertyFiles/citation.properties b/src/main/java/propertyFiles/citation.properties
index 93ab1eaa0ba..7ed48851195 100644
--- a/src/main/java/propertyFiles/citation.properties
+++ b/src/main/java/propertyFiles/citation.properties
@@ -86,7 +86,7 @@ datasetfieldtype.topicClassVocab.title=Controlled Vocabulary Name
datasetfieldtype.topicClassVocab.description=The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH)
datasetfieldtype.topicClassVocab.watermark=
datasetfieldtype.topicClassVocabURI.title=Term URI
-datasetfieldtype.topicClassVocabURI.description=A URI that points to the web presence of the Keyword Term
+datasetfieldtype.topicClassVocabURI.description=A URI that points to the web presence of the Topic Term, if available. Otherwise the URL of the controlled vocabulary
datasetfieldtype.topicClassVocabURI.watermark=https://
datasetfieldtype.publication.title=Related Publication
datasetfieldtype.publication.description=The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab
From 812414be135ba37954f48a4709ddf032754c4ef4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bj=C3=B6rn=20Selent?=
Date: Wed, 9 Oct 2024 16:13:54 +0200
Subject: [PATCH 5/5] Merge upstream v6.4 into branch properties
---
.github/ISSUE_TEMPLATE/bug_report.md | 7 +-
.github/ISSUE_TEMPLATE/feature_request.md | 7 +-
.github/ISSUE_TEMPLATE/idea_proposal.md | 40 +
.github/actions/setup-maven/action.yml | 37 +
.github/workflows/check_property_files.yml | 32 +
.github/workflows/container_app_push.yml | 41 +-
.github/workflows/container_base_push.yml | 153 +-
.github/workflows/container_maintenance.yml | 119 +
.github/workflows/maven_unit_test.yml | 8 +
.github/workflows/scripts/maintenance-job.sh | 180 +
.github/workflows/scripts/utils.sh | 108 +
.github/workflows/shellspec.yml | 23 +-
conf/solr/schema.xml | 5 +
doc/release-notes/6.4-release-notes.md | 526 ++
.../_static/api/dataset-create-software.json | 82 +
.../api/dataset-create-software.jsonld | 16 +
.../dataverse-complete-optional-params.json | 65 +
.../source/_static/util/counter_daily.sh | 2 +-
.../source/_templates/navbar.html | 1 -
.../source/admin/dataverses-datasets.rst | 37 +-
.../source/admin/discoverability.rst | 13 +-
.../source/admin/make-data-count.rst | 8 +-
doc/sphinx-guides/source/api/changelog.rst | 7 +
.../source/api/client-libraries.rst | 7 +
doc/sphinx-guides/source/api/native-api.rst | 296 +-
doc/sphinx-guides/source/api/search.rst | 56 +-
doc/sphinx-guides/source/conf.py | 4 +-
.../source/container/base-image.rst | 90 +-
.../source/container/dev-usage.rst | 5 +
.../source/container/running/demo.rst | 2 -
doc/sphinx-guides/source/contributor/code.md | 1 +
.../source/contributor/documentation.md | 23 +-
doc/sphinx-guides/source/contributor/index.md | 2 +-
.../source/developers/big-data-support.rst | 2 +
.../dataset-semantic-metadata-api.rst | 15 +-
.../source/developers/dev-environment.rst | 10 +-
.../source/developers/globus-api.rst | 2 +
.../source/developers/make-data-count.rst | 6 +-
.../developers/making-library-releases.rst | 55 +-
.../source/developers/making-releases.rst | 49 +-
.../source/developers/metadataexport.rst | 9 +-
.../developers/s3-direct-upload-api.rst | 35 +-
doc/sphinx-guides/source/developers/tips.rst | 54 +-
.../source/developers/version-control.rst | 63 +-
.../source/developers/windows.rst | 99 +-
.../source/installation/advanced.rst | 38 +-
.../source/installation/config.rst | 106 +-
.../source/installation/prerequisites.rst | 16 +-
.../source/user/dataset-management.rst | 28 +-
.../source/user/dataverse-management.rst | 3 +-
doc/sphinx-guides/source/versions.rst | 3 +-
docker/compose/demo/compose.yml | 2 +-
modules/container-base/README.md | 20 +-
modules/container-base/pom.xml | 9 +-
.../src/backports/v6.1/001-pom.xml.patch | 26 +
.../src/backports/v6.1/002-Dockerfile.patch | 10 +
.../backports/v6.1/003-parent-pom.xml.patch | 11 +
.../src/backports/v6.2/001-pom.xml.patch | 26 +
.../v6.2/002-Dockerfile.labels.patch | 10 +
.../v6.2/003-Dockerfile.security.patch | 10 +
.../backports/v6.2/004-parent-pom.xml.patch | 11 +
.../src/backports/v6.3/001-pom.xml.patch | 26 +
.../v6.3/002-Dockerfile.labels.patch | 10 +
.../v6.3/003-Dockerfile.security.patch | 7 +
.../backports/v6.3/004-parent-pom.xml.patch | 11 +
.../container-base/src/main/docker/Dockerfile | 89 +-
.../src/main/docker/scripts/entrypoint.sh | 12 +-
.../docker/scripts/init_1_change_passwords.sh | 43 +
.../init_1_generate_deploy_commands.sh | 17 +-
.../init_1_generate_devmode_commands.sh | 69 +-
.../main/docker/scripts/startInForeground.sh | 42 +-
modules/container-configbaker/Dockerfile | 2 +-
modules/dataverse-parent/pom.xml | 28 +-
pom.xml | 5 +-
...dataset-create-new-all-default-fields.json | 6 +
scripts/api/data/metadatablocks/citation.tsv | 8207 ++++++++++++++++-
.../issues/8578/citation_languages_10762.tsv | 7921 ++++++++++++++++
.../8578/citation_languages_develop.tsv | 186 +
scripts/issues/8578/script_check_languages.pl | 189 +
scripts/search/tests/data/dataset-finch3.json | 102 +
src/main/docker/scripts/init_2_configure.sh | 44 +-
.../scripts/init_3_wait_dataverse_db_host.sh | 2 +-
.../harvard/iq/dataverse/DataCitation.java | 5 +-
.../edu/harvard/iq/dataverse/DataFile.java | 19 +
.../iq/dataverse/DataFileServiceBean.java | 20 +-
.../edu/harvard/iq/dataverse/Dataset.java | 20 +-
.../harvard/iq/dataverse/DatasetField.java | 32 +-
.../dataverse/DatasetFieldCompoundValue.java | 9 +
.../iq/dataverse/DatasetFieldConstant.java | 5 +
.../iq/dataverse/DatasetFieldServiceBean.java | 161 +-
.../edu/harvard/iq/dataverse/DatasetPage.java | 45 +
.../iq/dataverse/DatasetRelPublication.java | 29 +-
.../iq/dataverse/DatasetServiceBean.java | 26 +-
.../harvard/iq/dataverse/DatasetVersion.java | 56 +-
.../dataverse/DatasetVersionServiceBean.java | 157 +-
.../iq/dataverse/DatasetVersionUI.java | 46 +-
.../edu/harvard/iq/dataverse/Dataverse.java | 16 +-
.../iq/dataverse/DataverseServiceBean.java | 57 +-
.../harvard/iq/dataverse/DataverseTheme.java | 9 +
.../edu/harvard/iq/dataverse/DvObject.java | 2 -
.../iq/dataverse/DvObjectServiceBean.java | 15 +-
.../iq/dataverse/EditDatafilesPage.java | 14 +-
.../iq/dataverse/EjbDataverseEngine.java | 11 +-
.../ExternalFileUploadInProgress.java | 110 +
.../iq/dataverse/ExternalIdentifier.java | 4 +-
.../FeaturedDataverseServiceBean.java | 7 +-
.../edu/harvard/iq/dataverse/FilePage.java | 10 +
.../edu/harvard/iq/dataverse/GlobalId.java | 7 +
.../iq/dataverse/GuestbookResponsesPage.java | 6 +-
.../harvard/iq/dataverse/MailServiceBean.java | 30 +-
.../iq/dataverse/ManageGuestbooksPage.java | 4 +-
.../harvard/iq/dataverse/SettingsWrapper.java | 12 +
.../java/edu/harvard/iq/dataverse/Shib.java | 7 +-
.../iq/dataverse/TermsOfUseAndAccess.java | 24 +
.../iq/dataverse/ThemeWidgetFragment.java | 140 +-
.../iq/dataverse/ThumbnailServiceWrapper.java | 26 +-
.../iq/dataverse/UserNotification.java | 3 +-
.../iq/dataverse/api/AbstractApiBean.java | 6 +-
.../edu/harvard/iq/dataverse/api/Access.java | 2 +-
.../edu/harvard/iq/dataverse/api/Admin.java | 30 +-
.../iq/dataverse/api/ApiBlockingFilter.java | 6 +-
.../iq/dataverse/api/ApiConstants.java | 4 +
.../iq/dataverse/api/DatasetFields.java | 29 +
.../harvard/iq/dataverse/api/Datasets.java | 180 +-
.../harvard/iq/dataverse/api/Dataverses.java | 172 +-
.../iq/dataverse/api/SavedSearches.java | 19 +-
.../WebApplicationExceptionHandler.java | 11 +-
.../api/imports/ImportGenericServiceBean.java | 7 +-
.../api/imports/ImportServiceBean.java | 10 +-
.../providers/builtin/DataverseUserPage.java | 2 +
.../providers/builtin/PasswordEncryption.java | 6 +-
.../dataaccess/TabularSubsetGenerator.java | 14 -
.../iq/dataverse/dataset/DatasetType.java | 70 +
.../dataset/DatasetTypeServiceBean.java | 79 +
.../iq/dataverse/dataset/DatasetUtil.java | 68 +-
.../datasetutility/AddReplaceFileHelper.java | 10 +-
.../engine/command/CommandContext.java | 3 +
.../impl/AbstractCreateDatasetCommand.java | 15 +-
.../command/impl/AbstractDatasetCommand.java | 80 +-
.../command/impl/CreateDataverseCommand.java | 74 +-
.../CuratePublishedDatasetVersionCommand.java | 125 +-
.../DeleteDatasetLinkingDataverseCommand.java | 11 +-
.../impl/DuraCloudSubmitToArchiveCommand.java | 7 +-
.../FinalizeDatasetPublicationCommand.java | 25 +-
.../GoogleCloudSubmitToArchiveCommand.java | 7 +-
.../impl/ListDataverseInputLevelsCommand.java | 40 +
.../command/impl/ListFacetsCommand.java | 22 +-
.../command/impl/MoveDatasetCommand.java | 11 +-
.../command/impl/PublishDatasetCommand.java | 7 -
.../command/impl/ReservePidCommand.java | 30 +-
.../impl/S3SubmitToArchiveCommand.java | 6 +-
.../impl/UpdateDatasetTargetURLCommand.java | 12 +-
.../impl/UpdateDatasetVersionCommand.java | 5 +-
.../UpdateDataverseInputLevelsCommand.java | 18 +-
.../impl/UpdateDataverseThemeCommand.java | 71 +-
.../UpdateDvObjectPIDMetadataCommand.java | 3 +-
.../iq/dataverse/export/DataCiteExporter.java | 13 +-
.../iq/dataverse/export/JSONExporter.java | 4 +-
.../iq/dataverse/export/OAI_OREExporter.java | 4 +-
.../export/SchemaDotOrgExporter.java | 3 +-
.../dataverse/export/ddi/DdiExportUtil.java | 508 +-
.../dublincore/DublinCoreExportUtil.java | 68 +-
.../export/openaire/OpenAireExportUtil.java | 11 +-
.../dataverse/globus/GlobusServiceBean.java | 762 +-
.../globus/GlobusTaskInProgress.java | 202 +
.../{GlobusTask.java => GlobusTaskState.java} | 6 +-
.../iq/dataverse/globus/GlobusUtil.java | 60 +
.../globus/TaskMonitoringServiceBean.java | 131 +
.../dataverse/ingest/IngestServiceBean.java | 5 +-
.../ingest/IngestableDataChecker.java | 7 +-
.../impl/plugins/dta/DTAFileReader.java | 25 +-
.../impl/plugins/dta/DataReader.java | 19 +-
.../impl/plugins/dta/NewDTAFileReader.java | 11 +-
.../impl/plugins/por/PORFileReader.java | 8 +-
.../impl/plugins/rdata/RDATAFileReader.java | 9 +-
.../impl/plugins/sav/SAVFileReader.java | 17 +-
.../makedatacount/DatasetMetrics.java | 4 +-
.../makedatacount/MakeDataCountUtil.java | 4 +-
.../iq/dataverse/metrics/MetricsUtil.java | 70 +-
.../pidproviders/PidProviderFactoryBean.java | 3 +
.../pidproviders/doi/AbstractDOIProvider.java | 25 +-
.../pidproviders/doi/DoiMetadata.java | 138 +
.../pidproviders/doi/XmlMetadataTemplate.java | 1661 +++-
.../doi/crossref/CrossRefDOIProvider.java | 125 +
.../crossref/CrossRefDOIProviderFactory.java | 43 +
.../crossref/CrossRefDOIRegisterService.java | 319 +
.../doi/crossref/CrossRefRESTfullClient.java | 119 +
.../datacite/DOIDataCiteRegisterService.java | 57 +-
.../doi/datacite/DataCiteDOIProvider.java | 3 +
.../doi/datacite/DataCiteRESTfullClient.java | 8 +-
.../handle/HandlePidProvider.java | 16 +-
.../perma/PermaLinkPidProvider.java | 2 +-
.../perma/PermaLinkProviderFactory.java | 2 +-
.../privateurl/PrivateUrlServiceBean.java | 2 +-
.../provenance/ProvPopupFragmentBean.java | 3 +-
.../rserve/RemoteDataFrameService.java | 3 +-
.../search/IndexBatchServiceBean.java | 42 +-
.../iq/dataverse/search/IndexServiceBean.java | 89 +-
.../iq/dataverse/search/SearchFields.java | 5 +
.../dataverse/search/SearchServiceBean.java | 46 +-
.../iq/dataverse/search/SolrSearchResult.java | 2461 ++---
.../savedsearch/SavedSearchServiceBean.java | 76 +-
.../iq/dataverse/settings/FeatureFlags.java | 14 +
.../iq/dataverse/settings/JvmSettings.java | 12 +-
.../timer/DataverseTimerServiceBean.java | 4 +-
.../harvard/iq/dataverse/util/FileUtil.java | 10 +-
.../harvard/iq/dataverse/util/MailUtil.java | 17 +
.../iq/dataverse/util/PersonOrOrgUtil.java | 2 +-
.../iq/dataverse/util/ShapefileHandler.java | 42 +-
.../harvard/iq/dataverse/util/StringUtil.java | 10 +-
.../iq/dataverse/util/SystemConfig.java | 6 +-
.../iq/dataverse/util/UrlSignerUtil.java | 6 +-
.../iq/dataverse/util/bagit/BagGenerator.java | 9 +-
.../iq/dataverse/util/bagit/OREMap.java | 3 +-
.../iq/dataverse/util/json/JSONLDUtil.java | 12 +-
.../iq/dataverse/util/json/JsonLDTerm.java | 2 +
.../iq/dataverse/util/json/JsonParser.java | 20 +-
.../iq/dataverse/util/json/JsonPrinter.java | 173 +-
.../iq/dataverse/util/xml/XmlValidator.java | 5 +
.../iq/dataverse/util/xml/XmlWriterUtil.java | 167 +
.../validation/JSONDataValidation.java | 190 +
src/main/java/propertyFiles/Bundle.properties | 70 +-
...imeTypeDetectionByFileExtension.properties | 1 +
.../propertyFiles/MimeTypeDisplay.properties | 2 +
.../propertyFiles/MimeTypeFacets.properties | 2 +
.../java/propertyFiles/archive.properties | 3 -
.../java/propertyFiles/citation.properties | 7884 +++++++++++++++-
.../java/propertyFiles/codeMeta20.properties | 3 +-
.../staticSearchFields.properties | 3 +-
src/main/resources/db/migration/V6.3.0.1.sql | 10 +
src/main/resources/db/migration/V6.3.0.2.sql | 2 +
src/main/resources/db/migration/V6.3.0.3.sql | 30 +
.../doi/crossref_metadata_template.xml | 29 +
.../doi/datacite_metadata_template.xml | 18 -
src/main/webapp/404static.xhtml | 13 +-
src/main/webapp/WEB-INF/web.xml | 5 +
src/main/webapp/dataset-license-terms.xhtml | 6 +-
src/main/webapp/dataset.xhtml | 91 +-
.../webapp/datasetLicenseInfoFragment.xhtml | 2 +-
src/main/webapp/dataverse.xhtml | 5 +-
src/main/webapp/dataverseuser.xhtml | 14 +
.../guestbook-terms-popup-fragment.xhtml | 2 +-
src/main/webapp/metadataFragment.xhtml | 16 +-
src/main/webapp/resources/css/structure.css | 14 +-
src/main/webapp/template.xhtml | 2 +-
src/main/webapp/themeAndWidgetsFragment.xhtml | 27 +-
.../iq/dataverse/DatasetFieldTest.java | 64 +
.../iq/dataverse/MetadataBlockTest.java | 49 +-
.../iq/dataverse/api/DatasetFieldsIT.java | 29 +
.../iq/dataverse/api/DatasetTypesIT.java | 267 +
.../harvard/iq/dataverse/api/DatasetsIT.java | 248 +-
.../iq/dataverse/api/DataversesIT.java | 266 +-
.../iq/dataverse/api/MetadataBlocksIT.java | 6 +-
.../iq/dataverse/api/SavedSearchIT.java | 200 +
.../harvard/iq/dataverse/api/SearchIT.java | 105 +-
.../edu/harvard/iq/dataverse/api/SwordIT.java | 2 +-
.../edu/harvard/iq/dataverse/api/UtilIT.java | 191 +-
.../dataaccess/RemoteOverlayAccessIOTest.java | 2 +-
.../dataverse/engine/TestCommandContext.java | 8 +-
.../command/impl/MoveDatasetCommandTest.java | 267 +-
.../export/OpenAireExportUtilTest.java | 2 +-
.../export/SchemaDotOrgExporterTest.java | 4 +-
.../dataverse/export/dataset-all-defaults.txt | 6 +
.../dataverse/feedback/FeedbackUtilTest.java | 8 +-
.../iq/dataverse/metrics/MetricsUtilTest.java | 32 +-
.../iq/dataverse/mocks/MocksFactory.java | 12 +-
.../dataverse/pidproviders/PidUtilTest.java | 28 +-
.../doi/datacite/XmlMetadataTemplateTest.java | 149 +
.../search/IndexServiceBeanTest.java | 5 +
.../search/SolrSearchResultTest.java | 23 +
.../iq/dataverse/util/FileUtilTest.java | 7 +
.../dataverse/util/PersonOrOrgUtilTest.java | 4 +-
.../data/FileDataProviderFactoryTest.java | 6 +-
.../dataverse/util/json/JsonParserTest.java | 9 +-
.../util/shapefile/ShapefileHandlerTest.java | 8 +-
.../validation/JSONDataValidationTest.java | 237 +
src/test/resources/hiddenShapefiles.zip | Bin 0 -> 54468 bytes
tests/check_duplicate_properties.sh | 37 +
tests/integration-tests.txt | 2 +-
tests/shell/spec/spec_helper.sh | 1 +
tests/verify_mdb_properties.sh | 99 +
281 files changed, 36827 insertions(+), 3903 deletions(-)
create mode 100644 .github/ISSUE_TEMPLATE/idea_proposal.md
create mode 100644 .github/actions/setup-maven/action.yml
create mode 100644 .github/workflows/check_property_files.yml
create mode 100644 .github/workflows/container_maintenance.yml
create mode 100755 .github/workflows/scripts/maintenance-job.sh
create mode 100644 .github/workflows/scripts/utils.sh
create mode 100644 doc/release-notes/6.4-release-notes.md
create mode 100644 doc/sphinx-guides/source/_static/api/dataset-create-software.json
create mode 100644 doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld
create mode 100644 doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json
create mode 100644 modules/container-base/src/backports/v6.1/001-pom.xml.patch
create mode 100644 modules/container-base/src/backports/v6.1/002-Dockerfile.patch
create mode 100644 modules/container-base/src/backports/v6.1/003-parent-pom.xml.patch
create mode 100644 modules/container-base/src/backports/v6.2/001-pom.xml.patch
create mode 100644 modules/container-base/src/backports/v6.2/002-Dockerfile.labels.patch
create mode 100644 modules/container-base/src/backports/v6.2/003-Dockerfile.security.patch
create mode 100644 modules/container-base/src/backports/v6.2/004-parent-pom.xml.patch
create mode 100644 modules/container-base/src/backports/v6.3/001-pom.xml.patch
create mode 100644 modules/container-base/src/backports/v6.3/002-Dockerfile.labels.patch
create mode 100644 modules/container-base/src/backports/v6.3/003-Dockerfile.security.patch
create mode 100644 modules/container-base/src/backports/v6.3/004-parent-pom.xml.patch
create mode 100644 modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh
create mode 100644 scripts/issues/8578/citation_languages_10762.tsv
create mode 100644 scripts/issues/8578/citation_languages_develop.tsv
create mode 100755 scripts/issues/8578/script_check_languages.pl
create mode 100644 scripts/search/tests/data/dataset-finch3.json
create mode 100644 src/main/java/edu/harvard/iq/dataverse/ExternalFileUploadInProgress.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/DatasetFields.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/dataset/DatasetTypeServiceBean.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/GlobusTaskInProgress.java
rename src/main/java/edu/harvard/iq/dataverse/globus/{GlobusTask.java => GlobusTaskState.java} (93%)
create mode 100644 src/main/java/edu/harvard/iq/dataverse/globus/TaskMonitoringServiceBean.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/DoiMetadata.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/crossref/CrossRefDOIProvider.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/crossref/CrossRefDOIProviderFactory.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/crossref/CrossRefDOIRegisterService.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/crossref/CrossRefRESTfullClient.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/xml/XmlWriterUtil.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/validation/JSONDataValidation.java
create mode 100644 src/main/resources/db/migration/V6.3.0.1.sql
create mode 100644 src/main/resources/db/migration/V6.3.0.2.sql
create mode 100644 src/main/resources/db/migration/V6.3.0.3.sql
create mode 100644 src/main/resources/edu/harvard/iq/dataverse/pidproviders/doi/crossref_metadata_template.xml
delete mode 100644 src/main/resources/edu/harvard/iq/dataverse/pidproviders/doi/datacite_metadata_template.xml
create mode 100644 src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java
create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldsIT.java
create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java
create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java
create mode 100644 src/test/java/edu/harvard/iq/dataverse/pidproviders/doi/datacite/XmlMetadataTemplateTest.java
create mode 100644 src/test/java/edu/harvard/iq/dataverse/validation/JSONDataValidationTest.java
create mode 100644 src/test/resources/hiddenShapefiles.zip
create mode 100755 tests/check_duplicate_properties.sh
create mode 100755 tests/verify_mdb_properties.sh
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 7e6995d76d9..3dba7d52109 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -14,7 +14,7 @@ Thank you for contributing to the Dataverse Project through the creation of a bu
WARNING: If this is a security issue it should be reported privately to security@dataverse.org
More information on bug issues and contributions can be found in the "Contributing to Dataverse" page:
-https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#bug-reportsissues
+https://guides.dataverse.org/en/latest/contributor/index.html
Please fill out as much of the template as you can.
Start below this comment section.
@@ -44,7 +44,6 @@ Start below this comment section.
**Any related open or closed issues to this bug report?**
-
**Screenshots:**
No matter the issue, screenshots are always welcome.
@@ -53,3 +52,7 @@ To add a screenshot, please use one of the following formats and/or methods desc
* https://help.github.com/en/articles/file-attachments-on-issues-and-pull-requests
*
+
+
+**Are you thinking about creating a pull request for this issue?**
+Help is always welcome, is this bug something you or your organization plan to fix?
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index d6248537418..7365cb4317c 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -1,7 +1,7 @@
---
name: Feature request
about: Suggest an idea or new feature for the Dataverse software!
-title: 'Feature Request/Idea:'
+title: 'Feature Request:'
labels: 'Type: Feature'
assignees: ''
@@ -11,7 +11,7 @@ assignees: ''
Thank you for contributing to the Dataverse Project through the creation of a feature request!
More information on ideas/feature requests and contributions can be found in the "Contributing to Dataverse" page:
-https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#ideasfeature-requests
+https://guides.dataverse.org/en/latest/contributor/index.html
Please fill out as much of the template as you can.
Start below this comment section.
@@ -34,3 +34,6 @@ Start below this comment section.
**Any open or closed issues related to this feature request?**
+
+**Are you thinking about creating a pull request for this feature?**
+Help is always welcome, is this feature something you or your organization plan to implement?
diff --git a/.github/ISSUE_TEMPLATE/idea_proposal.md b/.github/ISSUE_TEMPLATE/idea_proposal.md
new file mode 100644
index 00000000000..8cb6c7bfafe
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/idea_proposal.md
@@ -0,0 +1,40 @@
+---
+name: Idea proposal
+about: Propose a new idea for discussion to improve the Dataverse software!
+title: 'Suggestion:'
+labels: 'Type: Suggestion'
+assignees: ''
+
+---
+
+
+
+**Overview of the Suggestion**
+
+
+**What kind of user is the suggestion intended for?**
+(Example users roles: API User, Curator, Depositor, Guest, Superuser, Sysadmin)
+
+
+**What inspired this idea?**
+
+
+**What existing behavior do you want changed?**
+
+
+**Any brand new behavior do you want to add to Dataverse?**
+
+
+**Any open or closed issues related to this suggestion?**
+
+
+**Are you thinking about creating a pull request for this issue?**
+Help is always welcome, is this idea something you or your organization plan to implement?
diff --git a/.github/actions/setup-maven/action.yml b/.github/actions/setup-maven/action.yml
new file mode 100644
index 00000000000..4cf09f34231
--- /dev/null
+++ b/.github/actions/setup-maven/action.yml
@@ -0,0 +1,37 @@
+---
+name: "Setup Maven and Caches"
+description: "Determine Java version and setup Maven, including necessary caches."
+inputs:
+ git-reference:
+ description: 'The git reference (branch/tag) to check out'
+ required: false
+ default: '${{ github.ref }}'
+ pom-paths:
+ description: "List of paths to Maven POM(s) for cache dependency setup"
+ required: false
+ default: 'pom.xml'
+runs:
+ using: composite
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.git-reference }}
+ - name: Determine Java version by reading the Maven property
+ shell: bash
+ run: |
+ echo "JAVA_VERSION=$(grep '' ${GITHUB_WORKSPACE}/modules/dataverse-parent/pom.xml | cut -f2 -d'>' | cut -f1 -d'<')" | tee -a ${GITHUB_ENV}
+ - name: Set up JDK ${{ env.JAVA_VERSION }}
+ id: setup-java
+ uses: actions/setup-java@v4
+ with:
+ java-version: ${{ env.JAVA_VERSION }}
+ distribution: 'temurin'
+ cache: 'maven'
+ cache-dependency-path: ${{ inputs.pom-paths }}
+ - name: Download common cache on branch cache miss
+ if: ${{ steps.setup-java.outputs.cache-hit != 'true' }}
+ uses: actions/cache/restore@v4
+ with:
+ key: dataverse-maven-cache
+ path: ~/.m2/repository
diff --git a/.github/workflows/check_property_files.yml b/.github/workflows/check_property_files.yml
new file mode 100644
index 00000000000..505310aab35
--- /dev/null
+++ b/.github/workflows/check_property_files.yml
@@ -0,0 +1,32 @@
+name: "Properties Check"
+on:
+ pull_request:
+ paths:
+ - "src/**/*.properties"
+ - "scripts/api/data/metadatablocks/*"
+jobs:
+ duplicate_keys:
+ name: Duplicate Keys
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Run duplicates detection script
+ shell: bash
+ run: tests/check_duplicate_properties.sh
+
+ metadata_blocks_properties:
+ name: Metadata Blocks Properties
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Setup GraalVM + Native Image
+ uses: graalvm/setup-graalvm@v1
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ java-version: '21'
+ distribution: 'graalvm-community'
+ - name: Setup JBang
+ uses: jbangdev/setup-jbang@main
+ - name: Run metadata block properties verification script
+ shell: bash
+ run: tests/verify_mdb_properties.sh
diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml
index b3e247e376c..3b7ce066d73 100644
--- a/.github/workflows/container_app_push.yml
+++ b/.github/workflows/container_app_push.yml
@@ -5,6 +5,12 @@ on:
# We are deliberately *not* running on push events here to avoid double runs.
# Instead, push events will trigger from the base image and maven unit tests via workflow_call.
workflow_call:
+ inputs:
+ base-image-ref:
+ type: string
+ description: "Reference of the base image to build on in full qualified form [/]/:"
+ required: false
+ default: "gdcc/base:unstable"
pull_request:
branches:
- develop
@@ -16,7 +22,6 @@ on:
env:
IMAGE_TAG: unstable
- BASE_IMAGE_TAG: unstable
REGISTRY: "" # Empty means default to Docker Hub
PLATFORMS: "linux/amd64,linux/arm64"
MASTER_BRANCH_TAG: alpha
@@ -33,20 +38,24 @@ jobs:
if: ${{ github.repository_owner == 'IQSS' }}
steps:
- - name: Checkout repository
- uses: actions/checkout@v3
-
- - name: Set up JDK
- uses: actions/setup-java@v3
+ - name: Checkout and Setup Maven
+ uses: IQSS/dataverse/.github/actions/setup-maven@develop
with:
- java-version: "17"
- distribution: temurin
- cache: maven
+ pom-paths: |
+ pom.xml
+ modules/container-configbaker/pom.xml
+ modules/dataverse-parent/pom.xml
+
+ # TODO: Add a filter step here, that avoids building the image if this is a PR and there are other files touched than declared above.
+ # Use https://github.com/dorny/paths-filter to solve this. This will ensure we do not run this twice if this workflow
+ # will be triggered by the other workflows already (base image or java changes)
+ # To become a part of #10618.
- name: Build app and configbaker container image with local architecture and submodules (profile will skip tests)
run: >
mvn -B -f modules/dataverse-parent
-P ct -pl edu.harvard.iq:dataverse -am
+ $( [[ -n "${{ inputs.base-image-ref }}" ]] && echo "-Dbase.image=${{ inputs.base-image-ref }}" )
install
# TODO: add smoke / integration testing here (add "-Pct -DskipIntegrationTests=false")
@@ -106,11 +115,13 @@ jobs:
if: needs.check-secrets.outputs.available == 'true' &&
( github.event_name != 'push' || ( github.event_name == 'push' && contains(fromJSON('["develop", "master"]'), github.ref_name)))
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-java@v3
+ - name: Checkout and Setup Maven
+ uses: IQSS/dataverse/.github/actions/setup-maven@develop
with:
- java-version: "17"
- distribution: temurin
+ pom-paths: |
+ pom.xml
+ modules/container-configbaker/pom.xml
+ modules/dataverse-parent/pom.xml
# Depending on context, we push to different targets. Login accordingly.
- if: github.event_name != 'pull_request'
@@ -146,11 +157,13 @@ jobs:
run: >
mvn -B -f modules/dataverse-parent
-P ct -pl edu.harvard.iq:dataverse -am
+ $( [[ -n "${{ inputs.base-image-ref }}" ]] && echo "-Dbase.image=${{ inputs.base-image-ref }}" )
install
- name: Deploy multi-arch application and configbaker container image
run: >
mvn
- -Dapp.image.tag=${{ env.IMAGE_TAG }} -Dbase.image.tag=${{ env.BASE_IMAGE_TAG }}
+ -Dapp.image.tag=${{ env.IMAGE_TAG }}
+ $( [[ -n "${{ inputs.base-image-ref }}" ]] && echo "-Dbase.image=${{ inputs.base-image-ref }}" )
${{ env.REGISTRY }} -Ddocker.platforms=${{ env.PLATFORMS }}
-P ct deploy
diff --git a/.github/workflows/container_base_push.yml b/.github/workflows/container_base_push.yml
index b938851f816..c2340576c78 100644
--- a/.github/workflows/container_base_push.yml
+++ b/.github/workflows/container_base_push.yml
@@ -1,99 +1,130 @@
---
-name: Base Container Image
+name: Container Images Releasing
on:
push:
+ tags:
+ - 'v[6-9].**'
branches:
- 'develop'
- - 'master'
+ # "Path filters are not evaluated for pushes of tags" https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#onpushpull_requestpull_request_targetpathspaths-ignore
paths:
- 'modules/container-base/**'
+ - '!modules/container-base/src/backports/**'
+ - '!modules/container-base/README.md'
- 'modules/dataverse-parent/pom.xml'
- '.github/workflows/container_base_push.yml'
- pull_request:
- branches:
- - 'develop'
- - 'master'
- paths:
- - 'modules/container-base/**'
- - 'modules/dataverse-parent/pom.xml'
- - '.github/workflows/container_base_push.yml'
- schedule:
- - cron: '23 3 * * 0' # Run for 'develop' every Sunday at 03:23 UTC
+
+ # These TODOs are left for #10618
+ # TODO: we are missing a workflow_call option here, so we can trigger this flow from pr comments and maven tests (keep the secrets availability in mind!)
+ # TODO: we are missing a pull_request option here (filter for stuff that would trigger the maven runs!) so we can trigger preview builds for them when coming from the main repo (keep the secrets availability in mind!)
env:
- IMAGE_TAG: unstable
PLATFORMS: linux/amd64,linux/arm64
+ DEVELOPMENT_BRANCH: develop
jobs:
build:
- name: Build image
+ name: Base Image
runs-on: ubuntu-latest
permissions:
contents: read
packages: read
- strategy:
- matrix:
- jdk: [ '17' ]
# Only run in upstream repo - avoid unnecessary runs in forks
if: ${{ github.repository_owner == 'IQSS' }}
+ outputs:
+ base-image-ref: ${{ steps.finalize.outputs.base-image-ref }}
steps:
- - name: Checkout repository
- uses: actions/checkout@v3
-
- - name: Set up JDK ${{ matrix.jdk }}
- uses: actions/setup-java@v3
+ - name: Checkout and Setup Maven
+ uses: IQSS/dataverse/.github/actions/setup-maven@develop
with:
- java-version: ${{ matrix.jdk }}
- distribution: 'adopt'
- - name: Cache Maven packages
- uses: actions/cache@v3
- with:
- path: ~/.m2
- key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
- restore-keys: ${{ runner.os }}-m2
-
- - name: Build base container image with local architecture
- run: mvn -f modules/container-base -Pct package
+ pom-paths: modules/container-base/pom.xml
- # Run anything below only if this is not a pull request.
- # Accessing, pushing tags etc. to DockerHub will only succeed in upstream because secrets.
-
- - if: ${{ github.event_name == 'push' && github.ref_name == 'develop' }}
- name: Push description to DockerHub
- uses: peter-evans/dockerhub-description@v3
+ # Note: Accessing, pushing tags etc. to DockerHub will only succeed in upstream and
+ # on events in context of upstream because secrets. PRs run in context of forks by default!
+ - name: Log in to the Container registry
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- repository: gdcc/base
- short-description: "Dataverse Base Container image providing Payara application server and optimized configuration"
- readme-filepath: ./modules/container-base/README.md
- - if: ${{ github.event_name != 'pull_request' }}
- name: Log in to the Container registry
- uses: docker/login-action@v2
+ # In case this is a push to develop, we care about buildtime.
+ # Configure a remote ARM64 build host in addition to the local AMD64 in two steps.
+ - name: Setup SSH agent
+ if: ${{ github.event_name != 'schedule' }}
+ uses: webfactory/ssh-agent@v0.9.0
with:
- registry: ${{ env.REGISTRY }}
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- - if: ${{ github.event_name != 'pull_request' }}
- name: Set up QEMU for multi-arch builds
- uses: docker/setup-qemu-action@v2
- - name: Re-set image tag based on branch
- if: ${{ github.ref_name == 'master' }}
- run: echo "IMAGE_TAG=alpha" >> $GITHUB_ENV
- - if: ${{ github.event_name != 'pull_request' }}
- name: Deploy multi-arch base container image to Docker Hub
- run: mvn -f modules/container-base -Pct deploy -Dbase.image.tag=${{ env.IMAGE_TAG }} -Ddocker.platforms=${{ env.PLATFORMS }}
+ ssh-private-key: ${{ secrets.BUILDER_ARM64_SSH_PRIVATE_KEY }}
+ - name: Provide the known hosts key and the builder config
+ if: ${{ github.event_name != 'schedule' }}
+ run: |
+ echo "${{ secrets.BUILDER_ARM64_SSH_HOST_KEY }}" > ~/.ssh/known_hosts
+ mkdir -p modules/container-base/target/buildx-state/buildx/instances
+ cat > modules/container-base/target/buildx-state/buildx/instances/maven << EOF
+ { "Name": "maven",
+ "Driver": "docker-container",
+ "Dynamic": false,
+ "Nodes": [{"Name": "maven0",
+ "Endpoint": "unix:///var/run/docker.sock",
+ "Platforms": [{"os": "linux", "architecture": "amd64"}],
+ "DriverOpts": null,
+ "Flags": ["--allow-insecure-entitlement=network.host"],
+ "Files": null},
+ {"Name": "maven1",
+ "Endpoint": "ssh://${{ secrets.BUILDER_ARM64_SSH_CONNECTION }}",
+ "Platforms": [{"os": "linux", "architecture": "arm64"}],
+ "DriverOpts": null,
+ "Flags": ["--allow-insecure-entitlement=network.host"],
+ "Files": null}]}
+ EOF
+
+ # Determine the base image name we are going to use from here on
+ - name: Determine base image name
+ run: |
+ if [[ "${{ github.ref_name }}" = "${{ env.DEVELOPMENT_BRANCH }}" ]]; then
+ echo "BASE_IMAGE=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -q -DforceStdout )" | tee -a "${GITHUB_ENV}"
+ echo "BASE_IMAGE_UPCOMING=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -Dbase.image.tag.suffix="" -q -DforceStdout )" | tee -a "${GITHUB_ENV}"
+ else
+ echo "BASE_IMAGE=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -Dbase.image.tag.suffix="" -q -DforceStdout )" | tee -a "${GITHUB_ENV}"
+ fi
+ - name: Calculate revision number for immutable tag (on release branches only)
+ if: ${{ github.ref_name != env.DEVELOPMENT_BRANCH }}
+ id: revision-tag
+ uses: ./.github/actions/get-image-revision
+ with:
+ image-ref: ${{ env.BASE_IMAGE }}
+ tag-options-prefix: "-Dbase.image.tag.suffix='' -Ddocker.tags.revision="
+ - name: Configure update of "latest" tag for development branch
+ id: develop-tag
+ if: ${{ github.ref_name == env.DEVELOPMENT_BRANCH }}
+ run: |
+ echo "tag-options=-Ddocker.tags.develop=unstable -Ddocker.tags.upcoming=${BASE_IMAGE_UPCOMING#*:}" | tee -a "${GITHUB_OUTPUT}"
+
+ - name: Deploy multi-arch base container image to Docker Hub
+ id: build
+ run: |
+ mvn -f modules/container-base -Pct deploy -Ddocker.noCache -Ddocker.platforms=${{ env.PLATFORMS }} \
+ -Ddocker.imagePropertyConfiguration=override ${{ steps.develop-tag.outputs.tag-options }} ${{ steps.revision-tag.outputs.tag-options }}
+
+ - name: Determine appropriate base image ref for app image
+ id: finalize
+ run: |
+ if [[ "${{ github.ref_name }}" = "${{ env.DEVELOPMENT_BRANCH }}" ]]; then
+ echo "base-image-ref=${BASE_IMAGE_UPCOMING}" | tee -a "$GITHUB_OUTPUT"
+ else
+ echo "base-image-ref=gdcc/base:${{ steps.revision-tag.outputs.revision-tag }}" | tee -a "$GITHUB_OUTPUT"
+ fi
+
push-app-img:
name: "Rebase & Publish App Image"
permissions:
contents: read
packages: write
pull-requests: write
- needs: build
- # We do not release a new base image for pull requests, so do not trigger.
- if: ${{ github.event_name != 'pull_request' }}
- uses: ./.github/workflows/container_app_push.yml
secrets: inherit
+ needs:
+ - build
+ uses: ./.github/workflows/container_app_push.yml
+ with:
+ base-image-ref: ${{ needs.build.outputs.base-image-ref }}
diff --git a/.github/workflows/container_maintenance.yml b/.github/workflows/container_maintenance.yml
new file mode 100644
index 00000000000..986fe25cdf5
--- /dev/null
+++ b/.github/workflows/container_maintenance.yml
@@ -0,0 +1,119 @@
+---
+name: Container Images Scheduled Maintenance
+
+on:
+ # TODO: think about adding a (filtered) push event trigger here in case we change the patches
+ # ---
+ # Allow manual workflow triggers in case we need to repair images on Docker Hub (build and replace)
+ workflow_dispatch:
+ inputs:
+ force_build:
+ type: boolean
+ required: false
+ default: false
+ description: "Build and deploy even if no newer Java images or package updates are found."
+ schedule:
+ - cron: '23 3 * * 0' # Run for 'develop' every Sunday at 03:23 UTC
+
+env:
+ PLATFORMS: linux/amd64,linux/arm64
+ NUM_PAST_RELEASES: 3
+
+jobs:
+ build:
+ name: Base Image Matrix Build
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ packages: read
+ # Only run in upstream repo - avoid unnecessary runs in forks
+ if: ${{ github.repository_owner == 'IQSS' }}
+ outputs:
+ supported_tag_matrix: ${{ steps.execute.outputs.supported_tag_matrix }}
+ rebuilt_base_images: ${{ steps.execute.outputs.rebuilt_base_images }}
+
+ steps:
+ - name: Checkout and Setup Maven
+ uses: IQSS/dataverse/.github/actions/setup-maven@develop
+ with:
+ pom-paths: modules/container-base/pom.xml
+
+ # Note: Accessing, pushing tags etc. to DockerHub will only succeed in upstream and
+ # on events in context of upstream because secrets. PRs run in context of forks by default!
+ - name: Log in to the Container registry
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Set up QEMU for multi-arch builds
+ uses: docker/setup-qemu-action@v3
+ with:
+ platforms: ${{ env.PLATFORMS }}
+
+ # Discover the releases we want to maintain
+ - name: Discover maintained releases
+ id: discover
+ run: |
+ echo "FORCE_BUILD=$( [[ "${{ inputs.force_build }}" = "true" ]] && echo 1 || echo 0 )" | tee -a "$GITHUB_ENV"
+ DEVELOPMENT_BRANCH=$( curl -f -sS https://api.github.com/repos/${{ github.repository }} | jq -r '.default_branch' )
+ echo "DEVELOPMENT_BRANCH=$DEVELOPMENT_BRANCH" | tee -a "$GITHUB_ENV"
+ echo "branches=$( curl -f -sS https://api.github.com/repos/IQSS/dataverse/releases | jq -r " .[0:${{ env.NUM_PAST_RELEASES }}] | .[].tag_name, \"${DEVELOPMENT_BRANCH}\" " | tr "\n" " " )" | tee -a "${GITHUB_OUTPUT}"
+
+ # Execute matrix build for the discovered branches
+ - name: Execute build matrix script
+ id: execute
+ run: |
+ .github/workflows/scripts/maintenance-job.sh ${{ steps.discover.outputs.branches }}
+
+ # TODO: Use the needs.build.outputs.rebuilt_base_images with fromJSON() to create a matrix job.
+ # Must be a single rank matrix (vector), the branch and base image tag information ships as "branch=tag" string
+ # Will be part of working on #10618, app image versioned tags.
+ #push-app-img:
+ # name: "Rebase & Publish App Image"
+ # permissions:
+ # contents: read
+ # packages: write
+ # pull-requests: write
+ # secrets: inherit
+ # needs:
+ # - build
+ # strategy:
+ # fail-fast: false
+ # matrix:
+ # branch: ${{ fromJson(needs.discover.outputs.branches) }}
+ # uses: ./.github/workflows/container_app_push.yml
+ # with:
+ # branch: ${{ matrix.branch }}
+
+ hub-description:
+ name: Push description to DockerHub
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ packages: read
+ needs: build
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ - name: Render README
+ id: render
+ run: |
+ TAGS_JSON='${{ needs.build.outputs.supported_tag_matrix }}'
+ echo "$TAGS_JSON" | jq -r 'keys | sort | reverse | .[]' |
+ while IFS= read -r branch; do
+ echo \
+ "- \`$( echo "$TAGS_JSON" | jq --arg v "$branch" -r '.[$v] | join("`, `")' )\`" \
+ "([Dockerfile](https://github.com/IQSS/dataverse/blob/${branch}/modules/container-base/src/main/docker/Dockerfile)," \
+ "[Patches](https://github.com/IQSS/dataverse/blob/develop/modules/container-base/src/backports/${branch}))" \
+ | tee -a "${GITHUB_WORKSPACE}/tags.md"
+ done
+ sed -i -e "/<\!-- TAG BLOCK HERE -->/r ${GITHUB_WORKSPACE}/tags.md" "./modules/container-base/README.md"
+
+ - name: Push description to DockerHub
+ uses: peter-evans/dockerhub-description@v4
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ repository: gdcc/base
+ short-description: "Dataverse Base Container image providing Payara application server and optimized configuration"
+ readme-filepath: ./modules/container-base/README.md
\ No newline at end of file
diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml
index 4ad4798bc64..a94b17a67ba 100644
--- a/.github/workflows/maven_unit_test.yml
+++ b/.github/workflows/maven_unit_test.yml
@@ -30,6 +30,7 @@ jobs:
continue-on-error: ${{ matrix.experimental }}
runs-on: ubuntu-latest
steps:
+ # TODO: As part of #10618 change to setup-maven custom action
# Basic setup chores
- uses: actions/checkout@v3
- name: Set up JDK ${{ matrix.jdk }}
@@ -95,6 +96,7 @@ jobs:
# status: "Experimental"
continue-on-error: ${{ matrix.experimental }}
steps:
+ # TODO: As part of #10618 change to setup-maven custom action
# Basic setup chores
- uses: actions/checkout@v3
- name: Set up JDK ${{ matrix.jdk }}
@@ -128,6 +130,7 @@ jobs:
needs: integration-test
name: Coverage Report Submission
steps:
+ # TODO: As part of #10618 change to setup-maven custom action
# Basic setup chores
- uses: actions/checkout@v3
- uses: actions/setup-java@v3
@@ -156,6 +159,11 @@ jobs:
# NOTE: this may be extended with adding a report to the build output, leave a comment, send to Sonarcloud, ...
+ # TODO: Add a filter step here, that avoids calling the app image release workflow if there are changes to the base image.
+ # Use https://github.com/dorny/paths-filter to solve this. Will require and additional job or adding to integration-test job.
+ # This way we ensure that we're not running the app image flow with a non-matching base image.
+ # To become a part of #10618.
+
push-app-img:
name: Publish App Image
permissions:
diff --git a/.github/workflows/scripts/maintenance-job.sh b/.github/workflows/scripts/maintenance-job.sh
new file mode 100755
index 00000000000..370988b9812
--- /dev/null
+++ b/.github/workflows/scripts/maintenance-job.sh
@@ -0,0 +1,180 @@
+#!/bin/bash
+
+# A matrix-like job to maintain a number of releases as well as the latest snap of Dataverse.
+
+# PREREQUISITES:
+# - You have Java, Maven, QEMU and Docker all setup and ready to go
+# - You obviously checked out the develop branch, otherwise you'd not be executing this script
+# - You added all the branch names you want to run maintenance for as arguments
+# Optional, but recommended:
+# - You added a DEVELOPMENT_BRANCH env var to your runner/job env with the name of the development branch
+# - You added a FORCE_BUILD=0|1 env var to indicate if the base image build should be forced
+# - You added a PLATFORMS env var with all the target platforms you want to build for
+
+# NOTE:
+# This script is a culmination of Github Action steps into a single script.
+# The reason to put all of this in here is due to the complexity of the Github Action and the limitation of the
+# matrix support in Github actions, where outputs cannot be aggregated or otherwise used further.
+
+set -euo pipefail
+
+# Get all the inputs
+# If not within a runner, just print to stdout (duplicating the output in case of tee usage, but that's ok for testing)
+GITHUB_OUTPUT=${GITHUB_OUTPUT:-"/proc/self/fd/1"}
+GITHUB_ENV=${GITHUB_ENV:-"/proc/self/fd/1"}
+GITHUB_WORKSPACE=${GITHUB_WORKSPACE:-"$(pwd)"}
+GITHUB_SERVER_URL=${GITHUB_SERVER_URL:-"https://github.com"}
+GITHUB_REPOSITORY=${GITHUB_REPOSITORY:-"IQSS/dataverse"}
+
+MAINTENANCE_WORKSPACE="${GITHUB_WORKSPACE}/maintenance-job"
+
+DEVELOPMENT_BRANCH="${DEVELOPMENT_BRANCH:-"develop"}"
+FORCE_BUILD="${FORCE_BUILD:-"0"}"
+PLATFORMS="${PLATFORMS:-"linux/amd64,linux/arm64"}"
+
+# Setup and validation
+if [[ -z "$*" ]]; then
+ >&2 echo "You must give a list of branch names as arguments"
+ exit 1;
+fi
+
+source "$( dirname "$0" )/utils.sh"
+
+# Delete old stuff if present
+rm -rf "$MAINTENANCE_WORKSPACE"
+mkdir -p "$MAINTENANCE_WORKSPACE"
+
+# Store the image tags we maintain in this array (same order as branches array!)
+# This list will be used to build the support matrix within the Docker Hub image description
+SUPPORTED_ROLLING_TAGS=()
+# Store the tags of base images we are actually rebuilding to base new app images upon
+# Takes the from "branch-name=base-image-ref"
+REBUILT_BASE_IMAGES=()
+
+for BRANCH in "$@"; do
+ echo "::group::Running maintenance for $BRANCH"
+
+ # 0. Determine if this is a development branch and the most current release
+ IS_DEV=0
+ if [[ "$BRANCH" = "$DEVELOPMENT_BRANCH" ]]; then
+ IS_DEV=1
+ fi
+ IS_CURRENT_RELEASE=0
+ if [[ "$BRANCH" = $( curl -f -sS "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" | jq -r '.[0].tag_name' ) ]]; then
+ IS_CURRENT_RELEASE=1
+ fi
+
+ # 1. Let's get the maintained sources
+ git clone -c advice.detachedHead=false --depth 1 --branch "$BRANCH" "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}" "$MAINTENANCE_WORKSPACE/$BRANCH"
+ # Switch context
+ cd "$MAINTENANCE_WORKSPACE/$BRANCH"
+
+ # 2. Now let's apply the patches (we have them checked out in $GITHUB_WORKSPACE, not necessarily in this local checkout)
+ echo "Checking for patches..."
+ if [[ -d ${GITHUB_WORKSPACE}/modules/container-base/src/backports/$BRANCH ]]; then
+ echo "Applying patches now."
+ find "${GITHUB_WORKSPACE}/modules/container-base/src/backports/$BRANCH" -type f -name '*.patch' -print0 | xargs -0 -n1 patch -p1 -s -i
+ fi
+
+ # 3. Determine the base image ref (/:)
+ BASE_IMAGE_REF=""
+ # For the dev branch we want to full flexi stack tag, to detect stack upgrades requiring new build
+ if (( IS_DEV )); then
+ BASE_IMAGE_REF=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -q -DforceStdout )
+ else
+ BASE_IMAGE_REF=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -Dbase.image.tag.suffix="" -q -DforceStdout )
+ fi
+ echo "Determined BASE_IMAGE_REF=$BASE_IMAGE_REF from Maven"
+
+ # 4. Check for Temurin image updates
+ JAVA_IMAGE_REF=$( mvn help:evaluate -Pct -f modules/container-base -Dexpression=java.image -q -DforceStdout )
+ echo "Determined JAVA_IMAGE_REF=$JAVA_IMAGE_REF from Maven"
+ NEWER_JAVA_IMAGE=0
+ if check_newer_parent "$JAVA_IMAGE_REF" "$BASE_IMAGE_REF"; then
+ NEWER_JAVA_IMAGE=1
+ fi
+
+ # 5. Check for package updates in base image
+ PKGS="$( grep "ARG PKGS" modules/container-base/src/main/docker/Dockerfile | cut -f2 -d= | tr -d '"' )"
+ echo "Determined installed packages=\"$PKGS\" from Maven"
+ NEWER_PKGS=0
+ # Don't bother with package checks if the java image is newer already
+ if ! (( NEWER_JAVA_IMAGE )); then
+ if check_newer_pkgs "$BASE_IMAGE_REF" "$PKGS"; then
+ NEWER_PKGS=1
+ fi
+ fi
+
+ # 6. Get current immutable revision tag if not on the dev branch
+ REV=$( current_revision "$BASE_IMAGE_REF" )
+ CURRENT_REV_TAG="${BASE_IMAGE_REF#*:}-r$REV"
+ NEXT_REV_TAG="${BASE_IMAGE_REF#*:}-r$(( REV + 1 ))"
+
+ # 7. Let's put together what tags we want added to this build run
+ TAG_OPTIONS=""
+ if ! (( IS_DEV )); then
+ TAG_OPTIONS="-Dbase.image=$BASE_IMAGE_REF -Ddocker.tags.revision=$NEXT_REV_TAG"
+ # In case of the current release, add the "latest" tag as well.
+ if (( IS_CURRENT_RELEASE )); then
+ TAG_OPTIONS="$TAG_OPTIONS -Ddocker.tags.latest=latest"
+ fi
+ else
+ UPCOMING_TAG=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image.tag -Dbase.image.tag.suffix="" -q -DforceStdout )
+ TAG_OPTIONS="-Ddocker.tags.develop=unstable -Ddocker.tags.upcoming=$UPCOMING_TAG"
+
+ # For the dev branch we only have rolling tags and can add them now already
+ SUPPORTED_ROLLING_TAGS+=("[\"unstable\", \"$UPCOMING_TAG\", \"${BASE_IMAGE_REF#*:}\"]")
+ fi
+ echo "Determined these additional Maven tag options: $TAG_OPTIONS"
+
+ # 8. Let's build the base image if necessary
+ NEWER_IMAGE=0
+ if (( NEWER_JAVA_IMAGE + NEWER_PKGS + FORCE_BUILD > 0 )); then
+ mvn -Pct -f modules/container-base deploy -Ddocker.noCache -Ddocker.platforms="${PLATFORMS}" \
+ -Ddocker.imagePropertyConfiguration=override $TAG_OPTIONS
+ NEWER_IMAGE=1
+ # Save the information about the immutable or rolling tag we just built
+ if ! (( IS_DEV )); then
+ REBUILT_BASE_IMAGES+=("$BRANCH=${BASE_IMAGE_REF%:*}:$NEXT_REV_TAG")
+ else
+ REBUILT_BASE_IMAGES+=("$BRANCH=$BASE_IMAGE_REF")
+ fi
+ else
+ echo "No rebuild necessary, we're done here."
+ fi
+
+ # 9. Add list of rolling and immutable tags for release builds
+ if ! (( IS_DEV )); then
+ RELEASE_TAGS_LIST="["
+ if (( IS_CURRENT_RELEASE )); then
+ RELEASE_TAGS_LIST+="\"latest\", "
+ fi
+ RELEASE_TAGS_LIST+="\"${BASE_IMAGE_REF#*:}\", "
+ if (( NEWER_IMAGE )); then
+ RELEASE_TAGS_LIST+="\"$NEXT_REV_TAG\"]"
+ else
+ RELEASE_TAGS_LIST+="\"$CURRENT_REV_TAG\"]"
+ fi
+ SUPPORTED_ROLLING_TAGS+=("${RELEASE_TAGS_LIST}")
+ fi
+
+ echo "::endgroup::"
+done
+
+# Built the output which base images have actually been rebuilt as JSON
+REBUILT_IMAGES="["
+for IMAGE in "${REBUILT_BASE_IMAGES[@]}"; do
+ REBUILT_IMAGES+=" \"$IMAGE\" "
+done
+REBUILT_IMAGES+="]"
+echo "rebuilt_base_images=${REBUILT_IMAGES// /, }" | tee -a "${GITHUB_OUTPUT}"
+
+# Built the supported rolling tags matrix as JSON
+SUPPORTED_TAGS="{"
+for (( i=0; i < ${#SUPPORTED_ROLLING_TAGS[@]} ; i++ )); do
+ j=$((i+1))
+ SUPPORTED_TAGS+="\"${!j}\": ${SUPPORTED_ROLLING_TAGS[$i]}"
+ (( i < ${#SUPPORTED_ROLLING_TAGS[@]}-1 )) && SUPPORTED_TAGS+=", "
+done
+SUPPORTED_TAGS+="}"
+echo "supported_tag_matrix=$SUPPORTED_TAGS" | tee -a "$GITHUB_OUTPUT"
diff --git a/.github/workflows/scripts/utils.sh b/.github/workflows/scripts/utils.sh
new file mode 100644
index 00000000000..987b58d8bb5
--- /dev/null
+++ b/.github/workflows/scripts/utils.sh
@@ -0,0 +1,108 @@
+#!/bin/bash
+
+set -euo pipefail
+
+function check_newer_parent() {
+ PARENT_IMAGE="$1"
+ # Get namespace, default to "library" if not found
+ PARENT_IMAGE_NS="${PARENT_IMAGE%/*}"
+ if [[ "$PARENT_IMAGE_NS" = "${PARENT_IMAGE}" ]]; then
+ PARENT_IMAGE_NS="library"
+ fi
+ PARENT_IMAGE_REPO="${PARENT_IMAGE%:*}"
+ PARENT_IMAGE_TAG="${PARENT_IMAGE#*:}"
+
+ PARENT_IMAGE_LAST_UPDATE="$( curl -sS "https://hub.docker.com/v2/namespaces/${PARENT_IMAGE_NS}/repositories/${PARENT_IMAGE_REPO}/tags/${PARENT_IMAGE_TAG}" | jq -r .last_updated )"
+ if [[ "$PARENT_IMAGE_LAST_UPDATE" = "null" ]]; then
+ echo "::error title='Invalid PARENT Image'::Could not find ${PARENT_IMAGE} in the registry"
+ exit 1
+ fi
+
+ DERIVED_IMAGE="$2"
+ # Get namespace, default to "library" if not found
+ DERIVED_IMAGE_NS="${DERIVED_IMAGE%/*}"
+ if [[ "${DERIVED_IMAGE_NS}" = "${DERIVED_IMAGE}" ]]; then
+ DERIVED_IMAGE_NS="library"
+ fi
+ DERIVED_IMAGE_REPO="$( echo "${DERIVED_IMAGE%:*}" | cut -f2 -d/ )"
+ DERIVED_IMAGE_TAG="${DERIVED_IMAGE#*:}"
+
+ DERIVED_IMAGE_LAST_UPDATE="$( curl -sS "https://hub.docker.com/v2/namespaces/${DERIVED_IMAGE_NS}/repositories/${DERIVED_IMAGE_REPO}/tags/${DERIVED_IMAGE_TAG}" | jq -r .last_updated )"
+ if [[ "$DERIVED_IMAGE_LAST_UPDATE" = "null" || "$DERIVED_IMAGE_LAST_UPDATE" < "$PARENT_IMAGE_LAST_UPDATE" ]]; then
+ echo "Parent image $PARENT_IMAGE has a newer release ($PARENT_IMAGE_LAST_UPDATE), which is more recent than $DERIVED_IMAGE ($DERIVED_IMAGE_LAST_UPDATE)"
+ return 0
+ else
+ echo "Parent image $PARENT_IMAGE ($PARENT_IMAGE_LAST_UPDATE) is older than $DERIVED_IMAGE ($DERIVED_IMAGE_LAST_UPDATE)"
+ return 1
+ fi
+}
+
+function check_newer_pkgs() {
+ IMAGE="$1"
+ PKGS="$2"
+
+ docker run --rm -u 0 "${IMAGE}" sh -c "apt update >/dev/null 2>&1 && apt install -s ${PKGS}" | tee /proc/self/fd/2 | grep -q "0 upgraded"
+ STATUS=$?
+
+ if [[ $STATUS -eq 0 ]]; then
+ echo "Base image $IMAGE has no updates for our custom installed packages"
+ return 1
+ else
+ echo "Base image $IMAGE needs updates for our custom installed packages"
+ return 0
+ fi
+
+ # TODO: In a future version of this script, we might want to include checking for other security updates,
+ # not just updates to the packages we installed.
+ # grep security /etc/apt/sources.list > /tmp/security.list
+ # apt-get update -oDir::Etc::Sourcelist=/tmp/security.list
+ # apt-get dist-upgrade -y -oDir::Etc::Sourcelist=/tmp/security.list -oDir::Etc::SourceParts=/bin/false -s
+
+}
+
+function current_revision() {
+ IMAGE="$1"
+ IMAGE_NS_REPO="${IMAGE%:*}"
+ IMAGE_TAG="${IMAGE#*:}"
+
+ if [[ "$IMAGE_TAG" = "$IMAGE_NS_REPO" ]]; then
+ >&2 echo "You must provide an image reference in the format [/]:"
+ exit 1
+ fi
+
+ case "$IMAGE_NS_REPO" in
+ */*) :;; # namespace/repository syntax, leave as is
+ *) IMAGE_NS_REPO="library/$IMAGE_NS_REPO";; # bare repository name (docker official image); must convert to namespace/repository syntax
+ esac
+
+ # Without such a token we may run into rate limits
+ # OB 2024-09-16: for some reason using this token stopped working. Let's go without and see if we really fall into rate limits.
+ # token=$( curl -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:$IMAGE_NS_REPO:pull" )
+
+ ALL_TAGS="$(
+ i=0
+ while [ $? == 0 ]; do
+ i=$((i+1))
+ # OB 2024-09-16: for some reason using this token stopped working. Let's go without and see if we really fall into rate limits.
+ # RESULT=$( curl -s -H "Authorization: Bearer $token" "https://registry.hub.docker.com/v2/repositories/$IMAGE_NS_REPO/tags/?page=$i&page_size=100" )
+ RESULT=$( curl -s "https://registry.hub.docker.com/v2/repositories/$IMAGE_NS_REPO/tags/?page=$i&page_size=100" )
+ if [[ $( echo "$RESULT" | jq '.message' ) != "null" ]]; then
+ # If we run into an error on the first attempt, that means we have a problem.
+ if [[ "$i" == "1" ]]; then
+ >&2 echo "Error when retrieving tag data: $( echo "$RESULT" | jq '.message' )"
+ exit 2
+ # Otherwise it will just mean we reached the last page already
+ else
+ break
+ fi
+ else
+ echo "$RESULT" | jq -r '."results"[]["name"]'
+ # DEBUG:
+ #echo "$RESULT" | >&2 jq -r '."results"[]["name"]'
+ fi
+ done
+ )"
+
+ # Note: if a former tag could not be found, it just might not exist already. Start new series with rev 0
+ echo "$ALL_TAGS" | grep "${IMAGE_TAG}-r" | sed -e "s#${IMAGE_TAG}-r##" | sort -h | tail -n1 || echo "-1"
+}
diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml
index 227a74fa00f..3320d9d08a4 100644
--- a/.github/workflows/shellspec.yml
+++ b/.github/workflows/shellspec.yml
@@ -24,28 +24,11 @@ jobs:
run: |
cd tests/shell
shellspec
- shellspec-centos7:
- name: "CentOS 7"
+ shellspec-rocky9:
+ name: "RockyLinux 9"
runs-on: ubuntu-latest
container:
- image: centos:7
- steps:
- - uses: actions/checkout@v2
- - name: Install shellspec
- run: |
- curl -fsSL https://github.com/shellspec/shellspec/releases/download/${{ env.SHELLSPEC_VERSION }}/shellspec-dist.tar.gz | tar -xz -C /usr/share
- ln -s /usr/share/shellspec/shellspec /usr/bin/shellspec
- - name: Install dependencies
- run: yum install -y ed
- - name: Run shellspec
- run: |
- cd tests/shell
- shellspec
- shellspec-rocky8:
- name: "RockyLinux 8"
- runs-on: ubuntu-latest
- container:
- image: rockylinux/rockylinux:8
+ image: rockylinux/rockylinux:9
steps:
- uses: actions/checkout@v2
- name: Install shellspec
diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml
index 5dde750573d..2aed50e9998 100644
--- a/conf/solr/schema.xml
+++ b/conf/solr/schema.xml
@@ -142,6 +142,7 @@
+
@@ -205,6 +206,7 @@
+
@@ -350,6 +352,7 @@
+
@@ -426,6 +429,7 @@
+
@@ -590,6 +594,7 @@
+
diff --git a/doc/release-notes/6.4-release-notes.md b/doc/release-notes/6.4-release-notes.md
new file mode 100644
index 00000000000..979fd16bf9e
--- /dev/null
+++ b/doc/release-notes/6.4-release-notes.md
@@ -0,0 +1,526 @@
+# Dataverse 6.4
+
+Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.4 rather than the list of releases, which will cut them off.
+
+This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project.
+
+## Release Highlights
+
+New features in Dataverse 6.4:
+
+- Enhanced DataCite Metadata, including "Relation Type"
+- All ISO 639-3 languages are now supported
+- There is now a button for "Unlink Dataset"
+- Users will have DOIs/PIDs reserved for their files as part of file upload instead of at publication time
+- Datasets can now have types such as "software" or "workflow"
+- Croissant support
+- RO-Crate support
+- and more! Please see below.
+
+New client library:
+
+- Rust
+
+This release also fixes two important bugs described below and in [a post](https://groups.google.com/g/dataverse-community/c/evn5C-pyrS8/m/JrH9vp47DwAJ) on the mailing list:
+
+- "Update Current Version" can cause metadata loss
+- Publishing breaks designated dataset thumbnail, messes up collection page
+
+Additional details on the above as well as many more features and bug fixes included in the release are described below. Read on!
+
+## Features Added
+
+### Enhanced DataCite Metadata, Including "Relation Type"
+
+Within the "Related Publication" field, a new subfield has been added called "Relation Type" that allows for the most common [values](https://datacite-metadata-schema.readthedocs.io/en/4.5/appendices/appendix-1/relationType/) recommended by DataCite: isCitedBy, Cites, IsSupplementTo, IsSupplementedBy, IsReferencedBy, and References. For existing datasets where no "Relation Type" has been specified, "IsSupplementTo" is assumed.
+
+Dataverse now supports the [DataCite v4.5 schema](http://schema.datacite.org/meta/kernel-4/). Additional metadata is now being sent to DataCite including metadata about related publications and files in the dataset. Improved metadata is being sent including how PIDs (ORCID, ROR, DOIs, etc.), license/terms, geospatial, and other metadata are represented. The enhanced metadata will automatically be sent to DataCite when datasets are created and published. Additionally, after publication, you can inspect what was sent by looking at the DataCite XML export.
+
+The additions are in rough alignment with the OpenAIRE XML export, but there are some minor differences in addition to the Relation Type addition, including an update to the DataCite 4.5 schema. For details see #10632, #10615 and the [design document](https://docs.google.com/document/d/1JzDo9UOIy9dVvaHvtIbOI8tFU6bWdfDfuQvWWpC0tkA/edit?usp=sharing) referenced there.
+
+Multiple backward incompatible changes and bug fixes have been made to API calls (three of four of which were not documented) related to updating PID target URLs and metadata at the provider service:
+- [Update Target URL for a Published Dataset at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-target-url-for-a-published-dataset-at-the-pid-provider)
+- [Update Target URL for all Published Datasets at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-target-url-for-all-published-datasets-at-the-pid-provider)
+- [Update Metadata for a Published Dataset at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-a-published-dataset-at-the-pid-provider)
+- [Update Metadata for all Published Datasets at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-all-published-datasets-at-the-pid-provider)
+
+### Full List of ISO 639-3 Languages Now Supported
+
+The controlled vocabulary values list for the metadata field "Language" in the citation block has now been extended to include roughly 7920 ISO 639-3 values.
+
+Some of the language entries in the pre-6.4 list correspond to "macro languages" in ISO-639-3 and admins/users may wish to update to use the corresponding individual language entries from ISO-639-3. As these cases are expected to be rare (they do not involve major world languages), finding them is not covered in the release notes. Anyone who desires help in this area is encouraged to reach out to the Dataverse community via any of the standard communication channels.
+
+ISO 639-3 codes were downloaded from [sil.org](https://iso639-3.sil.org/code_tables/download_tables#Complete%20Code%20Tables:~:text=iso%2D639%2D3_Code_Tables_20240415.zip) and the file used for merging with the existing citation.tsv was "iso-639-3.tab". See also #8578 and #10762.
+
+### Unlink Dataset Button
+
+A new "Unlink Dataset" button has been added to the dataset page to allow a user to unlink a dataset from a collection. To unlink a dataset the user must have permission to link the dataset. Additionally, the [existing API](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#unlink-a-dataset) for unlinking datasets has been updated to no longer require superuser access as the "Publish Dataset" permission is now enough. See also #10583 and #10689.
+
+### Pre-Publish File DOI Reservation
+
+Dataverse installations using DataCite as a persistent identifier (PID) provider (or other providers that support reserving PIDs) will be able to reserve PIDs for files when they are uploaded (rather than at publication time). Note that reserving file DOIs can slow uploads with large numbers of files so administrators may need to adjust timeouts (specifically any Apache "``ProxyPass / ajp://localhost:8009/ timeout=``" setting in the recommended Dataverse configuration).
+
+### Initial Support for Dataset Types
+
+Out of the box, all datasets now have the type "dataset" but superusers can add additional types. At this time the type of a dataset can only be set at creation time via API. The types "dataset", "software", and "workflow" (just those three, for now) will be sent to DataCite (as `resourceTypeGeneral`) when the dataset is published.
+
+For details see [the guides](https://guides.dataverse.org/en/6.4/user/dataset-management.html#dataset-types), #10517 and #10694. Please note that this feature is highly experimental and is expected to [evolve](https://github.com/IQSS/dataverse-pm/issues/307).
+
+### Croissant Support (Metadata Export)
+
+A new metadata export format called [Croissant](https://github.com/mlcommons/croissant) is now available as an external metadata exporter. It is oriented toward making datasets consumable by machine learning.
+
+For more about the Croissant exporter, including installation instructions, see . See also #10341, #10533, and [discussion](https://groups.google.com/g/dataverse-community/c/JI8HPgGarr8/m/DqEIkiwlAgAJ) on the mailing list.
+
+Please note: the Croissant exporter works best with Dataverse 6.2 and higher (where it updates the content of `` as [described](https://guides.dataverse.org/en/6.4/admin/discoverability.html#schema-org-head) in the guides) but can be used with 6.0 and higher (to get the export functionality).
+
+### RO-Crate Support (Metadata Export)
+
+Dataverse now supports [RO-Crate](https://www.researchobject.org/ro-crate/) as a metadata export format. This functionality is not available out of the box, but you can enable one or more RO-Crate exporters from the [list of external exporters](https://guides.dataverse.org/en/6.4/installation/advanced.html#inventory-of-external-exporters). See also #10744 and #10796.
+
+### Rust API Client Library
+
+An Dataverse API client library for the Rust programming language is now available at https://github.com/gdcc/rust-dataverse and has been added to the [list of client libraries](https://guides.dataverse.org/en/6.4/api/client-libraries.html) in the API Guide. See also #10758.
+
+### Collection Thumbnail Logo for Featured Collections
+
+Collections can now have a thumbnail logo that is displayed when the collection is configured as a featured collection. If present, this thumbnail logo is shown. Otherwise, the collection logo is shown. Configuration is done under the "Theme" for a collection as explained in [the guides](https://guides.dataverse.org/en/6.4/user/dataverse-management.html#theme). See also #10291 and #10433.
+
+### Saved Searches Can Be Deleted
+
+Saved searches can now be deleted via API. See the [Saved Search](https://guides.dataverse.org/en/6.4/api/native-api.html#saved-search) section of the API Guide, #9317 and #10198.
+
+### Notification Email Improvement
+
+When notification emails are sent the part of the closing that says "contact us for support at" will now show the support email address (`dataverse.mail.support-email`), when configured, instead of the default system email address. Using the system email address here was particularly problematic when it was a "noreply" address. See also #10287 and #10504.
+
+### Ability to Disable Automatic Thumbnail Selection
+
+It is now possible to turn off the feature that automatically selects one of the image datafiles to serve as the thumbnail of the parent dataset. An admin can turn it off by enabling the [feature flag](https://guides.dataverse.org/en/6.4/installation/config.html#feature-flags) `dataverse.feature.disable-dataset-thumbnail-autoselect`. When the feature is disabled, a user can still manually pick a thumbnail image, or upload a dedicated thumbnail image. See also #10820.
+
+### More Flexible PermaLinks
+
+The configuration setting `dataverse.pid.*.permalink.base-url`, which is used for PermaLinks, has been updated to support greater flexibility. Previously, the string `/citation?persistentId=` was automatically appended to the configured base URL. With this update, the base URL will now be used exactly as configured, without any automatic additions. See also #10775.
+
+### Globus Async Framework
+
+A new alternative implementation of Globus polling during upload data transfers has been added in this release. This experimental framework does not rely on the instance staying up continuously for the duration of the transfer and saves the state information about Globus upload requests in the database. See `globus-use-experimental-async-framework` under [Feature Flags](https://guides.dataverse.org/en/6.4/installation/config.html#feature-flags) and [dataverse.files.globus-monitoring-server](https://guides.dataverse.org/en/6.4/installation/config.html#dataverse-files-globus-monitoring-server) in the Installation Guide. See also #10623 and #10781.
+
+### CVoc (Controlled Vocabulary): Allow ORCID and ROR to Be Used Together in Author Field
+
+Changes in Dataverse and updates to the ORCID and ROR external vocabulary scripts support deploying these for the citation block author field (and others). See also #10711, #10712, and .
+
+### Development on Windows
+
+New instructions have been added for developers on Windows trying to run a Dataverse development environment using Windows Subsystem for Linux (WSL). See [the guides](https://guides.dataverse.org/en/6.4/developers/windows.html), #10606, and #10608.
+
+### Experimental Crossref PID (DOI) Provider
+
+Crossref can now be used as a PID (DOI) provider, but this feature is experimental. Please provide feedback through the usual channels. See also the [guides](https://guides.dataverse.org/en/6.4/installation/config.html#crossref-specific-settings), #8581, and #10806.
+
+### Improved JSON Schema Validation for Datasets
+
+JSON Schema validation has been enhanced with checks for required and allowed child objects as well as type checking for field types including `primitive`, `compound` and `controlledVocabulary`. More user-friendly error messages help pinpoint the issues in the dataset JSON. See [Retrieve a Dataset JSON Schema for a Collection](https://guides.dataverse.org/en/6.4/api/native-api.html#retrieve-a-dataset-json-schema-for-a-collection) in the API Guide, #10169, and #10543.
+
+### Counter Processor 1.05 Support (Make Data Count)
+
+Counter Processor 1.05 is now supported for use with Make Data Count. If you are running Counter Processor, you should reinstall/reconfigure it as described in the latest guides. Note that Counter Processor 1.05 requires Python 3, so you will need to follow the full Counter Processor install. Also note that if you configure the new version the same way, it will reprocess the days in the current month when it is first run. This is normal and will not affect the metrics in Dataverse. See also #10479.
+
+### Version Tags for Container Base Images
+
+With this release we introduce a detailed maintenance workflow for our container images. As output of the [Containerization Working Group](https://ct.gdcc.io), the community takes another step towards production ready containers available directly from the core project.
+
+The maintenance workflow regularly updates the [Container Base Image](https://guides.dataverse.org/en/6.4/container/base-image.html), which contains the operating system, Java, Payara, and tools and libraries required by the Dataverse application. Shipping these rolling releases as well as immutable revisions is the foundation for secure and reliable [Dataverse Application Container](https://guides.dataverse.org/en/6.4/container/app-image.html) images. See also #10478 and #10827.
+
+## Bugs Fixed
+
+### Update Current Version
+
+A significant bug in the superuser-only [Update Current Version](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#make-metadata-updates-without-changing-dataset-version) publication option was fixed. If the "Update Current Version" option was used when changes were made to the dataset terms (rather than to dataset metadata) or if the PID provider service was down or returned an error, the update would fail and render the dataset unusable and require restoration from a backup. The fix in this release allows the update to succeed in both of these cases and redesigns the functionality such that any unknown issues should not make the dataset unusable (i.e. the error would be reported and the dataset would remain in its current state with the last-published version as it was and changes still in the draft version.)
+
+If you do not plan to upgrade to Dataverse 6.4 right away, you are encouraged to alert your superusers to this issue (see [this post](https://groups.google.com/g/dataverse-community/c/evn5C-pyrS8/m/JrH9vp47DwAJ)). Here are some workarounds for pre-6.4 versions:
+
+* Change the "dataset.updateRelease" entry in the Bundle.properties file (or local language version) to "Do Not Use" or similar (this doesn't disable the button but alerts superusers to the issue), or
+* Edit the dataset.xhtml file to remove the lines below, delete the contents of the generated and osgi-cache directories in the Dataverse Payara domain, and restart the Payara server. This will remove the "Update Current Version" from the UI.
+
+```
+
+
+
+```
+
+Again, the workarounds above are only for pre-6.4 versions. The bug has been fixed in Dataverse 6.4. See also #10797.
+
+### Broken Thumbnails
+
+Dataverse 6.3 introduced a bug where publishing would break the dataset thumbnail, which in turn broke the rendering of the parent collection (dataverse) page.
+
+This bug has been fixed but any existing broken thumbnails must be fixed manually. See "clearThumbnailFailureFlag" in the upgrade instructions below.
+
+Additionally, it is now possible to turn off the feature that automatically selects of one of the image datafiles to serve as the thumbnail of the parent dataset. An admin can turn it off by raising the feature flag `-Ddataverse.feature.disable-dataset-thumbnail-autoselect=true`. When the feature is disabled, a user can still manually pick a thumbnail image, or upload a dedicated thumbnail image.
+
+See also #10819, #10820, and [the post](https://groups.google.com/g/dataverse-community/c/evn5C-pyrS8/m/JrH9vp47DwAJ) on the mailing list.
+
+### No License, No Terms of Use
+
+When datasets have neither a license nor custom terms of use, the dataset page will now indicate this. Also, these datasets will no longer be indexed as having custom terms. See also #8796, #10513, and #10614.
+
+### CC0 License Bug Fix
+
+At a high level, some datasets have been mislabeled as "Custom License" when they should have been "CC0 1.0". This has been corrected.
+
+In Dataverse 5.10, datasets with only "CC0 Waiver" in the "termsofuse" field were converted to "Custom License" (instead of the CC0 1.0 license) through a SQL migration script (see #10634). On deployment of Dataverse 6.4, a new SQL migration script will be run automatically to correct this, changing these datasets to CC0. You can review the script in #10634, which only affect the following datasets:
+
+- The existing "Terms of Use" must be equal to "This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions: CC0 Waiver" (this was set in #10634).
+- The following terms fields must be empty: Confidentiality Declaration, Special Permissions, Restrictions, Citation Requirements, Depositor Requirements, Conditions, and Disclaimer.
+- The license ID must not be assigned.
+
+The script will set the license ID to that of the CC0 1.0 license and remove the contents of "termsofuse" field. See also #9081 and #10634.
+
+### Remap oai_dc Export and Harvesting Format Fields: dc:type and dc:date
+
+The `oai_dc` export and harvesting format has had the following fields remapped:
+
+- dc:type was mapped to the field "Kind of Data". Now it is hard-coded to the word "Dataset".
+- dc:date was mapped to the field "Production Date" when available and otherwise to "Publication Date". Now it is mapped the field "Publication Date" or the field used for the citation date, if set (see [Set Citation Date Field Type for a Dataset](https://guides.dataverse.org/en/6.4/api/native-api.html#set-citation-date-field-type-for-a-dataset)).
+
+In order for these changes to be reflected in existing datasets, a [reexport all](https://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api) should be run (mentioned below). See #8129 and #10737.
+
+### Zip File No Longer Misdetected as Shapefile (Hidden Directories)
+
+When detecting files types, Dataverse would previously detect a zip file as a shapefile if it contained [markers of a shapefile](https://guides.dataverse.org/en/6.4/developers/geospatial.html) in hidden directories. These hidden directories are now ignored when deciding if a zip file is a shapefile or not. See also #8945 and #10627.
+
+### External Controlled Vocabulary
+
+This release fixes a bug (introduced in v6.3) in the external controlled vocabulary mechanism that could cause indexing to fail (with a NullPointerException) when a script is configured for one child field and no other child fields were managed. See also #10869 and #10870.
+
+### Valid JSON in Error Response
+
+When any `ApiBlockingFilter` policy applies to a request, the JSON in the body of the error response is now valid JSON. See also #10085.
+
+### Docker Container Base Image Security and Compatibility
+
+- Switch "wait-for" to "wait4x", aligned with the Configbaker Image
+- Update "jattach" to v2.2
+- Install AMD64 / ARM64 versions of tools as necessary
+- Run base image as unprivileged user by default instead of `root` - this was an oversight from OpenShift changes
+- Linux User, Payara Admin and Domain Master passwords:
+ - Print hints about default, public knowledge passwords in place for
+ - Enable replacing these passwords at container boot time
+- Enable building with updates Temurin JRE image based on Ubuntu 24.04 LTS
+- Fix entrypoint script troubles with pre- and postboot script files
+- Unify location of files at CONFIG_DIR=/opt/payara/config, avoid writing to other places
+
+See also #10508, #10672 and #10722.
+
+### Cleanup of Temp Directories
+
+In this release we addressed an issue where copies of files uploaded via the UI were left in one specific temp directory (`.../domain1/uploads` by default). We would like to remind all the installation admins that it is strongly recommended to have some automated (and aggressive) cleanup mechanisms in place for all the temp directories used by Dataverse. For example, at Harvard/IQSS we have the following configuration for the PrimeFaces uploads directory above: (note that, even with this fix in place, PrimeFaces will be leaving a large number of small log files in that location)
+
+Instead of the default location (`.../domain1/uploads`) we use a directory on a dedicated partition, outside of the filesystem where Dataverse is installed, via the following JVM option:
+
+```
+-Ddataverse.files.uploads=/uploads/web
+```
+
+and we have a dedicated cronjob that runs every 30 minutes and deletes everything older than 2 hours in that directory:
+
+```
+15,45 * * * * /bin/find /uploads/web/ -mmin +119 -type f -name "upload*" -exec rm -f {} \; > /dev/null 2>&1
+```
+
+### Trailing Commas in Author Name Now Permitted
+
+When an author name ended in a comma (e.g. `Smith,` or `Smith, `), the dataset page was broken after publishing (a "500" error page was presented to the user). The underlying issue causing the JSON-LD Schema.org output on the page to break was fixed. See #10343 and #10776.
+
+## API Updates
+
+### Search API: affiliation, parentDataverseName, image_url, etc.
+
+The Search API (`/api/search`) response now includes additional fields, depending on the type.
+
+For collections (dataverses):
+
+- "affiliation"
+- "parentDataverseName"
+- "parentDataverseIdentifier"
+- "image_url" (optional)
+
+```javascript
+"items": [
+ {
+ "name": "Darwin's Finches",
+ ...
+ "affiliation": "Dataverse.org",
+ "parentDataverseName": "Root",
+ "parentDataverseIdentifier": "root",
+ "image_url":"/api/access/dvCardImage/{identifier}"
+(etc, etc)
+```
+
+For datasets:
+
+- "image_url" (optional)
+
+```javascript
+"items": [
+ {
+ ...
+ "image_url": "http://localhost:8080/api/datasets/2/logo"
+ ...
+(etc, etc)
+```
+
+For files:
+
+- "releaseOrCreateDate"
+- "image_url" (optional)
+
+```javascript
+"items": [
+ {
+ "name": "test.png",
+ ...
+ "releaseOrCreateDate": "2016-05-10T12:53:39Z",
+ "image_url":"/api/access/datafile/42?imageThumb=true"
+(etc, etc)
+```
+
+These examples are also shown in the [Search API](https://guides.dataverse.org/en/6.4/api/search.html) section of the API Guide.
+
+The image_url field was already part of the SolrSearchResult JSON (and incorrectly appeared in Search API documentation), but it wasn't returned by the API because it was appended only after the Solr query was executed in the SearchIncludeFragment of JSF (the old/current UI framework). Now, the field is set in SearchServiceBean, ensuring it is always returned by the API when an image is available.
+
+The Solr schema.xml file has been updated to include a new field called "dvParentAlias" for supporting the new response field "parentDataverseIdentifier". See upgrade instructions below.
+
+See also #10810 and #10811.
+
+### Search API: publicationStatuses
+
+The Search API (`/api/search`) response will now include publicationStatuses in the JSON response as long as the list is not empty.
+
+Example:
+
+```javascript
+"items": [
+ {
+ "name": "Darwin's Finches",
+ ...
+ "publicationStatuses": [
+ "Unpublished",
+ "Draft"
+ ],
+(etc, etc)
+```
+
+See also #10733 and #10738.
+
+### Search Facet Information Exposed
+
+A new endpoint `/api/datasetfields/facetables` lists all facetable dataset fields defined in the installation, as described in [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#list-all-facetable-dataset-fields).
+
+A new optional query parameter "returnDetails" added to `/api/dataverses/{identifier}/facets/` endpoint to include detailed information of each DataverseFacet, as described in [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#list-facets-configured-for-a-dataverse-collection). See also #10726 and #10727.
+
+### User Permissions on Collections
+
+A new endpoint at `/api/dataverses/{identifier}/userPermissions` for obtaining the user permissions on a collection (dataverse) has been added. See also [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#get-user-permissions-on-a-dataverse), #10749 and #10751.
+
+### addDataverse Extended
+
+The addDataverse (`/api/dataverses/{identifier}`) API endpoint has been extended to allow adding metadata blocks, input levels and facet IDs at creation time, as the Dataverse page in create mode does in JSF. See also [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#create-a-dataverse-collection), #10633 and #10644.
+
+### Metadata Blocks and Display on Create
+
+The `/api/dataverses/{identifier}/metadatablocks` endpoint has been fixed to not return fields marked as displayOnCreate=true if there is an input level with include=false, when query parameters returnDatasetFieldTypes=true and onlyDisplayedOnCreate=true are set. See also #10741 and #10767.
+
+The fields "depositor" and "dateOfDeposit" in the citation.tsv metadata block file have been updated to have the property "displayOnCreate" set to TRUE. In practice, only the API is affected because the UI has special logic that already shows these fields when datasets are created. See also and #10850 and #10884.
+
+### Feature Flags Can Be Listed
+
+It is now possible to list all feature flags and see if they are enabled or not. See also [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#list-all-feature-flags) and #10732.
+
+## Settings Added
+
+The following settings have been added:
+
+- dataverse.feature.disable-dataset-thumbnail-autoselect
+- dataverse.feature.globus-use-experimental-async-framework
+- dataverse.files.globus-monitoring-server
+- dataverse.pid.*.crossref.url
+- dataverse.pid.*.crossref.rest-api-url
+- dataverse.pid.*.crossref.username
+- dataverse.pid.*.crossref.password
+- dataverse.pid.*.crossref.depositor
+- dataverse.pid.*.crossref.depositor-email
+
+## Backward Incompatible Changes
+
+- The oai_dc export format has changed. See the "Remap oai_dc" section above.
+- Several APIs related to DataCite have changed. See "More and Better Data Sent to DataCite" above.
+
+## Complete List of Changes
+
+For the complete list of code changes in this release, see the [6.4 milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.4+is%3Aclosed) in GitHub.
+
+## Getting Help
+
+For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/g/dataverse-community) or email support@dataverse.org.
+
+## Installation
+
+If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it!
+
+Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club!
+
+You are also very welcome to join the [Global Dataverse Community Consortium](https://www.gdcc.io/) (GDCC).
+
+## Upgrade Instructions
+
+Upgrading requires a maintenance window and downtime. Please plan accordingly, create backups of your database, etc.
+
+These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.3.
+
+0\. These instructions assume that you are upgrading from the immediate previous version. If you are running an earlier version, the only supported way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to this version.
+
+If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user.
+
+In the following commands, we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed.
+
+```shell
+export PAYARA=/usr/local/payara6`
+```
+
+(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell)
+
+1\. Undeploy the previous version
+
+```shell
+$PAYARA/bin/asadmin undeploy dataverse-6.3
+```
+
+2\. Stop and start Payara
+
+```shell
+service payara stop
+sudo service payara start
+```
+
+3\. Deploy this version
+
+```shell
+$PAYARA/bin/asadmin deploy dataverse-6.4.war
+```
+
+Note: if you have any trouble deploying, stop Payara, remove the following directories, start Payara, and try to deploy again.
+
+```shell
+service payara stop
+rm -rf $PAYARA/glassfish/domains/domain1/generated
+rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache
+rm -rf $PAYARA/glassfish/domains/domain1/lib/databases
+```
+
+4\. For installations with internationalization:
+
+Please remember to update translations via [Dataverse language packs](https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs).
+
+5\. Restart Payara
+
+```shell
+service payara stop
+service payara start
+```
+
+6\. Update metadata blocks
+
+These changes reflect incremental improvements made to the handling of core metadata fields.
+
+```shell
+wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/scripts/api/data/metadatablocks/citation.tsv
+
+curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv
+```
+
+7\. Update Solr schema.xml file. Start with the standard v6.4 schema.xml, then, if your installation uses any custom or experimental metadata blocks, update it to include the extra fields (step 7a).
+
+Stop Solr (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.4/installation/prerequisites.html#solr-init-script)).
+
+```shell
+service solr stop
+```
+
+Replace schema.xml
+
+```shell
+wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/conf/solr/schema.xml
+cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf
+```
+
+Start Solr (but if you use any custom metadata blocks, perform the next step, 7a first).
+
+```shell
+service solr start
+```
+
+7a\. For installations with custom or experimental metadata blocks:
+
+Before starting Solr, update the schema to include all the extra metadata fields that your installation uses. We do this by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the names of the directories, if different):
+
+```shell
+ wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/conf/solr/update-fields.sh
+ chmod +x update-fields.sh
+ curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.4.1/server/solr/collection1/conf/schema.xml
+```
+
+Now start Solr.
+
+8\. Reindex Solr
+
+Below is the simplest way to reindex Solr:
+
+```shell
+curl http://localhost:8080/api/admin/index
+```
+
+The API above rebuilds the existing index "in place". If you want to be absolutely sure that your index is up-to-date and consistent, you may consider wiping it clean and reindexing everything from scratch (see [the guides](https://guides.dataverse.org/en/latest/admin/solr-search-index.html)). Just note that, depending on the size of your database, a full reindex may take a while and the users will be seeing incomplete search results during that window.
+
+9\. Run reExportAll to update dataset metadata exports
+
+This step is necessary because of changes described above for the `Datacite` and `oai_dc` export formats.
+
+Below is the simple way to reexport all dataset metadata. For more advanced usage, please see [the guides](http://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api).
+
+```shell
+curl http://localhost:8080/api/admin/metadata/reExportAll
+```
+
+10\. Pushing updated metadata to DataCite
+
+(If you don't use DataCite, you can skip this.)
+
+Above you updated the citation metadata block and Solr with the new "relationType" field. With these two changes, the "Relation Type" fields will be available and creation/publication of datasets will result in the expanded XML being sent to DataCite. You've also already run "reExportAll" to update the `Datacite` metadata export format.
+
+Entries at DataCite for published datasets can be updated by a superuser using an API call (newly [documented](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-all-published-datasets-at-the-pid-provider)):
+
+`curl -X POST -H 'X-Dataverse-key:' http://localhost:8080/api/datasets/modifyRegistrationPIDMetadataAll`
+
+This will loop through all published datasets (and released files with PIDs). As long as the loop completes, the call will return a 200/OK response. Any PIDs for which the update fails can be found using the following command:
+
+`grep 'Failure for id' server.log`
+
+Failures may occur if PIDs were never registered, or if they were never made findable. Any such cases can be fixed manually in DataCite Fabrica or using the [Reserve a PID](https://guides.dataverse.org/en/6.4/api/native-api.html#reserve-a-pid) API call and the newly documented `/api/datasets//modifyRegistration` call respectively. See https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#send-dataset-metadata-to-pid-provider. Please reach out with any questions.
+
+PIDs can also be updated by a superuser on a per-dataset basis using
+
+`curl -X POST -H 'X-Dataverse-key:' http://localhost:8080/api/datasets//modifyRegistrationMetadata`
+
+### Additional Upgrade Steps
+
+11\. If there are broken thumbnails
+
+To restore any broken thumbnails caused by the bug described above, you can call the `http://localhost:8080/api/admin/clearThumbnailFailureFlag` API, which will attempt to clear the flag on all files (regardless of whether caused by this bug or some other problem with the file) or the `http://localhost:8080/api/admin/clearThumbnailFailureFlag/$FILE_ID` to clear the flag for individual files. Calling the former, batch API is recommended.
+
+12\. PermaLinks with custom base-url
+
+If you currently use PermaLinks with a custom `base-url`: You must manually append `/citation?persistentId=` to the base URL to maintain functionality.
+
+If you use a PermaLinks without a configured `base-url`, no changes are required.
diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software.json b/doc/sphinx-guides/source/_static/api/dataset-create-software.json
new file mode 100644
index 00000000000..4c649bff0aa
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/dataset-create-software.json
@@ -0,0 +1,82 @@
+{
+ "datasetType": "software",
+ "datasetVersion": {
+ "license": {
+ "name": "CC0 1.0",
+ "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+ },
+ "metadataBlocks": {
+ "citation": {
+ "fields": [
+ {
+ "value": "pyDataverse",
+ "typeClass": "primitive",
+ "multiple": false,
+ "typeName": "title"
+ },
+ {
+ "value": [
+ {
+ "authorName": {
+ "value": "Range, Jan",
+ "typeClass": "primitive",
+ "multiple": false,
+ "typeName": "authorName"
+ },
+ "authorAffiliation": {
+ "value": "University of Stuttgart",
+ "typeClass": "primitive",
+ "multiple": false,
+ "typeName": "authorAffiliation"
+ }
+ }
+ ],
+ "typeClass": "compound",
+ "multiple": true,
+ "typeName": "author"
+ },
+ {
+ "value": [
+ { "datasetContactEmail" : {
+ "typeClass": "primitive",
+ "multiple": false,
+ "typeName": "datasetContactEmail",
+ "value" : "jan@mailinator.com"
+ },
+ "datasetContactName" : {
+ "typeClass": "primitive",
+ "multiple": false,
+ "typeName": "datasetContactName",
+ "value": "Range, Jan"
+ }
+ }],
+ "typeClass": "compound",
+ "multiple": true,
+ "typeName": "datasetContact"
+ },
+ {
+ "value": [ {
+ "dsDescriptionValue":{
+ "value": "A Python module for Dataverse.",
+ "multiple":false,
+ "typeClass": "primitive",
+ "typeName": "dsDescriptionValue"
+ }}],
+ "typeClass": "compound",
+ "multiple": true,
+ "typeName": "dsDescription"
+ },
+ {
+ "value": [
+ "Computer and Information Science"
+ ],
+ "typeClass": "controlledVocabulary",
+ "multiple": true,
+ "typeName": "subject"
+ }
+ ],
+ "displayName": "Citation Metadata"
+ }
+ }
+ }
+}
diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld b/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld
new file mode 100644
index 00000000000..6f072967dc8
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld
@@ -0,0 +1,16 @@
+{
+ "http://purl.org/dc/terms/title": "Darwin's Finches",
+ "http://purl.org/dc/terms/subject": "Medicine, Health and Life Sciences",
+ "http://purl.org/dc/terms/creator": {
+ "https://dataverse.org/schema/citation/authorName": "Finch, Fiona",
+ "https://dataverse.org/schema/citation/authorAffiliation": "Birds Inc."
+ },
+ "https://dataverse.org/schema/citation/datasetContact": {
+ "https://dataverse.org/schema/citation/datasetContactEmail": "finch@mailinator.com",
+ "https://dataverse.org/schema/citation/datasetContactName": "Finch, Fiona"
+ },
+ "https://dataverse.org/schema/citation/dsDescription": {
+ "https://dataverse.org/schema/citation/dsDescriptionValue": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds."
+ },
+ "https://dataverse.org/schema/core#datasetType": "software"
+}
diff --git a/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json
new file mode 100644
index 00000000000..fef32aa1e2c
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json
@@ -0,0 +1,65 @@
+{
+ "name": "Scientific Research",
+ "alias": "science",
+ "dataverseContacts": [
+ {
+ "contactEmail": "pi@example.edu"
+ },
+ {
+ "contactEmail": "student@example.edu"
+ }
+ ],
+ "affiliation": "Scientific Research University",
+ "description": "We do all the science.",
+ "dataverseType": "LABORATORY",
+ "metadataBlocks": {
+ "metadataBlockNames": [
+ "citation", "geospatial"
+ ],
+ "inputLevels": [
+ {
+ "datasetFieldTypeName": "geographicCoverage",
+ "include": true,
+ "required": true
+ },
+ {
+ "datasetFieldTypeName": "country",
+ "include": true,
+ "required": true
+ },
+ {
+ "datasetFieldTypeName": "geographicUnit",
+ "include": false,
+ "required": false
+ },
+ {
+ "datasetFieldTypeName": "geographicBoundingBox",
+ "include": false,
+ "required": false
+ },
+ {
+ "datasetFieldTypeName": "westLongitude",
+ "include": false,
+ "required": false
+ },
+ {
+ "datasetFieldTypeName": "eastLongitude",
+ "include": false,
+ "required": false
+ },
+ {
+ "datasetFieldTypeName": "northLatitude",
+ "include": false,
+ "required": false
+ },
+ {
+ "datasetFieldTypeName": "southLatitude",
+ "include": false,
+ "required": false
+ }
+ ],
+ "facetIds": [
+ "authorName", "authorAffiliation"
+ ]
+ }
+}
diff --git a/doc/sphinx-guides/source/_static/util/counter_daily.sh b/doc/sphinx-guides/source/_static/util/counter_daily.sh
index 674972b18f2..5095a83b7e2 100644
--- a/doc/sphinx-guides/source/_static/util/counter_daily.sh
+++ b/doc/sphinx-guides/source/_static/util/counter_daily.sh
@@ -1,6 +1,6 @@
#! /bin/bash
-COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-0.1.04"
+COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-1.05"
MDC_LOG_DIRECTORY="/usr/local/payara6/glassfish/domains/domain1/logs/mdc"
# counter_daily.sh
diff --git a/doc/sphinx-guides/source/_templates/navbar.html b/doc/sphinx-guides/source/_templates/navbar.html
index c7b81dcb937..d88306be8ae 100644
--- a/doc/sphinx-guides/source/_templates/navbar.html
+++ b/doc/sphinx-guides/source/_templates/navbar.html
@@ -25,7 +25,6 @@
About
-
+
-
-
+
+
diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml
index 936d354e9d7..6de0f00e94e 100644
--- a/src/main/webapp/dataset.xhtml
+++ b/src/main/webapp/dataset.xhtml
@@ -86,7 +86,7 @@
-
+
@@ -522,6 +522,16 @@
+
+
+
+ #{bundle['dataset.unlinkBtn']}
+
+
+
@@ -585,13 +595,13 @@
-
+
- )
-
+
@@ -631,6 +641,7 @@
or !empty DatasetPage.datasetVersionUI.keywordDisplay
or !empty DatasetPage.datasetVersionUI.subject.value
or !empty DatasetPage.datasetVersionUI.relPublicationCitation
+ or !empty DatasetPage.datasetVersionUI.relPublicationUrl
or !empty DatasetPage.datasetVersionUI.notes.value) and !empty DatasetPage.datasetSummaryFields}">