From 91ddec7faf61e50bb5b9432ee54ec492d8a8ca25 Mon Sep 17 00:00:00 2001 From: seeland Date: Fri, 17 Aug 2018 16:37:03 +0200 Subject: [PATCH 01/56] added endpoint as config parameter --- .../iq/dataverse/dataaccess/S3AccessIO.java | 25 +++++++++++++++---- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 980b3f97672..5154dca8382 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -7,6 +7,7 @@ import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.profile.ProfileCredentialsProvider; +import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; @@ -78,13 +79,27 @@ public S3AccessIO(T dvObject) { public S3AccessIO(T dvObject, DataAccessRequest req) { super(dvObject, req); this.setIsLocalFile(false); - try { - s3 = AmazonS3ClientBuilder.standard().defaultClient(); - } catch (Exception e) { - throw new AmazonClientException( - "Cannot instantiate a S3 client using; check your AWS credentials and region", + String endpoint = System.getProperty("dataverse.files.s3-endpoint"); + if (endpoint == null) { + try { + s3 = AmazonS3ClientBuilder.standard().defaultClient(); + } catch (Exception e) { + throw new AmazonClientException( + "Cannot instantiate a S3 client using AWS SDK defaults for credentials and region", + e); + } + } else { + try { + s3 = AmazonS3ClientBuilder.standard().enablePathStyleAccess(). + withEndpointConfiguration(new EndpointConfiguration( + endpoint, "")).build(); + } catch (Exception e) { + throw new AmazonClientException( + "Cannot instantiate a S3 client using specified endpoint configuration", e); + } } + } public static String S3_IDENTIFIER_PREFIX = "s3"; From bc09b08b089a05836ae8f5ced4395d310d997ae6 Mon Sep 17 00:00:00 2001 From: seeland Date: Fri, 11 Jan 2019 15:32:24 +0100 Subject: [PATCH 02/56] added custom dir for branding pics --- src/main/webapp/WEB-INF/glassfish-web.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index 1d35ebb5867..1de297f87e9 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -10,6 +10,7 @@ + From 709b3678308c830ef93b54fbb57dba996edda455 Mon Sep 17 00:00:00 2001 From: Isabella Kutger Date: Mon, 20 May 2019 11:15:32 +0200 Subject: [PATCH 03/56] Generating of One-Time-URL for uploading big data directly to S3-stroage. Only URL is generated and returned, no buffering of this URL yet. --- .../harvard/iq/dataverse/api/Datasets.java | 81 +++++++++++++++++++ .../iq/dataverse/dataaccess/S3AccessIO.java | 16 ++++ .../iq/dataverse/dataaccess/StorageIO.java | 7 ++ 3 files changed, 104 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 69a346080bd..39b35671140 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -29,6 +29,8 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; @@ -77,14 +79,28 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.EjbUtil; +import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.search.IndexServiceBean; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; + +import com.amazonaws.AmazonClientException; +import com.amazonaws.HttpMethod; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.client.builder.AwsClientBuilder; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3Client; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; +import com.amazonaws.services.s3.model.GeneratePresignedUrlRequest; + import java.io.IOException; import java.io.InputStream; import java.io.StringReader; +import java.net.URL; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Date; @@ -1191,6 +1207,71 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo } } + /** + * Get a pre-signed URL to upload data too large for add-method to S3 + * + * @param idSupplied + * @param jsonData + * @return + */ + @POST + @Path("{id}/getOneTimeUrl") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response getOneTimeUrl(@PathParam("id")String idSupplied, @FormDataParam("jsonData") String jsonData){ + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + User authUser; + try { + authUser = findUserOrDie(); + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, ResourceBundle.getBundle("Bundle").getString("file.addreplace" + + ".error.auth")); + } + // ------------------------------------- + // (1a) Find dataset + // ------------------------------------- + Dataset dataset; + try { + dataset = findDatasetOrDie(idSupplied); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + // ------------------------------------- + // (2) Generate pre-signed URL for uploading file to S3 + // ------------------------------------- + + //Todo: Remove Logging below; just for checking if s3 configuration is correct + String endpoint = System.getProperty("dataverse.files.s3-endpoint"); + Logger.getLogger(Files.class.getName()).info("Endpoint is: " + endpoint); + String bucketName = System.getProperty("dataverse.files.s3-bucket-name"); + Logger.getLogger(Files.class.getName()).info("Bucketname is: " + bucketName); + + + StorageIO dataAccess = null; + DataFile emptyFile = new DataFile(); + emptyFile.setOwner(dataset); + try { + dataAccess = DataAccess.createNewStorageIO(emptyFile, "s3"); + FileUtil.generateStorageIdentifier(emptyFile); + } catch (IOException e) { + Logger.getLogger(Files.class.getName()).warning("Failed to save the file, storage id " + emptyFile.getStorageIdentifier() + " (" + e.getMessage() + ")"); + } + + URL url = dataAccess.generateS3PreSignedUrl(emptyFile.getStorageIdentifier()); + + // ------------------------------------- + // (3) //todo: Save the pre-signed URL and provided JSON-Data in database + // ------------------------------------- + + // ------------------------------------- + // (4) Return the pre-signed URL + // ------------------------------------- + + return Response.ok().entity(url).build(); + } + /** * Add a File to an existing Dataset * diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index db1eb566629..f8fcb05eeb1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -39,6 +39,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.Random; import java.util.logging.Logger; @@ -677,6 +678,21 @@ private String getMainFileKey() throws IOException { return key; } + + @Override + public URL generateS3PreSignedUrl(String objectKey) { + Date expiration = new Date(); + long expirationAfter = expiration.getTime(); + expirationAfter += 1000 * 60 * 60; + expiration.setTime(expirationAfter); + + GeneratePresignedUrlRequest urlRequest = + new GeneratePresignedUrlRequest(bucketName, objectKey). withMethod(HttpMethod.PUT).withExpiration(expiration); + + URL url = s3.generatePresignedUrl(urlRequest); + + return url; + } public String generateTemporaryS3Url() throws IOException { //Questions: diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index fda93b3f557..fc49493fe16 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -30,6 +30,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.net.URL; import java.nio.channels.Channel; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; @@ -225,6 +226,9 @@ public boolean canWrite() { private GetMethod method = null; private Header[] responseHeaders; + // For S3 + private URL s3PreSignedUrl; + // getters: public Channel getChannel() { @@ -360,6 +364,9 @@ public boolean noVarHeader() { return noVarHeader; } + public URL generateS3PreSignedUrl(String objectKey) {return s3PreSignedUrl;} + + // setters: public void setDvObject(T f) { dvObject = f; From 396059a7e9a0fbcf3bfd5994ee42db2ed4756aa6 Mon Sep 17 00:00:00 2001 From: Isabella Kutger Date: Fri, 24 May 2019 15:27:04 +0200 Subject: [PATCH 04/56] Endpoint for bigDataUpload ready --- .../harvard/iq/dataverse/S3BigDataUpload.java | 166 ++++++++++++++++++ .../dataverse/S3BigDataUploadServiceBean.java | 68 +++++++ .../harvard/iq/dataverse/api/Datasets.java | 19 +- 3 files changed, 252 insertions(+), 1 deletion(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/S3BigDataUpload.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java diff --git a/src/main/java/edu/harvard/iq/dataverse/S3BigDataUpload.java b/src/main/java/edu/harvard/iq/dataverse/S3BigDataUpload.java new file mode 100644 index 00000000000..7c4a86f8e79 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/S3BigDataUpload.java @@ -0,0 +1,166 @@ +package edu.harvard.iq.dataverse; + +import com.google.gson.annotations.Expose; + +import org.hibernate.validator.constraints.NotBlank; + +import java.io.Serializable; +import java.sql.Timestamp; +import java.util.logging.Logger; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.Table; +import javax.validation.constraints.Pattern; + +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; + +@Entity +@Table +public class S3BigDataUpload implements Serializable { + private static final Logger logger = Logger.getLogger(S3BigDataUpload.class.getCanonicalName()); + + private static final long serialVersionID = 1L; + + public S3BigDataUpload() { + } + + public S3BigDataUpload(String preSignedUrl, User user, String jsonData, String datasetId, String storageId, + String fileName, String checksum, String checksumType, String contentType, Timestamp creationTime) { + this.preSignedUrl = preSignedUrl; + this.user = user; + this.jsonData = jsonData; + this.datasetId = datasetId; + this.storageId = storageId; + this.fileName = fileName; + this.checksum = checksum; + this.checksumType = checksumType; + this.contentType = contentType; + this.creationTime = creationTime; + } + + @Expose + @Id + @Column(nullable = false, length = 1024) + private String preSignedUrl; + + @Expose + @JoinColumn + private User user; + + @Expose + @Column(nullable = true) + private String jsonData; + + @Expose + @Column (nullable = false) + private String datasetId; + + @Expose + @Column + private String storageId; + + @Expose + @Column (nullable = false) + private String fileName; + + @Expose + @Column(nullable = false, length = 1024) + private String checksum; + + @Expose + @Column(nullable = false) + private String checksumType; + + @Expose + @NotBlank + @Column(nullable = false) + private String contentType; + + @Expose + @Column (nullable = false) + private Timestamp creationTime; + + public String getPreSignedUrl() { + return preSignedUrl; + } + + public void setPreSignedUrl(String preSignedUrl) { + this.preSignedUrl = preSignedUrl; + } + + public User getUser() { + return user; + } + + public void setUser(User user) { + this.user = user; + } + + public String getJsonData() { + return jsonData; + } + + public void setJsonData(String jsonData) { + this.jsonData = jsonData; + } + + public String getDatasetId() { + return datasetId; + } + + public void setDatasetId(String datasetId) { + this.datasetId = datasetId; + } + + public String getStorageId() { + return storageId; + } + + public void setStorageId(String storageId) { + this.storageId = storageId; + } + + public String getFileName() { + return fileName; + } + + public void setFileName(String fileName) { + this.fileName = fileName; + } + + public String getChecksum() { + return checksum; + } + + public void setChecksum(String checksum) { + this.checksum = checksum; + } + + public String getChecksumType() { + return checksumType; + } + + public void setChecksumType(String checksumType) { + this.checksumType = checksumType; + } + + public String getContentType() { + return contentType; + } + + public void setContentType(String contentType) { + this.contentType = contentType; + } + + public Timestamp getCreationTime() { + return creationTime; + } + + public void setCreationTime(Timestamp creationTime) { + this.creationTime = creationTime; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java new file mode 100644 index 00000000000..c5588362d2b --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java @@ -0,0 +1,68 @@ +package edu.harvard.iq.dataverse; + +import java.io.Serializable; +import java.sql.Timestamp; +import java.util.List; +import java.util.logging.Logger; + +import javax.annotation.PreDestroy; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.Query; +import javax.validation.ConstraintViolationException; + +import edu.harvard.iq.dataverse.authorization.users.User; + +@Stateless +@Named +public class S3BigDataUploadServiceBean { + + private static final Logger logger = Logger.getLogger(S3BigDataUploadServiceBean.class.getCanonicalName()); + + @PersistenceContext(name = "VDCNet-ejbPU") + private EntityManager em; + + + public void addS3BigDataUpload(String preSignedUrl, User user, String jsonData, String datasetId, String stroageId, + String fileName, String checksum, String checksumType, String contentType, Timestamp creationTime) { + + try { + S3BigDataUpload bigData = new S3BigDataUpload(); + bigData.setPreSignedUrl(preSignedUrl); + bigData.setUser(user); + bigData.setJsonData(jsonData); + bigData.setDatasetId(datasetId); + bigData.setStorageId(stroageId); + bigData.setFileName(fileName); + bigData.setChecksum(checksum); + bigData.setChecksumType(checksumType); + bigData.setContentType(contentType); + bigData.setCreationTime(creationTime); + em.persist(bigData); + } catch (ConstraintViolationException e) { + logger.warning("Exception: "); + e.getConstraintViolations().forEach(err->logger.warning(err.toString())); + } + } + + public S3BigDataUpload getS3BigDataUploadByUrl(String preSignedUrl) { + try { + //todo: Query als named Query auslagern + Query query = em.createQuery("SELECT s FROM S3BigDataUpload s WHERE s.preSignedUrl = :preSignedUrl"); + query.setParameter("preSignedUrl", preSignedUrl); + return (S3BigDataUpload) query.getSingleResult(); + } catch (ConstraintViolationException e) { + logger.warning("Exception: "); + e.getConstraintViolations().forEach(err->logger.warning(err.toString())); + } + return null; + } + + @PreDestroy + public void destruct() { + em.close(); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 39b35671140..f16841be9eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -19,6 +19,8 @@ import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.PermissionServiceBean; +import edu.harvard.iq.dataverse.S3BigDataUpload; +import edu.harvard.iq.dataverse.S3BigDataUploadServiceBean; import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.UserNotificationServiceBean; import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; @@ -101,6 +103,7 @@ import java.io.InputStream; import java.io.StringReader; import java.net.URL; +import java.sql.Time; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Date; @@ -124,6 +127,7 @@ import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; +import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; @@ -182,6 +186,9 @@ public class Datasets extends AbstractApiBean { @EJB S3PackageImporter s3PackageImporter; + + @EJB + S3BigDataUploadServiceBean s3BigDataUploadServiceBean; /** * Used to consolidate the way we parse and handle dataset versions. @@ -1217,7 +1224,10 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo @POST @Path("{id}/getOneTimeUrl") @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response getOneTimeUrl(@PathParam("id")String idSupplied, @FormDataParam("jsonData") String jsonData){ + public Response getOneTimeUrl(@PathParam("id")String idSupplied, + @FormDataParam("FileName") String fileName, @FormDataParam("jsonData") String jsonData, + @FormDataParam("checksum") String checksum, @FormDataParam("checksumType") String checksumType, + @FormDataParam("contentType") String contentType){ // ------------------------------------- // (1) Get the user from the API key // ------------------------------------- @@ -1260,10 +1270,17 @@ public Response getOneTimeUrl(@PathParam("id")String idSupplied, @FormDataParam( } URL url = dataAccess.generateS3PreSignedUrl(emptyFile.getStorageIdentifier()); + Timestamp creationTime = new Timestamp(System.currentTimeMillis()); // ------------------------------------- // (3) //todo: Save the pre-signed URL and provided JSON-Data in database // ------------------------------------- + s3BigDataUploadServiceBean.addS3BigDataUpload(url.toString(), authUser, jsonData, idSupplied, emptyFile.getStorageIdentifier(), + fileName, checksum, checksumType, contentType, creationTime); + //Todo: wieder raus, ist nur für mich zum Test + S3BigDataUpload test = s3BigDataUploadServiceBean.getS3BigDataUploadByUrl(url.toString()); + logger.info("From Database Url is: " + test.getPreSignedUrl()); + logger.info("From Database File name is: " + test.getFileName()); // ------------------------------------- // (4) Return the pre-signed URL From af1622bb9c452b71e0e20ec69648927361fa3463 Mon Sep 17 00:00:00 2001 From: Isabella Kutger Date: Mon, 27 May 2019 09:47:26 +0200 Subject: [PATCH 05/56] logs for debugging removed --- .../iq/dataverse/S3BigDataUploadServiceBean.java | 1 - .../edu/harvard/iq/dataverse/api/Datasets.java | 14 +------------- 2 files changed, 1 insertion(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java index c5588362d2b..df3b5832d17 100644 --- a/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java @@ -49,7 +49,6 @@ public void addS3BigDataUpload(String preSignedUrl, User user, String jsonData, public S3BigDataUpload getS3BigDataUploadByUrl(String preSignedUrl) { try { - //todo: Query als named Query auslagern Query query = em.createQuery("SELECT s FROM S3BigDataUpload s WHERE s.preSignedUrl = :preSignedUrl"); query.setParameter("preSignedUrl", preSignedUrl); return (S3BigDataUpload) query.getSingleResult(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index f16841be9eb..c60b5d10453 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1251,14 +1251,6 @@ public Response getOneTimeUrl(@PathParam("id")String idSupplied, // ------------------------------------- // (2) Generate pre-signed URL for uploading file to S3 // ------------------------------------- - - //Todo: Remove Logging below; just for checking if s3 configuration is correct - String endpoint = System.getProperty("dataverse.files.s3-endpoint"); - Logger.getLogger(Files.class.getName()).info("Endpoint is: " + endpoint); - String bucketName = System.getProperty("dataverse.files.s3-bucket-name"); - Logger.getLogger(Files.class.getName()).info("Bucketname is: " + bucketName); - - StorageIO dataAccess = null; DataFile emptyFile = new DataFile(); emptyFile.setOwner(dataset); @@ -1273,14 +1265,10 @@ public Response getOneTimeUrl(@PathParam("id")String idSupplied, Timestamp creationTime = new Timestamp(System.currentTimeMillis()); // ------------------------------------- - // (3) //todo: Save the pre-signed URL and provided JSON-Data in database + // (3) Save the pre-signed URL and provided JSON-Data in database // ------------------------------------- s3BigDataUploadServiceBean.addS3BigDataUpload(url.toString(), authUser, jsonData, idSupplied, emptyFile.getStorageIdentifier(), fileName, checksum, checksumType, contentType, creationTime); - //Todo: wieder raus, ist nur für mich zum Test - S3BigDataUpload test = s3BigDataUploadServiceBean.getS3BigDataUploadByUrl(url.toString()); - logger.info("From Database Url is: " + test.getPreSignedUrl()); - logger.info("From Database File name is: " + test.getFileName()); // ------------------------------------- // (4) Return the pre-signed URL From e55ec43a42a83749c1288052400e2b14d42035d9 Mon Sep 17 00:00:00 2001 From: Isabella Kutger Date: Tue, 28 May 2019 11:11:53 +0200 Subject: [PATCH 06/56] OneTimeUrls will be deleted after 90 min --- pom.xml | 5 ++++ .../dataverse/S3BigDataUploadServiceBean.java | 24 +++++++++++++++---- .../harvard/iq/dataverse/api/Datasets.java | 7 +++++- 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index c2e4f8a82a2..f30369be36b 100644 --- a/pom.xml +++ b/pom.xml @@ -485,6 +485,11 @@ unirest-java 1.4.9 + + org.quartz-scheduler + quartz + 2.3.1 + diff --git a/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java index df3b5832d17..6fe1eb9452a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java @@ -1,8 +1,6 @@ package edu.harvard.iq.dataverse; -import java.io.Serializable; import java.sql.Timestamp; -import java.util.List; import java.util.logging.Logger; import javax.annotation.PreDestroy; @@ -25,22 +23,24 @@ public class S3BigDataUploadServiceBean { private EntityManager em; - public void addS3BigDataUpload(String preSignedUrl, User user, String jsonData, String datasetId, String stroageId, + public void addS3BigDataUpload(String preSignedUrl, User user, String jsonData, String datasetId, String storageId, String fileName, String checksum, String checksumType, String contentType, Timestamp creationTime) { try { + em.getTransaction().begin(); S3BigDataUpload bigData = new S3BigDataUpload(); bigData.setPreSignedUrl(preSignedUrl); bigData.setUser(user); bigData.setJsonData(jsonData); bigData.setDatasetId(datasetId); - bigData.setStorageId(stroageId); + bigData.setStorageId(storageId); bigData.setFileName(fileName); bigData.setChecksum(checksum); bigData.setChecksumType(checksumType); bigData.setContentType(contentType); bigData.setCreationTime(creationTime); em.persist(bigData); + em.getTransaction().commit(); } catch (ConstraintViolationException e) { logger.warning("Exception: "); e.getConstraintViolations().forEach(err->logger.warning(err.toString())); @@ -59,6 +59,22 @@ public S3BigDataUpload getS3BigDataUploadByUrl(String preSignedUrl) { return null; } + public boolean deleteS3BigDataUploadByUrl(String preSignedUrl) { + try { + em.getTransaction().begin(); + Query query = em.createQuery("DELETE FROM S3BigDataUpload s WHERE s.preSignedUrl = :preSignedUrl"); + query.setParameter("preSignedUrl", preSignedUrl); + int i = query.executeUpdate(); + em.getTransaction().commit(); + if (i >= 1) { + return true; + } + } catch (Exception e) { + logger.warning(e.getMessage()); + } + return false; + } + @PreDestroy public void destruct() { em.close(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index c60b5d10453..233d164c2f6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -15,6 +15,7 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.DeleteOneTimeUrlTask; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.MetadataBlockServiceBean; @@ -1225,7 +1226,7 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo @Path("{id}/getOneTimeUrl") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response getOneTimeUrl(@PathParam("id")String idSupplied, - @FormDataParam("FileName") String fileName, @FormDataParam("jsonData") String jsonData, + @FormDataParam("fileName") String fileName, @FormDataParam("jsonData") String jsonData, @FormDataParam("checksum") String checksum, @FormDataParam("checksumType") String checksumType, @FormDataParam("contentType") String contentType){ // ------------------------------------- @@ -1270,6 +1271,10 @@ public Response getOneTimeUrl(@PathParam("id")String idSupplied, s3BigDataUploadServiceBean.addS3BigDataUpload(url.toString(), authUser, jsonData, idSupplied, emptyFile.getStorageIdentifier(), fileName, checksum, checksumType, contentType, creationTime); + DeleteOneTimeUrlTask task = new DeleteOneTimeUrlTask(url.toString(), 90 * 60 * 1000, + s3BigDataUploadServiceBean); + task.schedule(); + // ------------------------------------- // (4) Return the pre-signed URL // ------------------------------------- From b7f0dbb96d7f2701cc5837f0cd75c2027ef6c7e6 Mon Sep 17 00:00:00 2001 From: Isabella Kutger Date: Fri, 31 May 2019 09:15:04 +0200 Subject: [PATCH 07/56] Quartz scheduler for deleting expired urls --- .../iq/dataverse/DeleteOneTimeUrlTask.java | 101 ++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 src/main/java/edu/harvard/iq/dataverse/DeleteOneTimeUrlTask.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DeleteOneTimeUrlTask.java b/src/main/java/edu/harvard/iq/dataverse/DeleteOneTimeUrlTask.java new file mode 100644 index 00000000000..2cc7b50a4a5 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/DeleteOneTimeUrlTask.java @@ -0,0 +1,101 @@ +package edu.harvard.iq.dataverse; + +import org.quartz.Job; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; +import org.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.quartz.impl.JobDetailImpl; +import org.quartz.impl.StdSchedulerFactory; +import org.quartz.impl.triggers.SimpleTriggerImpl; + +import java.util.Date; +import java.util.logging.Logger; + +public class DeleteOneTimeUrlTask { + + private Scheduler quartz; + + private S3BigDataUploadServiceBean s3BigDataUploadService; + + private final String onTimeUrl; + + private int schedulerPeriod; + + + Logger log = Logger.getLogger(DeleteOneTimeUrlTask.class.getCanonicalName()); + + public DeleteOneTimeUrlTask(String onTimeUrl, int schedulerPeriod, S3BigDataUploadServiceBean s3BigDataUploadService) { + this.onTimeUrl = onTimeUrl; + this.s3BigDataUploadService = s3BigDataUploadService; + this.schedulerPeriod = schedulerPeriod; + + try { + + //Creating scheduler + quartz = new StdSchedulerFactory().getScheduler(); + + + //Creating job and link to Runner + JobDetailImpl job = new JobDetailImpl(); + job.setName("DeleteExpired"); + job.setJobClass(Runner.class); + job.setDurability(true); + job.getJobDataMap().put("task", this); + + //Strating scheduler + quartz.start(); + quartz.addJob(job,true); + + } catch (SchedulerException e) { + log.warning(e.getLocalizedMessage()); + } + + + } + + public void schedule() { + if (quartz == null || schedulerPeriod <= 0) { + log.info("Cancel scheduling of deleting task due to invalid scheduling period"); + return; + } + try { + //Creating schedule with trigger, only one execution + SimpleTriggerImpl trigger = new SimpleTriggerImpl(); + trigger.setStartTime(new Date(System.currentTimeMillis() + schedulerPeriod)); + trigger.setRepeatCount(0); + trigger.setRepeatInterval(0L); + trigger.setJobName("DeleteExpired"); + trigger.setName("DeleteExpiredTrigger"); + quartz.scheduleJob(trigger); + } catch (SchedulerException e) { + log.warning("Error scheduling Quartz Job"); + } + } + + public S3BigDataUploadServiceBean getS3BigDataUploadService() { + return s3BigDataUploadService; + } + + public String getOnTimeUrl() { + return onTimeUrl; + } + + public static class Runner implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { + try { + execute((DeleteOneTimeUrlTask) jobExecutionContext.getJobDetail().getJobDataMap().get("task")); + } catch (Exception e) { + throw new JobExecutionException("An error occurred while getting new Series"); + } + } + + private void execute(DeleteOneTimeUrlTask task) { + task.getS3BigDataUploadService().deleteS3BigDataUploadByUrl(task.getOnTimeUrl()); + } + } + + +} From 712ea83af3b26f2dc01aa003c35feb0f2840c6b3 Mon Sep 17 00:00:00 2001 From: Isabella Kutger Date: Mon, 1 Jul 2019 10:06:22 +0200 Subject: [PATCH 08/56] Endpoint for S3 to notify when data is uploaded --- .../dataverse/S3BigDataUploadServiceBean.java | 14 +- .../harvard/iq/dataverse/api/Datasets.java | 96 +++++++++- .../datasetutility/AddReplaceFileHelper.java | 181 +++++++++++++++++- .../dataverse/ingest/IngestServiceBean.java | 47 +++++ .../harvard/iq/dataverse/util/FileUtil.java | 41 +++- 5 files changed, 366 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java index 6fe1eb9452a..7504575975b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/S3BigDataUploadServiceBean.java @@ -27,7 +27,6 @@ public void addS3BigDataUpload(String preSignedUrl, User user, String jsonData, String fileName, String checksum, String checksumType, String contentType, Timestamp creationTime) { try { - em.getTransaction().begin(); S3BigDataUpload bigData = new S3BigDataUpload(); bigData.setPreSignedUrl(preSignedUrl); bigData.setUser(user); @@ -40,7 +39,6 @@ public void addS3BigDataUpload(String preSignedUrl, User user, String jsonData, bigData.setContentType(contentType); bigData.setCreationTime(creationTime); em.persist(bigData); - em.getTransaction().commit(); } catch (ConstraintViolationException e) { logger.warning("Exception: "); e.getConstraintViolations().forEach(err->logger.warning(err.toString())); @@ -59,6 +57,18 @@ public S3BigDataUpload getS3BigDataUploadByUrl(String preSignedUrl) { return null; } + public S3BigDataUpload getS3BigDataUploadByStorageId(String storageId) { + try { + Query query = em.createQuery("SELECT s FROM S3BigDataUpload s WHERE s.storageId = :storageId"); + query.setParameter("storageId", storageId); + return (S3BigDataUpload) query.getSingleResult(); + } catch (ConstraintViolationException e) { + logger.warning("Exception: "); + e.getConstraintViolations().forEach(err->logger.warning(err.toString())); + } + return null; + } + public boolean deleteS3BigDataUploadByUrl(String preSignedUrl) { try { em.getTransaction().begin(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 233d164c2f6..e95639fe7f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -130,17 +130,21 @@ import javax.ws.rs.DELETE; import javax.ws.rs.FormParam; import javax.ws.rs.GET; +import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; import org.glassfish.jersey.media.multipart.FormDataParam; +import org.json.JSONArray; +import org.json.JSONObject; @Path("datasets") public class Datasets extends AbstractApiBean { @@ -190,6 +194,9 @@ public class Datasets extends AbstractApiBean { @EJB S3BigDataUploadServiceBean s3BigDataUploadServiceBean; + + @Context + protected HttpServletRequest httpRequest; /** * Used to consolidate the way we parse and handle dataset versions. @@ -1234,19 +1241,31 @@ public Response getOneTimeUrl(@PathParam("id")String idSupplied, // ------------------------------------- User authUser; try { - authUser = findUserOrDie(); - } catch (WrappedResponse ex) { + String apiKey = httpRequest.getHeader("X-Dataverse-key"); + if (apiKey == null) { + apiKey = httpRequest.getParameter("key"); + } + authUser = authSvc.lookupUser(apiKey); + } catch (Exception ex) { + ex.printStackTrace(); return error(Response.Status.FORBIDDEN, ResourceBundle.getBundle("Bundle").getString("file.addreplace" + ".error.auth")); } // ------------------------------------- // (1a) Find dataset // ------------------------------------- - Dataset dataset; + Dataset dataset = null; + String datasetId = null; try { - dataset = findDatasetOrDie(idSupplied); - } catch (WrappedResponse wr) { - return wr.getResponse(); + if (idSupplied.equals(":persistentId")) { + datasetId = httpRequest.getParameter("persistentId"); + dataset = datasetService.findByGlobalId(datasetId); + logger.info("Dataset Identifier: " + dataset.getIdentifier()); + logger.info("Dataset Id: " + dataset.getId()); + } + } catch (Exception e) { + e.printStackTrace(); + return error(Response.Status.NOT_FOUND, "dataset not found"); } // ------------------------------------- @@ -1258,6 +1277,7 @@ public Response getOneTimeUrl(@PathParam("id")String idSupplied, try { dataAccess = DataAccess.createNewStorageIO(emptyFile, "s3"); FileUtil.generateStorageIdentifier(emptyFile); + logger.info("StorageId is: " + emptyFile.getStorageIdentifier()); } catch (IOException e) { Logger.getLogger(Files.class.getName()).warning("Failed to save the file, storage id " + emptyFile.getStorageIdentifier() + " (" + e.getMessage() + ")"); } @@ -1268,10 +1288,11 @@ public Response getOneTimeUrl(@PathParam("id")String idSupplied, // ------------------------------------- // (3) Save the pre-signed URL and provided JSON-Data in database // ------------------------------------- - s3BigDataUploadServiceBean.addS3BigDataUpload(url.toString(), authUser, jsonData, idSupplied, emptyFile.getStorageIdentifier(), + s3BigDataUploadServiceBean.addS3BigDataUpload(url.toString(), authUser, jsonData, datasetId, + emptyFile.getStorageIdentifier(), fileName, checksum, checksumType, contentType, creationTime); - DeleteOneTimeUrlTask task = new DeleteOneTimeUrlTask(url.toString(), 90 * 60 * 1000, + DeleteOneTimeUrlTask task = new DeleteOneTimeUrlTask(url.toString(), 2880 * 60 * 1000, s3BigDataUploadServiceBean); task.schedule(); @@ -1282,6 +1303,65 @@ public Response getOneTimeUrl(@PathParam("id")String idSupplied, return Response.ok().entity(url).build(); } + @POST + @Path("/notifyS3upload") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response registerFileFromS3(@HeaderParam("x-amz-sns-message-type") String messageType, + @HeaderParam("x-amz-sns-message-id") String messageId, @FormDataParam("message") String message) { + if (!messageType.equals("Notification")) { + return Response.status(418).build(); + } + logger.info(message); + JSONObject messageBody = new JSONObject(message); + String eventType = messageBody.getJSONArray("Records").getJSONObject(0).getString("eventName"); + String requestId = + messageBody.getJSONArray("Records").getJSONObject(0).getJSONObject("responseElements").getString("x" + + "-amz-request-id"); + + if(eventType.equals("ObjectCreated:Put")) { + String storageId = + messageBody.getJSONArray("Records").getJSONObject(0).getJSONObject("s3").getJSONObject("object").getString("key"); + logger.info("StorageId: " + storageId); + + S3BigDataUpload s3BigDataUpload = s3BigDataUploadServiceBean.getS3BigDataUploadByStorageId(storageId); + if(s3BigDataUpload != null) { + try { + logger.info("DatasetId in DB: " + s3BigDataUpload.getDatasetId()); + DataverseRequest dvRequest = createDataverseRequest(s3BigDataUpload.getUser()); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest, ingestService, + datasetService, fileService, permissionService, commandEngine, systemConfig); + + Dataset dataset = datasetService.findByGlobalId(s3BigDataUpload.getDatasetId()); + + OptionalFileParams optionalFileParams = new OptionalFileParams(s3BigDataUpload.getJsonData()); + + addFileHelper.runAddFileS3BigData(dataset, s3BigDataUpload.getFileName(), + s3BigDataUpload.getContentType(), optionalFileParams, storageId, + s3BigDataUpload.getChecksumType(), s3BigDataUpload.getChecksum(), + messageBody.getJSONArray("Records").getJSONObject(0).getJSONObject("s3").getJSONObject( + "object").getInt("size")); + + } catch (DataFileTagException e) { + e.printStackTrace(); + return error(Response.Status.INTERNAL_SERVER_ERROR, " Error while processing request occured"); + } + } + } + + StringBuilder response = new StringBuilder(""); + response.append("" ); + response.append("" + messageId + " "); + response.append(""); + response.append(""); + response.append("" + requestId + ""); + response.append(" "); + response.append("\n" + " \n"); + response.toString(); + + + return Response.ok(response.toString()).build(); + } + /** * Add a File to an existing Dataset * diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 9d9a8486675..1fee4ccc1f7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -18,7 +18,6 @@ import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.AbstractCreateDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; @@ -413,6 +412,184 @@ private boolean runAddReplaceFile(Dataset dataset, } + public boolean runAddFileS3BigData(Dataset dataset, String filename, String contentType, + OptionalFileParams optionalFileParams, String storageId, String checksumType, String checksum, + int fileSize) { + this.currentOperation = FILE_ADD_OPERATION; + + if (!step_001_loadDataset(dataset)) { + return false; + } + + boolean phase1Success = runAddFileS3BigDataPhase1(dataset, filename, contentType, optionalFileParams, + storageId, checksumType, checksum, fileSize); + + + if (!phase1Success) { + return false; + } + + return runAddFileS3BigDataPhase2(); + } + + + + private boolean runAddFileS3BigDataPhase1(Dataset dataset, String filename, String contentType, + OptionalFileParams optionalFileParams, String storageId, String checksumType, String checksum, + int fileSize) { + + msgt("step_001_loadDataset"); + if (!this.step_001_loadDataset(dataset)){ + return false; + } + + msgt("step_010_VerifyUserAndPermissions"); + if (!this.step_010_VerifyUserAndPermissions()){ + return false; + + } + + msgt("step_020_loadNewFile"); + if (!this.step_020_loadNewFileBigDataS3(filename, contentType)){ + return false; + } + + msgt("step_030_createNewFilesViaIngestBigDataS3"); + if (!this.step_030_createNewFilesViaIngestBigDataS3(storageId, checksumType, checksum, fileSize)) { + return false; + } + + msgt("step_050_checkForConstraintViolations"); + if (!this.step_050_checkForConstraintViolations()){ + return false; + } + + msgt("step_055_loadOptionalFileParams"); + if (!this.step_055_loadOptionalFileParams(optionalFileParams)){ + return false; + } + + return true; + } + + private boolean step_020_loadNewFileBigDataS3(String fileName, String fileContentType){ + + if (this.hasError()){ + return false; + } + + if (fileName == null){ + this.addErrorSevere(getBundleErr("filename_undetermined")); + return false; + + } + + if (fileContentType == null){ + this.addErrorSevere(getBundleErr("file_content_type_undetermined")); + return false; + + } + + newFileName = fileName; + newFileContentType = fileContentType; + + return true; + } + + private boolean step_030_createNewFilesViaIngestBigDataS3(String storageId, String checksumType, String checksum, + int fileSize) { + if (this.hasError()){ + return false; + } + + // Load the working version of the Dataset + workingVersion = dataset.getEditVersion(); + + try { + initialFileList = FileUtil.createDataFilesWithoutTempFile(this.newFileName, this.newFileContentType, + storageId, checksumType, checksum, fileSize); + + } catch (Exception ex) { + if (!Strings.isNullOrEmpty(ex.getMessage())) { + this.addErrorSevere(getBundleErr("ingest_create_file_err") + " " + ex.getMessage()); + } else { + this.addErrorSevere(getBundleErr("ingest_create_file_err")); + } + logger.severe(ex.toString()); + this.runMajorCleanup(); + return false; + } + + if (initialFileList.isEmpty()){ + this.addErrorSevere(getBundleErr("initial_file_list_empty")); + this.runMajorCleanup(); + return false; + } + + if (this.step_040_auto_checkForDuplicates()){ + return true; + } + + + return false; + } + + private boolean runAddFileS3BigDataPhase2() { + + if (this.hasError()){ + return false; // possible to have errors already... + } + + if ((finalFileList == null)||(finalFileList.isEmpty())){ + addError(getBundleErr("phase2_S3BigData called_early_no_new_files")); + return false; + } + + msgt("step_060_addFilesViaIngestServiceS3BigData"); + if (!this.step_060_addFilesViaIngestServiceS3BigData()){ + return false; + + } + + msgt("step_070_run_update_dataset_command"); + if (!this.step_070_run_update_dataset_command()){ + return false; + } + + msgt("step_090_notifyUser"); + if (!this.step_090_notifyUser()){ + return false; + } + + + return true; + } + + private boolean step_060_addFilesViaIngestServiceS3BigData() { + if (this.hasError()){ + return false; + } + + if (finalFileList.isEmpty()){ + this.addErrorSevere(getBundleErr("final_file_list_empty")); + return false; + } + + int nFiles = finalFileList.size(); + finalFileList = ingestService.addFileToDatasetS3BigData(workingVersion, finalFileList); + + if (nFiles != finalFileList.size()) { + if (nFiles == 1) { + addError("Failed to save the content of the uploaded file."); + } else { + addError("Failed to save the content of at least one of the uploaded files."); + } + return false; + } + + return true; + } + /** * Note: UI replace is always a "force replace" which means * the replacement file can have a different content type @@ -946,7 +1123,7 @@ private boolean step_020_loadNewFile(String fileName, String fileContentType, In /** * Optional: old file to replace * - * @param oldFile + * @param * @return */ private boolean step_005_loadFileToReplaceById(Long dataFileId){ diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 59ef459b306..88ddcc5b077 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -1805,6 +1805,52 @@ public static void main(String[] args) { } } + + public List addFileToDatasetS3BigData(DatasetVersion workingVersion, List fileList) { + IngestUtil.checkForDuplicateFileNamesFinal(workingVersion, fileList); + Dataset dataset = workingVersion.getDataset(); + + List result = new ArrayList<>(); + + if (fileList != null && fileList.size() > 0) { + + for (DataFile file : fileList) { + FileMetadata fileMetadata = file.getFileMetadatas().get(0); + String fileName = fileMetadata.getLabel(); + + if(file.getOwner() == null) { + file.setOwner(dataset); + + workingVersion.getFileMetadatas().add(file.getFileMetadata()); + file.getFileMetadata().setDatasetVersion(workingVersion); + dataset.getFiles().add(file); + + if(file.getFileMetadata().getCategories() != null) { + ListIterator dfcIt = file.getFileMetadata().getCategories().listIterator(); + + while (dfcIt.hasNext()) { + DataFileCategory dataFileCategory = dfcIt.next(); + + if (dataFileCategory.getDataset() == null) { + DataFileCategory newCategory = dataset.getCategoryByName(dataFileCategory.getName()); + if (newCategory != null) { + newCategory.addFileMetadata(file.getFileMetadata()); + dfcIt.set(newCategory); + } else { + dfcIt.remove(); + } + } + } + } + } + + result.add(file); + } + + } + return result; + } + /* private class InternalIngestException extends Exception { @@ -1814,4 +1860,5 @@ public class IngestServiceException extends Exception { } */ + } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index dcf6584fb51..6173ba4b07c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -945,7 +945,46 @@ public static List createDataFiles(DatasetVersion version, InputStream return null; } // end createDataFiles - + + + public static List createDataFilesWithoutTempFile( String fileName, + String contentType, String storageId, String checksumType, String checksum, int fileSize) { + try { + DataFile datafile = createSingleDataFileWithoutTempFile(fileName, contentType, storageId, checksumType, + checksum, fileSize); + if (datafile != null) { + List dataFiles = new ArrayList<>(); + dataFiles.add(datafile); + return dataFiles; + } + } catch (Exception e) { + logger.severe(e.getMessage()); + } + return null; + } + + private static DataFile createSingleDataFileWithoutTempFile(String fileName, String contentType, String storageId, String checksumType, + String checksum, int fileSize) { + DataFile dataFile = new DataFile(contentType); + dataFile.setModificationTime(new Timestamp(new Date().getTime())); + dataFile.setPermissionModificationTime(new Timestamp(new Date().getTime())); + dataFile.setStorageIdentifier(storageId); + dataFile.setFilesize(fileSize); + + FileMetadata fmd = new FileMetadata(); + fmd.setLabel(fileName); + + fmd.setDataFile(dataFile); + dataFile.getFileMetadatas().add(fmd); + + try { + dataFile.setChecksumType(ChecksumType.fromString(checksumType)); + dataFile.setChecksumValue(checksum); + } catch (Exception e) { + logger.severe(e.getMessage()); + } + return dataFile; + } private static File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit) throws IOException, FileExceedsMaxSizeException { From a215aac8193785dfb941ab86268822bcecc116a4 Mon Sep 17 00:00:00 2001 From: Isabella Kutger Date: Fri, 2 Aug 2019 14:47:58 +0200 Subject: [PATCH 09/56] Bug fixes; now everything should work --- .../harvard/iq/dataverse/api/Datasets.java | 104 ++++++++++-------- 1 file changed, 57 insertions(+), 47 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e95639fe7f2..3c09b104fdb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -103,17 +103,21 @@ import java.io.IOException; import java.io.InputStream; import java.io.StringReader; +import java.io.UnsupportedEncodingException; import java.net.URL; +import java.net.URLDecoder; import java.sql.Time; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Date; +import java.util.Enumeration; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.ResourceBundle; import java.util.Set; +import java.util.UUID; import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; @@ -1305,61 +1309,67 @@ public Response getOneTimeUrl(@PathParam("id")String idSupplied, @POST @Path("/notifyS3upload") - @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response registerFileFromS3(@HeaderParam("x-amz-sns-message-type") String messageType, - @HeaderParam("x-amz-sns-message-id") String messageId, @FormDataParam("message") String message) { - if (!messageType.equals("Notification")) { - return Response.status(418).build(); - } - logger.info(message); - JSONObject messageBody = new JSONObject(message); - String eventType = messageBody.getJSONArray("Records").getJSONObject(0).getString("eventName"); - String requestId = - messageBody.getJSONArray("Records").getJSONObject(0).getJSONObject("responseElements").getString("x" - + "-amz-request-id"); - - if(eventType.equals("ObjectCreated:Put")) { - String storageId = - messageBody.getJSONArray("Records").getJSONObject(0).getJSONObject("s3").getJSONObject("object").getString("key"); - logger.info("StorageId: " + storageId); - - S3BigDataUpload s3BigDataUpload = s3BigDataUploadServiceBean.getS3BigDataUploadByStorageId(storageId); - if(s3BigDataUpload != null) { - try { - logger.info("DatasetId in DB: " + s3BigDataUpload.getDatasetId()); - DataverseRequest dvRequest = createDataverseRequest(s3BigDataUpload.getUser()); - AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest, ingestService, - datasetService, fileService, permissionService, commandEngine, systemConfig); + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public Response registerFileFromS3(@FormParam("Action") String action, @FormParam("Message") String message, + @FormParam("TopicArn") String topic, @FormParam("Version") String version) { + if (message != null && message.startsWith("Storage")) { + + StringBuilder response = new StringBuilder(""); + response.append("" ); + response.append("" + UUID.randomUUID() + " "); + response.append(""); + response.append(""); + response.append("" + UUID.randomUUID() + ""); + response.append(" "); + response.append("\n" + " \n"); + response.toString(); + + return Response.ok(response.toString()).build(); + } else if (message != null && message.startsWith("{")) { + + JSONObject messageBody = new JSONObject(message); + String eventType = messageBody.getJSONArray("Records").getJSONObject(0).getString("eventName"); + String requestId = messageBody.getJSONArray("Records").getJSONObject(0).getJSONObject("responseElements") + .getString("x" + "-amz-request-id"); + + if (eventType.equals("ObjectCreated:Put")) { + String storageId = messageBody.getJSONArray("Records").getJSONObject(0).getJSONObject("s3").getJSONObject("object").getString("key"); + + S3BigDataUpload s3BigDataUpload = s3BigDataUploadServiceBean.getS3BigDataUploadByStorageId(storageId); + if (s3BigDataUpload != null) { + try { + DataverseRequest dvRequest = createDataverseRequest(s3BigDataUpload.getUser()); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest, ingestService, + datasetService, fileService, permissionService, commandEngine, systemConfig); - Dataset dataset = datasetService.findByGlobalId(s3BigDataUpload.getDatasetId()); + Dataset dataset = datasetService.findByGlobalId(s3BigDataUpload.getDatasetId()); - OptionalFileParams optionalFileParams = new OptionalFileParams(s3BigDataUpload.getJsonData()); + OptionalFileParams optionalFileParams = new OptionalFileParams(s3BigDataUpload.getJsonData()); - addFileHelper.runAddFileS3BigData(dataset, s3BigDataUpload.getFileName(), - s3BigDataUpload.getContentType(), optionalFileParams, storageId, - s3BigDataUpload.getChecksumType(), s3BigDataUpload.getChecksum(), - messageBody.getJSONArray("Records").getJSONObject(0).getJSONObject("s3").getJSONObject( - "object").getInt("size")); + addFileHelper.runAddFileS3BigData(dataset, s3BigDataUpload.getFileName(), s3BigDataUpload.getContentType(), optionalFileParams, storageId, + s3BigDataUpload.getChecksumType(), s3BigDataUpload.getChecksum(), + messageBody.getJSONArray("Records").getJSONObject(0).getJSONObject("s3").getJSONObject("object").getInt("size")); - } catch (DataFileTagException e) { - e.printStackTrace(); - return error(Response.Status.INTERNAL_SERVER_ERROR, " Error while processing request occured"); + } catch (DataFileTagException e) { + e.printStackTrace(); + return error(Response.Status.INTERNAL_SERVER_ERROR, " Error while processing request occured"); + } } } + StringBuilder response = new StringBuilder(""); + response.append("" ); + response.append("" + requestId + " "); + response.append(""); + response.append(""); + response.append("" + requestId + ""); + response.append(" "); + response.append("\n" + " \n"); + response.toString(); + + return Response.ok(response.toString()).build(); } - StringBuilder response = new StringBuilder(""); - response.append("" ); - response.append("" + messageId + " "); - response.append(""); - response.append(""); - response.append("" + requestId + ""); - response.append(" "); - response.append("\n" + " \n"); - response.toString(); - - - return Response.ok(response.toString()).build(); + return Response.status(Response.Status.BAD_REQUEST).build(); } /** From 2ab14791957678c2fe286677dd7af4270d8dc414 Mon Sep 17 00:00:00 2001 From: seeland Date: Thu, 15 Aug 2019 11:00:00 +0200 Subject: [PATCH 10/56] Revert "added custom dir for branding pics" Since user logos are all stored in glassfish and need to be backuped anyway, our branding logo goes also to glassfish. This reverts commit bc09b08b089a05836ae8f5ced4395d310d997ae6. --- src/main/webapp/WEB-INF/glassfish-web.xml | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index 1de297f87e9..1d35ebb5867 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -10,7 +10,6 @@ - From 0dc44355859ce59a88377e210ca59b6b3b5689f7 Mon Sep 17 00:00:00 2001 From: Anett Seeland Date: Thu, 15 Aug 2019 16:49:29 +0200 Subject: [PATCH 11/56] replaced logo and text properties of cc0 to ccby; CC0 is NOT changed in the DB! --- src/main/java/propertyFiles/Bundle.properties | 10 +++++----- src/main/webapp/dataset-license-terms.xhtml | 2 +- src/main/webapp/resources/images/ccby.png | Bin 0 -> 1468 bytes 3 files changed, 6 insertions(+), 6 deletions(-) create mode 100644 src/main/webapp/resources/images/ccby.png diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 6ef42104d53..5e95e2da3f3 100755 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1528,16 +1528,16 @@ file.dataFilesTab.terms.editTermsBtn=Edit Terms Requirements file.dataFilesTab.terms.list.termsOfUse.header=Terms of Use file.dataFilesTab.terms.list.termsOfUse.waiver=Waiver file.dataFilesTab.terms.list.termsOfUse.waiver.title=The waiver informs data downloaders how they can use this dataset. -file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Public Domain Dedication" -file.dataFilesTab.terms.list.termsOfUse.waiver.description=Datasets will default to a CC0 public domain dedication . CC0 facilitates reuse and extensibility of research data. Our Community Norms as well as good scientific practices expect that proper credit is given via citation. If you are unable to give datasets a CC0 waiver you may enter custom Terms of Use for datasets. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC BY - "Attribution" +file.dataFilesTab.terms.list.termsOfUse.waiver.description=Datasets will default to a CC BY Attribution. If you are unable to give datasets a CC BY waiver you may enter custom Terms of Use for datasets. file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt=No waiver has been selected for this dataset. file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description=Our Community Norms as well as good scientific practices expect that proper credit is given via citation. Please use the data citation above, generated by the Dataverse. -file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=Yes, apply CC0 - "Public Domain Dedication" -file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=No, do not apply CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=Yes, apply CC BY - "Attribution" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=No, do not apply CC BY - "Attribution" file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip=This is what end users will see displayed on this dataset file.dataFilesTab.terms.list.termsOfUse.termsOfUse=Terms of Use file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title=Outlines how this data can be used once downloaded. -file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=If you are unable to use CC0 for datasets you are able to set custom terms of use. Here is an example of a Data Usage Agreement for datasets that have de-identified human subject data. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=If you are unable to use CC BY for datasets you are able to set custom terms of use. Here is an example of a Data Usage Agreement for datasets that have de-identified human subject data. file.dataFilesTab.terms.list.termsOfUse.addInfo=Additional Information file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=Confidentiality Declaration file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=Indicates whether signing of a confidentiality declaration is needed to access a resource. diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index 8b83998af30..888d2ea5775 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -50,7 +50,7 @@ -

#{bundle['file.dataFilesTab.terms.list.termsOfUse.waiver.txt']} CC0

+

#{bundle['file.dataFilesTab.terms.list.termsOfUse.waiver.txt']} CC0

diff --git a/src/main/webapp/resources/images/ccby.png b/src/main/webapp/resources/images/ccby.png new file mode 100644 index 0000000000000000000000000000000000000000..44671b21b30cfd36384918279174ea35d21deba5 GIT binary patch literal 1468 zcmV;t1w;CYP)1^aqe1kcfX!)LSH~ zpomn8R0&zGqUO+0skbJu#^%Fccnxb~d)M}QA>_(W;3vG*^RC}zc3JQT&KeU&8nND) zogJQi=6OG6Vnh^=#bR_{M_-BXr~6|kzfF6^UCNcRR6Hn9$tcl5wM<6cpd+(N^=6G4 ztp-_blbW_g%}&#;wndh0lG$o-AM~x%4SrTPN<3Dsl;g4Sj!ZU9TYD)=?P#>Qy+Iq9 zPh3BxKhnpo|L#mxfW81Xujgq0ut?>KPDbsJ!D_VX3}D-7QRl=a=W~aSPmk&3i(7H+ ze0I1EeJtDJXYee>C|7kJ6JrNK)^;{MkOA-=1v%yiw_0Wf)~p({6`%ps)idSCTMzkx^LY<-^8B@eijk}Hdj56i=JF;=Q5Pub3q;ji<5sP z>Cx;XZyxjW^R%?I#1+?aAAQg_Qh=e8kcA6yo6N+8XA3^?rlzK7adDAWR#r&Yb*{L^ zJ+!HP@oc4LP|I;a29qHdt|n(`x3J@viW{(a{ofB1kH_8jE-cX5*%<{3?x78R&^HQ@ zJ%C}*pqG#ZFgFM;zypxy=jU`?SzllG0S&eQWPC7uIJ^UfuTaiYc7M;6iVzkL`i}^6 z@uAhCSaaVTWp0rHjO7cX1kJ_LR>7T{n+s*cRRtdb&!|ksn9yXr<3cJF7VoA=+uj^X zCZMar+%Ko6Jpo1$CVy>B_5e0m8tQD&pt&lzy;$21$k1dMK9-8QL3+hyF>7(&)nbu} zK>-$nHV6sOR}5by05f}Slchln8kV|k+caze$RZaQ6Jx`L6BZW_To$Vy#V|4<1}#`1 z7xICF(t#p|FA{*Ek1eOgu9!>;ntRhCdWQ-Z4l6{oxYTvqRGAnSV0a3_5)xJU3Z97! zU_@y4iOs&Ouve7^cni=ie05Y6{9xPK+M4el5f)=?XfYzbOEQtmxZZ@K!*>c`7&k0o z2_n=b1pveRU4W@+T4r$t3Fxr6Yj;v2kWV;1b-W3wEEHok zGc&`+0f6Hg%U{~W(*WSo7A6x2tD8O@F8JKFVF13)$X&(W$c-F)U*r{JLI5#1$uLds zN3YctfOo(rJ~EF+2C(X;^$T$1ZrUURJZPXH+8b%0(XT|n@WFM%H9%D*BG?v{L`+1e ze4UZI&XDzEvfK~0 zp@7k0op7%xlp(5 z0a~wZd+c2Bwmr^Aw9Uw6p;x-{R!D?ezyIMo($X5caPX6D@8F4}96WLOo_g>kmC@+Q zPfx~iROvQJLvMhk0<5=xyrp8fs2rW@Rvevbo7{#z;{bMI8GK(yP>JZlJrG~-bmjlX WBQ!IAXsN6K0000 Date: Thu, 17 Oct 2019 12:23:04 +0200 Subject: [PATCH 12/56] :fix: exporting and importing metadata showed/allowed CC0. Now CC0 throuws an error and CC BY is exported instead of CC0. --- .../edu/harvard/iq/dataverse/DatasetVersion.java | 2 +- .../api/datadeposit/SwordServiceBean.java | 11 ++++++++--- .../iq/dataverse/api/dto/DatasetVersionDTO.java | 6 ++++++ .../export/openaire/OpenAireExportUtil.java | 4 ++-- .../harvard/iq/dataverse/util/bagit/OREMap.java | 2 +- .../iq/dataverse/util/json/JsonParser.java | 6 ++++-- .../iq/dataverse/util/json/JsonPrinter.java | 16 ++++++++++++++-- src/main/webapp/dataset-license-terms.xhtml | 2 +- .../harvard/iq/dataverse/DatasetVersionTest.java | 4 ++-- .../dataverse/export/OpenAireExportUtilTest.java | 4 ++-- .../export/SchemaDotOrgExporterTest.java | 4 ++-- 11 files changed, 43 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index ea463e21702..d61bf760a6f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -1775,7 +1775,7 @@ public String getJsonLd() { JsonObjectBuilder license = Json.createObjectBuilder().add("@type", "Dataset"); if (TermsOfUseAndAccess.License.CC0.equals(terms.getLicense())) { - license.add("text", "CC0").add("url", "https://creativecommons.org/publicdomain/zero/1.0/"); + license.add("text", "CC BY").add("url", "https://creativecommons.org/licenses/by/4.0/"); } else { String termsOfUse = terms.getTermsOfUse(); // Terms of use can be null if you create the dataset with JSON. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java index 4daaad76978..5c25ad68243 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java @@ -174,6 +174,11 @@ public void setDatasetLicenseAndTermsOfUse(DatasetVersion datasetVersionToMutate if (StringUtils.isBlank(licenseProvided)) { throw new SwordError("License provided was blank."); } + if (licenseProvided != null && licenseProvided.equalsIgnoreCase("CC0")) { + throw new SwordError("Error parsing license: CC0 is not supported at the moment. Contact the support!"); + } else if (licenseProvided != null && (licenseProvided.equalsIgnoreCase("CC BY") || licenseProvided.equalsIgnoreCase("CC-BY"))) { + licenseProvided = "CC0"; + } TermsOfUseAndAccess.License licenseToSet; try { licenseToSet = TermsOfUseAndAccess.License.valueOf(licenseProvided); @@ -189,15 +194,15 @@ private void setTermsOfUse(DatasetVersion datasetVersionToMutate, Map listOfRightsProvided = dcterms.get("rights"); if (listOfRightsProvided != null) { int numRightsProvided = listOfRightsProvided.size(); - if (providedLicense.equals(DatasetVersion.License.CC0)) { + if (providedLicense.equals(TermsOfUseAndAccess.License.CC0)) { if (numRightsProvided > 0) { - throw new SwordError("Terms of Use (dcterms:rights) can not be specified in combination with the license \"" + TermsOfUseAndAccess.License.CC0 + "\". A license of \"" + TermsOfUseAndAccess.License.NONE + "\" can be used instead."); + throw new SwordError("Terms of Use (dcterms:rights) can not be specified in combination with the license \"CC BY\". A license of \"" + TermsOfUseAndAccess.License.NONE + "\" can be used instead."); } } else { if (numRightsProvided != 1) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetVersionDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetVersionDTO.java index 8ade19949d4..126c600eb9b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetVersionDTO.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetVersionDTO.java @@ -56,6 +56,9 @@ public void setInReview(boolean inReview) { } public String getTermsOfUse() { + if (termsOfUse != null && termsOfUse.contains("CC0")) { + termsOfUse = termsOfUse.replace("CC0", "CC BY"); + } return termsOfUse; } @@ -307,6 +310,9 @@ public void setArchiveTime(String archiveTime) { } public String getLicense() { + if (license != null && license.contains("CC0")) { + license = license.replace("CC0", "CC BY"); + } return license; } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 5984f705f7f..a930b28c80f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -1129,8 +1129,8 @@ public static void writeAccessRightsElement(XMLStreamWriter xmlw, DatasetVersion // check if getTermsOfUse() method starts with http:// writeRightsHeader(xmlw, language); if (StringUtils.isNotBlank(datasetVersionDTO.getLicense())) { - if (StringUtils.containsIgnoreCase(datasetVersionDTO.getLicense(), "cc0")) { - xmlw.writeAttribute("rightsURI", "https://creativecommons.org/publicdomain/zero/1.0/"); + if (StringUtils.containsIgnoreCase(datasetVersionDTO.getLicense(), "cc by")) { + xmlw.writeAttribute("rightsURI", "https://creativecommons.org/licenses/by/4.0/"); if (StringUtils.isNotBlank(datasetVersionDTO.getTermsOfUse())) { xmlw.writeCharacters(datasetVersionDTO.getTermsOfUse()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index 5520de3954e..2882d87b234 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -138,7 +138,7 @@ public JsonObject getOREMap() throws Exception { TermsOfUseAndAccess terms = version.getTermsOfUseAndAccess(); if (terms.getLicense() == TermsOfUseAndAccess.License.CC0) { aggBuilder.add(JsonLDTerm.schemaOrg("license").getLabel(), - "https://creativecommons.org/publicdomain/zero/1.0/"); + "https://creativecommons.org/licenses/by/4.0/"); } else { addIfNotNull(aggBuilder, JsonLDTerm.termsOfUse, terms.getTermsOfUse()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 3b8efa17513..bf8c984d2f6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -337,9 +337,11 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th } } - private License parseLicense(String inString) { - if (inString != null && inString.equalsIgnoreCase("CC0")) { + private License parseLicense(String inString) throws JsonParseException { + if (inString != null && (inString.equalsIgnoreCase("CC BY") || inString.equalsIgnoreCase("CC-BY"))) { return TermsOfUseAndAccess.License.CC0; + } else if (inString != null && inString.equalsIgnoreCase("CC0")) { + throw new JsonParseException("Error parsing license: CC0 is not a valid license at the moment. Contact the support!"); } return TermsOfUseAndAccess.License.NONE; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index f8a1246b464..dd3b285ab6c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -324,7 +324,7 @@ public static JsonObjectBuilder json(DatasetVersion dsv) { .add("lastUpdateTime", format(dsv.getLastUpdateTime())) .add("releaseTime", format(dsv.getReleaseTime())) .add("createTime", format(dsv.getCreateTime())) - .add("license", dsv.getTermsOfUseAndAccess().getLicense() != null ? dsv.getTermsOfUseAndAccess().getLicense().toString() : null) + .add("license", getLicense(dsv)) .add("termsOfUse", getLicenseInfo(dsv)) .add("confidentialityDeclaration", dsv.getTermsOfUseAndAccess().getConfidentialityDeclaration() != null ? dsv.getTermsOfUseAndAccess().getConfidentialityDeclaration() : null) .add("availabilityStatus", dsv.getTermsOfUseAndAccess().getAvailabilityStatus() != null ? dsv.getTermsOfUseAndAccess().getAvailabilityStatus() : null) @@ -385,11 +385,23 @@ private static String getRootDataverseNameforCitation(Dataset dataset) { private static String getLicenseInfo(DatasetVersion dsv) { if (dsv.getTermsOfUseAndAccess().getLicense() != null && dsv.getTermsOfUseAndAccess().getLicense().equals(TermsOfUseAndAccess.License.CC0)) { - return "CC0 Waiver"; + return "CC BY Waiver"; } return dsv.getTermsOfUseAndAccess().getTermsOfUse(); } + private static String getLicense(DatasetVersion dsv) { + if (dsv.getTermsOfUseAndAccess().getLicense() != null) { + if (dsv.getTermsOfUseAndAccess().getLicense().equals(TermsOfUseAndAccess.License.CC0)) { + return "CC BY"; + } else { + return dsv.getTermsOfUseAndAccess().getLicense().toString(); + } + } else { + return null; + } + } + /** * Export formats such as DDI require the citation to be included. See * https://github.com/IQSS/dataverse/issues/2579 for more on DDI export. diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index 888d2ea5775..9015893ab7b 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -50,7 +50,7 @@ -

#{bundle['file.dataFilesTab.terms.list.termsOfUse.waiver.txt']} CC0

+

#{bundle['file.dataFilesTab.terms.list.termsOfUse.waiver.txt']} CC BY

diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java index 3ca69fa71d2..7ce101a5502 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java @@ -132,8 +132,8 @@ public void testGetJsonLd() throws ParseException { assertEquals("https://doi.org/10.5072/FK2/LK0D1H", obj.getString("identifier")); assertEquals(null, obj.getString("schemaVersion", null)); assertEquals("Dataset", obj.getJsonObject("license").getString("@type")); - assertEquals("CC0", obj.getJsonObject("license").getString("text")); - assertEquals("https://creativecommons.org/publicdomain/zero/1.0/", obj.getJsonObject("license").getString("url")); + assertEquals("CC BY", obj.getJsonObject("license").getString("text")); + assertEquals("https://creativecommons.org/licenses/by/4.0/", obj.getJsonObject("license").getString("url")); assertEquals("1955-11-05", obj.getString("dateModified")); assertEquals("1955-11-05", obj.getString("datePublished")); assertEquals("1", obj.getString("version")); diff --git a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java index dfcf399bcd8..6593125edc1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java @@ -904,8 +904,8 @@ public void testWriteAccessRightElement() throws XMLStreamException, FileNotFoun xmlw.close(); Assert.assertEquals("" + "" - + "" - + "CC0 Waiver", + + "" + + "CC BY Waiver", sw.toString()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java index 1ae0b9b12ca..cc7c911925e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java @@ -318,8 +318,8 @@ public void testExportDataset() throws Exception { assertEquals("2001-10-01/2015-11-15", json2.getJsonArray("temporalCoverage").getString(1)); assertEquals(null, json2.getString("schemaVersion", null)); assertEquals("Dataset", json2.getJsonObject("license").getString("@type")); - assertEquals("CC0", json2.getJsonObject("license").getString("text")); - assertEquals("https://creativecommons.org/publicdomain/zero/1.0/", json2.getJsonObject("license").getString("url")); + assertEquals("CC BY", json2.getJsonObject("license").getString("text")); + assertEquals("https://creativecommons.org/licenses/by/4.0/", json2.getJsonObject("license").getString("url")); assertEquals("DataCatalog", json2.getJsonObject("includedInDataCatalog").getString("@type")); assertEquals("LibraScholar", json2.getJsonObject("includedInDataCatalog").getString("name")); assertEquals("https://librascholar.org", json2.getJsonObject("includedInDataCatalog").getString("url")); From 98bc1806b5280f4e0b642453d4e94c0c588dc5f8 Mon Sep 17 00:00:00 2001 From: Anett Seeland Date: Thu, 17 Oct 2019 19:05:46 +0200 Subject: [PATCH 13/56] added the advice to prefix a dataverse identifier with the institutional abbreviation --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 98395de74bc..3568da217de 100755 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -672,7 +672,7 @@ notification.email.apiTokenGenerated.subject=API Token was generated dataverse.title=The project, department, university, professor, or journal this dataverse will contain data for. dataverse.enterName=Enter name... dataverse.host.title=The dataverse which contains this data. -dataverse.identifier.title=Short name used for the URL of this dataverse. +dataverse.identifier.title=Short name used for the URL of this dataverse. Use your institutional abbreviation as prefix. dataverse.affiliation.title=The organization with which this dataverse is affiliated. dataverse.category=Category dataverse.category.title=The type that most closely reflects this dataverse. From 6c29c1f0977ac9366924f0291de30c31b6c6910f Mon Sep 17 00:00:00 2001 From: seeland Date: Tue, 21 Apr 2020 14:28:05 +0200 Subject: [PATCH 14/56] preparation for update --- .github/PULL_REQUEST_TEMPLATE.md | 15 + .gitignore | 1 + CODE_OF_CONDUCT.md | 76 + PULL_REQUEST_TEMPLATE.md | 23 - README.md | 5 +- conf/docker-aio/0prep_deps.sh | 6 +- conf/docker-aio/1prep.sh | 6 +- conf/docker-aio/c7.dockerfile | 8 +- conf/docker-aio/configure_doi.bash | 5 + conf/docker-aio/entrypoint.bash | 2 +- conf/docker-aio/run-test-suite.sh | 2 +- conf/docker-aio/testscripts/install | 2 +- conf/docker/dataverse-glassfish/Dockerfile | 2 +- conf/solr/{7.3.1 => 7.7.2}/readme.md | 0 conf/solr/{7.3.1 => 7.7.2}/schema.xml | 14 +- .../{7.3.1 => 7.7.2}/schema_dv_mdb_copies.xml | 0 .../{7.3.1 => 7.7.2}/schema_dv_mdb_fields.xml | 0 conf/solr/{7.3.1 => 7.7.2}/solrconfig.xml | 0 conf/solr/{7.3.1 => 7.7.2}/updateSchemaMDB.sh | 10 +- doc/Architecture/update-user-account-info.png | Bin 34757 -> 33229 bytes doc/mergeParty/readme.md | 8 +- doc/release-notes/4.16-release-notes.md | 1 + doc/release-notes/4.17-release-notes.md | 2 +- doc/release-notes/4.18-release-notes.md | 122 + doc/release-notes/4.18.1-release-notes.md | 45 + doc/release-notes/4.19-release-notes.md | 125 + doc/release-notes/4.20-release-notes | 224 ++ doc/release-notes/6545-solr-var-meta.md | 2 + doc/release-notes/6650-export-import-mismatch | 3 + .../admin/dataverse-external-tools.tsv | 2 +- .../source/_static/api/file-provenance.json | 1 + .../installation/files/etc/init.d/solr | 5 +- .../files/etc/shibboleth/attribute-map.xml | 168 +- .../etc/shibboleth/dataverse-idp-metadata.xml | 242 +- .../etc/shibboleth/shibGroupSAMLtest.json | 5 + .../etc/shibboleth/shibGroupTestShib.json | 5 - .../files/etc/systemd/solr.service | 6 +- .../files/root/auth-providers/microsoft.json | 8 + .../dataverse/branding/analytics-code.html | 2 +- .../source/admin/dataverses-datasets.rst | 26 +- .../source/admin/external-tools.rst | 9 +- .../source/admin/harvestclients.rst | 29 +- .../source/admin/harvestserver.rst | 6 +- doc/sphinx-guides/source/admin/index.rst | 2 +- .../source/admin/integrations.rst | 58 +- .../source/admin/make-data-count.rst | 13 +- .../source/admin/metadatacustomization.rst | 63 +- .../source/admin/metadataexport.rst | 2 +- doc/sphinx-guides/source/admin/monitoring.rst | 2 + ...ls.rst => reporting-tools-and-queries.rst} | 9 +- doc/sphinx-guides/source/admin/timers.rst | 2 +- .../source/admin/troubleshooting.rst | 29 +- .../source/admin/user-administration.rst | 2 +- doc/sphinx-guides/source/api/apps.rst | 17 +- .../source/api/client-libraries.rst | 2 +- doc/sphinx-guides/source/api/dataaccess.rst | 1 + .../source/api/external-tools.rst | 10 +- .../source/api/getting-started.rst | 7 + doc/sphinx-guides/source/api/native-api.rst | 2020 ++++++++++++++--- doc/sphinx-guides/source/api/search.rst | 148 +- doc/sphinx-guides/source/conf.py | 4 +- .../source/developers/big-data-support.rst | 55 +- .../source/developers/deployment.rst | 15 +- .../source/developers/dev-environment.rst | 24 +- .../source/developers/geospatial.rst | 2 +- doc/sphinx-guides/source/developers/intro.rst | 2 +- .../source/developers/make-data-count.rst | 4 +- .../source/developers/remote-users.rst | 2 +- .../source/developers/testing.rst | 33 +- doc/sphinx-guides/source/developers/tools.rst | 2 + .../source/developers/troubleshooting.rst | 4 +- .../source/developers/version-control.rst | 59 + .../source/installation/advanced.rst | 6 +- .../source/installation/config.rst | 338 ++- .../source/installation/geoconnect.rst | 2 +- .../source/installation/index.rst | 1 + .../source/installation/installation-main.rst | 17 +- .../source/installation/oauth2.rst | 16 +- .../source/installation/oidc.rst | 91 + .../source/installation/prep.rst | 2 +- .../source/installation/prerequisites.rst | 79 +- .../installation/r-rapache-tworavens.rst | 6 +- .../source/installation/shibboleth.rst | 83 +- doc/sphinx-guides/source/user/account.rst | 20 +- .../source/user/dataset-management.rst | 53 +- .../source/user/dataverse-management.rst | 76 +- .../source/user/find-use-data.rst | 10 +- .../user/tabulardataingest/ingestprocess.rst | 2 +- .../source/user/tabulardataingest/stata.rst | 5 - .../tabulardataingest/supportedformats.rst | 4 +- doc/sphinx-guides/source/versions.rst | 6 +- downloads/download.sh | 4 +- pom.xml | 137 +- scripts/api/data/metadatablocks/citation.tsv | 7 +- .../api/data/metadatablocks/geospatial.tsv | 512 ++--- scripts/api/data/role-editor.json | 2 +- scripts/database/reference_data.sql | 2 +- scripts/deploy/phoenix.dataverse.org/post | 2 +- scripts/installer/Makefile | 22 +- scripts/installer/README.txt | 2 +- scripts/installer/README_python.txt | 69 + scripts/installer/default.config | 34 + scripts/installer/ec2-create-instance.sh | 107 +- scripts/installer/glassfish-setup.sh | 15 +- scripts/installer/install | 44 +- scripts/installer/install.py | 639 ++++++ scripts/installer/installConfig.py | 15 + scripts/installer/installGlassfish.py | 53 + scripts/installer/installUtils.py | 75 + scripts/installer/interactive.config | 47 + .../installer/pgdriver/postgresql-42.2.2.jar | Bin 790405 -> 0 bytes .../installer/pgdriver/postgresql-42.2.9.jar | Bin 0 -> 914037 bytes scripts/installer/requirements.txt | 2 + scripts/issues/6510/PRE-RELEASE-INFO.txt | 22 + .../issues/6510/check_datafiles_6522_6510.sh | 131 ++ scripts/issues/6522/PRE-RELEASE-INFO.txt | 23 + scripts/issues/6522/find_duplicates.sh | 77 + scripts/vagrant/setup-solr.sh | 12 +- scripts/vagrant/setup.sh | 2 +- .../harvard/iq/dataverse/ApiTokenPage.java | 43 +- .../iq/dataverse/ConfigureFragmentBean.java | 3 +- .../ControlledVocabularyValueConverter.java | 5 +- .../harvard/iq/dataverse/DataCitation.java | 4 +- .../edu/harvard/iq/dataverse/DataFile.java | 37 +- .../iq/dataverse/DataFileConverter.java | 5 +- .../iq/dataverse/DataFileServiceBean.java | 15 +- .../edu/harvard/iq/dataverse/Dataset.java | 31 + .../iq/dataverse/DatasetConverter.java | 5 +- .../dataverse/DatasetFieldCompoundValue.java | 35 +- .../iq/dataverse/DatasetFieldConstant.java | 2 +- .../edu/harvard/iq/dataverse/DatasetPage.java | 145 +- .../iq/dataverse/DatasetServiceBean.java | 57 +- .../harvard/iq/dataverse/DatasetVersion.java | 83 +- .../iq/dataverse/DatasetVersionConverter.java | 7 +- .../dataverse/DatasetVersionDifference.java | 31 +- .../edu/harvard/iq/dataverse/Dataverse.java | 33 + .../iq/dataverse/DataverseConverter.java | 5 +- .../harvard/iq/dataverse/DataversePage.java | 121 +- .../iq/dataverse/DataverseServiceBean.java | 102 +- .../edu/harvard/iq/dataverse/DvObject.java | 2 +- .../iq/dataverse/EditDatafilesPage.java | 637 +++--- .../harvard/iq/dataverse/FacetConverter.java | 5 +- .../iq/dataverse/FileDownloadHelper.java | 53 +- .../iq/dataverse/FileDownloadServiceBean.java | 19 +- .../harvard/iq/dataverse/FileMetadata.java | 56 +- .../edu/harvard/iq/dataverse/FilePage.java | 118 +- .../iq/dataverse/FileVersionDifference.java | 33 +- .../GuestbookResponseServiceBean.java | 34 +- .../iq/dataverse/HarvestingClientsPage.java | 64 +- .../dataverse/IngestUpdatePushResource.java | 53 - .../edu/harvard/iq/dataverse/LoginPage.java | 26 +- .../harvard/iq/dataverse/MailServiceBean.java | 52 +- .../dataverse/ManageFilePermissionsPage.java | 55 +- .../iq/dataverse/ManagePermissionsPage.java | 11 +- .../iq/dataverse/MetadataBlockConverter.java | 5 +- .../iq/dataverse/RoleAssigneeConverter.java | 7 +- .../iq/dataverse/S3PackageImporter.java | 4 +- .../harvard/iq/dataverse/SettingsWrapper.java | 15 +- .../java/edu/harvard/iq/dataverse/Shib.java | 11 +- .../iq/dataverse/TemplateConverter.java | 5 +- .../iq/dataverse/ThemeWidgetFragment.java | 8 +- .../iq/dataverse/UserNotification.java | 2 +- .../UserNotificationServiceBean.java | 18 +- .../iq/dataverse/api/AbstractApiBean.java | 8 + .../edu/harvard/iq/dataverse/api/Access.java | 107 +- .../edu/harvard/iq/dataverse/api/Admin.java | 148 +- .../iq/dataverse/api/BuiltinUsers.java | 10 +- .../harvard/iq/dataverse/api/Datasets.java | 187 +- .../iq/dataverse/api/DownloadInstance.java | 2 +- .../dataverse/api/DownloadInstanceWriter.java | 22 +- .../edu/harvard/iq/dataverse/api/EditDDI.java | 49 +- .../iq/dataverse/api/MakeDataCountApi.java | 58 +- .../edu/harvard/iq/dataverse/api/Search.java | 30 +- .../edu/harvard/iq/dataverse/api/Users.java | 77 + .../datadeposit/MediaResourceManagerImpl.java | 2 +- .../datadeposit/SwordConfigurationImpl.java | 7 +- .../dataverse/api/datadeposit/UrlManager.java | 64 +- .../ServiceUnavailableExceptionHandler.java | 43 + .../api/imports/ImportDDIServiceBean.java | 262 ++- .../authorization/AuthenticationProvider.java | 1 + .../AuthenticationServiceBean.java | 148 +- .../authorization/DataverseRole.java | 4 +- .../providers/builtin/DataverseUserPage.java | 5 + .../providers/builtin/PasswordEncryption.java | 6 +- .../AbstractOAuth2AuthenticationProvider.java | 149 +- .../OAuth2AuthenticationProviderFactory.java | 6 +- .../oauth2/OAuth2FirstLoginPage.java | 8 +- .../oauth2/OAuth2LoginBackingBean.java | 218 +- .../providers/oauth2/OAuth2TokenData.java | 12 - .../providers/oauth2/impl/GitHubOAuth2AP.java | 4 +- .../providers/oauth2/impl/GoogleOAuth2AP.java | 7 +- .../oauth2/impl/MicrosoftOAuth2AP.java | 60 + .../providers/oauth2/impl/OrcidOAuth2AP.java | 103 +- .../oauth2/oidc/OIDCAuthProvider.java | 263 +++ .../OIDCAuthenticationProviderFactory.java | 47 + .../shib/ShibAuthenticationProvider.java | 5 + .../users/AuthenticatedUser.java | 37 + .../filesystem/FileRecordJobListener.java | 10 + .../importer/filesystem/FileRecordReader.java | 10 + .../iq/dataverse/dataaccess/DataAccess.java | 200 +- .../iq/dataverse/dataaccess/FileAccessIO.java | 111 +- .../dataaccess/ImageThumbConverter.java | 13 +- .../iq/dataverse/dataaccess/S3AccessIO.java | 268 ++- .../iq/dataverse/dataaccess/StorageIO.java | 22 +- .../dataverse/dataaccess/SwiftAccessIO.java | 81 +- .../iq/dataverse/dataset/DatasetUtil.java | 2 +- .../datasetutility/AddReplaceFileHelper.java | 69 +- .../datasetutility/FileReplacePageHelper.java | 22 +- .../datasetutility/FileSizeChecker.java | 111 +- .../datasetutility/OptionalFileParams.java | 82 +- .../datavariable/VariableMetadataUtil.java | 31 + .../impl/AbstractCreateDatasetCommand.java | 11 +- .../impl/AbstractSubmitToArchiveCommand.java | 3 +- .../command/impl/CreateNewDatasetCommand.java | 2 +- .../FinalizeDatasetPublicationCommand.java | 7 +- .../impl/GetDatasetStorageSizeCommand.java | 84 + .../impl/GetDataverseStorageSizeCommand.java | 2 +- .../impl/ImportFromFileSystemCommand.java | 12 + .../command/impl/ListRoleAssignments.java | 7 + .../impl/SubmitDatasetForReviewCommand.java | 2 +- .../impl/UpdateDatasetVersionCommand.java | 11 +- .../iq/dataverse/export/DDIExporter.java | 12 +- .../iq/dataverse/export/ExportService.java | 102 +- .../iq/dataverse/export/OAI_DDIExporter.java | 17 +- .../dataverse/export/ddi/DdiExportUtil.java | 252 +- .../dataverse/externaltools/ExternalTool.java | 43 + .../externaltools/ExternalToolHandler.java | 15 +- .../ExternalToolServiceBean.java | 40 +- .../harvest/client/HarvestingClient.java | 2 +- .../harvest/client/oai/OaiHandler.java | 22 +- .../server/web/servlet/OAIServlet.java | 6 +- .../iq/dataverse/ingest/IngestMessage.java | 13 +- .../dataverse/ingest/IngestMessageBean.java | 55 +- .../dataverse/ingest/IngestServiceBean.java | 517 +++-- .../DatasetExternalCitationsServiceBean.java | 5 +- .../FakePidProviderServiceBean.java | 6 +- .../ProvEntityFileDataConverter.java | 5 +- .../iq/dataverse/search/IndexServiceBean.java | 255 ++- .../iq/dataverse/search/SearchFields.java | 7 + .../search/SearchIncludeFragment.java | 2 +- .../dataverse/search/SearchServiceBean.java | 11 +- .../iq/dataverse/search/SolrSearchResult.java | 109 +- .../savedsearch/SavedSearchServiceBean.java | 5 +- .../settings/SettingsServiceBean.java | 67 +- .../harvard/iq/dataverse/util/BundleUtil.java | 61 +- .../harvard/iq/dataverse/util/ClockUtil.java | 26 + .../harvard/iq/dataverse/util/FileUtil.java | 833 +++---- .../harvard/iq/dataverse/util/MailUtil.java | 4 + .../iq/dataverse/util/SystemConfig.java | 10 +- .../iq/dataverse/util/json/JsonParser.java | 3 + .../iq/dataverse/util/json/JsonPrinter.java | 6 + .../workflow/WorkflowServiceBean.java | 3 +- src/main/java/propertyFiles/Bundle.properties | 108 +- .../V4.17.0.1__5991-update-scribejava.sql | 1 + .../V4.17.0.2__3578-file-page-preview.sql | 5 + .../V4.18.1.1__6459-contenttype-nullable.sql | 2 + .../migration/V4.19.0.1__6485_multistore.sql | 3 + ....19.0.2__6644-update-editor-role-alias.sql | 2 + src/main/webapp/contactFormFragment.xhtml | 8 +- src/main/webapp/dataset-citation.xhtml | 2 +- src/main/webapp/dataset-license-terms.xhtml | 18 +- src/main/webapp/dataset-widgets.xhtml | 2 +- src/main/webapp/dataset.xhtml | 1118 +++++---- .../webapp/datasetFieldForEditFragment.xhtml | 21 +- src/main/webapp/dataverse.xhtml | 849 +++---- src/main/webapp/dataverse_footer.xhtml | 14 +- src/main/webapp/dataverse_header.xhtml | 80 +- src/main/webapp/dataverse_template.xhtml | 6 +- src/main/webapp/dataverseuser.xhtml | 109 +- src/main/webapp/editFilesFragment.xhtml | 84 +- src/main/webapp/editdatafiles.xhtml | 22 +- .../webapp/explicitGroup-new-dialog.xhtml | 2 +- .../file-download-button-fragment.xhtml | 29 - src/main/webapp/file-info-fragment.xhtml | 3 +- src/main/webapp/file.xhtml | 485 ++-- src/main/webapp/filesFragment.xhtml | 3 +- src/main/webapp/guestbook-responses.xhtml | 3 +- src/main/webapp/harvestclients.xhtml | 29 +- src/main/webapp/loginpage.xhtml | 50 +- src/main/webapp/manage-templates.xhtml | 1 + src/main/webapp/metadataFragment.xhtml | 413 ++-- src/main/webapp/mydata_fragment.xhtml | 35 +- src/main/webapp/permissions-configure.xhtml | 2 +- .../webapp/permissions-manage-files.xhtml | 291 +-- src/main/webapp/permissions-manage.xhtml | 2 +- .../webapp/provenance-popups-fragment.xhtml | 4 +- src/main/webapp/resources/css/structure.css | 136 +- src/main/webapp/resources/iqbs/messages.xhtml | 8 +- .../resources/js/dv_rebind_bootstrap_ui.js | 15 +- src/main/webapp/resources/js/fileupload.js | 379 +++- .../webapp/resources/js/shib/idpselect.js | 31 +- .../resources/js/shib/idpselect_config.js | 93 - src/main/webapp/resources/js/widgets.js | 2 + src/main/webapp/search-include-fragment.xhtml | 44 +- .../iq/dataverse/DataCitationTest.java | 12 +- .../edu/harvard/iq/dataverse/LocaleTest.java | 30 + .../harvard/iq/dataverse/api/AccessIT.java | 20 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 103 +- .../edu/harvard/iq/dataverse/api/FilesIT.java | 54 +- .../iq/dataverse/api/InReviewWorkflowIT.java | 10 +- .../harvard/iq/dataverse/api/SearchIT.java | 186 +- .../edu/harvard/iq/dataverse/api/UsersIT.java | 162 +- .../edu/harvard/iq/dataverse/api/UtilIT.java | 86 +- ...tractOAuth2AuthenticationProviderTest.java | 26 + .../oauth2/OAuth2LoginBackingBeanTest.java | 258 +++ .../oauth2/impl/GitHubOAuth2APTest.java | 7 + .../dataverse/dataaccess/DataAccessTest.java | 6 + .../dataaccess/FileAccessIOTest.java | 12 +- .../dataverse/dataaccess/S3AccessIOTest.java | 10 +- .../dataverse/dataaccess/StorageIOTest.java | 5 +- .../dataaccess/SwiftAccessIOTest.java | 5 +- .../iq/dataverse/dataset/DatasetUtilTest.java | 6 +- .../datasetutility/FileSizeCheckerTest.java | 67 - .../export/ddi/DdiExportUtilTest.java | 12 + .../dataset-create-new-all-ddi-fields.json | 1067 +++++++++ .../dataverse/export/ddi/dataset-finch1.xml | 40 +- .../iq/dataverse/export/ddi/exportfull.xml | 194 ++ .../ExternalToolServiceBeanTest.java | 12 + tests/jenkins/ec2/Jenkinsfile | 40 + tests/jenkins/groupvars.yml | 163 ++ 320 files changed, 15794 insertions(+), 5911 deletions(-) create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 CODE_OF_CONDUCT.md delete mode 100644 PULL_REQUEST_TEMPLATE.md rename conf/solr/{7.3.1 => 7.7.2}/readme.md (100%) rename conf/solr/{7.3.1 => 7.7.2}/schema.xml (98%) rename conf/solr/{7.3.1 => 7.7.2}/schema_dv_mdb_copies.xml (100%) rename conf/solr/{7.3.1 => 7.7.2}/schema_dv_mdb_fields.xml (100%) rename conf/solr/{7.3.1 => 7.7.2}/solrconfig.xml (100%) rename conf/solr/{7.3.1 => 7.7.2}/updateSchemaMDB.sh (93%) create mode 100644 doc/release-notes/4.18-release-notes.md create mode 100644 doc/release-notes/4.18.1-release-notes.md create mode 100644 doc/release-notes/4.19-release-notes.md create mode 100644 doc/release-notes/4.20-release-notes create mode 100644 doc/release-notes/6545-solr-var-meta.md create mode 100644 doc/release-notes/6650-export-import-mismatch create mode 100644 doc/sphinx-guides/source/_static/api/file-provenance.json create mode 100644 doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibGroupSAMLtest.json delete mode 100644 doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibGroupTestShib.json create mode 100644 doc/sphinx-guides/source/_static/installation/files/root/auth-providers/microsoft.json rename doc/sphinx-guides/source/admin/{reporting-tools.rst => reporting-tools-and-queries.rst} (57%) create mode 100644 doc/sphinx-guides/source/installation/oidc.rst create mode 100644 scripts/installer/README_python.txt create mode 100644 scripts/installer/default.config create mode 100644 scripts/installer/install.py create mode 100644 scripts/installer/installConfig.py create mode 100644 scripts/installer/installGlassfish.py create mode 100644 scripts/installer/installUtils.py create mode 100644 scripts/installer/interactive.config delete mode 100644 scripts/installer/pgdriver/postgresql-42.2.2.jar create mode 100644 scripts/installer/pgdriver/postgresql-42.2.9.jar create mode 100644 scripts/installer/requirements.txt create mode 100644 scripts/issues/6510/PRE-RELEASE-INFO.txt create mode 100755 scripts/issues/6510/check_datafiles_6522_6510.sh create mode 100644 scripts/issues/6522/PRE-RELEASE-INFO.txt create mode 100755 scripts/issues/6522/find_duplicates.sh delete mode 100644 src/main/java/edu/harvard/iq/dataverse/IngestUpdatePushResource.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ServiceUnavailableExceptionHandler.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java create mode 100644 src/main/resources/db/migration/V4.17.0.1__5991-update-scribejava.sql create mode 100644 src/main/resources/db/migration/V4.17.0.2__3578-file-page-preview.sql create mode 100644 src/main/resources/db/migration/V4.18.1.1__6459-contenttype-nullable.sql create mode 100644 src/main/resources/db/migration/V4.19.0.1__6485_multistore.sql create mode 100644 src/main/resources/db/migration/V4.19.0.2__6644-update-editor-role-alias.sql create mode 100644 src/test/java/edu/harvard/iq/dataverse/LocaleTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProviderTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json create mode 100644 src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml create mode 100644 tests/jenkins/ec2/Jenkinsfile create mode 100644 tests/jenkins/groupvars.yml diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000000..d0e8bc2fec1 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,15 @@ +**What this PR does / why we need it**: + +**Which issue(s) this PR closes**: + +Closes # + +**Special notes for your reviewer**: + +**Suggestions on how to test this**: + +**Does this PR introduce a user interface change?**: + +**Is there a release notes update needed for this change?**: + +**Additional documentation**: diff --git a/.gitignore b/.gitignore index 2904bc578f2..580855ee8c7 100644 --- a/.gitignore +++ b/.gitignore @@ -60,3 +60,4 @@ scripts/installer/default.config tests/node_modules tests/package-lock.json venv +/build/ diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000000..4204a1fc85e --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at support at dataverse dot org. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index c3675b96bc5..00000000000 --- a/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,23 +0,0 @@ -## New Contributors - -Welcome! New contributors should at least glance at [CONTRIBUTING.md](/CONTRIBUTING.md), especially the section on pull requests where we encourage you to reach out to other developers before you start coding. Also, please note that we measure code coverage and prefer you write unit tests. Pull requests can still be reviewed without tests or completion of the checklist outlined below. Note that we use the "closes" syntax below to trigger Github's automation to close the corresponding issue once the pull request is merged. - -Thanks for your contribution to Dataverse! - -## Related Issues - -- closes #ISSUE_NUMBER: ISSUE_TITLE - -## Pull Request Checklist - -- [ ] Unit [tests][] completed -- [ ] Integration [tests][]: None -- [ ] Deployment requirements, [SQL updates][], [Solr updates][], etc.: None -- [ ] [Documentation][docs] completed -- [ ] Merged latest from "develop" [branch][] and resolved conflicts - -[tests]: http://guides.dataverse.org/en/latest/developers/testing.html -[SQL updates]: http://guides.dataverse.org/en/latest/developers/sql-upgrade-scripts.html -[Solr updates]: https://github.com/IQSS/dataverse/blob/develop/conf/solr/7.3.0/schema.xml -[docs]: http://guides.dataverse.org/en/latest/developers/documentation.html -[branch]: http://guides.dataverse.org/en/latest/developers/branching-strategy.html diff --git a/README.md b/README.md index d72e8704f1c..f52a6e20f83 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,10 @@ Dataverse is a trademark of President and Fellows of Harvard College and is regi [![Dataverse Project logo](src/main/webapp/resources/images/dataverseproject_logo.jpg?raw=true "Dataverse Project")](http://dataverse.org) -[![Build Status](https://travis-ci.org/IQSS/dataverse.svg?branch=develop)](https://travis-ci.org/IQSS/dataverse) [![Coverage Status](https://coveralls.io/repos/IQSS/dataverse/badge.svg?branch=develop&service=github)](https://coveralls.io/github/IQSS/dataverse?branch=develop) +[![API Test Status](https://jenkins.dataverse.org/buildStatus/icon?job=IQSS-dataverse-develop&subject=API%20Test%20Status)](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/) +[![API Test Coverage](https://img.shields.io/jenkins/coverage/jacoco?jobUrl=https%3A%2F%2Fjenkins.dataverse.org%2Fjob%2FIQSS-dataverse-develop&label=API%20Test%20Coverage)](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/) +[![Unit Test Status](https://img.shields.io/travis/IQSS/dataverse?label=Unit%20Test%20Status)](https://travis-ci.org/IQSS/dataverse) +[![Unit Test Coverage](https://img.shields.io/coveralls/github/IQSS/dataverse?label=Unit%20Test%20Coverage)](https://coveralls.io/github/IQSS/dataverse?branch=develop) [dataverse.org]: https://dataverse.org [demo.dataverse.org]: https://demo.dataverse.org diff --git a/conf/docker-aio/0prep_deps.sh b/conf/docker-aio/0prep_deps.sh index 2170631e645..9059439948c 100755 --- a/conf/docker-aio/0prep_deps.sh +++ b/conf/docker-aio/0prep_deps.sh @@ -8,7 +8,7 @@ if [ ! -e dv/deps/glassfish4dv.tgz ]; then mkdir -p /tmp/dv-prep/gf cd /tmp/dv-prep/gf wget http://download.java.net/glassfish/4.1/release/glassfish-4.1.zip - wget http://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar -O weld-osgi-bundle-2.2.10.Final-glassfish4.jar + wget https://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar -O weld-osgi-bundle-2.2.10.Final-glassfish4.jar unzip glassfish-4.1.zip rm glassfish4/glassfish/modules/weld-osgi-bundle.jar mv weld-osgi-bundle-2.2.10.Final-glassfish4.jar glassfish4/glassfish/modules @@ -17,12 +17,12 @@ if [ ! -e dv/deps/glassfish4dv.tgz ]; then # assuming that folks usually have /tmp auto-clean as needed fi -if [ ! -e dv/deps/solr-7.3.1dv.tgz ]; then +if [ ! -e dv/deps/solr-7.7.2dv.tgz ]; then echo "solr dependency prep" # schema changes *should* be the only ones... cd dv/deps/ #wget https://archive.apache.org/dist/lucene/solr/7.3.0/solr-7.3.0.tgz -O solr-7.3.0dv.tgz - wget https://archive.apache.org/dist/lucene/solr/7.3.1/solr-7.3.1.tgz -O solr-7.3.1dv.tgz + wget https://archive.apache.org/dist/lucene/solr/7.7.2/solr-7.7.2.tgz -O solr-7.7.2dv.tgz cd ../../ fi diff --git a/conf/docker-aio/1prep.sh b/conf/docker-aio/1prep.sh index 1dc95f8d45c..a2f2956532a 100755 --- a/conf/docker-aio/1prep.sh +++ b/conf/docker-aio/1prep.sh @@ -4,9 +4,9 @@ # this was based off the phoenix deployment; and is likely uglier and bulkier than necessary in a perfect world mkdir -p testdata/doc/sphinx-guides/source/_static/util/ -cp ../solr/7.3.1/schema*.xml testdata/ -cp ../solr/7.3.1/solrconfig.xml testdata/ -cp ../solr/7.3.1/updateSchemaMDB.sh testdata/ +cp ../solr/7.7.2/schema*.xml testdata/ +cp ../solr/7.7.2/solrconfig.xml testdata/ +cp ../solr/7.7.2/updateSchemaMDB.sh testdata/ cp ../jhove/jhove.conf testdata/ cp ../jhove/jhoveConfig.xsd testdata/ cd ../../ diff --git a/conf/docker-aio/c7.dockerfile b/conf/docker-aio/c7.dockerfile index 7436b73664c..c5663daa3ec 100644 --- a/conf/docker-aio/c7.dockerfile +++ b/conf/docker-aio/c7.dockerfile @@ -17,7 +17,7 @@ COPY testdata/sushi_sample_logs.json /tmp/ COPY disableipv6.conf /etc/sysctl.d/ RUN rm /etc/httpd/conf/* COPY httpd.conf /etc/httpd/conf -RUN cd /opt ; tar zxf /tmp/dv/deps/solr-7.3.1dv.tgz +RUN cd /opt ; tar zxf /tmp/dv/deps/solr-7.7.2dv.tgz RUN cd /opt ; tar zxf /tmp/dv/deps/glassfish4dv.tgz # this copy of domain.xml is the result of running `asadmin set server.monitoring-service.module-monitoring-levels.jvm=LOW` on a default glassfish installation (aka - enable the glassfish REST monitir endpoint for the jvm` @@ -28,9 +28,9 @@ RUN sudo -u postgres /usr/pgsql-9.6/bin/initdb -D /var/lib/pgsql/data # copy configuration related files RUN cp /tmp/dv/pg_hba.conf /var/lib/pgsql/data/ -RUN cp -r /opt/solr-7.3.1/server/solr/configsets/_default /opt/solr-7.3.1/server/solr/collection1 -RUN cp /tmp/dv/schema*.xml /opt/solr-7.3.1/server/solr/collection1/conf/ -RUN cp /tmp/dv/solrconfig.xml /opt/solr-7.3.1/server/solr/collection1/conf/solrconfig.xml +RUN cp -r /opt/solr-7.7.2/server/solr/configsets/_default /opt/solr-7.7.2/server/solr/collection1 +RUN cp /tmp/dv/schema*.xml /opt/solr-7.7.2/server/solr/collection1/conf/ +RUN cp /tmp/dv/solrconfig.xml /opt/solr-7.7.2/server/solr/collection1/conf/solrconfig.xml # skipping glassfish user and solr user (run both as root) diff --git a/conf/docker-aio/configure_doi.bash b/conf/docker-aio/configure_doi.bash index 24ed6005b95..3aa29605037 100755 --- a/conf/docker-aio/configure_doi.bash +++ b/conf/docker-aio/configure_doi.bash @@ -17,3 +17,8 @@ if [ ! -z "${doi_baseurl}" ]; then doi_baseurl_esc=`echo ${doi_baseurl} | sed -e 's/:/\\:/'` bin/asadmin create-jvm-options "\"-Ddoi.baseurlstring=${doi_baseurl_esc}\"" fi +if [ ! -z "${doi_mdcbaseurl}" ]; then + bin/asadmin delete-jvm-options "-Ddoi.mdcbaseurlstring=https\://api.test.datacite.org" + doi_mdcbaseurl_esc=`echo ${doi_mdcbaseurl} | sed -e 's/:/\\:/'` + bin/asadmin create-jvm-options "\"-Ddoi.mdcbaseurlstring=${doi_mdcbaseurl_esc}\"" +fi diff --git a/conf/docker-aio/entrypoint.bash b/conf/docker-aio/entrypoint.bash index da01ee56153..60f99cf2259 100755 --- a/conf/docker-aio/entrypoint.bash +++ b/conf/docker-aio/entrypoint.bash @@ -2,7 +2,7 @@ export LANG=en_US.UTF-8 #sudo -u postgres /usr/bin/postgres -D /var/lib/pgsql/data & sudo -u postgres /usr/pgsql-9.6/bin/postgres -D /var/lib/pgsql/data & -cd /opt/solr-7.3.1/ +cd /opt/solr-7.7.2/ # TODO: Run Solr as non-root and remove "-force". bin/solr start -force bin/solr create_core -c collection1 -d server/solr/collection1/conf -force diff --git a/conf/docker-aio/run-test-suite.sh b/conf/docker-aio/run-test-suite.sh index c1fca242389..811bc579c6d 100755 --- a/conf/docker-aio/run-test-suite.sh +++ b/conf/docker-aio/run-test-suite.sh @@ -8,4 +8,4 @@ fi # Please note the "dataverse.test.baseurl" is set to run for "all-in-one" Docker environment. # TODO: Rather than hard-coding the list of "IT" classes here, add a profile to pom.xml. -mvn test -Dtest=DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT -Ddataverse.test.baseurl=$dvurl +mvn test -Dtest=DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT -Ddataverse.test.baseurl=$dvurl diff --git a/conf/docker-aio/testscripts/install b/conf/docker-aio/testscripts/install index a994fe2920d..b886ea8e4ad 100755 --- a/conf/docker-aio/testscripts/install +++ b/conf/docker-aio/testscripts/install @@ -15,7 +15,7 @@ export SMTP_SERVER=localhost export MEM_HEAP_SIZE=2048 export GLASSFISH_DOMAIN=domain1 cd scripts/installer -cp pgdriver/postgresql-42.2.2.jar $GLASSFISH_ROOT/glassfish/lib +cp pgdriver/postgresql-42.2.9.jar $GLASSFISH_ROOT/glassfish/lib #cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf cp /opt/dv/testdata/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf cp /opt/dv/testdata/jhoveConfig.xsd $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhoveConfig.xsd diff --git a/conf/docker/dataverse-glassfish/Dockerfile b/conf/docker/dataverse-glassfish/Dockerfile index 367a9ca127c..57284d3f58b 100644 --- a/conf/docker/dataverse-glassfish/Dockerfile +++ b/conf/docker/dataverse-glassfish/Dockerfile @@ -70,7 +70,7 @@ RUN /tmp/dvinstall/glassfish-setup.sh ###glassfish-setup will handle everything in Dockerbuild ##install jdbc driver -RUN cp /tmp/dvinstall/pgdriver/postgresql-42.2.2.jar /usr/local/glassfish4/glassfish/domains/domain1/lib +RUN cp /tmp/dvinstall/pgdriver/postgresql-42.2.9.jar /usr/local/glassfish4/glassfish/domains/domain1/lib # Customized persistence xml to avoid database recreation #RUN mkdir -p /tmp/WEB-INF/classes/META-INF/ diff --git a/conf/solr/7.3.1/readme.md b/conf/solr/7.7.2/readme.md similarity index 100% rename from conf/solr/7.3.1/readme.md rename to conf/solr/7.7.2/readme.md diff --git a/conf/solr/7.3.1/schema.xml b/conf/solr/7.7.2/schema.xml similarity index 98% rename from conf/solr/7.3.1/schema.xml rename to conf/solr/7.7.2/schema.xml index fd307a32f07..da40a8e99fa 100644 --- a/conf/solr/7.3.1/schema.xml +++ b/conf/solr/7.7.2/schema.xml @@ -171,6 +171,12 @@ + + + + + + @@ -229,6 +235,12 @@ + + + + + + @@ -281,7 +293,7 @@ - + diff --git a/conf/solr/7.3.1/schema_dv_mdb_copies.xml b/conf/solr/7.7.2/schema_dv_mdb_copies.xml similarity index 100% rename from conf/solr/7.3.1/schema_dv_mdb_copies.xml rename to conf/solr/7.7.2/schema_dv_mdb_copies.xml diff --git a/conf/solr/7.3.1/schema_dv_mdb_fields.xml b/conf/solr/7.7.2/schema_dv_mdb_fields.xml similarity index 100% rename from conf/solr/7.3.1/schema_dv_mdb_fields.xml rename to conf/solr/7.7.2/schema_dv_mdb_fields.xml diff --git a/conf/solr/7.3.1/solrconfig.xml b/conf/solr/7.7.2/solrconfig.xml similarity index 100% rename from conf/solr/7.3.1/solrconfig.xml rename to conf/solr/7.7.2/solrconfig.xml diff --git a/conf/solr/7.3.1/updateSchemaMDB.sh b/conf/solr/7.7.2/updateSchemaMDB.sh similarity index 93% rename from conf/solr/7.3.1/updateSchemaMDB.sh rename to conf/solr/7.7.2/updateSchemaMDB.sh index e4446083442..0044f15c7cd 100755 --- a/conf/solr/7.3.1/updateSchemaMDB.sh +++ b/conf/solr/7.7.2/updateSchemaMDB.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash set -euo pipefail # This script updates the and schema configuration necessary to properly @@ -61,6 +61,12 @@ echo "Retrieve schema data from ${DATAVERSE_URL}/api/admin/index/solr/schema" TMPFILE=`mktemp` curl -f -sS "${DATAVERSE_URL}/api/admin/index/solr/schema${UNBLOCK_KEY}" > $TMPFILE +### Fail gracefull if Dataverse is not ready yet. +if [[ "`wc -l ${TMPFILE}`" < "3" ]]; then + echo "Dataverse responded with empty file. When running on K8s: did you bootstrap yet?" + exit 123 +fi + ### Processing echo "Writing ${TARGET}/schema_dv_mdb_fields.xml" echo "" > ${TARGET}/schema_dv_mdb_fields.xml @@ -76,4 +82,4 @@ rm ${TMPFILE}* ### Reloading echo "Triggering Solr RELOAD at ${SOLR_URL}/solr/admin/cores?action=RELOAD&core=collection1" -curl -f -sS "${SOLR_URL}/solr/admin/cores?action=RELOAD&core=collection1" \ No newline at end of file +curl -f -sS "${SOLR_URL}/solr/admin/cores?action=RELOAD&core=collection1" diff --git a/doc/Architecture/update-user-account-info.png b/doc/Architecture/update-user-account-info.png index a372104438cbaab499dac987bbad0b6122332c5d..aa7d5f881f1680d1c928eb7872eadc3126ffdc7b 100644 GIT binary patch literal 33229 zcmc$`c{r5s-#(5enpD&j3fYaFDEpqJ&=680S}c)$-`A9VDNC}JJ*3DULm^qR?`s(Q z5|N$nYf|se=Xsvbar~a+`RC_2%9y$D>%Q*mzOL8nJkQs8O@JCk@%YhmM+pcBj$geZ zr%pgX^dA0Y9U+2um?g%8;TOA|{4Ki&mR8PYhDLS-iiQ@3Huvld4e0MX(?7Jcvl8Xy zwKBVBVP|h{#`D0^{3M?U4Ya^yqIt{i@B0LV(2P^O;j?R&{nF%5mN+-1f^GQK7y1#5 zrcr{(R1=wSD!jh(>gqQ2jsEgecp(x|otR~d+r+vIu!|gf!NGy+Hcs7x)=k9k?S*OwsA#?jG zi|Z77NBTadX5VT~AH{@5hSv=-yHlcft*U2qO%D$t+Aq$n70gjng~W`m&{9Z#%Sh(T zws|c3Fr}uh?D_0Fq@}Q1LCyONlO)r~=gU2U1Z4Rq-#l|2GkCr>&s8!bziy|w$ZQfY z*Z00<%RlO54?V`iu6*d>-66;4tk+G2I=j+6H5%5&IuD;+)36(~AqnyoENQvRk-Gai zkz&edS2*DLv}lL}+x39>5bH*B6Mvq(qi>q;7Vexr-W%DNu$K4aWeA6j#?xa)BYjds z)xvYpL`$Uv1fG0X&cTRKSU_pSKinEwy@cp5F5;V>RHqF zt|)Jwfbq9@oU`PV^YB#c^LxM0FdeIsJYQZ%zzZ)F{6~Pn|L6A|Q5>3Zt@m{5a$i)* zTGdhpk9t15pN>P2wJ#+m7Bc>b;)!dz{ZO^s)G=&#J$;^pjQu_|@oWi!t*s%h9NUaR zI4P{|PE0=?Ke0}8enZLF@Oz?y`+lo#N+c`-rF_(&sDnecJN3-Qdf}|?ZdLmH{KEzI zHqW(1m!XLvFOH?xhv9-O-N?m#2SriWnU%0(RJ+rA*0c<2>D!K>kL_rf=jL;Iek!c2 zefaPpCx=T3R$H>1h|Fq*P0(QSgmCreCRd$> zp|lJvZN+DQ@=Sfk`j;NOSl1t;)h}L9g(y6K`7)3K4vj>huk_nrDcjtAyPL>?Xh;>e zDH(E=x3YTo`ml+K$!Q4!0_pdTK0iltVkGx!YD`M)d_GG&ZQP=+Yw9mDk9&Ri>eZ{q z&Ok$_UD3i4kJ46`GmJc8yec43!^RFxjIuzOP+Auk`m!3_|Owrfr?MSDr{}+ zt0YFik<;4&IarU0)voND+gn?U!xdtztgM$VMYWR<5U>qn!b{e?R+kxQNpS5*hss|Y zjv;PSZfH#Mu`_aVa?C6&*SDa}+5zPG)R7_eI{Shf*<#kNNgRkCGP6?*y}GYHr(cV! zL3Y2rQ*J&`$*_dvo|5J`q3w;0 z9PNU-mKI)NVPPJg(BGZ;g$a*%@p?zjJ0jz#+!4mDFfC%FV4ijueojS7qsgCnisbjb zDYV=CzEAabpXJlSH0}1_megKScteSq-aEO+UP~w7P^EMf+wD=~xnCvY#b#$F_Nu>E zrkwadYn9pKFTuUNw=Q*!XTahomcr7*Cohk;g$kl`qzan+)V{5SYO!^ka>cGbQZ0j@ zC$8++$WF&O`6Ghn{mg<@BaHY1Z=tU!D+dJyk?tCf6tzrRnsr__yE{%T;f`(|`e5N} zdT5L=ko-rSJoE$RU4DN4vuDpHDy4*los^TlU~iUGZr69I{&4;@izR0mEe^p?;~W&* z<-_Mf6kN6veA6Y*qo*$@l zE|Fe9|n+Ya7I@)2pE~KWWMmdIO#%_m!X(|9!kLAbCd5Xjx z%0Jl`X%>zqhx@Q1iib7`&{c5z|Mic+QWTXpRt(;*LR!JIA&4jV9gF@?f7mqpu9lb2 ztx=$Rm$q6U6j|e2)UYFp@cOa%T<%CDyJ@E^z9&_u6X;zAi(eD%oe?u}b1-Kg=lFR~ z<0EMUKFFY1m;r?@>wTA_yCbBP?M+-jCdxkjb$FGmwNEx7NiFfjkyC<)zbemc zceJ;MUfcivo~5^h*d06h_6TsUJ& zL^_PLpcO0c8ouM;Y4lC?4f$Y+HPeL))>Ezz3`+GkPmmNZo<@iJZFUpk8X`2Bzl9oc zq<5?xid*SaQy)^1O9;qPL*1&v{CGNfTlx%HpX0*j~9SBkH{s^OS*B(NM}M^Vq3O zS*u_3eV?C_(e=~i=XQ@6CeSu-=?9i4Ka`Pta+%qH2^IS_I&x;NG+}wPt zqBQQ`FSBS6*Votg^z^*?=15L2XIHaaC;P>8%4bzXwmvCz9xUa@GRGny3hh#Pb0Cz~ zJ=c&^p2(jC_O9Zu`D_xoTE6%m))ir4O@D-LSzBI?NDZt~kdu3O@f?1zH6E()GJ78$ zM>LR8Sr$6(bF*qqmCBkksBL-Ray8~0E%MD+l0?NV{kF6g3%xiqYHI9$Hd!*KKkCj| z{`nK-8WI$gf*&Tox>eazw+JxgSdXw34aC**6EjD=`L85%BeYf3w1j`<-Dn7-i2pGZ zb(4Fv;ngdH`;49YiSZvV9R3YGV=q+XlPT)?O4# zRBvXBpN6sRX3F8ZgsP#6*d}o|r=9*n_qpz7=e4c|3yW*E9oAzArM97R%2LmL8#H?R zdde1G^2-iQCk)?xxxC_G(}(x&Sx+_G zMDnC>6GMlVU-pdWt|h%W!A(WSQ6GZJ?5GVAgFt!7bMF=o_wJs=Y4+!@m)dJD2?|P& z^dPRPVjk^njDJvyYIqbTu{yVIQEaiu|8x3!r_hMoR{x{C`tx%=d8PMj?G}`;zYLRf z7ZnliEi@m{jh^rOxf^HMT~NraQT{92#)<4ajZo#`aZGqm?hIWRrY+=bN8as14h1C@ z)>7*2AD=CL{VL+c&hNB%?p==5mXKvC=c#(4E)q0cF=Fk&gMNw@@*ndNNl4Z;L7`9N zsHe$hsdq!V5C1lvb>@vbiXOMK{6#OnP&^@L?;cfBD()GNdC%M0rH>t^pB|Ha7%?3z zS?tjd)6j1aj5^dB_I6(LgngA4PZa0P%vxbfK4cfCJ;!JJajAyM}3$BX^OchfLvTLNPRJg3IEFcV+^7`tV zcDfoG+P_S3`r=}@8wF#Q>u20T=C7d3NKWL0qi&Mc)RTlz%oNrG{sN>Y! z_MsD6bYC-IL6?ea?|xP@U8oB=L_Cmn>5);ROkm%}raSjQ1ocQp(KhkYVVu@VMv_rl znqT8}5z+o#?7onI(9@S48NtER9?P{~)aUp1M)vkxH`-F>o{%lY_K;q4eI(KqvHV$J z3?a{OXbdrFWVAou|DZ45u!SEZEx7XX_>4yF>xoMF@|;fg^%!%W@raJibA1sGc6s_E z`_&|gr})+$CiMrNx3(JmcW&S$MTl{crFHUfG-5|~1K(SOrd)G#A5njGU85PnA3C2AMqD=$4I%TO!nStip3P%odK#l) z7j%WV2DxW6xjH{W8y@0j@L+eV)JE?NhwxYPib3&ssih?BsgT3lRwBk_UD`cSdd*7VZsxRkmOKe8diwCl~A?v9P+-NL82;^Mk`9(~#td4>h; zgW~q23i0%GIvpbC6G#e8JDs!zSwziJX$(r6fg9@fVd>n9^BXi{rW>KX^7PGWP1l2Xeu&La`9s8I|cw<>e=S!%0cwof*iA>5?7xuWPf(-zn1!Y%pQ^%O$&SfLUJ50bBNz~o~B4R=8v)yaGg1uf)QMaIlgf97t8hch1@?!tCv6O6{&_OVx}Is zDQ>mpLU^UrcQf1Zt!q@nUepUlWlk)4N^2F;3k`gIj;gpen z#y;%Kbl5A3Rz~xwpHQe29=dr{<1Pw!FNg8*4dGwAq<;`NJ1XmxI1onZnC@aoRTypWhU?CoH!c zHdfZVn+2P}^pGe=eat*2UXFq_WPI89_WS!~gSSRXZQ1Bza+jSZ>?-RVQ|X3|o{5jV zL%o^2%+GZSlYC9`v9u1UO+l9K9`}8l70I@;*)DyT?S;Aqt1`A5r8c1&>28sU9vf2o z%g;F#%h_1&0bGcBsuY~Z-m$maSW7{DF*8j^_OEIHM6Y&()iGz@ z>=H!OTz0Nh!Dvlw=8H$!Vp>oe+n$_j*~e@$J6_A9CSF7lKipdN?9N=yAM~&P`Yl#K zeVgDz%A=0~O}np5J(Y~{8TL*DfI3>Sy@Y6r^Jh!-oZoV+Q@-4@H4y1)D-f#=YN#p@ zBkZf!7vvZ4JY(BmwnjZ8mqr@ypl*@fnK$v%u5Xg{JaJ3UM(8S5?A?bC!gdb}m&wlY zex_VPXzw>BwMa*w-`s0vWY?ZykV}f;ip|+=HBoB~Q_r&TzWQ<%HbP zR;1jp>zd0>vA<1(e@v$5tu#*`ZEZ!g%0{w{;UUk)NDX}KYEQ4P?0nv}C-wn9x|kos zuI3-G3|i@pF^*pwusbUiE5#g#i0#;Qx@9a>4Jl_ksJGfKc??80Uhdk9{PxR!Z_l*a z=Y65+4Q`Ee&Bo}EPAs+oqT0`OBayD_-on+SUk?V?QSiihU6uu3&Fl{$Hq2u*W44!s84(6 znHd!jkTi9lKLIg@P-(ds@Fu7(Jr$u}UbNJqO5z_dw8waWVSCZa=T-=nV81KH|OgZuI--r}=zhVyD_a)DNoMq(^VlM5{NJ$E}I}Iqj;Ik6wjKRtw6X;_;Iwm^i@&OV91N`s!E# z<4{Oi6Z?;aA6RLOA(A}zMEKtDx{CB%~S8 zyT%oa)!u;Rgl25u40BgZ#*=5WMtHjC_p(Y{ta^2(<56d(_2OXZi08gL{8II}vHc!J zBVuDP-TDfRiLY;J3JwU!Qcr6jliHzg1m$)H?qhB)E;^cmj*geqxFzw98m3BX-ZMga4Qj!k&(uZKHJ!gks~4M>H3v-W`rKKm9hWq#w2o1HEA}bcRflv zr*R-pNIXSQ^o($|vobO=^6|BT)Ws! zEYwP5N2m!(-MvQa zKKMww*K3dnBRxoXggRWO3h6IKaPN=}sW7263B8%L1tFS#k>K(1dTxCCUjMO?z)FwsLtNzR8q)%M9Cp@PJ+@}g$3|mQE`fn|` zxw*|dGp~n+hO)D>izf9~xE5zt!P;m^5OZ8#yz|~0Fthu{_opJXsurP-|F#pDi8^xj z>{(c`AVv0b0#NVCy#AoZ|75Au`FMhdI)wGy7&W4V(WY1dBO@b_A*+!D__1KK!6fhB zaQAj|}N5yqKTcIn*X%3Q-_QU9sKqUQ-q?S z$@zKGcO0?(YoWt|s1E5>bjE8?_No;Tlf+h%r-Uk>&BT>>g5P8M+`&gGX=g|#mfw&S z6Hi4=9Tyd4zq_@@Te(4mCcr<3>WTQ#7Bp716*P^&Gn~;JH{NLF-iAN+)_U~2xTnQc z-?^-r{_Tde-x-Rt0qg-fWa3R96=w(!22_O>G+d>Gu&@_N4w%dTynBaKQ2RsV$|lZo z32k9U=xIT5l&%NV9A-r++h@?rPEMi zN=B{bB0X?a+Cn)JRd46gtrQ#M87m}=1qoeUZ9kz&hgj1^2n5T6hhi9CrFrGJ)GIgl zD-pd3a=uYM6&r|`eI@4uSoBtwShQp z9TT!+f39gw^c>}r8@jMYLj2ERND$Ue725?>dlM>=9Xw=$$n)fa_}&wwkvu-`H8_TV zJH;;*VM1uAmjLF$|LG7-s=w{wUzMVnnGGg`QZlr=V&%PBTh*QsTzrPVLVD<`zY%K- zlp?MWwO+wr{5v8np{lEE&GvasZS7$+!MUsP1Z)x;Qt0sE<)W*M=ui|v2s6GpENN+JCyymZ)F8duKOq@K`Ir>(CQAkpfzLk}gt}adWpBsoz zzUA|~=hA*B9oe|hYKxx~!_E(-U!#Ua1Z&=+_nnM$_VpBn2G!kLIy%^2{f`QaTePIk z(9$j}E?QVxjwH+YdhX8YH<-%E$gFfjc|tLm>Uy>oH#IeNUw^!at-6Lrk|;rPQy8@n z)Pfvrs9@pmu4DIe3vPq(^1;(9(`|D_n6ULqG&y<{-9?ZnB_iT&x0!)S=r6U+hywjE z4fpEu)6O#%LuD+)^cCeftQA>YP@#xe!{H#H)DpO?PQMcM{>T!Y{P7GO-AJCk6jSlq z7Dlby!CWNx45wE8M;Zk)GqY+B$5!b3)#t3vE-no+f$IKqEPpFt=`b6wZt z5ZZ;&sDM}-d{#iubJt!>Oss3Gmdb!uTYJ*n%q%i2Ucyz&-X{;c*FU=O34oF<=*MpN z@4sHj8Y*`jM-Z@nM;v=Ya5`b>#OfEMo4dQA!ee?yM%kUwlrYI>F+4mx{o{ys=eb}M zTM`GTTuMp}8UsOY!ZK6e5;hkJk`W$`$;sj~OrI8Gc#SSPeIZ*RO*2-n=U&lBLiZLv zih+^$^OL`I>qS^t7?-1+ow&pNi$UDYoLkrJiPoHn`d%uev^ggxd+cuU?|(%nF*7sA zOS-#kuHfLdxY^LtBUo&c<80?lT?q9iHTu>^r)8K7M&IJu_j$DQAKxViG58*lgcZYQ zv5!`0s!OJw1D05GOG|xt@`aCj8dlTdXQ22K9@_3Oc*A%JQy zVrzMoTS_yZaJRJ*iNkXPE=Gcu2Men`NKsDiG(@)ab3zu7xlFX%E8Sfo+A=G@M8Sfr zw?7}xYZPO7^4O+dy8t`!37UsB$VA*tBG0`|<31lhzuMX$%Ak$$(_;w0PP?D{NGZ8) zjV>%ySPY5!5MF%dr|1z+&(3}|rL6`>YB?t~PLEY{VXd8ROCfDVPO!Sx2A+xG)ej8} z1f3+xOeQ{MTIxPKhW;t{?H}V|Qg8LK9wU|Rh(iyVNy$74{FBDL?r+A)JKOv8_VyAU zBG!DP`L(U>Jpid73hstE${pSYRt=F>eSS8=fbvi4G#kA$(cuAPnK_Tz}rY zgSN7^-W+6(Kz%{}i79(6NEI6V0Kl26qb;qit=gU!9=6iY&{ll>2oYp>ZW9(p9;284 zNz9-4_rnv4ya;0AE%*`V{fKVCz?A&CkU3VPEMdE63t7+sA8jyz$;#KTL8&58Y zvHyMqD0oNyIij7ifqUh@9|4urzeoIk)j}fUq}% z6T##5L!XGHEq>P*|90((H|+svtVL$>{cm3Zg#4W`HG6GCU4i-Be5)IaZ59MJ1}LcZ z?Zs!$pEox(!QlT~%{oMK;)-^x*<*2W@glRnPiaSm2}cpaGEgju$8dXM=ygbs9C2bG z(LH^Hlr&qX*uud=%R&9ZGIn-$en-zNj$kbt z*xx|jP*WrS=Y%=LgD;^`v0$ly4@5*o(^FGVv!gf0f_Pol7tP1m&xzViL*=qmlMoGK z#vCv0G6vyAL`>}F8%>kZD(_;8A^u8Td!OLoU`Ph`GwrxAslD(RNGL-2#$7D z*xg@o`Fjuybno1urJ;E#XznuI%0P=&6Ew82cpKc_xCMf*y`v+jnRrh8j*c(EXEkzfUqYkN+6lw?l{i( zS`UA8p4IzUQGrYE>+aV2#=^m&r2V2J{n%SGt9ur+vL{p!Y52Uy1EC%Qcb-SWq~@Np zs9D0Pa29wt9XzHyH{fxj*}h`vX}wnGAhW>(i_lY?4B@>QFKtl{agfI`oQzKL;xPK_ z7Oo+R6p6g5tQ;rhS(%bD9zrb?EG84hapMG?WR6+i=)J*8_j0?L_6rv-G<-2`deH{K zZ&$@OeEe|^vcu6zRlqsO3crX_&hySUhJl1ljT@68G8a&uMajw%#}QG@f4DUJbODM z#=?;}N*-OP>P@V@vsW5btrUY`WksYxjlOc_3ZF108LK-ZsFkM&lYImT#aD^!vS!dx zIClfd&zwnHK)^^};>RTWI_w)y@J%dgxm1Jm<~SAK-QIWiwldNMF!bME345D0fBG~w z*&J8r$gL75cy0ge<-WWeCZTSiT?&F}SQ`v!(rrqx8^t$B!FcTx%(T9`To@Fie8!5X z3JIQ+BLI(P`U|sHB^n%i&!0bE7sEO9Wc32T%=b;%2^KAh5|C9==>o&TmOw`-LPhfG z>sk0L4VD&IeU~L6A=#Zm2HPo%&_3ZaY&h-c=%}NkgEP9l-w7!KvNucl^Ti_hAd1f(kZR| z+q@aL{-HX+llp)xYHH+gf<4X6*IcF*LeA~}_)ON1;d5Rwm`Pk%SO7uh6a_`Pn_vtk zyyM;xu*>AfJULE1X=i#fPcO>i#{SZY8VpdwTL6M_k4V>U2CTZuIXi-%gWq;(xZ*G= zMSVp{NJvP|t%I23_3UdvKoCnHVbsV_M{<~_pJSpZ5Lj!XmnNV& zuxOJC#ubK3Td*&zG*T~F=KTnb0KEYirItW5ASr{f=UCgR=IC6aH%{OIlCU=vJrVDv zBZNQo@tlm}u2N~H`;K77ue~VIq}XCnfz3$`U!~x`tzSD*)LXR7IquT>Ko>}(4V~uw zU{tHhA#y=B2HAIg`CUdq(fx-GIUjxVQOt315Td-_)2AAV!i3Ei3GVscziPS>!0oWO z1xMKXfSl&y>YBZ8e`FMKu>(YqKooFlYvp1p8miym2%nuHu^o^{ymjbz#uq01!y@re z>UVm@fKd_85b?euV9=k4@tN*7^~5*Bb}ua^KHkj8C>khR)JZ|Svncu1g#e>6!^UU; z`!z_Ab)X4=SSkV-ps8>ruP0B8&Y=Nbz{R95Ys|mIBVyH9{@SjtuIlPnCLQTdPtdBz zs6xGZXlQ77*aO^8Aen%oVC&%URx_);%KMNdP@g%_J5wQ1&8CCWW_Ir$kUa<*ODBL- zP0q})4?y_bYfn?Y@mr!3Fj6Tr?TU|&H}KooaPlorw*goIo%NcUK>UXc5{bL}>1}*F5v08{I*_cs~ zi@twnU}Y#z={SexzY$J_DdPQ$?ue6%3noeOj&`NP!fnHQ4LAaJOK&{8M|0ysEBrk05Q@#BZu$V-01!1D6)+}vD#o8)nXPf(C@o?eAP!59Vi z?GI{6=;`5#ax(!CS(%ua`1#fKj0_DynEyS%t*xgmW+bj)sj(g{yCaL!V%0D{X#+*Y z#g^@Q7`9*##4? z9(av1`=p&4^EwB5d_&Le&|?U6-@t%k)b4lW9Xn=bZ&qBcmzVo%(IZXQ<`fJN9ylcD zSoxfK;bDKb3adVqH1{i#MOEQQFp`iUhT}yJstLd*4Egu(-y_d}2H1h^dK;2-V-BRG z)70T61@fHlA9oaJHGJIR%)ZdFQ_#X0!-1=Edtd)1;_JY`02oR|MPHsrpxL}J;T{*3 z!+VT9((ZKDSn>`qBsY|sOkIp5v&!=M$`6qSg2}gsKfX0#!&w*+knADf>YJHy$v;L# z_8N(9SAPzYG8?$j_^3cW`-?BhFY`8{QT4+bM$ozY1O<&!=c&}fSv)*E7!hr4Z7`l` z{#HKEMpUvt6UQ~{Y}}lGOo6{4R=AP&T~{4!=@PF&K(Dl!S)i1 zNtb^R9Qmup!Mk24dh5TcpW40u^y)x0$5WwyC=Qi1$pLBN2tJU~g$d6n40`{jVqQm? z77wICytsv80V*k+dXU7&&z**90Sbkpq>OyiYs7^TyPI_*j~=bw|!AVcZ7%%lO5 z(k*v5j!ynPUY|)Fh&(;_`3)Ox-MR&(H{8zVPmq*!K+FRT#MG;)xZAvzJjh|&^HY?ORUEM*iMa( zszDa>+}|GAoHLBN7U2KEq(edQ2GGFc)Pii5^78Vf<>j!rh0J=b_I5V=0gt^0H@oWg z&HaWLUS3jO{YvrsHU75?O~2-rnt6VU7s<-X+7ts_ajIcaF7?gLJOGlT0GrtxL*`{g zsw0>b%gW07`}(-=lxYqntO&?K{Ynxe&4l!uL+lW9cnNM{-aYE!!Q}$$IyVytBouT| z?%ntt_B#;{Qa3?xoH-2&8N<)$=;-(F-)}k)%{(;J0QB+!{NC4~qRGa>B2UqvljQy@ zc2K>M0))F`=i|NsD9-@JxM9J`&CLz#u^SxBNF~)D9!?(XIUXW3;RG42;$hqC_Yb~E z+EX?l125A^y4`Zse~wnwidXn1x{mobUGsdc)5N#|wMw+OFrg~@p@-`%TbLhEuVn9u z8nlqKXMzkLa|y~E*+VVtj#D;4IcCNHN};D~3{Sw%Ua#CY1ZB{uOL@AbjE%tQZMPPs zr+V$;rGph`T%z(8a0a-v7mA>F>V-MrA+0{h1^{WVDDn(>S~$FlABFm8Rz(VEXpNr$ zjx;WK=p|jyg~!+s#aZi3bm&kRofPI%wE+}vjy55KA(L`%{F?&WkoLKIu|6u1|H6gq z&QK^C+8T%`H}$QU2h>m~RA}(^6wVL`=!9X<+dGBx3vW|MVz6nsTDZ-Q7!N*V-+uvYTkLVw|dy5NXnm=O~iP(_2We!e{f z+-I@{IpLsHIy3@II7h^wp4@qJHaqW)r&q?Vcr}2*iAkMMG#8i3Ru9bkvu=sd%eAEu zeLaT*Ew&+M7fd%XEJ^&&h34|gK%1IJ1<2s;g^ z;Q3F zD-E1BU)bGW*?k@($=?BZ$fOX=cJpogU~t2oto#T!VefY!Q@|#XhB~Mf0(j<%6SW?D z{LOqd^Lom#$F@0+q;0wtc}ii+f&~LPVl$4-oFOMU=xqQU~+Yeql@Hs%YEY=yrI@%aMH#H_TrQZ z`~Iw*U*Yy)0R2|}vG*Yo3f#yAuFO~VC}$qI_))k8{Sv5l9wr5CrzclO@!(E{Wz^N5 zLBY3?a=6>FPR8TaYTyxoxphdTXDHZ|A^TzU3!SGfEMmY}5Ux|dezK%kwxv-E$3E~! zomNDT3I#o6_8z&wyr{;X0NC){RlEX@mn2$$r-kpOV%|YPjZ{@op_tUQ#udS5{^L&E`FmYe}q7Idnn%?>+3NX!)wU%;y)Ly@u~LzQ2EB*1r<`tYE;_QbsP9O#cb$-@Og9_J*QJ-RtoH9cXP_w zl;i|W_#MulAZ5V24EQQMQsxug<;(*g@vl6k_v<=9SrgK%-F@*AWq$(w$Dt$mD|U*_sn zh{jMuZKuf03N#6Du)j`8;RG3Jf6skw=-ee!#Nor%Swtbset<;QUoRq3K!Ff6yY=4I z2MEkneKCXj@Zu4V@+%_M;LB?(ceGq^1Oh4QwqfK2%L`7;g9S(-Z8Cd3VZL%ftO3)aaMl zW%XNgY+n)*iqUvJ;BNS^LN!Tpwwi6^%aq;APp~g(#O-IZ>-u8FYP&Mjeq7$5sKoPN z%_?1ItcUNiyW`r@JUZ?#%t%S=(CZ-1v<=QWCNGwAWg7@$Y{E3g_}jxH}P1rT(3YdnaOSI}};*EnAK1iz?g8H^^ipczPb;T$JktsZ?q;_l7U#X+R^^=_<8Y-ngCRTZ0YsAUjvqfQjF_^vnjS) zbVxk)zO1FOH3{nbZofUKAGK=btcy$*B-b^{SHdPLLfR z&^HlxW(J1pPoHEL!`9Z*hL)#3BO|v>47u)1dGv3{aI{>YqC+1!aY9S<8&nZ=Zr|S8 z-Ug2;T1;#kuCB5FazD3tSlx9!9-|TX8geywu&&3<4;1Ht&Y((*=5x}s3DPgMF=R&= zlBYqz1ZvkF;BX6}gZck!>N$42)w7OINY&aViwFZ1B&3Lpnp!K=JyOx%8X8uXm*2x0 z&{5Z}bpB}s5s+5Q0k&AsL1_UVk}=TzDDUNX$#!t10A>J{HIPwGL7_oe`+Hx4^uvzy z5Hsy9Wt^)c85x;jX@sw)Adh3O%{tf64%9=TsuDSc{G-?YZ09U~*d{w8jCL9T450b@aH63$JxD<+TST~lPiA=3J>~Ax?65|$4`VvLX25!U7do#oYeki z*hd;6i})ro8ez2|Cyc!Lk4}S4{MlMLnTbAXP&mK^VDJ$R0Yb!vWefA29!SlmJ#V=a z!tGw$q~yG1+O6Pv4>(~aEoKYw?Qq-V>d6@wklOJ=?56)O-U%@LGNaLrgjez==jX-m zRuL@Z?c0i8F&u|^5pAKqHOYGI?~24|t6rMzJ682GlKb|r>#2DAi`u;!+CaL0Q+5A9BT$tN0LbrZ z_-|7jzKV_K@gOuD^bE9D@&|uDs-eQ;2{`KlF|dCr-v0s;&S~c!D7mS`2ZCwxCnd8P z^S=Bv;0M_Kz4GNQyqt#@95dfBUUz{E4#d~K`n(G8XK*S4oc;CvDV?Of;a|CoBga6r z8W?gyLING+6>v;~TKHG8Q@dA7zlbNn+oUsbhF3xPg%~sY+gF1H3jmDIo8#_K)y#oF ziTnU}>rr~ET zL3-eYp45+EIksEznjG|0etzV*UuY*5KlRA}tcddeOXpwJk-l z!4!%-EskV__VMLXjg@WYp^yTjC*RV|0faUX_Uy}QSn2+AL&wK017+9MG3zaSBPS3r-zg%gvV zcN?ogYX4g^;c4sLI+ioS)!=OEjDa+xB{rIw$ySRzb?Ous7njBG$1H3i0C#Yx$c-Su zt6^bcG6TElp}qO}`K8KjjI-N!)ypkZk}j)PLaW=_E-*2DuCIidmmE}$Or}@PgA)1p zmoV6N?=6F#hHMxU6XPTe91iGP*A!mR8HTGujdi971YDerMv{?IgrW$M62&FN)I4; z?vzbTP8vOWGypUTCWeeXQRz^Uu=V}i+l6os6TV=q%;y8#qNO2~{d1@q;yG|H-Gdkh zvIb@Z36QM#ZnY06FyOMTzqhfy&B;kM2wPzY@b#yOf>qe^AEJ+)+Ou>#xe=>oe;lGmb4ycPdf> zpMET?t;>Dq_gg2&*GrCa+E`;Z=Vn?lDMC6}Zc->CnepzQko~=gj}oH!UX^4YIs~S| zugbAS{Ur>DA2jiI!Qu?S=_nalM$Vtbd2*e*$Xrn8gQl5uEc`0_$nj$}CyJkUzrnOg zAPMU{j)%zLhqJT0c#{z?^qLhAp7Zui_+n=JusC~iq>YG88j=SK zF<<%4xHsWp?XjsTv@AI-t^3;SjlbswTHkznd?)9&<$H5qimC$C`SUzO*y6Rg?LFCO z|Kl}~oXJzu(yq(lYf+lF+Cs`j&B)behM>%M^k|8uH&}x2&RXwZw&ngc5_fbBj6(lG z{ygxv;KMB5S4nz!>EHY-cun??L|pdYHCg;;|E?iF86_nDYed0+_CG8$$Epv}{}pfl z`^oUP7mCW~|K_*p=X(c6*aHvf8HNAttN*A4|JT6?`kh-6Bf`VOW95AljcyoIumHmj zAo*iWB5&V}^=1I>CNGXpRRl^c6Y;;Tpc|yXivJp`R63EVOdpSXGNj=}hTm3D48ec2 zf<{o_y;^wEpFWKM+4P@1?5DY}T)W1?jn8)Ej=vRPpe*3*+uhxjC%u3FK73`w6;gbs zYG}b0VCMr16}~USp8||p2b;~?iSYJSYZ8>b4=@l95U}z-t^^hq7KVge1KKCGG3qV#rqHkx>@C528eww^-nWFSa|HW@a1GvyDe~`?{kn<8QFXG}jNz;06)**wJ;F~9OB_jYr zoR&x5RXaR-7;L1?@SwQ3xec3>Jhr~}^xQOpFKJ<9tRa)!nr3pI85?_@!@iD#$J1_H zURK5t#?~NMx*MD)iu5%T)HBfv=xC`dcKvaGC z>?fF1VdpL>fv&my1n@w-3l1z*pwMf6sA_M|x4|{!2Z6G|W|{Vn*!i01kJwrFJ*yn- z3kvxY!uZ`Wx&zgl_Z>DVst*QirN?(C^py;H15{e@8)Lg*t z@Ee=sEf(+!Y!rV??2u0mT=uLB2q>^6eb&EX^7m8Y*BFAWfBv`)9<6U%L9j}%Do*2Y zT`+2TElJ?JI>S;E{nzp;a2+NaECP4<(L)SBH`+KYk$N#zCDq{Kx#LK}gD90njzTdB z)$Hx-kK^8&vT2(SMw%f0OKQ7Yg2!^FIz@6F)&C+?sEwY&iV+ z^8@e_{@L{dUruw<$IFYrP?C!)2clqKU*DfkI&6Gkm)FrV9D`U-{X6O)EhZ<^ABd|y z5Xb)tiYy@E(Mq}rA8adnRWITVP-(DypuZo=V+!)}?4%MB5?~>1YHr5mf||gYvJ0Me zllIhrt!kLIpk#qdI93(5iv8n5nJ8FZUTzQt6(U~9GV#|U>|jW*IC*es*$69eKZEUX z4z6oxWON>OsX-9yUy^j&;QL2C3y%TY8=zY|n9(bXP%`Q-w#;jCb#``ka`I;Ze01#v zAKpy^)uCrNyzW?PllDsx&e6+KXZdrXHXEXuTJQ;C62(m>3WkS=fqPw_1np4*b`8aT z{r2tcf!)wh^1_9-rY0`1d21`0RkgKU?_pEz~GdB8sRx-J;U(al zf)+L7>DkyG!dGA5y@Y>-AdIRi=zXxQ3NLp1fj$EbY9vU9uYu*r+}s?->w(QMlW@rYYwf$ksr>){!x2Y>jwH&=7LpZ? zy`t<bkhx_qoqK zUa!~l`FxD$9dc+ogSImNQ73@evuAkN}XsBgy7iOk(3ICLY~hCm|^wHA+^=zwLum7|jo zm;$yIZVep*B^+bN^RkbOKXbs>W)j*7Twe1Ru)TkmPuTwU&@?ohiJ@=J$A^hsIUZ>G z*p{wB;5`wxo^{_5)ZTwDAfDpS^@_6Il=SsVke(sOqGsp6#MCq0qvmT0v;XgI6#qgW z|6a=fjU{SvN0sl=rCu2BAWnGdwU=kp3(+O^VF8TY1%TsP!4e3VMSZt~{fOV0Cjtwj zqq!DesSmNZ%Cv-}oDz=W^~qUO`)9yb5xS)1TNDa~;#j$P^CpCqZ{NTF(jr6nMS zM?p})LcOYPlnrW6ST)*=wSbAipK^l4&}KalWCfit(0upzpknsS{fTl^{tq7IwH+6% z2(k}JHWr5Ur(WfZP|A`4Zv@6Ji7Gzs>9bp%3pPaLW!<>Y)Dxv~&7E1l}I7JI@HN zMxam}Q?KXST%R%kxdi!ja0C5>1gQ8xig0hB@B$WV)CGv)%joFnGnc)*N|&#{h-0Mk5P69WL~;wb@U+^`u`x)$Vjpx$p6e}NodN1NdJY%{ zgu0nNdJ-@xHdQSxEs#-9f|nCKzko3e6+5eLy1_k0C{=G42p-n$Y3M_xevB8p{h?lb zl>+qQHbbSG+6ktuNiw&CG(2d}0(QcdFbpE8TPCpB1ET%9-AXr0YXTBn5UIz+85X6fS!*wpdGggDcOS$iJI@QIv_-iW9mIGZ84&E z3#h89DlL3iDmr)z;@6oC5r9HjIe69rm=>VIfwJPiHX=63+r-TM_BfN$pZSu7X%G^u zD_DoqV_!x^T>x?V)bh!6q-Izb_2*E7^#f`r&~`K9Gp#f^YRVfzB^9krG03ZJTdhmCa#@GwZL_G{BlA>Bbh6dK&uIrjSXR< z{;OLc9~ASv@R4(lK=?Px@hfklu7~yK0n&K|upOAFF!1ExN~`?bVzjje4xh#=FfPF& z0sijWt%t-6ms5Kak#}scid&J;B6F`5M0-Slvjmxmsch@f~#U?^mkgzwbg$Yta-_ z1auobd3lFUc0A{J=^@=;7cXAaJr8c#ELU!B?lqd2A8_-zR$yL@ln@*k8b%%BB(aHeSim1BC5tZYTnLt!g&leZ%=bWJ!Ij02q{PU=&o42(4h-_NXP-LX)8@ns)yW{Vc?QC{; zpcSD`iQi6CuivTI{6&`=*dnz*Fv0|G$ruOxOBTBh_*qK3mZW5o@qg9ebwcwX3te4a zK0zLQs9N8W@L%PH%DmX&XV5AJl$RVmmZVS_O#k}j0!`@=dt)y?v`YuClR+dlDuh>vb8!e5fB~UkTfe->M@3t_En&n2Cl62BZKoD3yiG zC6?&qq0J^Ku-2&9YYGQh0G-g?)f#fCOf`GJrl!bArWTIN6H0E5M+1kZ>mf50*;ygp zI&YS49se(!nCE+5Y3u8M#Cz@SI!NOX8CLO4i_2;Zr_$J;d4E}sGxo>=v~1dT36gkf zo@C`L zM+e_XpN+APa}}ja<)ssryX#@CcmCaK;Glo>Xns%8LgpCL*Mb;_^EymP^9Lsk%n$ec zbY&%I^bODUaqW{;-KW5x4o4Ctd^L@{vG4o-sKbQBuOWQ{u+gQcV+DN$gIawGQ?Er} zlVNlj{h;~k zqsh-~il_QSCe%q&Mp%(dSVvyoCODX|ci~5L^Ti^I!i@k&|Ba3KN0##g7cJG?CwGP~ zxz49Um#~$rS%zxWSdD^Nlt+Lg`$oXzW;cPXoR}K%yUvT1@7`9Nw#6GAP$70!Ro5t= z;5|==dhXI|_$?&gZ&8c2^L#QpN4Mj?cW+WhZBieB)iGi|UD?4C9{5eW6CY)oq;^d@ z7HCd{hEYsE!}$>?7GVy4ZN+HJ2b!@27PGFRV#Jesh?DKmSiA`}4q@3$Ls@lcWH&Pw z34 zBZF6`h+K3f9=A0v36iD$d|U3kOvtujaWyV)0R6UkXJ;J5d>_C$n%qIn#D4Xq&K*st z@`bFsykI@E{;UCNP!cq93CVU-Xi?Ku6zd2Yl>Qe$FDm=kAjh-{h$mxfS3D6B5%BE% zbB5~~6UM-mf{Y<_E-z0Il?@rAInwVJhr_}0`7t-w3St`+Y>oFpP6}6HZV8zNtaXfp zmu748v$8nt?t_SoS+By#$mr^xv=?b&bo51NdkLaioos`Xk8YHC=$=x~0qZT5wUt%K z76=?d6Z%4Bzk$bSYjw)p*0v1vm__WBhQTu+DwW#>v)mr^e^V7Y<>u*Wu73OWFUZbp zY;7S%VsBOlmbfm2)fm0q}&AT_)X)c%rf_4ieKpL~4 zk{A&Ntz7af8gef#y?4agPvhcFF)%>-oP2}oE?DOo=;$6dWxm6VoOcU`HZ+l!dxq<~<7w){mDt zs2;v#8B)EZD^UgMN8LKC%^KAJ=jupQ0sNOM1ro|qiTl4ujmKPukEkcL8m*!NH40Wy zXL8!d(c?1K;AMmO1J-bw>Q8wXe{B*+e}Df%?S>LF%5rESGmT^9;%br)1DEN@dALn5 z-Ws{}fG}lXV8{)VAfDs63u`?5Yf?!51=`C#i7E(+wJwlB9xrlRN(p6`ru+8oTY8N` zeqbQV1}f>PwO)T&*QHMcFabUAk3Vnwa<=MPpjPDb=Nua-$UK{7P*8O+H^R(%3snO2 zB!HZhhlAr>mF2}s<;tgwxD|mDE&1*)B~P}B*|vblSgmY#kOxKL<_3!`2c zhq(2?9pk>YQ{;wNazK5ZmX^l&9ymOFJ_q7;A`}XEz=h$8eIRC`;8MyqD6l(UzIN)w ziLt4vkdd%>7_1QJ&D7MwWi=I6QAtn-aGaKumQWuV8)IHi1TzaTQd)*iQ;K1O>9c6C z$@9?rjQ=x~JX0N&-Z?rs0Re2&k?{@8N)tF!NW6)yh<`w3@i;W}#3ocZ{xn&8j99M+ z2?<6pKOi#{_FT95f*9=c?zB~d>UP$@1{^JS02BfXPSe6Zm-a(k&I&Y0!Wf z<(d)N)t=|!iDcs7P@e79(`m!XC@O-^dhwpMwYblY+a>`s#xgl$i&wrw(ZY-An;IbRb3@?Ys$o{*7m9LILHTKN zXdILZ68i7Lf4}@fU^5i6rg{ki7AjDY{ziumMS^f0fW)BtGjuB`J3BiNLB`u6_b@<< zX9M5~5CZDhyiQNQIbY(tUrwC+KSs50u{|NSpoy(@9+-cupaV;9v=jw{&E)ua^Gl(( zFeU*Kt7AfJqIVI=A!yAcytney62uVg*ZX9TFPZ!)rl1|xD1e20e`lp-jKbEk^SMdc zwM_#Du-=Et61>%Y8+2sxcN@zfU1s)h3>J|x*o&#Es*=hBLzsLXddExyz5sd*cnV;q zKq8$(>l&Ay`Ry?AiNv$FZr={46ZziNl@AmchV4or2*6|uW-`B|tOk{DbqgDlB(j3o zBMUN;c-!A-VysN>Dhjgsh~QvpM$pT-gVo$_xIAnZ3UM%q*FNzD1ruCB)_~LmP}{7m zARC<>DAN9EyBbW%0wOV}x&zDrWgsd5LhRU57iizO?N*^87lTz!);hCX&94&b?Z{WJ z9-p8H4diQcQMBFE4UKI;Y;{bu30P0+lRXekA$xGXmz3|Ly@&$l2->;?(t(NnaYV!h z_~s-D4Q?F=_1VDDMHFR}jz^-m!GhE_I-fK5dm~5{Yws}&b3X+S+Nl-*`h5bnP zC)v#yK~qcu%O`Kk8e&*>Iw&!a(}`lKCtGt}(?oixHaUwX2*OyeqbuE=V8 zMYUTS23ey59H-Ia_lHNS9stIRP+QGk+tsdYwcj{RLlbN$SjtV^?zujjMK6$=l5(X} z)PCg15Ey_y=&Y+MP0gdzCeilXoMQ#mr-gp$FQTHNqE2$@XjEh!oZPRG5yVR5?hlVV z4)?J(7eaR}C!0DByrl029^HuoIk`?aAY1t@$5*-Zu%Gf1p0Z1OB0;(%Wd5qMvc_*0 zBf;x2L#?X&BtPp~jvox6>(9i^o<5h80V&e}#?9TG^8*HSb|FQ}@Ro|;xWgbV-w7t8 z?A6bIkQmK`Dd_tfqO%C>7H9?UKr{I3R^%B|06KGbYZ!mbH%*;;LQ-`(y+^hi?PLW5 zqh|eYET5=VD+T5^yIm@iYu*o9A2{1kf|6Cwr(UDM?8imx{;ZE`egZ=8F;vs&yf8OS z4>`VF?k8~p2h4^$BB*ug?+?;K8?ko2>GsFCeV`BGh2j~Lem73H z0LTKYy>3B}gTbf8;(EnB_&!*i5K$N~*8`a5L@9=gEX;ZUkwAwY`8{B`O?~uz>FLU= zfK$;*SAlfw6-eI4(Xd7t_!WPSt>8f;5 z+X&(`nJQFrmuZf0FM@IvQXK|6^hrgk?p&BeM=3@%PLF>8VNp-6d3wJ!O)9JiBZ^(s z)%d3a0s?A%j1115rY$6k4gc}>TG)%vv5Xri2w8WIT?xs~o&%eIdj0emS}@#=(z9l8 z&>OPQgG;qqK}?%~k3`cyXH}H7Gq~M{TihvqO?qJ!m8M*#AL_&P{0<8b&yxyU9_c%A z&0RAHE($qsOgtsNI)%-`)7f0!Z7(i4QY7p%!iR3$zuQOR`^iq@wwuX@^&S\t_i zs&zmnjq{5+mJ-ywCNj5m{}H)t$yM|lA!+Ykzv6WE5y7hviJPL&^G^k=ny<7q9Bn3j zzZ>p#u#v;0LlFFinCKW6{dwnc-{E7Kt>=4&ST8b1F$LlL3hV+kY|=_2PbQT1zbP0; z<8!a~7_NQzl6bH2YoXj=JI4Mc8&tIuZ4@Y-S;{`|rf&@9SqJBYd=3(g%1|u+m6FC& zM-R^w`Rh(L#kCDh6PhHXJiLmL&5d%Dw=?+lkgPFkWNSV&1^;lwD`>Hn6X#GVn5oFE4dEK@otw>|LX z++Fr>-j+9WSN>i0aj?>o+);FZ%FtaoM$FlIhlD2^C;i(mi!`mj;l!nZD=}i}-*1tU z@C@Qc3O>ITVA*-0Vzp)ajjN1J<+unY{d?`I!O1^t5>+!J5o^r*0RiVSv|idh+@2BA zbS!+q;mnrN{YKgYKjXx+sVE^s0NBBU=M7RzP>X}{N}x5V~$WfUa>59@K#4Yk6fQ_ft}mDO9CUt9s957eMdEBIg;sscWFRKlAtmbTXFgW<~MKVg4P8x^!P6w9T;KM zHZ-OEifGi%oip8s7Ap>Dm=Le~^kBjsR$CO(sot(}Y z0uyKw5)#M_rn{i8{rmv9IwXB|XvD!nXG+CYS`Ex{XD1cX`IOlmk&Z&Hni0K@e=OeOUhahK-){_VMf;h;;RNWjy6ZHk|hoXg=B0FZAy4%YZzA~nxB zDXa-KK2t(<9{QEI<;L(W6xWB%c(3=?Dn*R_Mh}&;D_>cy;efcIw;`vKW;nZ2uCl2R zPxQDTulu}F>N2V`sC;T~H}lf{p?jT~?;q9~W*ds^E`PP^F_*weKjqCdE+bK7XS%6m zwTy!D+om(K4HU+RVhS`osi&(kwG9nmMPX%UpPZOTko4|5>?+Yy0VY!5W&dn$T3A|| zlzE&Khs=7p7%P(~c{VpH(RcdSw^VT#o@|rh?`^qdy#CQRX{W)C*X*)#BKp~PvF1VC zr*uB2cTb|BJs|YIwFXl6ljR2OWa&Mf^fLFUXG!cy!mC&19cT4;@EBU9qT8i~;8$mMh;~?}woZi!Mm{dcusEabbkS6eP^|!;H+3)PTsrO5e=; zmBz^pk@h0OR_*-O)?bUAZGp7Am(#38c2dpg2`2qc-SZ2(wzh9mJql$4Pjz-l->{;m z$IQ-I>%Gy*)qbrw^f zlx$&W=$7&Dv8%=}^Dk(Dhr%gLo*?{e-|EHOxwc{>V0aB75a%F|?1!T9ki-2IlKukU z^jsk8m&uk4o?FP;On+V7rYXvy#P2IO zhaWT%a8-*)4tl5iIvqxK56BPTIf8If_4tv>F$p}A#zXSqEuEXr|> zO2Yrbog=u)&f4)*CKeG8YQ;F?-<315Wz48n7%f*A(${L={rE$ZzOV1CJ#&%My+M-Q zkq1>2&vp7`Ej7cuGl%G`r4m%s7hfkN`bwh9vT&z)Kgtq*8XxzQjmy??lqIsi*-jOq zrevy!^t?lFs=dEtIr+X_l#Hg;sNBOJJu&^GX899GT+#@dl;di;(tonteZiPG^EEpr=I!oHapip^8_1Z^P6^@%iNw4tz8&$ z!yFzTU07Jy_cU))5RKvu(_OH%P;V`9C2Dpx13`uF#(Zqhw`~^1)oA1L4cxH|1*11_ zmN*{1pr!RYUDuga`X|{d`EojEM3n4W-sNs(!6WL6`ImpB__|2j(jUA!= z8Hbg{{(CY(S0f%a$r*aas;7MvG`nYVQF2H>j^N~$Mw!Ts;VC)wq-iv`eVFoF;(M`9 zckW$7Ee$H$k5r0L4F06$W5#8Pycw}wqZ)hX5o@c2iEPJjEGLt3R&iMSdd&A#YTOO?ST9~xs zsFt1{Djv^8*t+Wo$>zGDIstFL>_l>QSrhwHBA(nm&ChSv>K+r3U^SA-e|Y{(z$N@f z>qv=9OP`D?lp*j8<`h~}a+}-P(LTxja-qM#z$m=w5@pZ@M{!r)5@&8I=3~b^H@|d! zi59=W-}*k>1h1NtqNcR$b?L6l#lZV#GH>9Y8G~hO!jsX?&2kA{T%MlQtz_QKxmv?l zflBeR+EkNN;7TlZ!}@z&kgLNq(s8gkZN>{YPb*L9N&obF(?CE#`?j0KsL^WKlK+(Q zO=*iE25fOfbt#`zmY~A4uCCE9Z_SaS``b=&VX{_D=4l$(uy!GOvPul;#Vl=k2*p8G zayWZl<+}Ed%PW(wSIcaU#Ujg5qOwsjw8b~f8IhYHh~0*9@uUtjfOVgL>8m`wE@$Og z%@ALtqlG)towNF6bZtf|Ic~JD&ak@ovJrX)+(g%Mv*R&nNy=rTg85GsJ9TRE7P&5Y zjuU4H%;ew|Fu%TSOBbO?Z6O*=cqWAi+nyoizN!S6H#te-S&qc5&ps|kq|$Qz&F0#= zU9<9Ddv8-O_bPcy>vn}X6xKWcOnFiwk$lB-`z5qL)xXYbT-pQN@Ld0wuhyYnb2=|K zCfaqMGq1gQ<7KAHX<||4)R940{8ceoaN+`2F*^q*8Py<0gYKxc94okuC`7u*8R}{5 zd%3h+;xzA({JW`xQ<0d8coR_)?d`~}7Y3yKnM9I0o%IAjLzb%1R&%^SQ z!^Ws4$k}7{4)?Q;n^|B@9-G?Sh>!2t-(jcF4=?3T=2vQ1A4@(l5wDz_{A!fi@bg_} zll1F(ckae=>4|dM>5h)rGwlD+XRto=x=@rsq@W@}a*jM3*5qnJKD|6D6;WRw2D6-k z4XV6j3uXa+X8E?;?K$mEtEi@3#%Dk6>&0D}B_!Ay8H%(-*Jj-67x#8KmYyU#6{HPS zs!JshUpON*@3~++9%rH!EtQ&PD?LF@Awxg5DIlmfu_;itb-jNDB_m_aUv3s9r}9?6 zMJ}NZm1jSy1oN~mjF=YM8#=iYj<@ULn4Leo+2M#?o)`?!xMgkVy}##s+fpe}bm@WH zdzFo2S9dQ7*qZA&scUG_auh?PBke#Uo#<=mtjiS4j55X5+QzO%l^Vxo^Dejc7*Alb zd|fn>1Fm0b7Dk3xYoSEdZU^U6I?-`{QM=g&boA|pa7U@dIDfNAb=!KZO4+6X*8kcy zxfW*GN=fKDY_ivCrQGOBqjSSF@)R!@H}|nD?&KUCw*R_;9<{18$)$&reunaDq#>pF z@RFEQN~0$dF4U}bWov}}3@o`T+63xnmiH`^r%Vbj*J~T1m6w*vceh>N8kIoPf8B=g z^Q_%A(pF*~_!IOGqU$ufx}E_&x3yS9NkHTh_hFn{FpUO_*S!6^jj!Ty@MBcD{FYg) z2SK95<>w*`1mfak!xrtruxioYC;84Qq_>9^gj@ZJoklFLUr>p?i=999vT5U6 zUYKUCTfm7n={79h=bJgg%q;ulD+EC&@3Y*+Z^4tYth9LhSR(JU{;2%2B`M{egO2^8 zff0VFECX#6d!gs@0R2o{iwOPPuh7#b>2e(b`Dg7ekS-kUFQICqUMvU;;~(|7GgO+q zBlk^YQI7Y+_9KHA&;P94xH6&SUpIxu4f+O~MqnBu5;wIw=7 ziMrQEH=eGaGN{N)vTZzlZ6g6%@N~RZhj(*AudXSJV{Q}nDYwj0;1wB9Ix)~Ev5Hg9 zGpr*wbS$dIdnP~Wb~QDK5IX*!il@^vU|4V0Pn*rtZRT2CkLG-1#r>`^VCqN=BXPL* z+gf-Z&ZxmV8e+sS(NP`&Ca3PzaklVmsWfompeMlcw%VQz=)3@A1EeoAabA_M)KOGZ z;DRQ+EJAS_{r&t#73coN{bOg}(uk~^kMG~yss;jgb8{0Ez<>-B5vuGNX(=}3U9==}g!+<9ijs;dFk>rHi}>jA8g_5i*qWj!CfOfooZsWz-x0GAD}Q z(P#~$8OO2RdGw=K8q~5Q91NYlls!FPCZk7PP0DsDOyqSMtH=`>7p-SOkm zmM0m4=3KltJs(PuCYH~g>^I4sBR3@`-L^HCC1!$iz7lGTk|5q8X5Os|G3M(=D}I;# zgLD$e&^JYt>&uVMuI@4@2U5rqbDqT=aovGY7rdH0uR4Mb7}_Tt^@U1s--l2boRk2e z+AQv?u))5{Q;Vo->X9xfpP`p=`*OYBmv7t{0PXdTiGSG0i0NkZaX}Qa+lw_+z&PP> z6(;_V%~>^9H>;JIl|8}#qC8Q9N17@TWGIu^ikl-deFz~%h6K$AElsx%9RgiRA z?P=~QB;w^LFxJtmjSfru%2H2bW(iQ3V*_O?>?35=#a4pA+e4 zrOI!d2RPEE?pdLJyoCGHeWDl0Q~ik>QM5ldaO6s*&^|?}{A|nj?~+CKKUL#Q+qlOK z6G1@dhCiEj9=X=0cYou7B=NpwoDbjW0ARF%hC6#k?ASJewWz}UC1Q2IggipnR~gu; z2(5b%CKwkJ|14VPDhZ&F_Un(s(bLO#LxcVHcVykpxsVuE(z7Fnj$w~1cc&SZNF8x6 zzjiK?vcHJ!~=FA-dCO3*@#dl zJ12MKd&St5)g|obEtbis5zd#5Pj{|E0Uw#$1yQEhV}F_All4xe(n&d}0F^tVYeU3jQb zzJkZ&{DTvz!;MUnONAsdD@#$Oni1DB`wvh^xIb*PUw*ZZkur#!bqzk;aS&eP%&fUX zRh52q+vx&ojDyp#d3A!XBH_-|A8TbQ+k)6Qr9#Pk* zF86rgBaTxxL}o>dqj~6ZFj>MRk<`5RNxBJdY;7y#l^V4Ig z`O@B)jU_Uw4om1OZNoORb7s$ZQJ9tDq)jU? z;w@WJq5vC)0bsR;vhp>R7LD_@tQtWk5rr8QPP#hh8gFj^^E0`F1FfPQNso_LQ&XvG z!bgZX@zfW`Tc{=m)H_GqkZ}(<6!HV+AM)eixWt?tJ5rclMwh!LlvfcS>tzKs`S-G> G0sjTHkIRq% literal 34757 zcmd?RXH-*dw>69nyQnCLG)0PZMG%pu2qMy?C?F+DmEJ)F5)@Gp5s)SzO`1rP-iZp* zdy$sVLnlZL5R!Z=>iwMOJm)=Qy#L-GA7k9yj?LaXS6yqaIp-DsJ8FuD51c+gLql^| z>6W}E4b9GU_%CDMPPk(D#y1B3$Lpe?>tbT>;9+BC?n0wzW@qN~(8bL3jIqa=M=mZ7 zSA~TgY#!RVxZ2tXnb_MNyCA_rL$jmpiMFoGKi6q!9)B=B~d`Wd2E ze6JesI;p@&q{oeDVUa)HS3woK98ExJNWkxkuy)x)>cTDjoF1~1WCw;Bj z{dxbZY=qB+Dn~B%?ofQ@Vh{WLSnrva60B#QwOGAeWPfb$@ZxBg@RC^N-F-(&DJ?B` zy6U9HpMRd)9oBA@JYL}}*xbH?W^>?PKPq%oecSODw>_=QOdNZ!@s#w%3byU?2G(4y zFhgCrdus(J%NN8p!q0JKF5~#@*jT%-d>uO{u*`vA*xZ@@^Dv1eo>QReQ}(y>Y5LFA zPG$V4Tp0}evgkG8n6H{J^p#GL)AM;Ci{Cvb^#_S`k&Qon^RBz+skB*!ZnJexAnp1c ziyD{9&EkfHWxfd7kEXuRFS4#m;a{hlHaTdb5yX9qhQ^0RN&dR_4RPeX_Y(V$m~B!uh53Q`8O`nlrK*K76nUzmnjW z4-u}=5Ui6i70w>|y4{%gw`D>hDB(=H2rg|Q8k%ctE-0t(Yf0Gw;<|-K`U&ayf^VN& zLYC~7h3+1rp$T@!u%o9O9^hJUuj?-iz1naY)9B54`zF4O)FU%@KrGUajQb#De=sQm zK4$qDQDK`o^S=MckJVn}@cZbka{OwWf7bPu;RyHMlUB#ddBa}Q($Lgn@#<-kuM2!Y4qsJ4dr=n5m9e*>ykp>y?YlLn(hd#H<%&Irg`Vw+~##Za+)3;?RMEi z>nBFS!OF$O#l}WJ2l_@++D76PU;OlvxN5&Pr%w6MSN1b+vuy0hS^Xl#n>TA~YB)JL z6HxFn%mlelHIajCyXI+j9w}yPk0Eq~Qs^gl@7>E!YMPG!ycgOgh7XD+hL%W;b@`i@ zQR+9+^T=(a+Re>PUQ%6ct;p)1dt_(JoDj} zFNqZIRRT%&6M9j6sal4@yo%nMK1*@MDrUkb;u^z;hK8!Ds=j^u*3r@N`{mh=sDZ^T z*VbR|BstmBl<8iwa*-Zk+zCT%R{)l>m^8c+bjO{1m>fr08`tl7Md_d;szjd~LzKG- z61J%ynoFdlq2`XvQkDiai>GqDU+yXP5H48!rF!^D4u9t|>Ks{g=^*r3Y7WLLxEw2N zAmhP3oh00Q(*2GO4F$z#PMMY@7wD6$+J%2k*>A%%I#m*3;y)%O-6-0-sTw<{Md?sQ zZuVNNMp{?iv--(6%)hiU2xCK2Iz(z{Xn>Itc}sWX$hx<6<`${yq-Cax$q&@4jZIYq zb=@o?#a4s-{f{N>J>s=W^tRr5bG(JJF&-qFeD;!Zgg{6l`pw2B+6!I>tJZ9Hd~ooY zn{-;kfl5K9XvSr z@t@T_>sL}f|NF-J~+3w$Dn9$MS#V0?T7<9>O3i9 z2g#pu@Z@0_RDEoGSeVP&Y)=5AFl>JA*by@?VSFUY47ozVJwKLwl=t?538icBI_kXA zl(54u2U++v0_~%z=OD!ROi}03K^6~l^zXp}K8}WZ?f>{7hO+S-O$jeSp%9F}7YqR# zAH967CMPv4#+~~9Yi#bQK1nSx!lEmi_t^Bs-#1xcbE;+to>kR;Yu_MZm?8;^2s3}; zA_9F3&GU@6SCv31RT>uw@epZSZPto3#LF(s@Z=#7&1aX2la@v7C?RWH_nY3u(oeof zahz^XtRquAaF~L@{VJu8%1$4@{$Gy@;$@4U%$3OrPV8&V>E{*xv81G=R9CUxd;fBz z-(YIjA^CTc((^VeI=FRxai}(dm)F_vN!o;f|FZCvCHe53`HT@mgGQXv! zIY!k>(QCP-3GrhoBlyaMN#CAxq;y`o?gRuq#h;#MJuNqP?PXr8aFMh+t90UOe1D=W zd$>XLt>KxKox67V`uftbNu5trxpgan`l=|4CtK-FB7}f>f-5s&`t^7Z0gpf@Vi$UA zgymBlx9j6$^u5klH=@(jwz>FC*IR$TtiC7Ig1E7`7Ezy!ibKxk zHIq;!0`ClI@8R9Vg)|7(NY!nTbJ6Iq+ETO{NwZ_K!%vQI(c+`J*?Isw=~_hg>D%DHoJzhl`H(d9=DOU}^if#YNP z-Q*5-5?xGsrCBO@^V3A9e6}-5?fA^X!ou7fKVve4wSRlh2y#Jq%al8IqbC8Ov)y3% z>Ezojn|r7=M)Jb4%1nIF0n!sRm+>TNF{N?7uV@ zLth5HUs0r_VuzEJus2>JhpN0o(x$YRQVe!`5wKUf)=r+9E^-)s?u8?px6Hou^AOg( zQ1!4nGWj*fx!y@<0%f%;^CW+jmldAy%OQmA+xPDrm)+8$Vv?0Df38o?91z!FR!{qo z7W}WKTa>x-HxHC)Q^+VN^g3lYYbU*alpBh;o?|V%I3(Sk5^y7g!%x_t z&-`Q6iz5rzBHK_e98$4v(h50Pq|liqU?k<|Nv=(ucR;W1TVKz)-D2_hp_M?2-e8PH zkK&mo+1u)}84~TJSyqPO3WT?*cO^ zJv!$KPI9Z95_Ou8l#y|spa0wzFKZN0Q)9z)<%@o)Q(IH?eTtcla~Hl%TVqS{V@&<@ zbl~z+NB4Ns`5Lp^N)l3zV`q3v?-w$fwj5!N7Sf-zdXm=PLHC({6Bp^>^w;*=c-#>!hA==Qvp3mOTyUpsj?wQm=g5%w@*}?iW}&%&6SjeNeP_>_ z3F)e6{;`%fugUvQ^CQG7of5lBa${%4FaF#Wv*9DvG__q=PMWE)C)2H)t;bzY#R!LL zN5~^m7Kn?66IR!R_Pk~gd>3|t?_k@9{_aZ8IUR(z1gB|#N77ti7WeCu!x%K`RygmN z$BL@rnr?fOu+DJdT%GG&6|Y_%BtowRxxt@Bc?MrRJ=?B$h+K~XBTTI^P$TdvKo8nbB88PZg|Fz+IpES~3Bm^E*fg=kUN{PbR& zyj9O<4%Vk1Ub!&%9c1jx_FQe#D5IItt0K9nJl?SK&{0!7Js>YRd*qGjV4j(BlZE>7 zuhEy%EfNUQuHfsRr)&C>OgpWmyiT2c$rFy6@1tE1EX{r=ZN>V$f?rbPy=H&X>YKIM z{@bAqJDQ>`+U#0?ojGi0{KX;ba*a?#1cUR`+whfvT$5b?wpTMwZwg<{IA4tDVd)#| zpFnmRBk#*>j2!mhjg+Ts(?@3?X|a@d{%HJbcVKuq4IJf>0JIH29=F^%5P=KB|-l5Xy>V~ z&TS`|rMC>Rk{@Tkch#uDI?1WfB>v30u%>zStx@FwlT_WrgoDI|h4)&TN1FDk2)%z( zn1rt*4psU(C0;^qe^rUiL2@z2B-2NJOF5lwSXNN|{k!q<#5bn(N!rD+2wtuGW&2&5 zoivpryy8Z$pT^YG2paRM){1k-iCiLSbT(e>;7Fxw*1hM946sm|R8mguM?4kRhSevexY&k@iXqm$Si@*;QaOF=(y3XTE4~_-l=^A#CA~K2Hlnm<>r}cIgvSJ zX`UyAB;H+NVA9AHZmMSGR{dh}P3iE6m0@|&@$tqtUSIj{3Gn-eIey7(hj?G^mea&( z_`<;A6!DoYX{*$UxA#*>tN}iqi~F+p0$Jr`3W{?%&&85o0{6|=bN6n63*KWQt?gO6 zvGzAq=S5>MIi4H{q=Z4MoFPq$=Uk0H!i9J}>cfZo1$NjzUCG%9hgt0en{u}W@u;ha z6|{Qz=?FPfQRjsLqo|2bpYLUTB1vneK5@oM-|jtQ@=S#_xyaxD`&~;~wE5+!q~vgu zG6fZu_qmyq+YD{*nnNR0QWR7Z6Pr>ZOlvqUMcK-C{TUdYadH4-anrFzTz&A_N|E`? zhwo{>NZWpc%~^U(xs;PiIn>k;M(jz!Wa`B??!9@s(7c5a_Zm5+rfzafGGNZ4f2@`v_@fbF z*9q1H4OhiewCda2o30YfFGcswXgQ!ilo-&YBQId}Lqe=^UyE#(+9Ouy`?K}( zw`vD8il57pAG>H?zb(%0p_97pgKFhw5Ior}Ad&~KW^WFPpvWLZ2l9Kh`&M;hD zK$0>ifmr!F0x{V}Si@}0idB7N`b|;RlfFEUK6+tSKQ~>`>+jp*=HpW=M?}^gMQ4*Q z3Giv*<3(`Q@3@o=s#siBr=O&^jEu1N*wSO8NgLxbg8Ee-Csi}Kp9bqT#}M=q+gi<8nltuB+neVHaPJXSx3>7)tnyK0yWrNLqmo&B5`vs3WF60M3y2?kZ z-Y&mcHY4rnmj8kHQ6V%i%l_j>r)**`2;@S@6&%X*U=d8yCGwt9#?o`)%}YS4_s7OzGvYgMDbqk|1Md{58TRT&e9k zT!mYH7$@zS{GpXh;#Vz5vfFQuUf>*Tymmp#t1#bLx%6iTTc#NLT@Z^NPezKQU!gTR zBss)|^0vwNY%Rm-YO^1{j)SMiG)8y4}0&)>^@ zg{EwWg;k4~hFpw_qNdUJkEnyJ_Gn3fI>_4hIXJw$ye2qg>|K`D{Rp+z`6s@agV%k*>4dchv7cs`Mxe zi=IBxEOiU3s&YaI+tr>}$z?Cnkf24H35jCo!aAuG+=?inL=``du!t)#Z!+{}*-D2DCr0QuXe)mxfy;duSr zT*<;>l}vMo8|$o8P_Mz7C>M`hhK)Qx1V-euS?)P`+JwhVZZ zb+YHn9LIGop=ReGW(5(Nh8Trxx=6~e1u`ezpgAs*S4-?}=W~XdGYBfs z^o*%55HfEmm$9!zE)Df$l;^LRWT2h}X;gh%yOhC(X$uPsJ;V)|jBD2s;hdR3=0Pzl z)5!iRG0Cb$3**KHdP)$xmYNzX-a=#!RsWcPZ8auE2qKs+O&Tub`ley%kSi^Z=w(^! zYn~IMZozFI*-ee1s7@B*$HX}$r6hHX36Qj;SEkfIB!fs%P6K}kv zlij|v^5p5xJ;^R>PBpKN#Y(!%z#NiE#B=A)^?bMNO0C>pZp!o2%Q9_vw`0c+t4+zp z3KMJV!A!mUyBYVQP^Q#EFX}zD+CJihuB;@LFIMp%bfxH`<^7MtzSb5lcbQpUAGEi| z=V+&EdU;6^wv{*oo8%`Oq_pf@DK8E)1BM*b}!hjeh~VkR3)SurZup+O*YoK?Z><&XX!09;?$G1?*|g*kA0|uV2^0k%5lj znQ92*=LTA9&6irIv$y7ARaYLCvq{wgFAIz*)SDL*_?{+{iHFzI@2ZcDo4LKJDXTpQPCPJ$9$|grL>Q(_P4)!u6SIv+ce`&@03nd$6vf5U z9F~-JGDAe=pMe0Pq6;6Xd0O;Q@cO|{*SGAladUf8$1(@5RDQwP*FJ)c+fPixjJ9fkQ+Hz*SE4M zQi68Ey(TY*g`McH+>XJaSLm6TczeajWU^D-Q1Ez&`McV8+@UW50s~VVpEiejJT6La z5<|~L;$>>S`R>IX!tJbMbdu^+>D28s?R40`f4`_uu!jg;+hx~&9Jz2$w0S%c_xfh5 zs*vsX@86%T9Jtgddbl;|)+HqqrBJ16E-o%ybuE|W=3x=_N6wFDol=On%!!XT)r7gZ zxv%Do+$(W(VN-}!I;|Y7a#~12LW2FWkW!Ocpl8P zPMUp!__XwW;=!Ucasfn3zVC-Q5Bs!_A){ZP-t~_5+9O9=b5Fc z$44;1d~?0f%{BJ>hAXzizUwqKpXp2ilG&myPOPM^ySw}A*RL~OX+fth8sjmgFycU{ z&E9fRCc+wi!|F5t%3&AwJWb*cQc%CPYRqL;S!4zNxUzDSnDx*4Hv;S*Zo+!CwXvb4 z)TPBEtsFKBt$ULTV9!*o{nUqh3ch?fnv*tYXl!f@>y1w*Tgqd3LLq=5kXc;qKtEK- zSC2>a-X1Knjmg~=%zDf&fq4%Mi|iaW=j;o+p^8#+(N+-5FwyvJQ*^A{QF*g z%;NO)M$(awQ#|gJbgi`8;e2ZyitIC0o+Do&#vIsQs$;Kxn{3^iEn{heEjvm0+j&?< zIz}F5?~m*`$l!IH0O2ZME%vJBt#}!4J4{I;5ud6WQ$u1DHtIHfOu%Db(y@QGo$JXE zKOe}emHMwwFUQ0e61Iu!3+491RZWq?b2+HhX$?n84x0Q~Q0eyVcU+S0`0Yqf^55PF zxuVy-(K5h?n>9tSUNqi~-+RXL+M~0G5`=EWW5+}}zX=3}j=DVLsvOWGINaTAgVL#Z zRF&g+?d8jtp=_6Z{3~JLUiPj7dkIa+iT@WAyx^e ziJ5IV#8r@cLI?Wpe01H8+xPvjOA>W)P93C@ttUCOz8;KK_sZv!7`-Y+`}a?d*}MIS z4A$XpyAlHJLn0FGy&~|JY5w(RJyM3H?_GWDAWiqF_Q0J-2Y|bedRr#5C2hDF>0vuG z;_}e@!YFN-28~=(i7es`F{tI2F^2B%%gvT#x2n*(8I`R`W=d9#yQ~gP-`oMatIc;> zW-90KYWl^$^J@Q*T!r%1AiC`G{;zv5Rj1g+2x=nlf~ z3(pxGWd9ErsCQED__I^Hw4XEII!)aXq9;db|6@PLr+%#csbG-tmzg$BaZ>`RVUTv2iLnhQZpmyxt`%=k+)zi}xdQH9S;zm4CA+UG( zs?*yN_Gvap**hk|ix}BQ?FnDLe5tPoj%T{BuP-^7Q-lWYk9XLGZ)?J%R$9b&{fiae zQonWU%&u#MSH%6|Bjk#CvQg!e&EGU{0NHF+_y+3VA6@LvpMqSbKoQxZJ9sd_-~a5t zp0ucijT|A2Kmt83px4qTjdDa67Z>N`UZ(CNB2q*S%#8J)W7B z#eXmJiqpg`U-$VQHetiEnv;qspyM8c7ba?b4IR z1V$R4w1De`4E;IiBXPA$-TG%H?}}EM+Fg793TbKSAF)S{9ARK!2nh+vyW?VKZOxz2 z5dQ6>LIA`VzLiTBtzpGu6Vz40di*e` z8O<<1ijy&yZ9kWPhlioF{7qOa7H0-&wK-tCr##=hr4u9UYZJvdep5kVVX#&}%^06B$v=%Kq&eS?EB<4Ccb*PT@c zpe=c{lQ5h+>Lm!<;l@4GT;3>xezIm2wK$AM8yr2K&QD;!>T+w6`LfNY%ajpcI#B{Z z6g5iGb%ruoDJhH!!MTn7%*&w_oliWD6W=j}+>MXfNy4!1kBJ<)yg)vCR@OZqF!r1IAAEohO@Qwt}W*N3A2^2=XWHhQAaqs;57jMFa zjVeCjDM0Srj)3ju;JO{r0{Aksya&1z#3mg*iuh&0(4WKdA_|XyZGaO}7*Fbip?(yf$Q~~nOzm|)LAnNeo zTqjr@|K2u&!nesndW1W#1>)T0+a1Q4or z)6%~1^ZQX#L!C70+n0r5V~U(uClDCLo>(wnDnIfInOfi9Aki4rxLI3U+ll`Z1t$<7 zJ%eDj!HG_Z0-t2?#d)R*-KO2?)&aPt!^I2D{%XlSUV`=8cOLEwFO)s9=IkKX?Z z&4UYxd;ar+U)nfTqx}y}_8Q(ef4rg9?f=mY!Qz-U!P8cU{$#yt{|R0H z*IbHwy@w$+H#eNxklJm!cMSA zB0`iL0Li|1=t+ioOEg4n*#7FeBTx|LKlt`cz!4n~(DeCkMqaw%8`i0OEA#f62ep#jJokf8-4s{?|9 z#5G=h6FV%_Ev;q}E^rW%heh-Iv&<03M5Lq)(8mJrmTcC2z}Kilm(9#viR-p9F<>kr zOgR!HB_v#@+ehV3i;$r>$e+Pk4T4>w;XC9xYqOs@2!z(avpolmyw~p9lAxC$Pf^-r zDJN_r1@+gVFe}3|;==gu&L#!PXz~yC^?5;x2R)aLT}JfrAqe9+fa0yJK0A3Wjr{Nbd48OTLH^w~sil!NJKX&(Inpt}YsnPZlyLIb&_tpQj+$i{Au;z|9CEkr?m@ z7IlrYqod=D+mr1LM?ES4n>7P7N{`rcx9Hjf9f+>J?}zviSzS2@*+MU<lxu8Q}#I(+QRz5;eaJ+(a8Vtl=WlZAio;04bzjrk*4EaxHeqa$lU_o3;w19!g8&A>dCECaQBkZ~ z(#Macto@Nscxhc-Tm-Z~wb%I5FAh~&e>_SO3lwbrvfRbm3Co;cC)=iq*dl9AQWOVl zG>gOi_@{-)lrm5-(s6^u_K}f5{E8GA47vd)a+3d^l=oIa_G<AYEx#{yy?rh}ScZ>G3O~NOXgP;pg}M;7`t7f$ zmpf}~Cy9$_SnuTMS_X67&B^lN2YxZA<{1nMbYcm@$jF1gYuk(fEY+{VUZc5TGzBIx z8-XVO)VHq$fRLV^UK?RO*tqSpW0%#>%yc~4K}bb2y*UUcr)8KZKp9zcvhOXjlP)|+ zNXa5*jgNx%{p#P1p(~NKFGIX~`<4OsN-8rmGg{m}yEc{^n*aJi834;K4{xl+T<~0< z_gMPD2$MRHp_9{}Z!uSf2H+`--p<>na`WTB1@Rlyp&kWNDDoRA>XOAFyK-m~Xu*2- z?_Yk@{HSU_bdAV4^9{czf@^B7J3VRgpCI${5R(XSVlpwo0VUoS$<3WU<&OSPVpflL>lzz> z{1)$esR(mMB(E2tSFV1PnK^^DFp5j7F5&F*Pqu#Lx|SI6z{-^1CO?3}8Jo z|KZBe@)Nv{m`RhaVrnItrU(UOu={Eb_waW?RSYeaybvbTtH95u+kipR%jnWCGFPg@ zHE8h`qO{jK&qO+!J{ug^+0t3H=8oyI+lYwVvdQit)_e%J!q{I;F+D}-I=3rWv_5={ zE_16Ef4O@5wtL@jFYfl8$4~%Y(z^Y^QC3=nM#&e$g$$x#CwiHtE%;KF@xVw_R8(}g z8SeVqCg;uJVFIJ`E%ElYOd=#%EC%!J*|SfY81qs6h;OTZx_1#+%BbEkU-LPTjWLBtuv^8G;Dp$Z8O4ms2&RF+V)Sqi+P^&vXS zl9Y`ya@~hKEWSP4Qxc!NV+7hmc{l87ic#;Rdf3uX9_wP4AAF_RJoJX?QY!F{e^Y3L zutAAK-r5P}4wepui6K zfrG8#{cGgueLEAA6{uMi6%`?tv^O-Q!{+Acd<5`esx{UumRgFy_7D@hD^c}$X$-Du z4K^w&hcC|0W74&F^pH@8*!7cShXJhX3LaO6~5JlbQ$-2XTiKxos^3Xc`} zmHSbRBt{{FV6kGG{*MvXgzdHu66czrD8$|7;NbY)*(o98`zpPjg;!b8`d!aHcV=X(#ru7$IY) ze!-)M6n3Z(w)XA?MlZb@KV$&7?eRD7wI*64fG}vhQ5mnn04WWnm3@)==c<>ci$s?>+R~z|r9t{)R zAsj=4W;cDnLOZi~0(^}y(!b=zB0e~fFtb_9IaNf^!nc;e4w7xqUt4>V8I#uK;NnuS zQ3WCJ-o1MehID+9KY(fz@+ATZaLR=eA*c^zqM4;-Kj0Lg09r_p-VMuKLU2VzMJM^R z?+l-zj`Qy&bv=;lIotm8!%Fu`Dg>KHN)R%%B3?m(fp>3s&~tKgHUb!p=JQ`>!UQK?{+kMVEevV5h-KcB`ZY*z z4NpD1ez%O9)~DRV)QXu@aD*ma^()DT=&bd#@M)EO^m0!Dx%4M@4c%7@`sGBTMN2de zhf5(BoN_?nTg$%9w6{xuhqeCAHh;#&HmQ~7sZ9?PQsU)?gyG&^0#vDWtPtzo5Y>fr z^Gk0Eqo)RQggb63V2WRnQhX}A>3;P;_j>ha5ke38)7WxSogT-}$YbbN@f3j*gee@G1X5joB)~9`RrO#IM+!(4lrEUn1Q!nlpr%~D^caX}X1&jXM1=tV&zo^mvH@=gqQIvc; z1<1j%g@0A>*GFq3Fxo#)=#-`^5y57!?jN|;Y7VeycR2nSu%<5r+|a(h)tvPnzzvNf z%M_{QZ{82m$0+OIm0-7TB$puSV2_Z!rpVWQY5n~C6mH(Usi+99du}eS5dA+7IG~K& zC^7a`Os7trVqx+6HFCX!YN;bAQMWD=P zNK-U7=0wkC139^XOVD9esm@Or8|fyP&+qHYQFlAdjCNPmb884ywCMj?Be30aMKT6+E~sfE32y~=p?ROsoxM25z!RP(Mo%3V`gnl zq%x^?znRpi@C?3W;IlyswQPwNGegdHr{_ssytssmL_|cMyZPq5EQ%Z;T=i)V>HG2o z8=DZW-YO4y>((uhivW4!wytSk_1bXI1JgoCf3>rd6Q5>sJusfYwF&%aJWl`Pvvd0W z!p|FnSgavYB*l5E)ydXny8Wy(MmX{`J-5pAK%q6vPg5B0xn5IFhBNf|soaM&zHS}v zl(8QHW~Gl?YlVW~SQ4$G^9sSGUFp7dplE<&8FE;oqC_XC`_}=SHgZ~4vV%OysRfwt>@cD(+FMc z6~15asj#pxa}r9yjH_mm;%etitX8qoYjN=0Rf>NOf!vY zX1c;FQ2undqy2Amx>DSSy?Ivh)Y2(mii_LkNB8XAo8h@1G9nPzN-8R%8kzXg^R02x z){qB_YzI}Or-Fz(*oLX}@P(B)^q~Z5@pO31RS%d80V+NGO3zX_AMftq5L^rs)UCW! zTwze4V+6M%Qb@YU5U@v~9&* z&=#N^0-*r3Mi3?&#_NI@5At<@rkCrnGASZ!hq=N;j$R})T{5rN7rbB4muJ=lWfO<~ zNnxXH@GX>8v51(~!?B>)*jO_pfJ!R|1jurLPXSrpek=c?H=3x*@~*kQsZEAB4?16& z*JfT;R@M`pY|e?i+7SYY`-uA6QF!N}0|(xXdO`pLpRK>~8dezR1#ttY_Zz9TzxnFa z?Ck92oD*txGi;-daVvEYUqxKdUz~8anVd5+=3#67%+%AOO%N%vgZ*7mkehp~P+w24 zp?u781)F2nk+GKm40N=ppCXm5%RZ_Dc9xGuoj4(W8_cTufayI%j7ORO{w$;dRvMR`FP7kW0tA z+)!}N0mAPulZ#0zT@Mm!mk<0VyWL4Zs&TRP?9LR_9-v>0-sE@PVv&BL?P;t=ktzK{ zghOnWS5#z{R5N+WY2P5Xkn*P6tk(+H%og|L%kW6NOthYjd3^0r0<7dmz(@^ytpvuz zP+7JHLF_rHw?v4DDeC@60n7pMx3pFQITxHJ>OZV|lDAB7US!VH5>&%Gu8$~n^)?6} zyywrq!azRz!*~~MXAkF(4cen)uQ(dN-9qn*D55c2qhRI;JN-;mrthOBQG`6m^=gVj1iSsH!GzCPERqaH6K z?aULqFu2+rP*PIzistg!&jDjw7~uQawyDmp z(fs|?7OcpXK#XQNcI=_mV4-yoi^Rvu3Ww3}(PGwdzaDRsNY&NVsyWU;^YHNS=<5#x zQ3I8SS=XrTAL>|{PeMq@4yGRfLWf~rk*&Utjt=ky9N3xq6rb|)^5S9wgvgoS`Z5oO zG9K$bqz5s=-xd@>35!;i1sp(pV2{_I*L5W;g~Ab?Xd??K>L%x>_tOdAARsCpFM%jC zc|r^*TCfs$0plW3M}i_N<6hP~ejd;{!QVm^x}nd-dQ$v^0B-0Ap&1JnX##{fGu^lE z*ZdfbK0$2)1}PW&dD$C;rV8-xM8{uv_)NkANXuz906_r`bf+#*IhhuSpu-&<8WU7P z8T8#lI4cLz-%6I@PbI5A*aWEHk$4dFFlxPV#}A`l$WP!dV*nmppSI1O_r)x>*DRaPQhRpZgGe+mjUO zc+r3bAZYfQ^1keVzd_03I^UOIl#n51k00eP*uspZx*b<218JnPu>FMcVtGr|EPEcJm z-$|(FoU>amF+~LTNKjA^LOzR+0W;HJ`M0i8$MF{i ze%k=a25k59+5(CF#|uG^r<_av3AW~d>Fft`*;{i|GCAy=?*ond^779&xP^q&7R;cq z9k80g3qQ9xQ%|_ z6BW|a?c__?20;**yu?Y@#Z6{ONr~f8k-3GWm^&eAb*87S!hp!orc;!^z$mBGN-Jo6ya$N>b!_%uREG=eHqs@n{vnjhl8xE z%5GE^3kHRumKN3R&-!86M4$%xVC~gfYUdak4wGtI)z+6`3WHR}ed%O^T_;;(t-q6M z)f8_|r6@;6xSJ(Vs^QSr`wOFBcN!|T2SsJ=%>uS>roQl*K;*l^t{lLEk&%plEKtt} zzwUa(ah91mW)V#>s_d@(CPd{3Gfiwr0MrAVRu=_j6N%vsZVLk`MvY%|jXpaT`B!%d zN}N4={qixfM%_}7q2O%Xj42ZmlD0C?%E^a3(NN6oCt$pPyGL z8LpXG1=rK|#uD)V$6}ifII&)Ovihhn{97fnWGwJ0`ntNGnv!61$p=Tm8Q}j~Dm+Za zjqR6!dwPKe#ms6H_##lSfdffa>4MUyA(SUZmr6$Gz1`v%fn^2b_Sos)IB|5C8&EEP zNtH-aw2TyQDIos5s%ekb4-&wD-Q9am;qa*oBM`UqdiZlHKAOV%M>LX z`jftI+KTdo~ZZ;iDUm^^*w0wiH;8e3XgT3hQwsV27SB}d3`EZwIme?FB_?6XGROm*nHzx0lB z6xc%su6V*{rQ8mGcg_abx0f$payLPN0rQdx3^+9=9^A8568~67Eg$HM*$5wUf8|mw zvxug;PR7^>6tg7jZ9Q3&v~$(oOm)`k7hLcdy|)PWupC5jts2ky^Dw11XWO>^GT$q^ zoQcNUQ7Jpz$B%E?>;y_Ro>1+5?l1`lf%n+4V*`Xnrfd|bH!f(0NUr*ar?&mC`o2sv z;2)pq>(dqc{PXC!(^68riX>NZmS_P^^BEqoVA0dhB+1$}y+~cSDPJLMp@<8`ApTgm z2}c(DDcN`h3?PP1*Z#>L^mxoStqsB04{cBrfN7|Q{k(a^inZHqC)0sDpKWWVHyif% zDxGfsUFl0i1C2RH-aaBltMlx;cR-=04$YiE^n#5@{i-d^AMG{X;YM61YT)EGXZm80 zzNrtkT^}_!ZhQyUuNc-q+E1GY=X@_2#H+JYd0nkzcJ*c~r$#;tRAnL22v7Ba`%?=o zg-_z&=W7NMRTTq9c3{u_Hc(E;Eiu{@Remccmc3nr*4)F%Q7oaQaeUthr!QVct-)}~L^aK0GF#^@{1tj6$+u?*Y)i40v015wh zBRzP_CUp!I^nLm&7Wdp%U*;fNszpH^>zcGh`^wrHV1d$HIE(uMk(sr z&8e{&(3JxWz)%tX0xYFfpv8gN4PYQB&BFk9vML~a!U4X!Ediyln>ZNv{w2)qqD*Sv zbX+b42@MiFD+^2VHiXvt6fk(tm7^hWIW2`mN1vvr2dG$i$bB!2 zSR#P!Scy*T3gclq)~ni{mhB{q)(^aQ5`55T>QQW<%Ykee9~&!Bg#eI3r1@50*#%G` z2aSMA; z_WB|L6o`OscRYr71!C5PY2*(`jno08kUhJ~LB8?10}NfOD8%m`?<2CYFNi=TfWu~D zVj`ShCql^Z{vTm7FTCs$3PfmTal22rtowxr4j#NTNu9C)BGjeKjEoT07xnb@=N4T+ zNNbp zh!B9<4NQ3EZE;zphzV?pGNZDxG9Y$@X&vs>k1`OAn7yScP|~N zyLP{_K%O@$byDyw1=tDnwQTi`aOK^FIr^52q+3v+L`#D2 z^FUL`lI%CAl3MB$+HrE|!JjtS9Zidy>vPHl8l*h?V?^a0QYY#3eExTSovJjbt~GyW zMe~e(^_i(sSRmPcXqPoT39T^RVlrbpym~TE{^m9Zl4%vTt3Th)-yy+*#ctuUPuUKK zTI+L#yVd0$J^J)$D9d3yffCGR>`mq4p^o^(@w;h_yrXZ2>5pv~>vW-psVV~a)yWkM z_=)Fp9c)#_!nImDe=h!SCU!Zu(*OR&6~9eU!9+}35>xjns(-%eKfc-G&kgz5|3^1W z940siy!gk)|38Y`-;S^U3kw+46ZZ6vSq#$1V=F*sb!$u=#a{!mC47Q|j~f56_G+1I z?h8RY-EM*X>c8W+*;X|WCrOaJaz&+)YQp?uqy_7o79n6?4o-F=^B-rO1FFiI0^SZd z@4#{H99V&X8G(trwdeO+4uf!0PH7PzJ_J8Uj>v#Zd|DRR0&QxJC(`**n)Kl&#|sM}u%>@o>PcEeSMMo9AJ)E_4?2oa+* zr6{O`@+tsIX=rHhsK>$CaT1t=sD4Au{XoFnh;yaDaTl{vaFDahW|w<4zrUDMxqcrC z{uoh95dZ~+4&a=CI{CQw=SXONVSfG~(4EaqCvQNe1WN9YeYhUVXY-$=y+>H42a9uT zg#6$%{B1$edztz9`78|plyPVF9y;Yib#)R-(KIwtCl3t)TrEhixC;@=(MwM5+5Ph$ z*MP=Wyn5vduwe2xZ#I4h{F}kPS9^|XfC9w1WZ9i~d>|YO zuQL&7qjeD!*-Ov(W8YQR*?OvB?R5v3wptv~2D(tG{CCT6!>(%)q5BDlT|Q|}!-EOL z94K3XET;;p<(oN`uH@}a7^X(2mK=~F^Csaym8lWuRjcmvQ6V7(ndk0@+Y%NAc~&5I zHD(e)bT}^zRiRqEOjgAE^Y=<+&zw1px@jQ@ zfuAEGuD**zc&~*&0VH9NVajHD&R!4^|U=|B=&AX_Vg+8pZ57Z}tG;*{fSS#b|E^Yvklk zZKF5F0>EIcC@)_DsW49sY`HacjU>pIHgHs>IS9_Q(2iM!3K^DtqT7UnYY7p+W$R%t z1@}--1S~Gv|04colCS<7lS~FYw-ItqaUu&wk@$xQzUsMd^A8iuZjV{S2?uCNmF--X1p5>+uBqSUl=9lpR270)G#L{S^ zeH~NiM6CfYYq-zEe)zD;npt)GF?<GRaDS@7n_R?H*YM>hURBt3z)tIE-AQZ`)#(@wx(Q80%EJ@6OH$<)>)!RufB8 zW6xK}|AmsfYlB@2_HMcO2L-7U(q2UIUB2-@>04@WF_#D$Cxh4e{rmURb>AW7JW8nj zx(@){1Z7D-&(wcn&suj2IXU@h z`it1O$)z81J6;DT9u9A`7%fhh``=%DGQ7?)*vkm8>}~ zpByHy*1I$?@Vg&#>vLPw+Yga9XZHS<3M-JHTR#$X8-V`S*`B|@#{H06-R|o^r2Wt9 z{y?|fCGNAGuUofTP1-8eDS&HzeBgJnJSsLpza`tEePg;i<;siI7sWB|vCJxJYNyG5 zYnvGqeZ_wIKTYZC$dHfbo91^AoEDOJoS3LVb03D6|0>V@KdIk;cBlVzz5KDNz53;P zGaXDhnVy+hRb35mofQg^w9BMF#tvb|#@<20oR!71i+%1o*%W}{1@R9nD{Jr0uBi6P zu3);E_m|-T_NvlS511QZ^ZBZ9?;h4L&lDDUK6J0Z|Gc~QTN_)pJ@HIiMppLv6rleA zpvHV&n4kB=p!3l5{~I^_eL@_A!@%AEMVk2sIn)+hTV0g~iM*Q|X-FDI;9N}J`E9L0 zZ`tf52B~2>fawAQ*ivK83c9s3yu6N}RHa}(`_y4q?;$;sFx;qs~D9(*#s3sNmX}zSR6ZVSHQ$y~w9%w%9-tKuw-) zQalB507rJ^3Bd0DukpZ%WH5LM2D@{S@cnZFfni|0K}3#1Sk%sGe?ujtP$)Zu0|$i#hrHVCB^*qg>H#W+Z2-vOS1dIh@UH9(YBT6mI z%Kt)yphm;!APe@>(#+heFj_zX)=ri&`BC#*4{60T2y#XEN@C*^6D6QbE^UCp3!K8Q zWmT8jEB{dnvXBg5E^;13b>Z4Go+JS=fUqzQzdo@6*WrK93Jp-vEDNpcL85cxW!w5C zz+qvMb8&Iu*Ux$tgabD&&~$p(LqWe(iS4Wg#U!}I#ktUl7J|!mosXaIEy5RR@sqIZ z=LDJ=MpUP>!f`@7AWg5L#F7EqND_uEln=v{nX;cByY_TXHdVZ2|Id@)J%dg-;P)xJ zAtj5z6%AV`%w35lsAe_cBWfj!V)CcBbsHhG{YKFsFV z1;R7v;^+Vx@yo8o4V12VE#B@+$q`tI07QCqNz??q+8NdI~v8AOYucwcIv}XlV4+tiV-(6GXHRlH^ zAP@|5&rX6X+G5vVAa)z9`GCupMjgvFD|I57OhNtp)@%^EFh1S1fLud3k@xQ}n%KHt z-Zia34bQdu#YuUUc3HE}Mu{Zh0;SMiuR+VOU3$%x;oU=^ZR14k_`pB`^w|CTd;GC; zDHsrM%U^G$bBP|Cx?Q)$m4wT<_RX~^g6Ul~iF=~}gz1dt7C^I6L+rW2eifr=AcpuTdj@XfUQSj7cJt6aN{Scra`wlJ1ERq0}u(cMlzoB zapv`FU~)JSh9Y?bEn~x~NR-wSpSgcAS`H>&<4utye>3n(Z?k}h1P3?BEPv|;G1WvM z3CH%_NjJ^+?F7}fzrX*Lxm0NtgG3;i)cS}*=3yr<+|63N0EU{hjF6BcU$nt5xY(MU zoGjtgtFu~#-HCL$(SQflkYVh(*(e=qy++n7!=gH0tyF7XpHYL!z=;ZQV|7+;_;v(BA_-p4(5MaB@*>J&QEFt zx+Q#Q-kg?s!}`JYvaRGz7Tjmnx(?^K%Rl;KdYXc0wrE~Fy-FUsE*g9zAwl8EV-1Dp zH;A$M3y47+>!RG`s_NvrpLz5t>@>j~>tWTuju`5nt6J>DZ=FfH2_R(Q*&+&cPghqF zaq&m@!oL6HQ_LGpKoj9S)Wm7L?LS|=5kq>w;2d)n`oE*KQ$XDn8NFk z!LpBuiAg{Uf9Gk$!XE0y1e+2IOE^W%b3S_)7tpNL-+b1EBdbNFdlR7!e%t_tAP{Sp zVFn{>5zW^gTv1qF!6kSOd*49{BGcl-z}$z}a2coa1lYK(Q=l&%fTpF`5YzAT*W*P; z^`m#sRmtpkE`ZLAGFfuDuz0uv-?j=~p!s|ihcr^|mr-Rv4IwgsegYa{iK zV7YpY!mC7unLw1Kp8|Yn3!7EH3>6Dlkffxfu3Y|Da)=3-LH-psd4!C#v@|#OVSeXX zbsy8CUncPYCqzX=%nzgFYoJHOCDf*`(=~TNeJLp^0UV*Svhr(|?&Sr6s_#e9eyg7W z#!y*}*ykm`&ARMy=T5z}*~?A7Dujtq~?BuF*XV5>6wzGzB z(9M9ug2ON^%Vuk360{}Yj8%@y@Y)f)aG?smD@^0Ip}Ff92gK(o96%NHAhqFc#;5Oh z1QHJY@VtAMjargn-@yIFB;N%B4>5I7Fjc)NSKL&uiRS+R35 zZvoGb-9jcZHw7_jss*ti87L#Vji0^5xQ0Jq`dEtevUjQXLpu6#eUhETu=KNxH3 zzB(sdQNL#*+Mg)~3Y>UnN~G`RQmW}e7*6>YX=PfKGG!@j2R1bWl&afo|;JO z{B|^AiZ|f0U3O$b_`!0+lCpacwG5PlluDycojsk%SJC{yK<3XYm_84V$ zA4$D2i>H%f*Joy@6RbrkhU30+1>OI^L;AmzRG-L?pS^S)19VE(5J6#R?X0t)nDUXo ztc2m-eM3rW8kUwXk}50)-Kp~hNA^Pbh>|+#j1)4c6~1)CvzH>$_v`IpX58`Fj-lwo zOR=#hY;wtz)MxOV=7ZVXX%Cr?DF?kgY*15j#7>jP6s}y+SQ8DgyY(b4A%Y53eXr4u zQdqFTm^Lg1R!n9^Q5Vr;@bqw)?7o|n1CHH}4)GmQ`#yL(Qa2^&lmwuyx<3jEuHkdm zH*yBA3SjTOSa5P4cc?iRK|?wrrng2Mw+O;)4K~k7Cr$jGhqFU{V>Wge$#=8hRNu%O z;PA$fZBUMus{T5amN6}|a2YwT?;T=E_YyoZ@wApN1JbT@cj2rU?x>gV{YY^?=Oy-C z>=C#VUub-YH`y1T7dF5Dj*r~N|G5ZEZ%=bCw)d7)lvay9)f+Gh=;unk0as>>D?p8d zPB0aFbud83F(8eTbR((wB1zu@?$@8osyhfT3rNeritE!!rS9IbWAwfe@Rr^CBCGuS zzIX@9#yoqr3aF{=kT`UX0Pu5)?uQ9 z%1!pH1=v!!v=T$`v$L~qm``R|KVWdxQGr=YaR{-qeB~_3A)ECB3U|>ojHS(?O#q{k zG!5OC_lOW+zML7pUA28U5ZZdf;>68OFL(FVh=^myU+>=5w~u{t0m*yH4dKG?KpJIR z4OJIz3CJ4Yvc!qlD8LMtEbALYYHDX7GQrk%3^w)=u+yHIp$g(XC76gysVB#KNdA+=0O9A%Ob{y`!&&Epy;>-%-%YFcWM?};} zrOP49vKHG``xZ(hAmVbs-_|U^#G8OFbLc$f>xf)t;BN= z@GuOS)p!-%O@npSaI{gCXY?d`{u2@@@}feO7&IaByb zV7p&U1* zy8h`>P<~rttk1&6E|ynK8bzDzQW)WbBg4dP2Wa&OiD(uC}l#c3K5dAyK@h9|V{ zOf*yf{qc`|od0(-;)?_dw5Pip79Oan4PPrqLMLxc1P7@Eh=&0fNjmSI3DkYV4`7c+ z@J0oefmb%=E==SfRQEn;w2Y|*P;4@2+bT#r^Tus2I_#`~| zvvZ00M1$8t0@7p5E8VoTv;*K5gJGro3iGtv`?V0+I$h-0KGM+6CDu;t&C8!KiMqM zAj+tMn|V4vB_jPfKg<4N1u^{s5 ze3%Pt+;yJwMOm^bD!|!9#N%1}~UmxIz z!L0!6JOZNzqi>ju0K7GZ8Y_t&AI5)k>5Sfyy)*Yg%T-)OTYK}nUGO4^8H8#_A-prjt+1)+%<)Sn7}oaf%C1&|8BNJ{aeWOuh^ z&Mp9ZKw4T_dO2|Ub#|YQKdR`5eT)2(7J1TDzXDC~`z4aD!1{phi&f*-7q6A4#AvIj zscC5RzR7~+4ulgp8|u2bi%p9TEh9?BAWNne)bo$dY8DqbJ3GS@m|nht86E~-Of2Om zn0(7_4VfNs)0;_rI*K=CVPs*Ewtjzqe%?8XoAng5E$D&?{N`U2kr-d_iq=OSno7R3G3dY7Ha1TD zfC&fs1lk`^l821rdS3=AnY=9=F$Kv9im{?&o^hv>q9wKZL8JeAh-aK`^U6j)m2q> zZ#DmG>;JWt-)J%-?kk<@t-6}$zC?36+pK&M+(@!&rA9LjvZY2NTFnK`9tTy>7tb#* zFGJyEVq#*|BRwZpHI0B--E@qDT=VNN?WQ-DWMpJiSK|P$AMpc_!E0l_5qaw*G)XXp z(bCf19;|r4Zon)A6bmF2h>42>Z>b!~94HGg;e3Aq#M(a-ak7)&ur?j}`7=z^2afLY zz2wPH5h#VAQM7vvd;++Ibn;)?7iBrYMd#8^-U0mS!>Po?#EA!`M&ThYDhU?3_fMY>PKJjCPWgJL?`Xb}84`+bg7Y#yJc^t(- z{}kksn^jl~AR8nEsHn?Yc=XgvgWXu5Ov>f^wd>b2TrcFs6T9)}AM#vT?HQJNnb~DT zjvfXb_o#ArH95r=P%4V2+EpH3nS1thn;|RyYQI*td%k6DSlY)VN-9Rikk}(x3i5NW zYae<%+-IttPW4xy_7f`Utml`>e~q!Z*tTF4C_+gYTcE^Z6oOFj?DOV}Aa8)5sqq(r zPObZFGsry027s);hYzOZ*0PrG(1ZE1rP6P6G`P7Kdc_?dl)eGT20#`R*y9MH?*?2)hm%Y6`JbOMX6QRizp^=eFrvxk|#&eJl z63%-p=+CicM`rCIH5vSC#d6>IjkzcKarFDrb#h@B3l2WKd(jzuF#uV<7yx*w&Jd%m zsoBUAT2CFG2ga%#eZJCBuuC83@PIV8GO!`!e#Hq})kP@uF2>6FZXC;B0$o!s?y$fn zXq!^arTlBG=4Q!9@M($A#_@Zh`rK!K@^?Y*Ccunp=V9ltv0t&XN%`HAtU0RyvkLN0 z5sSQ;tFP>}ETl88hM)fgt#=~<=#?6}a@VyrMBcKXF*lDdYATd&UwY0!Zmz)EDnJY8 z(tklaUg7@%nEdHctUGVrxJ7qd1MD1+QGVjHXS6b^i+Ip}fwSwpvWoyJy>#gBR)<{LyxKGG ztohgy5K$l(K*AM?;|ckMhm!1aFbOpN8TWV7H5}JPI6w;428uNj2n*XaGFk zDC?zZQI)gu>Rs}9`c@4sttRz$6_G`7T!jYXR-6+QtYo1xk__DkLOcuZ!7$~Jj=;R@M`Lm``-UltLAbg1>8=){K+}S>AR&HX@3Xgl zNh<6Yof2id!KfJcNL6%f>Jt5dSqFlrn(-NmK(scd|ES%Lj*zG%G*^vUj)x{7N)E>D zxF_V48!h55yPlc(<<~di1crA~9(5UD1wEMsWT`eUu9hPB z)WZ3e&K;MpGh0sOdr3gdm1_!FXZ6Gu*v1B3MydLySN$bFO#b>Or0S_B@t`P(BM^?f z?^AU)$I2>PPQ|)QC_}%Z#$0|`dx7tVff&LWQ#QjQaZ|7#E?-nLz!0Q1ON$nlbiImo z@uv^}<~6MiaT2;i9S_LF^NaZlN2qW5Zz-ez9^tM?0x&pfm+=IP>(yC;YrKD-(!biQ zzu!#4TL9L8F6kE*aSKkJ1erjm2B$;hv(U!KUvtea>F3j3rE~eBZ&9>{phjfc#xAmkUvrJm z@^(s|eUX#T=u2edN2!yo+VGv2rNm-ZB#w(!X!Od^K3mo|Vnp9|B~&hqKYQjLm2I5< zk|M?2Wflo&1Q1xy7Y<(D3qQk@0VdBu%cGx|7_L9NwD;7=H{B?A?pmu@Kh(Z_W z_+OtO(W$@r_BB?VKKXR*v=CGqUzl?hL(lVar5_WbmC*hyPG9$PWdRRD7?N6O072OC z2DK_kcF$OnM57>|f%*hLi?#?@PKFFf=mEcx^As|9r^x3p)gBLGBXp776~HnK^UYN; z$dh!H9vxz!6%V0t*Lwl5$K&@SV!ErnGFL})goe?>5T6?@x2>|W4)Z%q%jz{d5FmKy zI4dI?8yhwCgUX-*lAQyFV!Mq8&M=oBKX!}`2Ti3546nCulN10I73X}2bFZ3{j-no& zh8T{0q8DjQ$2&-$qM^}bn8aR7is1q6a=jL>J8WTqy#S=nX?YxwU`W<^iiOf75BX%+ zootmuv7rfXrsBhLu(+))_%s*~k8{)df?e=&D)h*;ZXiLJ@6CP&J!$aq$!Wa)wJ(&5 z4-e);RxZDj#n0KwnKvmXbWD{b$CT7o3tpJg&{Ce^JSdMm)$*k5aShuJ=%WEY;jyS% zgAH$4?7{_RCO|4?u#}MNp}bB=y*vy)h@aNi4c<6(9)}G6@^e9j?$c6NB7eRqb7*-= z6jvG5Bc*qy8u0%B()}WaO}0wDN@>n4CdQ*~%RbCTJ2rvYFf+nrxNf5^)#@@W@d8H^T){FyY(r@@mz zKu&I2>-4p~zZHTA&2se34D%D0r(SA)QJU_=)l8v5YX9!@O9qAnY=MRJ;r^)5P*pd# zkmTU3?B2rHm8PTQwYujf=^p-l&fKy->2IMq3U7N=QZf$M+xGU#WNlMSnA}eaG86gR z=qqtgF1~LZz=6Z54d5>;;8wyn;Y3?&LC(%0DoPzA4^ifj0TY@8_Il{EwgHVvfzWSg zaY?BnKX3pf@YMRPO!QxRRP{p8Y-)S`5jlRnh5G{pGc1{UqaI!6E`ltY=p$oQ9;;rR zRz_k$SUW}VsGsza>kQ2ndco{@R;riNdPzNdsnqm9ZHm(u?j+gjlNM`p1!mzLUF=4f z&E?A?zHO}+EX-@!GC z6V||735y(xENw_rqAgm$5b;Z+weU>ZIP{NwVw(;-W~^;p7V^FH*3d+_aH7X`+~)YG z6=JJMe}1;-2L!f6MnpX8OipS{%tzo&n?b^tqRM37!3v=M%)*Z$=%pqW<@Wp1&}^WH zb#|W>-NdrUzPQpw^mwUegcD!9(K_)B-V{piA?Wx7aF_` zl1}<;3ooP}5R&Fci&XkGzsnPjTufn;Ukvrw^Zhik?d#XsoC3Kg9S1cJ9VTWOWZ0gi zq{QCmH#Yft#~r7@r&#$kA^66L{63b##27&rK7AesyPo&`_W}vAPfgF8a-`{q9?UyY zAHvdt%RV9d^=Se#Jor_vqi+)YvmB__pNHA+_jn;}B+PXIl!iL_(fQQIqx*PeFhsN5 z3(1|I#z>C41RSVQX=!Md`Ug-8uV?WpGXg4BSqs5zQ`VCAvoFE~)zhPGJq@vg!N+F5 z^=s6QB3{pP@N`1pL-@?5Nyq&l18>;?00xK)Ga_L>u6iSk4a$9K7z+Uy=U5-u2=+fXUdf{pP8LuB=#A)p@!Vy z`Jp2<;k1qhyd5D=iVXP80*=uucjOphCrwoIf^P_#|D>o0&(3ZMqwnd;zZ~4hYtGR+ zITIG*W_H)H^oa7H3R=5#rXYq=o;55sj%EmjRLG-TCrgvl`||yw#pKa-TfKK4bKMs_ zoK$cgll>C=_|H}r)}opT`6-4N2xN%-uC>Nn1uj*I*7D?uZy(1cZE-EntWwpT?e?!4 z9?+OzJDc_L+Ca#OOpo&0Y%yvdp1ZWiJ*ZHeWK*5_`_gr(Kc==XxFK)*_A@cb{{#agmqyhX z52~j4ot9mixLp%&%c;Pgdrpn&&)%gCje^d$>Y z+1rbsno{akC`EJ^*6Dh+YHaB}ZPg74achLDGjnC|uY@IL;b}k#)qd7p+!msmM5QxT(eG2o!c*IHB27|@tdXZ?`qHMp8KhzF1@6(O z7CahtEfxIysG`=FwP1Fc=>zh~ za1W%Oo`_S(Y)DSd)#MMEeNnB&ElO>;Vm7%tZyow-TeGzWicQ}k?2!$+I&640$bnME zA70XNab~twW}|s$8GC^K9Ug;h6SeZ)6xp{;l@oB3!E+K3x_mXRNadWO&5e+-@8_*A zGH9p=hV!!?&y&938OwI`O)N4u3#n^bYgHA^XJvE4WwZ3=;i8jtC8qAz^v^l6d&x6L zkv(QS{1trxi9SpZKY*E60wN`!Z$HZW=&yK_K_Fh5ZfdIhWKQk2uY5jHk9p*wRPT*^ zBpR5v zT-c}}u8F-ke~0{}fpd+#xC8|P<3Nz)W7i&_m9UdQ3u<6eq%f~RnK4wu#C&og94+HE z;c(}NBDy#QaUo4Tw#?Ryo#nt@6F(vz+1{?v_(=v(7t=E5_)hpe%7TIbKn3>Je<*$H zy7`HX@^)Wt>W&G$iiwQT)8RCjfCGcdovw%9qWdx3vG6{?G$MkWTkrseO)}R3oIoXY zfhw@<1&l}d7P1X;wkz&DBCU&k`4{h&uH6A`M6G~v#pLYrxK&Y@MX`OEaTEKc_%7}S zL9xGtIy&}$^fVq&-nt=Vl-=O^3duRi5t84aYx7+4nhK?CMp_Q6k(wGTLwyLUW-2>o z=dDsQ$o&Q@S6^;Xq`ljHJSu$(EQf;2;CE=vEw<<{y!G;uzsq`UmZzO?F+BB2N@tkm zrblnYR{a8c$gz(}dcV^~wYSc784L*Ihay>mp&?_H+gXJ@ialVizmvft+F%7X^`*u6 zZ)_@*$2efVE>s9~tH^&|%x+^DRB9i5Trfkn`6@V7+fT5ELkH5mnw9Xj!34I;4{sok zy(iC1dh$bK4W;{K4QueR55!s_oNrl*N=W4HQJS5_lT&j_n;&os#u61kr`b+f;iNZVkr1lI->6@#KoQSmBh2DMdvu-jBML4OzN)SAi1` z-)MPLGIkwwKRH`(ikCjm=fwRkTcZBcv>A`)!lg3;LtQ5#jtnTj0+~D&8(V<4Wm}Bo z$`|u|y~)vwoocO=x;!9yVlt}p`*wd1c7DFt_?@+8=5$87VU&WRB^np5@R1__*Hal9 zK?+%VIm1-4kHGi&)C#@g_3RtJ3e(ruS9~MB4nS5>Z5QR3U>@o6GkVD@3dG>*7nBF3 z%dg~_V(pFxc)B5U#fv*ye6<-{aE&ga8Z4xz(=jC6#0Luv3bnrCEy7MTmzE#HJ6b8T zMKr~SW7MYgT7k|gxQ)C^qc{6Io@j4pYO{s!R1Vt+aM$U)GB9w})KGLCQZVQ$Ss7J< zA9@TJ9}}Gaq$};|ceQYL(c003$)4k#nuQG3K&QsLYL$ehB}6U_px(wAYMfWE()Q~3 zoHiq>*^k3JqwIjb1CSKiONHI8=>_MUJZzwVK!s4Ly1j zMrH>5=Lar1NmE~Pj6%-pS%X^24R!g4v5X^ov#AvcjC+=gFx>L$Iio5Ek=Qef7HDhuJL3hZ4&zYZF9t?0>x8EaE51;JqWb&1Ds#4u;6~c7gWc)!=k0 znWD(q7dv?q6I8Q;fzGD18b^vHTf*pWZK12^-x5dTR+FX6_ZyX-8QaIOzJlkVu^;Y^ zG2z(%HRESL>^D{aOMI7M{g;^V^H#)!*y ze64TA)Mz%r8RrAzy<_L%bM9ZxuV# z*AQp%!S|w+!szDf?)0Z&NN~Xg2PWor{q&xxJnx{F_1Dv;vc0+Nayl4M)5DjSu{&0| zn^rr8oz7B>DDa`|UEFHZ&DIBv17s**Pd6SoaFqS>$jULPN9tML3K4l19^u*Kk>PS< z9UtoZv`{tD%gsOR3|`9}on1=4y(NS!*mRF%d*yb}t53TFl4wN>>PI<$-a_u#bI?^) z@sd7|(^1})zch}-tc4#xCX@=Nvf^amz=%04C2T>4_gc;m8|sx0<{4gZbl9mZ-)>A} z%eT6Z*=&K(EouMlv!)?r9YT5&m*S`d0**as6CWfUgG{vS=dmf0#^T&?(TEOli@&VA z121GXcduOAWPW^ZTf!F|hef%&}V4q|x9{Nps$OXA)ptNX)i1}`0~^&Z*5crga@R2zjtDlvb8 zH^<*s#v7$0(m`>B(udfH*}yO^A9N-FZb;x|aCROjyhM1QSVxGZWDlTHI2OLQhK-h@ z{oKe*YD(Sa5MfWONujsEn$+aONKk#So^$<6My6&D@k1IR(UgMuTLYtsRti4#p3=d3U!*h<(Gs>8MbzBPt76_);}C!>O% zSOPG;B8VsN*yJ*QeKhAF4!CA@G@G2Yn#LLi#Aa~%AAL}p>BA_~l)1Vq`3Gt&Z$#sII;?<4XE}wu6B>MN2dGE*AtUnm zRoM+pZkJ&Dhr{UTq!J|QxL&?UWj!_Pp$8(HSxh;Nr&>0_teB>ky%80UA1l7^RBKR8 zY5k@Uxp59IoSr}}@!PZ9E-l)R9QgC!IzOgai4g$C4Kd$X+dSg!`J;}O?Ig#1iu$iZ zZ+gqO6Dr2<#`(Bb7issL+F9=$LbPmNau#BEEi^y3!|pbA+pnFi`Morev=WwA3Byx6 zN2Dp=-Su;uTeNx?y~R2H>5wx)Stu0w5tA=H(x_@am& zO!4+GH{?D4L?!>e6kfy>2iuQQ+d_GRsnN^fuhX5MESg#Ej1@S(8TLH&>esbZ8p5$P zafh-%)QABAcamYXu# zaBCq%*I~!hi4Qb`tuC-M3hy4W7pGk<^vH&7aid5CQChGPu7peu_p%}KO5Hv=v;Ac_ z8c%f3xxaf+oAYqm{!1RpdaYL9>so~c%u}kdV*?+;pBsT45W8GR#-=bKJ+fT zMie$VPI?f@Kj0?m1_}f5Ea^O?7chRoi4#A!B}!l2-}H!d58VFqLF0Y8gN&t-m${=l Sc?Hs1_oymqDdsC!Km30WAJYZ^ diff --git a/doc/mergeParty/readme.md b/doc/mergeParty/readme.md index f97b17f7430..061673fffa0 100644 --- a/doc/mergeParty/readme.md +++ b/doc/mergeParty/readme.md @@ -1,5 +1,5 @@ -# Merge Party Readme -Welcome to the merge party! This document is intended to give a short overview of why we need this party, when was changed and how to change it. There's much work to do, so we'll keep it short. Hopefully. +# Merge Party +Welcome to the merge party! This document is intended to give a short overview of why we need this party, when was it changed and how to change it. There's much work to do, so we'll keep it short, hopefully. ## What Just Happened In order to allow users to log into Dataverse using credentials from other systems (e.g. institutional Shibboleth server), we had to refactor out the internal user management sub-system (formerly known as "DataverseUser") and introduce a new user system. The existing system was taken out of Dataverse but kept in the .war file, as we also need to support standalone instances. @@ -16,7 +16,7 @@ From a merge standpoint, this means that code that previously referenced `Datave Most of these changes have been done by Michael/Phil - otherwise, the `auth` branch would not compile. -Since the guest user does not live in the database, it does not have an id. Moreover, JPA classes cannot link directly to it\*. But have no fear - all users (and, really, all `RoleAssignee`s, which are users or groups) have an identifier. When you need to reference a user (and later, a group) just use the identifier (it's of type `String`). When needing to convert an identifier to a user, call `RoleAssigneeServiceBean.getRoleAssignee( identifier )` in the general case, or `AuthenticationServiceBean.getAuthenticatedUser(identifier)` if you're certain the identifier is of an authenticated user. +The guest user does not live in the database so it does not have an id. Moreover, JPA classes cannot link directly to it\*. But have no fear - all users (and, really, all `RoleAssignee`s, which are users or groups) have an identifier. When you need to reference a user (and later, a group) just use the identifier (it's of type `String`). When needing to convert an identifier to a user, call `RoleAssigneeServiceBean.getRoleAssignee( identifier )` in the general case, or `AuthenticationServiceBean.getAuthenticatedUser(identifier)` if you're certain the identifier is of an authenticated user. \* We have debated this for a while, since we could have created a dummy record, like we've done so far. We went with this solution, as it is cleaner, can't be messed up by SQL scripts, and will make even more sense once groups arrive. @@ -84,4 +84,4 @@ A new script that sets up the users and the dataverses, sets the system up for b ## Undoing the undoing the merge When merging back to master, we need to undo commit 8ae3e6a482b87b52a1745bb06f340875803d2c5b (a.k.a 8ae3e6a), which is the commit that undid the erroneous merge. -More at http://www.christianengvall.se/undo-pushed-merge-git/ \ No newline at end of file +More at http://www.christianengvall.se/undo-pushed-merge-git/ diff --git a/doc/release-notes/4.16-release-notes.md b/doc/release-notes/4.16-release-notes.md index 66241a42777..8feb263d2ab 100644 --- a/doc/release-notes/4.16-release-notes.md +++ b/doc/release-notes/4.16-release-notes.md @@ -91,6 +91,7 @@ If this is a new installation, please see our Building External Tools section of the API Guide. +For more information, check out new Building External Tools section of the API Guide. ## Complete List of Changes diff --git a/doc/release-notes/4.18-release-notes.md b/doc/release-notes/4.18-release-notes.md new file mode 100644 index 00000000000..97ab25aa0fc --- /dev/null +++ b/doc/release-notes/4.18-release-notes.md @@ -0,0 +1,122 @@ +# Dataverse 4.18 + +**Note: There is an issue in 4.18 with the display of validation messages on the dataset page (#6380) and we recommend using 4.18.1 for any production environments.** + +This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +## Release Highlights + +### File Page Previews and Previewers + +File-level External Tools can now be configured to display in a "Preview Mode" designed for embedding within the file landing page. + +While not technically part of this release, previewers have been made available for several common file types. The previewers support for spreadsheet, image, text, document, audio, video, html files and more. These previewers can be found in the Qualitative Data Repository Github Repository. The spreadsheet viewer was contributed by the [Dataverse SSHOC][] project. + +[Dataverse SSHOC]: https://www.sshopencloud.eu/news/developing-sshoc-dataverse + +### Microsoft Login + +Users can now create Dataverse accounts and login using self-provisioned Microsoft accounts such as live.com and outlook.com. Users can also use Microsoft accounts managed by their institutions. This new feature not only makes it easier to log in to Dataverse but will also streamline the interaction between any external tools that utilize Azure services that require login. + +### Add Data and Host Dataverse + +More workflows to add data have been added across the UI, including a new button on the My Data tab of the Account page, as well as a link in the Dataverse navbar, which will display on every page. This will provider users much easier access to start depositing data. By default, the Host Dataverse will be the installation root dataverse for these new Add Data workflows, but there is now a dropdown component allowing creators to select a dataverse you have proper permissions to create a new dataverse or dataset in. + +### Primefaces 7 + +Primefaces, the open source UI framework upon which the Dataverse front end is built, has been updated to the most recent version. This provides security updates and bug fixes and will also allow Dataverse developers to take advantage of new features and enhancements. + +### Integration Test Pipeline and Test Health Reporting + +As part of the Dataverse Community's ongoing efforts to provide more robust automated testing infrastructure, and in support of the project's desire to have the develop branch constantly in a "release ready" state, API-based integration tests are now run every time a branch is merged to develop. The status of the last test run is available as a badge at the bottom of the README.md file that serves as the homepage of Dataverse Github Repository. + +### Make Data Count Metrics Updates + +A new configuration option has been added that allows Make Data Count metrics to be collected, but not reflected in the front end. This option was designed to allow installations to collect and verify metrics for a period before turning on the display to users. + +### Search API Enhancements + +The Dataverse Search API will now display unpublished content when an API token is passed (and appropriate permissions exist). + +### Additional Dataset Author Identifiers + +The following dataset author identifiers are now supported: + +- DAI: https://en.wikipedia.org/wiki/Digital_Author_Identifier +- ResearcherID: http://researcherid.com +- ScopusID: https://www.scopus.com + +## Major Use Cases + +Newly-supported use cases in this release include: + +- Users can view previews of several common file types, eliminating the need to download or explore a file just to get a quick look. +- Users can log in using self-provisioned Microsoft accounts and also can log in using Microsoft accounts managed by an organization. +- Dataverse administrators can now revoke and regenerate API tokens with an API call. +- Users will receive notifications when their ingests complete, and will be informed if the ingest was a success or failure. +- Dataverse developers will receive feedback about the health of the develop branch after their pull request was merged. +- Dataverse tool developers will be able to query the Dataverse API for unpublished data as well as published data. +- Dataverse administrators will be able to collect Make Data Count metrics without turning on the display for users. +- Users with a DAI, ResearcherID, or ScopusID and use these author identifiers in their datasets. + +## Notes for Dataverse Installation Administrators + +### API Token Management + +- You can now delete a user's API token, recreate a user's API token, and find a token's expiration date. See the Native API guide for more information. + +### New JVM Options + +[:mdcbaseurlstring](http://guides.dataverse.org/en/4.18/installation/config.html#mdcbaseurlstring) allows dataverse administrators to use a test base URL for Make Data Count. + +### New Database Settings + +[:DisplayMDCMetrics](http://guides.dataverse.org/en/4.18/installation/config.html#DisplayMDCMetrics) can be set to false to disable display of MDC metrics. + +## Notes for Tool Developers and Integrators + +### Preview Mode + +Tool Developers can now add the `hasPreviewMode` parameter to their file level external tools. This setting provides an embedded, simplified view of the tool on the file pages for any installation that installs the tool. See Building External Tools for more information. + +### API Token Management + +If your tool writes content back to Dataverse, you can now take advantage of administrative endpoints that delete and re-create API tokens. You can also use an endpoint that provides the expiration date of a specific API token. See the Native API guide for more information. + +### View Unpublished Data Using Search API + +If you pass a token, the search API output will include unpublished content. + +## Complete List of Changes + +For the complete list of code changes in this release, see the 4.18 milestone in Github. + +For help with upgrading, installing, or general questions please post to the Dataverse Google Group or email support@dataverse.org. + +## Installation + +If this is a new installation, please see our Installation Guide. + +## Upgrade + +1. Undeploy the previous version. + +- <glassfish install path>/glassfish4/bin/asadmin list-applications +- <glassfish install path>/glassfish4/bin/asadmin undeploy dataverse + +2. Stop glassfish and remove the generated directory, start. + +- service glassfish stop +- remove the generated directory: rm -rf <glassfish install path>glassfish4/glassfish/domains/domain1/generated +- service glassfish start + +3. Deploy this version. + +- <glassfish install path>/glassfish4/bin/asadmin deploy <path>dataverse-4.18.war + +4. Restart glassfish. + +5. Update Citation Metadata Block + +- `wget https://github.com/IQSS/dataverse/releases/download/v4.18/citation.tsv` +- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` diff --git a/doc/release-notes/4.18.1-release-notes.md b/doc/release-notes/4.18.1-release-notes.md new file mode 100644 index 00000000000..99db66464a8 --- /dev/null +++ b/doc/release-notes/4.18.1-release-notes.md @@ -0,0 +1,45 @@ +# Dataverse 4.18.1 + +This release provides a fix for a regression introduced in 4.18 and implements a few other small changes. + +## Release Highlights + +### Proper Validation Messages + +When creating or editing dataset metadata, users were not receiving field-level indications about what entries failed validation and were only receiving a message at the top of the page. This fix restores field-level indications. + +## Major Use Cases + +Use cases in this release include: + +- Users will receive the proper messaging when dataset metadata entries are not valid. +- Users can now view the expiration date of an API token and revoke a token on the API Token tab of the account page. + +## Complete List of Changes + +For the complete list of code changes in this release, see the 4.18.1 milestone in Github. + +For help with upgrading, installing, or general questions please post to the Dataverse Google Group or email support@dataverse.org. + +## Installation + +If this is a new installation, please see our Installation Guide. + +## Upgrade + +1. Undeploy the previous version. + +- <glassfish install path>/glassfish4/bin/asadmin list-applications +- <glassfish install path>/glassfish4/bin/asadmin undeploy dataverse + +2. Stop glassfish and remove the generated directory, start. + +- service glassfish stop +- remove the generated directory: rm -rf <glassfish install path>glassfish4/glassfish/domains/domain1/generated +- service glassfish start + +3. Deploy this version. + +- <glassfish install path>/glassfish4/bin/asadmin deploy <path>dataverse-4.18.1.war + +4. Restart glassfish. diff --git a/doc/release-notes/4.19-release-notes.md b/doc/release-notes/4.19-release-notes.md new file mode 100644 index 00000000000..70c8711582c --- /dev/null +++ b/doc/release-notes/4.19-release-notes.md @@ -0,0 +1,125 @@ +# Dataverse 4.19 + +This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +## Release Highlights + +### Open ID Connect Support + +Dataverse now provides basic support for any OpenID Connect (OIDC) compliant authentication provider. + +Prior to supporting this standard, new authentication methods needed to be added by pull request. OIDC support provides a standardized way for authentication, sharing user information, and more. You are able to use any compliant provider just by loading a configuration file, without touching the codebase. While the usual prominent providers like Google and others feature OIDC support there are plenty of other options to easily attach your installation to a custom authentication provider, using enterprise grade software. + +See the [OpenID Connect Login Options documentation](http://guides.dataverse.org/en/4.19/installation/oidc.html) in the Installation Guide for more details. + +This is to be extended with support for attribute mapping, group syncing and more in future versions of the code. + +### Python Installer + +We are introducing a new installer script, written in Python. It is intended to eventually replace the old installer (written in Perl). For now it is being offered as an (experimental) alternative. + +See [README_python.txt](https://github.com/IQSS/dataverse/blob/v4.19/scripts/installer/README_python.txt) in scripts/installer and/or in the installer bundle for more information. + +## Major Use Cases + +Newly-supported use cases in this release include: + +- Dataverse installation administrators will be able to experiment with a Python Installer (Issue #3937, PR #6484) +- Dataverse installation administrators will be able to set up an OIDC-compliant login options by editing a configuration file and with no need for a code change (Issue #6432, PR #6433) +- Following setup by a Dataverse administration, users will be able to log in using OIDC-compliant methods (Issue #6432, PR #6433) +- Users of the Search API will see additional fields in the JSON output (Issues #6300, #6396, PR #6441) +- Users loading the support form will now be presented with the math challenge as expected and will be able to successfully send an email to support (Issue #6307, PR #6462) +- Users of https://mybinder.org can now spin up Jupyter Notebooks and other computational environments from Dataverse DOIs (Issue #4714, PR #6453) + +## Notes for Dataverse Installation Administrators + +### Security vulnerability in Solr + +A serious security issue has recently been identified in multiple versions of Solr search engine, including v.7.3 that Dataverse is currently using. Follow the instructions below to verify that your installation is safe from a potential attack. You can also consult the following link for a detailed description of the issue: + +RCE in Solr via Velocity Template. + +The vulnerability allows an intruder to execute arbitrary code on the system running Solr. Fortunately, it can only be exploited if Solr API access point is open to direct access from public networks (aka, "the outside world"), which is NOT needed in a Dataverse installation. + +We have always recommended having Solr (port 8983) firewalled off from public access in our installation guides. But we recommend that you double-check your firewall settings and verify that the port is not accessible from outside networks. The simplest quick test is to try the following URL in your browser: + + `http://:8983` + +and confirm that you get "access denied" or that it times out, etc. + +In most cases, when Solr runs on the same server as the Dataverse web application, you will only want the port accessible from localhost. We also recommend that you add the following arguments to the Solr startup command: `-j jetty.host=127.0.0.1`. This will make Solr accept connections from localhost only; adding redundancy, in case of the firewall failure. + +In a case where Solr needs to run on a different host, make sure that the firewall limits access to the port only to the Dataverse web host(s), by specific ip address(es). + +We would also like to reiterate that it is simply never a good idea to run Solr as root! Running the process as a non-privileged user would substantially minimize any potential damage even in the event that the instance is compromised. + +### Citation and Geospatial Metadata Block Updates + +We updated two metadata blocks in this release. Updating these metadata blocks is mentioned in the step-by-step upgrade instructions below. + +### Run ReExportall + +We made changes to the JSON Export in this release (#6246). If you'd like these changes to reflected in your JSON exports, you should run ReExportall as part of the upgrade process. We've included this in the step-by-step instructions below. + +### BinderHub + +https://mybinder.org now supports spinning up Jupyter Notebooks and other computational environments from Dataverse DOIs. + +### Widgets update for OpenScholar + +We updated the code for widgets so that they will keep working in OpenScholar sites after the upcoming upgrade OpenScholar upgrade to Drupal 8. If users of your dataverse have embedded widgets on an Openscholar site that upgrades to Drupal 8, you will need to run this Dataverse version (or later) for the widgets to keep working. + +### Payara tech preview + +Dataverse 4 has always run on Glassfish 4.1 but changes in this release (PR #6523) should open the door to upgrading to Payara 5 eventually. Production installations of Dataverse should remain on Glassfish 4.1 but feedback from any experiments running Dataverse on Payara 5 is welcome via the [usual channels](https://dataverse.org/contact). + +## Notes for Tool Developers and Integrators + +### Search API + +The boolean parameter `query_entities` has been removed from the Search API. The former "true" behavior of "whether entities are queried via direct database calls (for developer use)" is now always true. + +Additional fields are now available via the Search API, mostly related to information about specific dataset versions. + +## Complete List of Changes + +For the complete list of code changes in this release, see the 4.19 milestone in Github. + +For help with upgrading, installing, or general questions please post to the Dataverse Google Group or email support@dataverse.org. + +## Installation + +If this is a new installation, please see our Installation Guide. + +## Upgrade + +1. Undeploy the previous version. + +- <glassfish install path>/glassfish4/bin/asadmin list-applications +- <glassfish install path>/glassfish4/bin/asadmin undeploy dataverse + +2. Stop glassfish and remove the generated directory, start. + +- service glassfish stop +- remove the generated directory: rm -rf <glassfish install path>glassfish4/glassfish/domains/domain1/generated +- service glassfish start + +3. Deploy this version. + +- <glassfish install path>/glassfish4/bin/asadmin deploy <path>dataverse-4.19.war + +4. Restart glassfish. + +5. Update Citation Metadata Block + +- `wget https://github.com/IQSS/dataverse/releases/download/v4.19/citation.tsv` +- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` + +6. Update Geospatial Metadata Block + +- `wget https://github.com/IQSS/dataverse/releases/download/v4.19/geospatial.tsv` +- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @geospatial.tsv -H "Content-type: text/tab-separated-values"` + +7. (Optional) Run ReExportall to update JSON Exports + + diff --git a/doc/release-notes/4.20-release-notes b/doc/release-notes/4.20-release-notes new file mode 100644 index 00000000000..e29953db101 --- /dev/null +++ b/doc/release-notes/4.20-release-notes @@ -0,0 +1,224 @@ +# Dataverse 4.20 + +This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +## Release Highlights + +### Multiple Store Support + +Dataverse can now be configured to store files in more than one place at the same time (multiple file, s3, and/or swift stores). + +General information about this capability can be found below and in the Configuration Guide - File Storage section. + +### S3 Direct Upload support + +S3 stores can now optionally be configured to support direct upload of files, as one option for supporting upload of larger files. In the current implementation, each file is uploaded in a single HTTP call. For AWS, this limits file size to 5 GB. With Minio the theoretical limit should be 5 TB and 50+ GB file uploads have been tested successfully. (In practice other factors such as network timeouts may prevent a successful upload a multi-TB file and minio instances may be configured with a < 5 TB single HTTP call limit.) No other S3 service providers have been tested yet. Their limits should be the lower of the maximum object size allowed and any single HTTP call upload limit. + +General information about this capability can be found in the Big Data Support Guide with specific information about how to enable it in the Configuration Guide - File Storage section. + +To support large data uploads, installations can now configure direct upload to S3, bypassing the application server. This will allow for larger uploads over a more resilient transfer method. + +General information about this capability can be found below and in the Configuration Guide. + +### Integration Test Coverage Reporting + +The percentage of code covered by the API-based integration tests is now shown on a badge at the bottom of the README.md file that serves as the homepage of Dataverse Github Repository. + +### New APIs + +New APIs for Role Management and Dataset Size have been added. Previously, managing roles at the dataset and file level was only possible through the UI. API users can now also retrieve the size of a dataset through an API call, with specific parameters depending on the type of information needed. + +More information can be found in the API Guide. + +## Major Use Cases + +Newly-supported use cases in this release include: + +- Users will now be able to see the number of linked datasets and dataverses accurately reflected in the facet counts on the Dataverse search page. (Issue #6564, PR #6262) +- Users will be able to upload large files directly to S3. (Issue #6489, PR #6490) +- Users will be able to see the PIDs of datasets and files in the Guestbook export. (Issue #6534, PR #6628) +- Administrators will be able to configure multiple stores per Dataverse installation, which allow dataverse-level setting of storage location, upload size limits, and supported data transfer methods (Issue #6485, PR #6488) +- Administrators and integrators will be able to manage roles using a new API. (Issue #6290, PR #6622) +- Administrators and integrators will be able to determine a dataset's size. (Issue #6524, PR #6609) +- Integrators will now be able to retrieve the number of files in a dataset as part of a single API call instead of needing to count the number of files in the response. (Issue #6601, PR #6623) + +## Notes for Dataverse Installation Administrators + +### Potential Data Integrity Issue + +We recently discovered a *potential* data integrity issue in Dataverse databases. One manifests itself as duplicate DataFile objects created for the same uploaded file (https://github.com/IQSS/dataverse/issues/6522); the other as duplicate DataTable (tabular metadata) objects linked to the same DataFile (https://github.com/IQSS/dataverse/issues/6510). This issue impacted approximately .03% of datasets in Harvard's Dataverse. + +To see if any datasets in your installation have been impacted by this data integrity issue, we've provided a diagnostic script here: + +https://github.com/IQSS/dataverse/raw/develop/scripts/issues/6510/check_datafiles_6522_6510.sh + +The script relies on the PostgreSQL utility psql to access the database. You will need to edit the credentials at the top of the script to match your database configuration. + +If neither of the two issues is present in your database, you will see a message "... no duplicate DataFile objects in your database" and "no tabular files affected by this issue in your database". + +If either, or both kinds of duplicates are detected, the script will provide further instructions. We will need you to send us the produced output. We will then assist you in resolving the issues in your database. + +### Multiple Store Support Changes + +**Existing installations will need to make configuration changes to adopt this version, regardless of whether additional stores are to be added or not.** + +Multistore support requires that each store be assigned a label, id, and type - see the Configuration Guide for a more complete explanation. For an existing store, the recommended upgrade path is to assign the store id based on it's type, i.e. a 'file' store would get id 'file', an 's3' store would have the id 's3'. + +With this choice, no manual changes to datafile 'storageidentifier' entries are needed in the database. If you do not name your existing store using this convention, you will need to edit the database to maintain access to existing files. + +The following set of commands to change the Glassfish JVM options will adapt an existing file or s3 store for this upgrade: +For a file store: + + ./asadmin create-jvm-options "\-Ddataverse.files.file.type=file" + ./asadmin create-jvm-options "\-Ddataverse.files.file.label=file" + ./asadmin create-jvm-options "\-Ddataverse.files.file.directory=" + +For a s3 store: + + ./asadmin create-jvm-options "\-Ddataverse.files.s3.type=s3" + ./asadmin create-jvm-options "\-Ddataverse.files.s3.label=s3" + ./asadmin delete-jvm-options "-Ddataverse.files.s3-bucket-name=" + ./asadmin create-jvm-options "-Ddataverse.files.s3.bucket-name=" + +Any additional S3 options you have set will need to be replaced as well, following the pattern in the last two lines above - delete the option including a '-' after 's3' and creating the same option with the '-' replaced by a '.', using the same value you currently have configured. + +Once these options are set, restarting the Glassfish service is all that is needed to complete the change. + +Note that the "\-Ddataverse.files.directory", if defined, continues to control where temporary files are stored (in the /temp subdir of that directory), independent of the location of any 'file' store defined above. + +Also note that the :MaxFileUploadSizeInBytes property has a new option to provide independent limits for each store instead of a single value for the whole installation. The default is to apply any existing limit defined by this property to all stores. + +### Direct S3 Upload Changes + +Direct upload to S3 is enabled per store by one new jvm option: + + ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" + +The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. + +Direct upload via the Dataverse web interface is transparent to the user and handled automatically by the browser. Some minor differences in file upload exist: directly uploaded files are not unzipped and Dataverse does not scan their content to help in assigning a MIME type. Ingest of tabular files and metadata extraction from FITS files will occur, but can be turned off for files above a specified size limit through the new dataverse.files..ingestsizelimit jvm option. + +API calls to support direct upload also exist, and, if direct upload is enabled for a store in Dataverse, the latest DVUploader (v1.0.8) provides a'-directupload' flag that enables its use. + +### Solr Update + +With this release we upgrade to the latest available stable release in the Solr 7.x branch. We recommend a fresh installation of Solr 7.7.2 (the index will be empty) +followed by an "index all". + +Before you start the "index all", Dataverse will appear to be empty because +the search results come from Solr. As indexing progresses, results will appear +until indexing is complete. + +### Dataverse Linking Fix + +The fix implemented for #6262 will display the datasets contained in linked dataverses in the linking dataverse. The full reindex described above will correct these counts. Going forward, this will happen automatically whenever a dataverse is linked. + +### Google Analytics Download Tracking Bug + +The button tracking capability discussed in the installation guide (http://guides.dataverse.org/en/4.20/installation/config.html#id88) relies on an analytics-code.html file that must be configured using the :WebAnalyticsCode setting. The example file provided in the installation guide is no longer compatible with recent Dataverse releases (>v4.16). Installations using this feature should update their analytics-code.html file by following the installation instructions using the updated example file. Alternately, sites can modify their existing files to include the one-line change made in the example file at line 120. + +### Run ReExportall + +We made changes to the JSON Export in this release (Issue 6650, PR #6669). If you'd like these changes to reflected in your JSON exports, you should run ReExportall as part of the upgrade process. We've included this in the step-by-step instructions below. + +### New JVM Options and Database Settings + +## New JVM Options for file storage drivers + +- The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. +- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset to the S3 bucket. (S3 stores only!) +- The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. +- The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. + +## New Database Settings for Shibboleth + +- The database setting :ShibAffiliationAttribute can now be set to prevent affiliations for Shibboleth users from being reset upon each log in. + +## Notes for Tool Developers and Integrators + +### Integration Test Coverage Reporting + +API-based integration tests are run every time a branch is merged to develop and the percentage of code covered by these integration tests is now shown on a badge at the bottom of the README.md file that serves as the homepage of Dataverse Github Repository. + +### Guestbook Column Changes + +Users of downloaded guestbooks should note that two new columns have been added: + +- Dataset PID +- File PID + +If you are expecting column in the CSV file to be in a particular order, you will need to make adjustments. + +Old columns: Guestbook, Dataset, Date, Type, File Name, File Id, User Name, Email, Institution, Position, Custom Questions + +New columns: Guestbook, Dataset, Dataset PID, Date, Type, File Name, File Id, File PID, User Name, Email, Institution, Position, Custom Questions + +### API Changes + +As reported in #6570, the affiliation for dataset contacts has been wrapped in parentheses in the JSON output from the Search API. These parentheses have now been removed. This is a backward incompatible change but it's expected that this will not cause issues for integrators. + +### Role Name Change + +The role alias provided in API responses has changed, so if anything was hard-coded to "editor" instead of "contributor" it will need to be updated. + +## Complete List of Changes + +For the complete list of code changes in this release, see the 4.20 milestone in Github. + +For help with upgrading, installing, or general questions please post to the Dataverse Google Group or email support@dataverse.org. + +## Installation + +If this is a new installation, please see our Installation Guide. + +## Upgrade + +1. Undeploy the previous version. + +- <glassfish install path>/glassfish4/bin/asadmin list-applications +- <glassfish install path>/glassfish4/bin/asadmin undeploy dataverse + +2. Stop glassfish and remove the generated directory, start. + +- service glassfish stop +- remove the generated directory: rm -rf <glassfish install path>glassfish4/glassfish/domains/domain1/generated +- service glassfish start + +3. Install and configure Solr v7.7.2 + +See http://guides.dataverse.org/en/4.20/installation/prerequisites.html#installing-solr + +4. Deploy this version. + +- <glassfish install path>/glassfish4/bin/asadmin deploy <path>dataverse-4.20.war + +5. The following set of commands to change the Glassfish JVM options will adapt an existing file or s3 store for this upgrade: +For a file store: + + ./asadmin create-jvm-options "\-Ddataverse.files.file.type=file" + ./asadmin create-jvm-options "\-Ddataverse.files.file.label=file" + ./asadmin create-jvm-options "\-Ddataverse.files.file.directory=" + +For a s3 store: + + ./asadmin create-jvm-options "\-Ddataverse.files.s3.type=s3" + ./asadmin create-jvm-options "\-Ddataverse.files.s3.label=s3" + ./asadmin delete-jvm-options "-Ddataverse.files.s3-bucket-name=" + ./asadmin create-jvm-options "-Ddataverse.files.s3.bucket-name=" + +Any additional S3 options you have set will need to be replaced as well, following the pattern in the last two lines above - delete the option including a '-' after 's3' and creating the same option with the '-' replaced by a '.', using the same value you currently have configured. + +6. Restart glassfish. + +7. Update Citation Metadata Block + +- `wget https://github.com/IQSS/dataverse/releases/download/4.20/citation.tsv` +- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` + +8. Kick off full reindex + +http://guides.dataverse.org/en/4.20/admin/solr-search-index.html + +9. (Recommended) Run ReExportall to update JSON Exports + + diff --git a/doc/release-notes/6545-solr-var-meta.md b/doc/release-notes/6545-solr-var-meta.md new file mode 100644 index 00000000000..5e4a0c417c1 --- /dev/null +++ b/doc/release-notes/6545-solr-var-meta.md @@ -0,0 +1,2 @@ +File schema.xml for solr search was changed. New fields such as literalQuestion, interviewInstruction, postQuestion, variableUniverse, variableNotes were added. +Full reindexing is needed if one wants to search and see updates to variable level metadata before this change. Otherwise were is no need to reindex, new updates with DCT will be authomaticaly indexed. diff --git a/doc/release-notes/6650-export-import-mismatch b/doc/release-notes/6650-export-import-mismatch new file mode 100644 index 00000000000..0ab2999a603 --- /dev/null +++ b/doc/release-notes/6650-export-import-mismatch @@ -0,0 +1,3 @@ +Run ReExportall to update JSON Exports + +http://guides.dataverse.org/en/4.19/admin/metadataexport.html?highlight=export#batch-exports-through-the-api \ No newline at end of file diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 5de06df2bd3..556a17ef0eb 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -1,5 +1,5 @@ TwoRavens explore file A system of interlocking statistical tools for data exploration, analysis, and meta-analysis: http://2ra.vn. See the :doc:`/user/data-exploration/tworavens` section of the User Guide for more information on TwoRavens from the user perspective and the :doc:`/installation/r-rapache-tworavens` section of the Installation Guide. Data Explorer explore file A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Explorer for the instructions on adding Data Explorer to your Dataverse; and the :doc:`/installation/prerequisites` section of the Installation Guide for the instructions on how to set up **basic R configuration required** (specifically, Dataverse uses R to generate .prep metadata files that are needed to run Data Explorer). Whole Tale explore dataset A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_. -File Previewers explore file A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, text, video - allowing them to be viewed without downloading. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. https://github.com/QualitativeDataRepository/dataverse-previewers +File Previewers explore file A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, text, video, tabular data, and spreadsheets - allowing them to be viewed without downloading. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/GlobalDataverseCommunityConsortium/dataverse-previewers Data Curation Tool configure file A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions. diff --git a/doc/sphinx-guides/source/_static/api/file-provenance.json b/doc/sphinx-guides/source/_static/api/file-provenance.json new file mode 100644 index 00000000000..6c823cdb5f3 --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/file-provenance.json @@ -0,0 +1 @@ +{"prefix": {"pre_0": "http://www.w3.org/2001/XMLSchema", "s-prov": "http://s-prov/ns/#", "provone": "http://purl.dataone.org/provone/2015/01/15/ontology#", "vargen": "http://openprovenance.org/vargen#", "foaf": "http://xmlns.com/foaf/0.1/", "dcterms": "http://purl.org/dc/terms/", "tmpl": "http://openprovenance.org/tmpl#", "var": "http://openprovenance.org/var#", "vcard": "http://www.w3.org/2006/vcard/ns#", "swirrl": "http://project-dare.eu/ns#"}, "bundle": {"vargen:SessionSnapshot": {"prefix": {"s-prov": "http://s-prov/ns/#", "provone": "http://purl.dataone.org/provone/2015/01/15/ontology#", "vargen": "http://openprovenance.org/vargen#", "tmpl": "http://openprovenance.org/tmpl#", "var": "http://openprovenance.org/var#", "vcard": "http://www.w3.org/2006/vcard/ns#", "swirrl": "http://project-dare.eu/ns#"}, "entity": {"vargen:inData": {"swirrl:volumeId": {"$": "var:rawVolumeId", "type": "prov:QUALIFIED_NAME"}, "prov:type": {"$": "provone:Data", "type": "prov:QUALIFIED_NAME"}}, "vargen:inFile": {"prov:atLocation": {"$": "var:atLocation", "type": "prov:QUALIFIED_NAME"}, "s-prov:format": {"$": "var:format", "type": "prov:QUALIFIED_NAME"}, "s-prov:checksum": {"$": "var:checksum", "type": "prov:QUALIFIED_NAME"}}, "vargen:WorkData": {"swirrl:volumeId": {"$": "var:workVolumeId", "type": "prov:QUALIFIED_NAME"}, "prov:type": {"$": "provone:Data", "type": "prov:QUALIFIED_NAME"}}, "var:JupSnapshot": {"prov:generatedAt": {"$": "var:generatedAt", "type": "prov:QUALIFIED_NAME"}, "prov:atLocation": {"$": "var:repoUrl", "type": "prov:QUALIFIED_NAME"}, "s-prov:description": {"$": "var:description", "type": "prov:QUALIFIED_NAME"}, "prov:type": {"$": "swirrl:NotebookSnapshot", "type": "prov:QUALIFIED_NAME"}, "swirrl:sessionId": {"$": "var:sessionId", "type": "prov:QUALIFIED_NAME"}}}, "used": {"_:id1": {"prov:activity": "vargen:snapshot", "prov:entity": "var:Jupyter"}, "_:id2": {"prov:activity": "vargen:snapshot", "prov:entity": "vargen:WorkData"}, "_:id3": {"prov:activity": "vargen:snapshot", "prov:entity": "vargen:inData"}}, "wasDerivedFrom": {"_:id4": {"prov:usedEntity": "var:Jupyter", "prov:generatedEntity": "var:JupSnapshot"}}, "wasAssociatedWith": {"_:id5": {"prov:activity": "vargen:snapshot", "prov:agent": "var:snapAgent"}}, "actedOnBehalfOf": {"_:id6": {"prov:delegate": "var:snapAgent", "prov:responsible": "var:user"}}, "activity": {"vargen:snapshot": {"prov:atLocation": {"$": "var:method_path", "type": "prov:QUALIFIED_NAME"}, "tmpl:startTime": {"$": "var:startTime", "type": "prov:QUALIFIED_NAME"}, "tmpl:endTime": {"$": "var:endTime", "type": "prov:QUALIFIED_NAME"}}}, "wasGeneratedBy": {"_:id7": {"prov:activity": "vargen:snapshot", "prov:entity": "var:JupSnapshot"}}, "agent": {"var:user": {"vcard:uid": {"$": "var:name", "type": "prov:QUALIFIED_NAME"}, "swirrl:authMode": {"$": "var:authmode", "type": "prov:QUALIFIED_NAME"}, "swirrl:group": {"$": "var:group", "type": "prov:QUALIFIED_NAME"}, "prov:type": {"$": "prov:Person", "type": "prov:QUALIFIED_NAME"}}, "var:snapAgent": {"vcard:uid": {"$": "var:name_api", "type": "prov:QUALIFIED_NAME"}, "prov:type": {"$": "prov:SoftwareAgent", "type": "prov:QUALIFIED_NAME"}}}, "hadMember": {"_:id8": {"prov:collection": "vargen:inData", "prov:entity": "vargen:inFile"}}}}} \ No newline at end of file diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr index 0c6f27ccf40..d351c544a65 100755 --- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr +++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr @@ -5,9 +5,9 @@ # chkconfig: 35 92 08 # description: Starts and stops Apache Solr -SOLR_DIR="/usr/local/solr/solr-7.3.1" +SOLR_DIR="/usr/local/solr/solr-7.7.2" SOLR_COMMAND="bin/solr" -SOLR_ARGS="-m 1g" +SOLR_ARGS="-m 1g -j jetty.host=127.0.0.1" SOLR_USER=solr case $1 in @@ -33,4 +33,3 @@ case $1 in exit 1 ;; esac - diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/attribute-map.xml b/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/attribute-map.xml index f6386b620f5..0633f055222 100644 --- a/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/attribute-map.xml +++ b/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/attribute-map.xml @@ -6,67 +6,65 @@ few exceptions for newer attributes where the name is the same for both versions. You will usually want to uncomment or map the names for both SAML versions as a unit. --> - - - - - + + + + + + + + + + + + + + - + - - + + - - - - - - + + - - + + - - - - - - - - - - + - + - - + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + @@ -110,32 +143,27 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - + + diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/dataverse-idp-metadata.xml b/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/dataverse-idp-metadata.xml index 07970b26c2d..861829aedb5 100644 --- a/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/dataverse-idp-metadata.xml +++ b/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/dataverse-idp-metadata.xml @@ -1,124 +1,122 @@ - - - - - - - - - - - - - - - - - testshib.org - - TestShib Test IdP - TestShib IdP. Use this as a source of attributes for your test SP. - https://idp.testshib.org/idp/images/logo.jpg - - - - - - - MIIEDjCCAvagAwIBAgIBADANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJVUzEV - MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMREwDwYD - VQQKEwhUZXN0U2hpYjEZMBcGA1UEAxMQaWRwLnRlc3RzaGliLm9yZzAeFw0wNjA4 - MzAyMTEyMjVaFw0xNjA4MjcyMTEyMjVaMGcxCzAJBgNVBAYTAlVTMRUwEwYDVQQI - EwxQZW5uc3lsdmFuaWExEzARBgNVBAcTClBpdHRzYnVyZ2gxETAPBgNVBAoTCFRl - c3RTaGliMRkwFwYDVQQDExBpZHAudGVzdHNoaWIub3JnMIIBIjANBgkqhkiG9w0B - AQEFAAOCAQ8AMIIBCgKCAQEArYkCGuTmJp9eAOSGHwRJo1SNatB5ZOKqDM9ysg7C - yVTDClcpu93gSP10nH4gkCZOlnESNgttg0r+MqL8tfJC6ybddEFB3YBo8PZajKSe - 3OQ01Ow3yT4I+Wdg1tsTpSge9gEz7SrC07EkYmHuPtd71CHiUaCWDv+xVfUQX0aT - NPFmDixzUjoYzbGDrtAyCqA8f9CN2txIfJnpHE6q6CmKcoLADS4UrNPlhHSzd614 - kR/JYiks0K4kbRqCQF0Dv0P5Di+rEfefC6glV8ysC8dB5/9nb0yh/ojRuJGmgMWH - gWk6h0ihjihqiu4jACovUZ7vVOCgSE5Ipn7OIwqd93zp2wIDAQABo4HEMIHBMB0G - A1UdDgQWBBSsBQ869nh83KqZr5jArr4/7b+QazCBkQYDVR0jBIGJMIGGgBSsBQ86 - 9nh83KqZr5jArr4/7b+Qa6FrpGkwZzELMAkGA1UEBhMCVVMxFTATBgNVBAgTDFBl - bm5zeWx2YW5pYTETMBEGA1UEBxMKUGl0dHNidXJnaDERMA8GA1UEChMIVGVzdFNo - aWIxGTAXBgNVBAMTEGlkcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN - BgkqhkiG9w0BAQUFAAOCAQEAjR29PhrCbk8qLN5MFfSVk98t3CT9jHZoYxd8QMRL - I4j7iYQxXiGJTT1FXs1nd4Rha9un+LqTfeMMYqISdDDI6tv8iNpkOAvZZUosVkUo - 93pv1T0RPz35hcHHYq2yee59HJOco2bFlcsH8JBXRSRrJ3Q7Eut+z9uo80JdGNJ4 - /SJy5UorZ8KazGj16lfJhOBXldgrhppQBb0Nq6HKHguqmwRfJ+WkxemZXzhediAj - Geka8nz8JjwxpUjAiSWYKLtJhGEaTqCYxCCX2Dw+dOTqUzHOZ7WKv4JXPK5G/Uhr - 8K/qhmFT2nIQi538n6rVYLeWj8Bbnl+ev0peYzxFyF5sQA== + + + + + + + + + samltest.id + + + + SAMLtest IdP + A free and basic IdP for testing SAML deployments + https://samltest.id/saml/logo.png + + + + + + + +MIIDETCCAfmgAwIBAgIUZRpDhkNKl5eWtJqk0Bu1BgTTargwDQYJKoZIhvcNAQEL +BQAwFjEUMBIGA1UEAwwLc2FtbHRlc3QuaWQwHhcNMTgwODI0MjExNDEwWhcNMzgw +ODI0MjExNDEwWjAWMRQwEgYDVQQDDAtzYW1sdGVzdC5pZDCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAJrh9/PcDsiv3UeL8Iv9rf4WfLPxuOm9W6aCntEA +8l6c1LQ1Zyrz+Xa/40ZgP29ENf3oKKbPCzDcc6zooHMji2fBmgXp6Li3fQUzu7yd ++nIC2teejijVtrNLjn1WUTwmqjLtuzrKC/ePoZyIRjpoUxyEMJopAd4dJmAcCq/K +k2eYX9GYRlqvIjLFoGNgy2R4dWwAKwljyh6pdnPUgyO/WjRDrqUBRFrLQJorR2kD +c4seZUbmpZZfp4MjmWMDgyGM1ZnR0XvNLtYeWAyt0KkSvFoOMjZUeVK/4xR74F8e +8ToPqLmZEg9ZUx+4z2KjVK00LpdRkH9Uxhh03RQ0FabHW6UCAwEAAaNXMFUwHQYD +VR0OBBYEFJDbe6uSmYQScxpVJhmt7PsCG4IeMDQGA1UdEQQtMCuCC3NhbWx0ZXN0 +LmlkhhxodHRwczovL3NhbWx0ZXN0LmlkL3NhbWwvaWRwMA0GCSqGSIb3DQEBCwUA +A4IBAQBNcF3zkw/g51q26uxgyuy4gQwnSr01Mhvix3Dj/Gak4tc4XwvxUdLQq+jC +cxr2Pie96klWhY/v/JiHDU2FJo9/VWxmc/YOk83whvNd7mWaNMUsX3xGv6AlZtCO +L3JhCpHjiN+kBcMgS5jrtGgV1Lz3/1zpGxykdvS0B4sPnFOcaCwHe2B9SOCWbDAN +JXpTjz1DmJO4ImyWPJpN1xsYKtm67Pefxmn0ax0uE2uuzq25h0xbTkqIQgJzyoE/ +DPkBFK1vDkMfAW11dQ0BXatEnW7Gtkc0lh2/PIbHWj4AzxYMyBf5Gy6HSVOftwjC +voQR2qr2xJBixsg+MIORKtmKHLfU - - - - - - - - - - - - urn:mace:shibboleth:1.0:nameIdentifier - urn:oasis:names:tc:SAML:2.0:nameid-format:transient - - - - - - - - - - - MIIEDjCCAvagAwIBAgIBADANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJVUzEV - MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMREwDwYD - VQQKEwhUZXN0U2hpYjEZMBcGA1UEAxMQaWRwLnRlc3RzaGliLm9yZzAeFw0wNjA4 - MzAyMTEyMjVaFw0xNjA4MjcyMTEyMjVaMGcxCzAJBgNVBAYTAlVTMRUwEwYDVQQI - EwxQZW5uc3lsdmFuaWExEzARBgNVBAcTClBpdHRzYnVyZ2gxETAPBgNVBAoTCFRl - c3RTaGliMRkwFwYDVQQDExBpZHAudGVzdHNoaWIub3JnMIIBIjANBgkqhkiG9w0B - AQEFAAOCAQ8AMIIBCgKCAQEArYkCGuTmJp9eAOSGHwRJo1SNatB5ZOKqDM9ysg7C - yVTDClcpu93gSP10nH4gkCZOlnESNgttg0r+MqL8tfJC6ybddEFB3YBo8PZajKSe - 3OQ01Ow3yT4I+Wdg1tsTpSge9gEz7SrC07EkYmHuPtd71CHiUaCWDv+xVfUQX0aT - NPFmDixzUjoYzbGDrtAyCqA8f9CN2txIfJnpHE6q6CmKcoLADS4UrNPlhHSzd614 - kR/JYiks0K4kbRqCQF0Dv0P5Di+rEfefC6glV8ysC8dB5/9nb0yh/ojRuJGmgMWH - gWk6h0ihjihqiu4jACovUZ7vVOCgSE5Ipn7OIwqd93zp2wIDAQABo4HEMIHBMB0G - A1UdDgQWBBSsBQ869nh83KqZr5jArr4/7b+QazCBkQYDVR0jBIGJMIGGgBSsBQ86 - 9nh83KqZr5jArr4/7b+Qa6FrpGkwZzELMAkGA1UEBhMCVVMxFTATBgNVBAgTDFBl - bm5zeWx2YW5pYTETMBEGA1UEBxMKUGl0dHNidXJnaDERMA8GA1UEChMIVGVzdFNo - aWIxGTAXBgNVBAMTEGlkcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN - BgkqhkiG9w0BAQUFAAOCAQEAjR29PhrCbk8qLN5MFfSVk98t3CT9jHZoYxd8QMRL - I4j7iYQxXiGJTT1FXs1nd4Rha9un+LqTfeMMYqISdDDI6tv8iNpkOAvZZUosVkUo - 93pv1T0RPz35hcHHYq2yee59HJOco2bFlcsH8JBXRSRrJ3Q7Eut+z9uo80JdGNJ4 - /SJy5UorZ8KazGj16lfJhOBXldgrhppQBb0Nq6HKHguqmwRfJ+WkxemZXzhediAj - Geka8nz8JjwxpUjAiSWYKLtJhGEaTqCYxCCX2Dw+dOTqUzHOZ7WKv4JXPK5G/Uhr - 8K/qhmFT2nIQi538n6rVYLeWj8Bbnl+ev0peYzxFyF5sQA== + + + + + + + + +MIIDEjCCAfqgAwIBAgIVAMECQ1tjghafm5OxWDh9hwZfxthWMA0GCSqGSIb3DQEB +CwUAMBYxFDASBgNVBAMMC3NhbWx0ZXN0LmlkMB4XDTE4MDgyNDIxMTQwOVoXDTM4 +MDgyNDIxMTQwOVowFjEUMBIGA1UEAwwLc2FtbHRlc3QuaWQwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQC0Z4QX1NFKs71ufbQwoQoW7qkNAJRIANGA4iM0 +ThYghul3pC+FwrGv37aTxWXfA1UG9njKbbDreiDAZKngCgyjxj0uJ4lArgkr4AOE +jj5zXA81uGHARfUBctvQcsZpBIxDOvUUImAl+3NqLgMGF2fktxMG7kX3GEVNc1kl +bN3dfYsaw5dUrw25DheL9np7G/+28GwHPvLb4aptOiONbCaVvh9UMHEA9F7c0zfF +/cL5fOpdVa54wTI0u12CsFKt78h6lEGG5jUs/qX9clZncJM7EFkN3imPPy+0HC8n +spXiH/MZW8o2cqWRkrw3MzBZW3Ojk5nQj40V6NUbjb7kfejzAgMBAAGjVzBVMB0G +A1UdDgQWBBQT6Y9J3Tw/hOGc8PNV7JEE4k2ZNTA0BgNVHREELTArggtzYW1sdGVz +dC5pZIYcaHR0cHM6Ly9zYW1sdGVzdC5pZC9zYW1sL2lkcDANBgkqhkiG9w0BAQsF +AAOCAQEASk3guKfTkVhEaIVvxEPNR2w3vWt3fwmwJCccW98XXLWgNbu3YaMb2RSn +7Th4p3h+mfyk2don6au7Uyzc1Jd39RNv80TG5iQoxfCgphy1FYmmdaSfO8wvDtHT +TNiLArAxOYtzfYbzb5QrNNH/gQEN8RJaEf/g/1GTw9x/103dSMK0RXtl+fRs2nbl +D1JJKSQ3AdhxK/weP3aUPtLxVVJ9wMOQOfcy02l+hHMb6uAjsPOpOVKqi3M8XmcU +ZOpx4swtgGdeoSpeRyrtMvRwdcciNBp9UZome44qZAYH1iqrpmmjsfI9pJItsgWu +3kXPjhSfj1AJGR1l9JGvJrHki1iHTA== + + + + + + + + + +MIIDEjCCAfqgAwIBAgIVAPVbodo8Su7/BaHXUHykx0Pi5CFaMA0GCSqGSIb3DQEB +CwUAMBYxFDASBgNVBAMMC3NhbWx0ZXN0LmlkMB4XDTE4MDgyNDIxMTQwOVoXDTM4 +MDgyNDIxMTQwOVowFjEUMBIGA1UEAwwLc2FtbHRlc3QuaWQwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCQb+1a7uDdTTBBFfwOUun3IQ9nEuKM98SmJDWa +MwM877elswKUTIBVh5gB2RIXAPZt7J/KGqypmgw9UNXFnoslpeZbA9fcAqqu28Z4 +sSb2YSajV1ZgEYPUKvXwQEmLWN6aDhkn8HnEZNrmeXihTFdyr7wjsLj0JpQ+VUlc +4/J+hNuU7rGYZ1rKY8AA34qDVd4DiJ+DXW2PESfOu8lJSOteEaNtbmnvH8KlwkDs +1NvPTsI0W/m4SK0UdXo6LLaV8saIpJfnkVC/FwpBolBrRC/Em64UlBsRZm2T89ca +uzDee2yPUvbBd5kLErw+sC7i4xXa2rGmsQLYcBPhsRwnmBmlAgMBAAGjVzBVMB0G +A1UdDgQWBBRZ3exEu6rCwRe5C7f5QrPcAKRPUjA0BgNVHREELTArggtzYW1sdGVz +dC5pZIYcaHR0cHM6Ly9zYW1sdGVzdC5pZC9zYW1sL2lkcDANBgkqhkiG9w0BAQsF +AAOCAQEABZDFRNtcbvIRmblnZItoWCFhVUlq81ceSQddLYs8DqK340//hWNAbYdj +WcP85HhIZnrw6NGCO4bUipxZXhiqTA/A9d1BUll0vYB8qckYDEdPDduYCOYemKkD +dmnHMQWs9Y6zWiYuNKEJ9mf3+1N8knN/PK0TYVjVjXAf2CnOETDbLtlj6Nqb8La3 +sQkYmU+aUdopbjd5JFFwbZRaj6KiHXHtnIRgu8sUXNPrgipUgZUOVhP0C0N5OfE4 +JW8ZBrKgQC/6vJ2rSa9TlzI6JAa5Ww7gMXMP9M+cJUNQklcq+SBnTK8G+uBHgPKR +zBDsMIEzRtQZm4GIoHJae4zmnCekkQ== - - - - - - - - - - - - urn:mace:shibboleth:1.0:nameIdentifier - urn:oasis:names:tc:SAML:2.0:nameid-format:transient - - - TestShib Two Identity Provider - TestShib Two - http://www.testshib.org/testshib-two/ - - - Nate - Klingenstein - ndk@internet2.edu - - - + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibGroupSAMLtest.json b/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibGroupSAMLtest.json new file mode 100644 index 00000000000..8cfe2285e40 --- /dev/null +++ b/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibGroupSAMLtest.json @@ -0,0 +1,5 @@ +{ + "name": "All samltest.id Shibboleth Users", + "attribute": "Shib-Identity-Provider", + "pattern": "https://samltest.id/saml/idp" +} diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibGroupTestShib.json b/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibGroupTestShib.json deleted file mode 100644 index 01b2bd51b2d..00000000000 --- a/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibGroupTestShib.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "All testshib.org Shibboleth Users", - "attribute": "Shib-Identity-Provider", - "pattern": "https://idp.testshib.org/idp/shibboleth" -} diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service index 8960e708fff..06eacc68ca2 100644 --- a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service +++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service @@ -5,9 +5,9 @@ After = syslog.target network.target remote-fs.target nss-lookup.target [Service] User = solr Type = forking -WorkingDirectory = /usr/local/solr/solr-7.3.1 -ExecStart = /usr/local/solr/solr-7.3.1/bin/solr start -m 1g -ExecStop = /usr/local/solr/solr-7.3.1/bin/solr stop +WorkingDirectory = /usr/local/solr/solr-7.7.2 +ExecStart = /usr/local/solr/solr-7.7.2/bin/solr start -m 1g -j "jetty.host=127.0.0.1" +ExecStop = /usr/local/solr/solr-7.7.2/bin/solr stop LimitNOFILE=65000 LimitNPROC=65000 Restart=on-failure diff --git a/doc/sphinx-guides/source/_static/installation/files/root/auth-providers/microsoft.json b/doc/sphinx-guides/source/_static/installation/files/root/auth-providers/microsoft.json new file mode 100644 index 00000000000..8c555735577 --- /dev/null +++ b/doc/sphinx-guides/source/_static/installation/files/root/auth-providers/microsoft.json @@ -0,0 +1,8 @@ +{ + "id":"microsoft", + "factoryAlias":"oauth2", + "title":"Microsoft", + "subtitle":"", + "factoryData":"type: microsoft | userEndpoint: NONE | clientId: FIXME | clientSecret: FIXME", + "enabled":true +} diff --git a/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html b/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html index 4e6a01f2d5d..ca703dddf11 100644 --- a/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html +++ b/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html @@ -117,7 +117,7 @@ var row = target.parents('tr')[0]; if(row != null) { //finds the file id/DOI in the Dataset page - label = $(row).find('td.col-file-metadata > a').attr('href'); + label = $(row).find('div.file-metadata-block > a').attr('href'); } else { //finds the file id/DOI in the file page label = $('#fileForm').attr('action'); diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst index 7b5c5fbd4a0..a4bea9f53e7 100644 --- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst +++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst @@ -22,7 +22,7 @@ Moves a dataverse whose id is passed to a new dataverse whose id is passed. The Link a Dataverse ^^^^^^^^^^^^^^^^ -Creates a link between a dataverse and another dataverse (see the Linked Dataverses + Linked Datasets section of the :doc:`/user/dataverse-management` guide for more information). Only accessible to superusers. :: +Creates a link between a dataverse and another dataverse (see the :ref:`dataverse-linking` section of the User Guide for more information). Only accessible to superusers. :: curl -H "X-Dataverse-key: $API_TOKEN" -X PUT http://$SERVER/api/dataverses/$linked-dataverse-alias/link/$linking-dataverse-alias @@ -38,7 +38,27 @@ Add Dataverse RoleAssignments to Child Dataverses Recursively assigns the users and groups having a role(s),that are in the set configured to be inheritable via the :InheritParentRoleAssignments setting, on a specified dataverse to have the same role assignments on all of the dataverses that have been created within it. The response indicates success or failure and lists the individuals/groups and dataverses involved in the update. Only accessible to superusers. :: - curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/dataverse/$dataverse-alias//addRoleAssignmentsToChildren + curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/dataverse/$dataverse-alias/addRoleAssignmentsToChildren + +Configure a Dataverse to store all new files in a specific file store +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To direct new files (uploaded when datasets are created or edited) for all datasets in a given dataverse, the store can be specified via the API as shown below, or by editing the 'General Information' for a Dataverse on the Dataverse page. Only accessible to superusers. :: + + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT -d $storageDriverLabel http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver + +The current driver can be seen using: + + curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver + +and can be reset to the default store with: + + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver + +The available drivers can be listed with: + + curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/storageDrivers + Datasets -------- @@ -55,7 +75,7 @@ Moves a dataset whose id is passed to a dataverse whose alias is passed. If the Link a Dataset ^^^^^^^^^^^^^^ -Creates a link between a dataset and a dataverse (see the Linked Dataverses + Linked Datasets section of the :doc:`/user/dataverse-management` guide for more information). :: +Creates a link between a dataset and a dataverse (see the :ref:`dataset-linking` section of the User Guide for more information). :: curl -H "X-Dataverse-key: $API_TOKEN" -X PUT http://$SERVER/api/datasets/$linked-dataset-id/link/$linking-dataverse-alias diff --git a/doc/sphinx-guides/source/admin/external-tools.rst b/doc/sphinx-guides/source/admin/external-tools.rst index 6054334e4a6..405c710d07e 100644 --- a/doc/sphinx-guides/source/admin/external-tools.rst +++ b/doc/sphinx-guides/source/admin/external-tools.rst @@ -37,6 +37,8 @@ In the curl command below, replace the placeholder "fabulousFileTool.json" place curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools --upload-file fabulousFileTool.json +Note that some tools will provide a preview mode, which provides an embedded, simplified view of the tool on the file pages of your installation. This is controlled by the `hasPreviewMode` parameter. + Listing All External Tools in Dataverse +++++++++++++++++++++++++++++++++++++++ @@ -76,10 +78,15 @@ Once you have added an external tool to your installation of Dataverse, you will File Level Explore Tools ++++++++++++++++++++++++ -File level explore tools are specific to the file type (content type or MIME type) of the file. For example, there is a tool for exploring PDF files in the "File Previewers" set of tools. +File level explore tools are specific to the file type (content type or MIME type) of the file. For example, Data Explorer is tool for exploring tabular data files. An "Explore" button will appear (on both the dataset page and the file landing page) for files that match the type that the tool has been built for. When there are multiple explore tools for a filetype, the button becomes a dropdown. +File Level Preview Tools +++++++++++++++++++++++++ + +File level explore tools can be set up to display in preview mode, which is a simplified view of an explore tool designed specifically for embedding in the file page. + File Level Configure Tools ++++++++++++++++++++++++++ diff --git a/doc/sphinx-guides/source/admin/harvestclients.rst b/doc/sphinx-guides/source/admin/harvestclients.rst index cd841aeba85..9dcdb13814e 100644 --- a/doc/sphinx-guides/source/admin/harvestclients.rst +++ b/doc/sphinx-guides/source/admin/harvestclients.rst @@ -3,36 +3,27 @@ Managing Harvesting Clients .. contents:: |toctitle| :local: - + Your Dataverse as a Metadata Harvester -------------------------------------- -Harvesting is a process of exchanging metadata with other repositories. As a harvesting *client*, your Dataverse can -gather metadata records from remote sources. These can be other Dataverse instances or other archives that support OAI-PMH, the standard harvesting protocol. Harvested metadata records will be indexed and made searchable by your users. Clicking on a harvested dataset in the search results takes the user to the original repository. Harvested datasets cannot be edited in your Dataverse installation. +Harvesting is a process of exchanging metadata with other repositories. As a harvesting *client*, your Dataverse can gather metadata records from remote sources. These can be other Dataverse instances or other archives that support OAI-PMH, the standard harvesting protocol. Harvested metadata records will be indexed and made searchable by your users. Clicking on a harvested dataset in the search results takes the user to the original repository. Harvested datasets cannot be edited in your Dataverse installation. -Harvested records can be kept in sync with the original repository through scheduled incremental updates, daily or weekly. -Alternatively, harvests can be run on demand, by the Admin. +Harvested records can be kept in sync with the original repository through scheduled incremental updates, daily or weekly. +Alternatively, harvests can be run on demand, by the Admin. Managing Harvesting Clients --------------------------- -To start harvesting metadata from a remote OAI repository, you first create and configure a *Harvesting Client*. +To start harvesting metadata from a remote OAI repository, you first create and configure a *Harvesting Client*. Clients are managed on the "Harvesting Clients" page accessible via the :doc:`dashboard`. Click on the *Add Client* button to get started. -The process of creating a new, or editing an existing client, is largely self-explanatory. It is split into logical steps, in a way that allows the user to go back and correct the entries made earlier. The process is interactive and guidance text is provided. For example, the user is required to enter the URL of the remote OAI server. When they click *Next*, the application will try to establish a connection to the server in order to verify that it is working, and to obtain the information about the sets of metadata records and the metadata formats it supports. The choices offered to the user on the next page will be based on this extra information. If the application fails to establish a connection to the remote archive at the address specified, or if an invalid response is received, the user is given an opportunity to check and correct the URL they entered. - -New in Dataverse 4, vs. DVN 3 ------------------------------ - - -- Note that when creating a client you will need to select an existing local dataverse to host the datasets harvested. In DVN 3, a dedicated "harvesting dataverse" would be created specifically for each remote harvesting source. In Dataverse 4, harvested content can be added to *any dataverse*. This means that a dataverse can now contain datasets harvested from multiple sources and/or a mix of local and harvested datasets. - - -- An extra "Archive Type" pull down menu is added to the Create and Edit dialogs. This setting, selected from the choices such as "Dataverse 4", "DVN, v2-3", "Generic OAI", etc. is used to properly format the harvested metadata as they are shown in the search results. It is **very important** to select the type that best describes this remote server, as failure to do so can result in information missing from the search results, and, a **failure to redirect the user to the archival source** of the data! - - It is, however, **very easy to correct** a mistake like this. For example, let's say you have created a client to harvest from the XYZ Institute and specified the archive type as "Dataverse 4". You have been able to harvest content, the datasets appear in search result, but clicking on them results in a "Page Not Found" error on the remote site. At which point you realize that the XYZ Institute admins have not yet upgraded to Dataverse 4, still running DVN v3.1.2 instead. All you need to do is go back to the Harvesting Clients page, and change the setting to "DVN, v2-3". This will fix the redirects **without having to re-harvest** the datasets. +The process of creating a new, or editing an existing client, is largely self-explanatory. It is split into logical steps, in a way that allows the user to go back and correct the entries made earlier. The process is interactive and guidance text is provided. For example, the user is required to enter the URL of the remote OAI server. When they click *Next*, the application will try to establish a connection to the server in order to verify that it is working, and to obtain the information about the sets of metadata records and the metadata formats it supports. The choices offered to the user on the next page will be based on this extra information. If the application fails to establish a connection to the remote archive at the address specified, or if an invalid response is received, the user is given an opportunity to check and correct the URL they entered. -- Another extra entry, "Archive Description", is added to the *Edit Harvesting Client* dialog. This description appears at the bottom of each search result card for a harvested dataset or datafile. By default, this text reads "This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data." Here it can be customized to be more descriptive, for example, "This Dataset is harvested from our partners at the XYZ Institute..." +What if a Run Fails? +~~~~~~~~~~~~~~~~~~~~ +Each harvesting client run logs a separate file per run to Glassfish's default logging directory (``/usr/local/glassfish4/glassfish/domains/domain1/logs/`` unless you've changed it). Look for filenames in the format ``harvest_TARGET_YYYY_MM_DD_timestamp.log`` to get a better idea of what's going wrong. +Note that you'll want to run a minimum Dataverse version of 4.6, optimally 4.18, for the best OAI-PMH interoperability. diff --git a/doc/sphinx-guides/source/admin/harvestserver.rst b/doc/sphinx-guides/source/admin/harvestserver.rst index c24968e0c9b..f1436926ea2 100644 --- a/doc/sphinx-guides/source/admin/harvestserver.rst +++ b/doc/sphinx-guides/source/admin/harvestserver.rst @@ -22,6 +22,8 @@ You might consider adding your OAI-enabled production instance of Dataverse to `this shared list `_ of such instances. +The email portion of :ref:`systemEmail` will be visible via OAI-PMH (from the "Identify" verb). + How does it work? ----------------- @@ -52,7 +54,7 @@ be used to create an OAI set. Sets can overlap local dataverses, and can include as few or as many of your local datasets as you wish. A good way to master the Dataverse search query language is to experiment with the Advanced Search page. We also recommend that you -consult the Search API section of the Dataverse User Guide. +consult the :doc:`/api/search` section of the API Guide. Once you have entered the search query and clicked *Next*, the number of search results found will be shown on the next screen. This way, if @@ -136,7 +138,7 @@ runs every night (at 2AM, by default). This export timer is created and activated automatically every time the application is deployed or restarted. Once again, this is new in Dataverse 4, and unlike DVN v3, where export jobs had to be scheduled and activated by the admin -user. See the "Export" section of the Admin guide, for more information on the automated metadata exports. +user. See the :doc:`/admin/metadataexport` section of the Admin Guide, for more information on the automated metadata exports. It is still possible however to make changes like this be immediately reflected in the OAI server, by going to the *Harvesting Server* page diff --git a/doc/sphinx-guides/source/admin/index.rst b/doc/sphinx-guides/source/admin/index.rst index 39b4f5748d3..6ff611cb55f 100755 --- a/doc/sphinx-guides/source/admin/index.rst +++ b/doc/sphinx-guides/source/admin/index.rst @@ -27,7 +27,7 @@ This guide documents the functionality only available to superusers (such as "da solr-search-index ip-groups monitoring - reporting-tools + reporting-tools-and-queries maintenance backups troubleshooting diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst index 2ed8581eb50..527ec6fe563 100644 --- a/doc/sphinx-guides/source/admin/integrations.rst +++ b/doc/sphinx-guides/source/admin/integrations.rst @@ -19,9 +19,9 @@ If your researchers have data on Dropbox, you can make it easier for them to get Open Science Framework (OSF) ++++++++++++++++++++++++++++ -The Center for Open Science's Open Science Framework (OSF) is an open source software project that facilitates open collaboration in science research across the lifespan of a scientific project. +The Center for Open Science's Open Science Framework (OSF) is an open source software project that facilitates open collaboration in science research across the lifespan of a scientific project. -For instructions on depositing data from OSF to your installation of Dataverse, your researchers can visit http://help.osf.io/m/addons/l/863978-connect-dataverse-to-a-project +For instructions on depositing data from OSF to your installation of Dataverse, your researchers can visit http://help.osf.io/m/addons/l/863978-connect-dataverse-to-a-project RSpace ++++++ @@ -41,13 +41,40 @@ As of this writing only OJS 2.x is supported and instructions for getting starte If you are interested in OJS 3.x supporting deposit from Dataverse, please leave a comment on https://github.com/pkp/pkp-lib/issues/1822 +Renku ++++++ + +Renku is a platform that enables collaborative, reproducible and reusable +(data)science. It allows researchers to automatically record the provenance of +their research results and retain links to imported and exported data. Users +can organize their data in "Datasets", which can be exported to Dataverse via +the command-line interface (CLI). + +Renku dataset documentation: https://renku-python.readthedocs.io/en/latest/commands.html#module-renku.cli.dataset + +Flagship deployment of the Renku platform: https://renkulab.io + +Renku discourse: https://renku.discourse.group/ + + +Embedding Data on Websites +-------------------------- + +OpenScholar ++++++++++++ + +`OpenScholar `_ is oriented toward hosting websites for academic institutions and offers `Dataverse Widgets `_ that can be added to web pages. See also: + +- :ref:`openscholar-dataverse-level` (dataverse level) +- :ref:`openscholar-dataset-level` (dataset level) + Analysis and Computation ------------------------ Data Explorer +++++++++++++ -Data Explorer is a GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. +Data Explorer is a GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. For installation instructions, see the :doc:`external-tools` section. @@ -77,6 +104,27 @@ Whole Tale `import data from Dataverse `_ via identifier (e.g., DOI, URI, etc) or through the External Tools integration. For installation instructions, see the :doc:`external-tools` section or the `Integration `_ section of the Whole Tale User Guide. +Binder +++++++ + +Researchers can launch Jupyter Notebooks, RStudio, and other computational environments by entering the DOI of a Dataverse dataset on https://mybinder.org + +Institutions can self host BinderHub. Dataverse is one of the supported `repository providers `_. + +Renku ++++++ + +Researchers can import Dataverse datasets into their Renku projects via the +command-line interface (CLI) by using the Dataverse DOI. See the `renku Dataset +documentation +`_ +for details. Currently Dataverse ``>=4.8.x`` is required for the import to work. If you need +support for an earlier version of Dataverse, please get in touch with the Renku team at +`Discourse `_ or `GitHub `_. +The UI implementation of the import is in progress and will be +completed in Q12020. + + Discoverability --------------- @@ -90,7 +138,7 @@ Dataverse supports a protocol called OAI-PMH that facilitates harvesting dataset SHARE +++++ -`SHARE `_ is building a free, open, data set about research and scholarly activities across their life cycle. It's possible to add and installation of Dataverse as one of the `sources `_ they include if you contact the SHARE team. +`SHARE `_ is building a free, open, data set about research and scholarly activities across their life cycle. It's possible to add an installation of Dataverse as one of the `sources `_ they include if you contact the SHARE team. Research Data Preservation -------------------------- @@ -98,7 +146,7 @@ Research Data Preservation Archivematica +++++++++++++ -`Archivematica `_ is an integrated suite of open-source tools for processing digital objects for long-term preservation, developed and maintained by Artefactual Systems Inc. Its configurable workflow is designed to produce system-independent, standards-based Archival Information Packages (AIPs) suitable for long-term storage and management. +`Archivematica `_ is an integrated suite of open-source tools for processing digital objects for long-term preservation, developed and maintained by Artefactual Systems Inc. Its configurable workflow is designed to produce system-independent, standards-based Archival Information Packages (AIPs) suitable for long-term storage and management. Sponsored by the `Ontario Council of University Libraries (OCUL) `_, this technical integration enables users of Archivematica to select datasets from connected Dataverse instances and process them for long-term access and digital preservation. For more information and list of known issues, please refer to Artefactual's `release notes `_, `integration documentation `_, and the `project wiki `_. diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst index 9840b5ce1af..d6e9828a872 100644 --- a/doc/sphinx-guides/source/admin/make-data-count.rst +++ b/doc/sphinx-guides/source/admin/make-data-count.rst @@ -61,10 +61,17 @@ If you haven't already, follow the steps for installing Counter Processor in the Enable Logging for Make Data Count ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To make Dataverse log dataset usage (views and downloads) for Make Data Count, you must set the ``:MDCLogPath`` database setting. See :ref:`MDCLogPath` for details. +To make Dataverse log dataset usage (views and downloads) for Make Data Count, you must set the ``:MDCLogPath`` database setting. See :ref:`:MDCLogPath` for details. + +If you wish to start logging in advance of setting up other components, or wish to log without display MDC metrics for any other reason, you can set the optional ``:DisplayMDCMetrics`` database setting to false. See :ref:`:DisplayMDCMetrics` for details. After you have your first day of logs, you can process them the next day. +Enable or Disable Display of Make Data Count Metrics +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +By default, when MDC logging is enabled (when ``:MDCLogPath`` is set), Dataverse will display MDC metrics instead of it's internal (legacy) metrics. You can avoid this (e.g. to collect MDC metrics for some period of time before starting to display them) by setting ``:DisplayMDCMetrics`` to false. + Configure Counter Processor ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -128,11 +135,15 @@ Once you are satisfied with your testing, you should contact support@datacite.or - Views ("investigations" in COUNTER) - Downloads ("requests" in COUNTER) +.. _MDC-updateCitationsForDataset: + Configuring Dataverse for Make Data Count Citations --------------------------------------------------- Please note: as explained in the note above about limitations, this feature is not available to installations of Dataverse that use Handles. +To configure Dataverse to pull citations from the test vs. production DataCite server see :ref:`doi.mdcbaseurlstring` in the Installation Guide. + Please note that in the curl example, Bash environment variables are used with the idea that you can set a few environment variables and copy and paste the examples as is. For example, "$DOI" could become "doi:10.5072/FK2/BL2IBM" by issuing the following export command from Bash: ``export DOI="doi:10.5072/FK2/BL2IBM"`` diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 1a41d329b3b..bf89805007b 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -9,18 +9,17 @@ Dataverse has a flexible data-driven metadata system powered by "metadata blocks Introduction ------------ -Before you embark on customizing metadata in Dataverse you should make sure you are aware of the modest amount of customization that is available with the Dataverse web interface. It's possible to hide fields and make field required by clicking "Edit" at the dataverse level, clicking "General Information" and making adjustments under "Metadata Fields" as described in the context of dataset templates in the :doc:`/user/dataverse-management` section of the User Guide. +Before you embark on customizing metadata in Dataverse you should make sure you are aware of the modest amount of customization that is available with the Dataverse web interface. It's possible to hide fields and make field required by clicking "Edit" at the dataverse level, clicking "General Information" and making adjustments under "Metadata Fields" as described in the :ref:`create-dataverse` section of the Dataverse Management page in the User Guide. Much more customization of metadata is possible, but this is an advanced topic so feedback on what is written below is very welcome. The possibilities for customization include: - Editing and adding metadata fields -- Editing and adding instructional text (field label tooltips and text - box watermarks) +- Editing and adding instructional text (field label tooltips and text box watermarks) - Editing and adding controlled vocabularies -- Changing which fields depositors must use in order to save datasets (see also "dataset templates" in the :doc:`/user/dataverse-management` section of the User Guide.) +- Changing which fields depositors must use in order to save datasets (see also :ref:`dataset-templates` section of the User Guide.) - Changing how saved metadata values are displayed in the UI @@ -38,10 +37,8 @@ tab-separated value (TSV). [1]_\ :sup:`,`\ [2]_ While it is technically possible to define more than one metadata block in a TSV file, it is good organizational practice to define only one in each file. -The metadata block TSVs shipped with Dataverse are in `this folder in -the Dataverse github -repo `__ and the corresponding ResourceBundle property files are `here `__. -Human-readable copies are available in `this Google Sheets +The metadata block TSVs shipped with Dataverse are in `/tree/develop/scripts/api/data/metadatablocks +`__ and the corresponding ResourceBundle property files `/tree/develop/src/main/java `__ of the Dataverse GitHub repo. Human-readable copies are available in `this Google Sheets document `__ but they tend to get out of sync with the TSV files, which should be considered authoritative. The Dataverse installation process operates on the TSVs, not the Google spreadsheet. About the metadata block TSV @@ -120,6 +117,17 @@ Each of the three main sections own sets of properties: | | | cause display | | | | problems. | +-----------------------+-----------------------+-----------------------+ +| blockURI | Associates the | The citation | +| | properties in a block | #metadataBlock has | +| | with an external URI. | the blockURI | +| | Properties will be | https://dataverse.org | +| | assigned the global | /schema/citation/ | +| | identifier | which assigns a | +| | blockURI in the | global URI to terms | +| | OAI_ORE metadata | such as 'https:// | +| | and archival Bags | dataverse.org/schema/ | +| | | citation/subtitle' | ++-----------------------+-----------------------+-----------------------+ #datasetField (field) properties ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -316,6 +324,19 @@ Each of the three main sections own sets of properties: | | | existing metadata | | | | block.) | +-----------------------+-----------------------+------------------------+ +| termURI | Specify a global URI | For example, the | +| | identifying this term | existing citation | +| | in an external | #metadataBlock | +| | community vocabulary. | defines the property | +| | | names 'title' | +| | This value overrides | as http://purl.org/dc/ | +| | the default created | terms/title - i.e. | +| | by appending the | indicating that it can | +| | property name to the | be interpreted as the | +| | blockURI defined | Dublin Core term | +| | for the | 'title' | +| | #metadataBlock | | ++-----------------------+-----------------------+------------------------+ #controlledVocabulary (enumerated) properties ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -391,12 +412,10 @@ FieldType definitions | | newlines. While any HTML is | | | permitted, only a subset of HTML | | | tags will be rendered in the UI. | -| | A `list of supported tags is | -| | included in the Dataverse User | -| | Guide `__ | -| | . | +| | See the | +| | :ref:`supported-html-fields` | +| | section of the Dataset + File | +| | Management page in the User Guide.| +-----------------------------------+-----------------------------------+ | url | If not empty, field must contain | | | a valid URL. | @@ -504,10 +523,10 @@ Setting Up a Dev Environment for Testing You have several options for setting up a dev environment for testing metadata block changes: -- Vagrant: See the :doc:`/developers/tools` section of the Dev Guide. +- Vagrant: See the :doc:`/developers/tools` section of the Developer Guide. - docker-aio: See https://github.com/IQSS/dataverse/tree/develop/conf/docker-aio -- AWS deployment: See the :doc:`/developers/deployment` section of the Dev Guide. -- Full dev environment: See the :doc:`/developers/dev-environment` section of the Dev Guide. +- AWS deployment: See the :doc:`/developers/deployment` section of the Developer Guide. +- Full dev environment: See the :doc:`/developers/dev-environment` section of the Developer Guide. To get a clean environment in Vagrant, you'll be running ``vagrant destroy``. In Docker, you'll use ``docker rm``. For a full dev environment or AWS installation, you might find ``rebuild`` and related scripts at ``scripts/deploy/phoenix.dataverse.org`` useful. @@ -586,7 +605,7 @@ controlledvocabulary.language.marathi_(marathi)=Marathi (Mar\u0101\u1E6Dh\u012B) Enabling a Metadata Block ~~~~~~~~~~~~~~~~~~~~~~~~~ -Running a curl command like "load" example above should make the new custom metadata block available within the system but in order to start using the fields you must either enable it from the GUI (see "General Information" in the :doc:`/user/dataverse-management` section of the User Guide) or by running a curl command like the one below using a superuser API token. In the example below we are enabling the "journal" and "geospatial" metadata blocks for the root dataverse: +Running a curl command like "load" example above should make the new custom metadata block available within the system but in order to start using the fields you must either enable it from the UI (see :ref:`general-information` section of Dataverse Management in the User Guide) or by running a curl command like the one below using a superuser API token. In the example below we are enabling the "journal" and "geospatial" metadata blocks for the root dataverse: ``curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" -d "[\"journal\",\"geospatial\"]" http://localhost:8080/api/dataverses/:root/metadatablocks`` @@ -601,7 +620,7 @@ configuration, including any enabled metadata schemas: ``curl http://localhost:8080/api/admin/index/solr/schema`` -For convenience and automation you can download and consider running :download:`updateSchemaMDB.sh <../../../../conf/solr/7.3.1/updateSchemaMDB.sh>`. It uses the API endpoint above and writes schema files to the filesystem (so be sure to run it on the Solr server itself as the Unix user who owns the Solr files) and then triggers a Solr reload. +For convenience and automation you can download and consider running :download:`updateSchemaMDB.sh <../../../../conf/solr/7.7.2/updateSchemaMDB.sh>`. It uses the API endpoint above and writes schema files to the filesystem (so be sure to run it on the Solr server itself as the Unix user who owns the Solr files) and then triggers a Solr reload. By default, it will download from Dataverse at `http://localhost:8080` and reload Solr at `http://localhost:8983`. You may use the following environment variables with this script or mix'n'match with options: @@ -614,13 +633,13 @@ Environment variable Option Description E `UNBLOCK_KEY` `-u` If your installation has a blocked admin API *xyz* or */secrets/unblock.key* endpoint, you can provide either the key itself or a path to a keyfile -`TARGET` `-t` Provide the config directory of your Solr core */usr/local/solr/solr-7.3.1/server/solr/collection1/conf* +`TARGET` `-t` Provide the config directory of your Solr core */usr/local/solr/solr-7.7.2/server/solr/collection1/conf* "collection1" ==================== ====== =============================================== ========================================================= See the :doc:`/installation/prerequisites/` section of the Installation Guide for a suggested location on disk for the Solr schema file. -Please note that if you are going to make a pull request updating ``conf/solr/7.3.1/schema.xml`` with fields you have added, you should first load all the custom metadata blocks in ``scripts/api/data/metadatablocks`` (including ones you don't care about) to create a complete list of fields. +Please note that if you are going to make a pull request updating ``conf/solr/7.7.2/schema.xml`` with fields you have added, you should first load all the custom metadata blocks in ``scripts/api/data/metadatablocks`` (including ones you don't care about) to create a complete list of fields. Reloading a Metadata Block -------------------------- @@ -631,7 +650,7 @@ As mentioned above, changes to metadata blocks that ship with Dataverse will be Great care must be taken when reloading a metadata block. Matching is done on field names (or identifiers and then names in the case of controlled vocabulary values) so it's easy to accidentally create duplicate fields. -The ability to reload metadata blocks means that SQL update scripts don't need to be written for these changes. See also the :doc:`/developers/sql-upgrade-scripts` section of the Dev Guide. +The ability to reload metadata blocks means that SQL update scripts don't need to be written for these changes. See also the :doc:`/developers/sql-upgrade-scripts` section of the Developer Guide. Tips from the Dataverse Community --------------------------------- diff --git a/doc/sphinx-guides/source/admin/metadataexport.rst b/doc/sphinx-guides/source/admin/metadataexport.rst index 1d1deb37a2f..b9036363cac 100644 --- a/doc/sphinx-guides/source/admin/metadataexport.rst +++ b/doc/sphinx-guides/source/admin/metadataexport.rst @@ -9,7 +9,7 @@ Automatic Exports Publishing a dataset automatically starts a metadata export job, that will run in the background, asynchronously. Once completed, it will make the dataset metadata exported and cached in all the supported formats listed under :ref:`Supported Metadata Export Formats ` in the :doc:`/user/dataset-management` section of the User Guide. -A scheduled timer job that runs nightly will attempt to export any published datasets that for whatever reason haven't been exported yet. This timer is activated automatically on the deployment, or restart, of the application. So, again, no need to start or configure it manually. (See the "Application Timers" section of this guide for more information) +A scheduled timer job that runs nightly will attempt to export any published datasets that for whatever reason haven't been exported yet. This timer is activated automatically on the deployment, or restart, of the application. So, again, no need to start or configure it manually. (See the :doc:`timers` section of this Admin Guide for more information.) Batch exports through the API ----------------------------- diff --git a/doc/sphinx-guides/source/admin/monitoring.rst b/doc/sphinx-guides/source/admin/monitoring.rst index 84d6f31e6d7..a901a357907 100644 --- a/doc/sphinx-guides/source/admin/monitoring.rst +++ b/doc/sphinx-guides/source/admin/monitoring.rst @@ -103,6 +103,8 @@ actionlogrecord There is a database table called ``actionlogrecord`` that captures events that may be of interest. See https://github.com/IQSS/dataverse/issues/2729 for more discussion around this table. +.. _edit-draft-versions-logging: + Edit Draft Versions Logging --------------------------- diff --git a/doc/sphinx-guides/source/admin/reporting-tools.rst b/doc/sphinx-guides/source/admin/reporting-tools-and-queries.rst similarity index 57% rename from doc/sphinx-guides/source/admin/reporting-tools.rst rename to doc/sphinx-guides/source/admin/reporting-tools-and-queries.rst index c309744be63..197339d767d 100644 --- a/doc/sphinx-guides/source/admin/reporting-tools.rst +++ b/doc/sphinx-guides/source/admin/reporting-tools-and-queries.rst @@ -1,18 +1,19 @@ .. role:: fixedwidthplain -Reporting Tools -=============== +Reporting Tools and Common Queries +================================== -Reporting tools created by members of the Dataverse community. +Reporting tools and queries created by members of the Dataverse community. .. contents:: Contents: :local: * Matrix (): Collaboration Matrix is a visualization showing the connectedness and collaboration between authors and their affiliations. Visit https://rin.lipi.go.id/matrix/ to play with a production installation. - * Dataverse Web Report (): Creates interactive charts showing data extracted from the Dataverse Excel Report * Dataverse Reports for Texas Digital Library (): A python3-based tool to generate and email statistical reports from Dataverse (https://dataverse.org/) using the native API and database queries. * dataverse-metrics (): Aggregates and visualizes metrics for installations of Dataverse around the world or a single Dataverse installation. + +* Useful queries from the Dataverse Community (): A community-generated and maintained document of postgresql queries for getting information about users and dataverses/datasets/files in your Dataverse installation. If you are trying to find out some information from Dataverse, chances are that someone else has had the same questions and it's now listed in this document. If it's not listed, please feel free to add it to the document. \ No newline at end of file diff --git a/doc/sphinx-guides/source/admin/timers.rst b/doc/sphinx-guides/source/admin/timers.rst index 3c1ff40f935..733dd7fbc1c 100644 --- a/doc/sphinx-guides/source/admin/timers.rst +++ b/doc/sphinx-guides/source/admin/timers.rst @@ -24,7 +24,7 @@ The following JVM option instructs the application to act as the dedicated timer **IMPORTANT:** Note that this option is automatically set by the Dataverse installer script. That means that when **configuring a multi-server cluster**, it will be the responsibility of the installer to remove the option from the :fixedwidthplain:`domain.xml` of every node except the one intended to be the timer server. We also recommend that the following entry in the :fixedwidthplain:`domain.xml`: ```` is changed back to ```` on all the non-timer server nodes. Similarly, this option is automatically set by the installer script. Changing it back to the default setting on a server that doesn't need to run the timer will prevent a potential race condition, where multiple servers try to get a lock on the timer database. -**Note** that for the timer to work, the version of the PostgreSQL JDBC driver your instance is using must match the version of your PostgreSQL database. See the 'Timer not working' section of the :doc:`/admin/troubleshooting` guide. +**Note** that for the timer to work, the version of the PostgreSQL JDBC driver your instance is using must match the version of your PostgreSQL database. See the :ref:`timer-not-working` section of Troubleshooting in the Admin Guide. Harvesting Timers ----------------- diff --git a/doc/sphinx-guides/source/admin/troubleshooting.rst b/doc/sphinx-guides/source/admin/troubleshooting.rst index 1b22a58555b..bf0ffb508a6 100644 --- a/doc/sphinx-guides/source/admin/troubleshooting.rst +++ b/doc/sphinx-guides/source/admin/troubleshooting.rst @@ -32,12 +32,28 @@ A User Needs Their Account to Be Converted From Institutional (Shibboleth), ORCI See :ref:`converting-shibboleth-users-to-local` and :ref:`converting-oauth-users-to-local`. +.. _troubleshooting-glassfish: + Glassfish --------- -``server.log`` is the main place to look when you encounter problems. Hopefully an error message has been logged. If there's a stack trace, it may be of interest to developers, especially they can trace line numbers back to a tagged version. +.. _glassfish-log: + +Finding the Glassfish Log File +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``/usr/local/glassfish4/glassfish/domains/domain1/logs/server.log`` is the main place to look when you encounter problems (assuming you installed Glassfish in the default directory). Hopefully an error message has been logged. If there's a stack trace, it may be of interest to developers, especially they can trace line numbers back to a tagged version or commit. Send more of the stack trace (the entire file if possible) to developers who can help (see "Getting Help", below) and be sure to say which version of Dataverse you are running. + +.. _increase-glassfish-logging: + +Increasing Glassfish Logging +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -For debugging purposes, you may find it helpful to increase logging levels as mentioned in the :doc:`/developers/debugging` section of the Developer Guide. +For debugging purposes, you may find it helpful to temporarily increase logging levels. Here's an example of increasing logging for the Java class behind the "datasets" API endpoints: + +``./asadmin set-log-levels edu.harvard.iq.dataverse.api.Datasets=FINE`` + +For more on setting log levels, see the :doc:`/developers/debugging` section of the Developer Guide. Our guides focus on using the command line to manage Glassfish but you might be interested in an admin GUI at http://localhost:4848 @@ -62,7 +78,9 @@ Note that it may or may not work on your system, so it is provided as an example .. literalinclude:: ../_static/util/clear_timer.sh -Timer not working +.. _timer-not-working: + +Timer Not Working ----------------- Dataverse relies on EJB timers to perform scheduled tasks: harvesting from remote servers, updating the local OAI sets and running metadata exports. (See :doc:`timers` for details.) If these scheduled jobs are not running on your server, this may be the result of the incompatibility between the version of PostgreSQL database you are using, and PostgreSQL JDBC driver in use by your instance of Glassfish. The symptoms: @@ -100,3 +118,8 @@ Many Files with a File Type of "Unknown", "Application", or "Binary" -------------------------------------------------------------------- From the home page of a Dataverse installation you can get a count of files by file type by clicking "Files" and then scrolling down to "File Type". If you see a lot of files that are "Unknown", "Application", or "Binary" you can have Dataverse attempt to redetect the file type by using the :ref:`Redetect File Type ` API endpoint. + +Getting Help +------------ + +If the troubleshooting advice above didn't help, contact any of the support channels mentioned in the :ref:`support` section of the Installation Guide. diff --git a/doc/sphinx-guides/source/admin/user-administration.rst b/doc/sphinx-guides/source/admin/user-administration.rst index 764de6977ab..d9907a94f43 100644 --- a/doc/sphinx-guides/source/admin/user-administration.rst +++ b/doc/sphinx-guides/source/admin/user-administration.rst @@ -53,7 +53,7 @@ The app will send a standard welcome email with a URL the user can click, which, Should users' URL token expire, they will see a "Verify Email" button on the account information page to send another URL. -Sysadmins can determine which users have verified their email addresses by looking for the presence of the value ``emailLastConfirmed`` in the JSON output from listing users (see the "Admin" section of the :doc:`/api/native-api`). As mentioned in the :doc:`/user/account` section of the User Guide, the email addresses for Shibboleth users are re-confirmed on every login. +Sysadmins can determine which users have verified their email addresses by looking for the presence of the value ``emailLastConfirmed`` in the JSON output from listing users (see :ref:`admin` section of Native API in the API Guide). As mentioned in the :doc:`/user/account` section of the User Guide, the email addresses for Shibboleth users are re-confirmed on every login. Deleting an API Token --------------------- diff --git a/doc/sphinx-guides/source/api/apps.rst b/doc/sphinx-guides/source/api/apps.rst index de5ace23e86..6fca5891202 100755 --- a/doc/sphinx-guides/source/api/apps.rst +++ b/doc/sphinx-guides/source/api/apps.rst @@ -30,7 +30,7 @@ File Previewers File Previewers are tools that display the content of files - including audio, html, Hypothes.is annotations, images, PDF, text, video - allowing them to be viewed without downloading. -https://github.com/QualitativeDataRepository/dataverse-previewers +https://github.com/GlobalDataverseCommunityConsortium/dataverse-previewers TwoRavens ~~~~~~~~~ @@ -93,6 +93,14 @@ Archivematica is an integrated suite of open-source tools for processing digital https://github.com/artefactual/archivematica/tree/v1.9.2/src/MCPClient/lib/clientScripts +repo2docker +~~~~~~~~~~~ + +repo2docker is a command line tool that allows you to create and start a +Docker image from a code repository that follows the [reproducible executable environment specification](https://repo2docker.readthedocs.io/en/latest/specification.html). repo2docker supports Dataverse DOIs to find and retrieve datasets. + +https://github.com/jupyter/repo2docker/blob/master/repo2docker/contentproviders/dataverse.py + Java ---- @@ -121,3 +129,10 @@ OJS The Open Journal Systems (OJS) Dataverse Plugin adds data sharing and preservation to the OJS publication process. https://github.com/pkp/ojs/tree/ojs-stable-2_4_8/plugins/generic/dataverse + +OpenScholar +~~~~~~~~~~~ + +The Dataverse module from OpenScholar allows Dataverse widgets to be easily embedded in its web pages: + +https://github.com/openscholar/openscholar/tree/SCHOLAR-3.x/openscholar/modules/os_features/os_dataverse diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index ef3671256e6..64726c77f58 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -29,7 +29,7 @@ R https://github.com/IQSS/dataverse-client-r is the official R package for Dataverse APIs. The latest release can be installed from `CRAN `_. -It was created by `Thomas Leeper `_ whose dataverse can be found at https://dataverse.harvard.edu/dataverse/leeper +The package is currently maintained by `Will Beasley `_. It was created by `Thomas Leeper `_ whose dataverse can be found at https://dataverse.harvard.edu/dataverse/leeper Java ---- diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index eca43ba1c5e..0e2e338404d 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -112,6 +112,7 @@ Value Description ID Exports file with specific file metadata ``ID``. ============== =========== +.. _data-variable-metadata-access: Data Variable Metadata Access ----------------------------- diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index fc5adf00c2b..5e58e68c853 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -29,7 +29,13 @@ Note: This is the same list that appears in the :doc:`/admin/external-tools` sec How External Tools Are Presented to Users ----------------------------------------- -In short, an external tool appears under an "Explore" or "Configure" button either on a dataset landing page or a file landing page. See also the :ref:`testing-external-tools` section of the Admin Guide for some perspective on how installations of Dataverse will expect to test your tool before announcing it to their users. +An external tool can appear in Dataverse in one of three ways: + +- under an "Explore" or "Configure" button either on a dataset landing page +- under an "Explore" or "Configure" button on a file landing page +- as an embedded preview on the file landing page + +See also the :ref:`testing-external-tools` section of the Admin Guide for some perspective on how installations of Dataverse will expect to test your tool before announcing it to their users. Creating an External Tool Manifest ---------------------------------- @@ -75,6 +81,8 @@ Terminology type Whether the external tool is an **explore** tool or a **configure** tool. Configure tools require an API token because they make changes to data files (files within datasets). Configure tools are currently not supported at the dataset level (no "Configure" button appears in the GUI for datasets). toolUrl The **base URL** of the tool before query parameters are added. + + hasPreviewMode A boolean that indicates whether tool has a preview mode which can be embedded in the File Page. Since this view is designed for embedding within Dataverse, the preview mode for a tool will typically be a view without headers or other options that may be included with a tool that is designed to be launched in a new window. Sometimes, a tool will exist solely to preview files in Dataverse and the preview mode will be the same as the regular view. contentType File level tools operate on a specific **file type** (content type or MIME type such as "application/pdf") and this must be specified. Dataset level tools do not use contentType. diff --git a/doc/sphinx-guides/source/api/getting-started.rst b/doc/sphinx-guides/source/api/getting-started.rst index c0d6515dc64..3c9bfd9b137 100644 --- a/doc/sphinx-guides/source/api/getting-started.rst +++ b/doc/sphinx-guides/source/api/getting-started.rst @@ -94,6 +94,13 @@ A quick example search for the word "data" is https://demo.dataverse.org/api/sea See the :doc:`search` section for details. +Finding Recently Published Dataverses, Datasets, and Files +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +See :ref:`search-date-range`. + +It's also possible to find recently published datasets via OAI-PMH. + Downloading Files ~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 774ff0aa88c..67b4317e1de 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -53,9 +53,9 @@ Next you need to figure out the alias or database id of the "parent" dataverse i .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export PARENT=root export SERVER_URL=https://demo.dataverse.org - + export PARENT=root + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$PARENT --upload-file dataverse-complete.json The fully expanded example above (without environment variables) looks like this: @@ -64,42 +64,83 @@ The fully expanded example above (without environment variables) looks like this curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root --upload-file dataverse-complete.json -You should expect a 201 ("CREATED") response and JSON indicating the database id that has been assigned to your newly created dataverse. +You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the newly-created dataverse. .. _view-dataverse: View a Dataverse ~~~~~~~~~~~~~~~~ -|CORS| View data about the dataverse identified by ``$id``. ``$id`` can be the id number of the dataverse, its identifier (a.k.a. alias), or the special value ``:root`` for the root dataverse. +|CORS| View a JSON representation of the dataverse identified by ``$id``. ``$id`` can be the database ID of the dataverse, its alias, or the special value ``:root`` for the root dataverse. + +To view a published dataverse: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl $SERVER_URL/api/dataverses/$ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl https://demo.dataverse.org/api/dataverses/root + +To view an unpublished dataverse: -``curl $SERVER_URL/api/dataverses/$id`` +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root Delete a Dataverse ~~~~~~~~~~~~~~~~~~ -In order to delete a dataverse you must first delete or move all of its contents elsewhere. +Before you may delete a dataverse you must first delete or move all of its contents elsewhere. + +Deletes the dataverse whose database ID or alias is given: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/dataverses/$ID + +The fully expanded example above (without environment variables) looks like this: -Deletes the dataverse whose ID is given: +.. code-block:: bash -``curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/dataverses/$id`` + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/dataverses/root .. _show-contents-of-a-dataverse-api: Show Contents of a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Lists all the dataverses and datasets directly under a dataverse (direct children only). You must specify the "alias" of a dataverse or its database id. If you specify your API token and have access, unpublished dataverses and datasets will be included in the listing. +|CORS| Lists all the dataverses and datasets directly under a dataverse (direct children only, not recursive) specified by database id or alias. If you pass your API token and have access, unpublished dataverses and datasets will be included in the response. .. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export ALIAS=root export SERVER_URL=https://demo.dataverse.org - - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ALIAS/contents + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/contents The fully expanded example above (without environment variables) looks like this: @@ -110,45 +151,104 @@ The fully expanded example above (without environment variables) looks like this Report the data (file) size of a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Shows the combined size in bytes of all the files uploaded into the dataverse ``id``. :: +Shows the combined size in bytes of all the files uploaded into the dataverse ``id``: -``curl -H "X-Dataverse-key:$API_TOKEN" http://$SERVER_URL/api/dataverses/$id/storagesize`` +.. code-block:: bash -Both published and unpublished files will be counted, in the dataverse specified, and in all its sub-dataverses, recursively. -By default, only the archival files are counted - i.e., the files uploaded by users (plus the tab-delimited versions generated for tabular data files on ingest). If the optional argument ``includeCached=true`` is specified, the API will also add the sizes of all the extra files generated and cached by Dataverse - the resized thumbnail versions for image files, the metadata exports for published datasets, etc. + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/storagesize + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/storagesize +The size of published and unpublished files will be summed both in the dataverse specified and beneath all its sub-dataverses, recursively. +By default, only the archival files are counted - i.e., the files uploaded by users (plus the tab-delimited versions generated for tabular data files on ingest). If the optional argument ``includeCached=true`` is specified, the API will also add the sizes of all the extra files generated and cached by Dataverse - the resized thumbnail versions for image files, the metadata exports for published datasets, etc. List Roles Defined in a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -All the roles defined directly in the dataverse identified by ``id``:: +All the roles defined directly in the dataverse identified by ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/roles + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - GET http://$SERVER/api/dataverses/$id/roles?key=$apiKey + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/roles List Facets Configured for a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| List all the facets for a given dataverse ``id``. :: +|CORS| List all the facets for a given dataverse ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/facets + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - GET http://$SERVER/api/dataverses/$id/facets?key=$apiKey + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/facets Set Facets for a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~ -Assign search facets for a given dataverse with alias ``$alias`` +Assign search facets for a given dataverse identified by ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/dataverses/$ID/facets --upload-file facets.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash -``curl -H "X-Dataverse-key: $apiKey" -X POST http://$server/api/dataverses/$alias/facets --upload-file facets.json`` + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/facets --upload-file facets.json Where ``facets.json`` contains a JSON encoded list of metadata keys (e.g. ``["authorName","authorAffiliation"]``). Create a New Role in a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Creates a new role under dataverse ``id``. Needs a json file with the role description:: +Creates a new role under dataverse ``id``. Needs a json file with the role description: - POST http://$SERVER/api/dataverses/$id/roles?key=$apiKey - -POSTed JSON example:: +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ID/roles --upload-file roles.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json + +Where ``roles.json`` looks like this:: { "alias": "sys1", @@ -164,29 +264,66 @@ POSTed JSON example:: List Role Assignments in a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -List all the role assignments at the given dataverse:: +List all the role assignments at the given dataverse: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/assignments + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/assignments - GET http://$SERVER/api/dataverses/$id/assignments?key=$apiKey - Assign Default Role to User Creating a Dataset in a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Assign a default role to a user creating a dataset in a dataverse ``id`` where ``roleAlias`` is the database alias of the role to be assigned:: +Assign a default role to a user creating a dataset in a dataverse ``id`` where ``roleAlias`` is the database alias of the role to be assigned: - PUT http://$SERVER/api/dataverses/$id/defaultContributorRole/$roleAlias?key=$apiKey - -Note: You may use "none" as the ``roleAlias``. This will prevent a user who creates a dataset from having any role on that dataset. It is not recommended for dataverses with human contributors. +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + export ROLE_ALIAS=curator + + curl -H X-Dataverse-key:$API_TOKEN -X PUT $SERVER_URL/api/dataverses/$ID/defaultContributorRole/$ROLE_ALIAS + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X PUT https://demo.dataverse.org/api/dataverses/root/defaultContributorRole/curator + +Note: You may use "none" as the ``ROLE_ALIAS``. This will prevent a user who creates a dataset from having any role on that dataset. It is not recommended for dataverses with human contributors. .. _assign-role-on-a-dataverse-api: Assign a New Role on a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Assigns a new role, based on the POSTed JSON. :: +Assigns a new role, based on the POSTed JSON: - POST http://$SERVER/api/dataverses/$id/assignments?key=$apiKey +.. code-block:: bash -POSTed JSON example:: + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-Type: application/json" $SERVER_URL/api/dataverses/$ID/assignments --upload-file role.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-Type: application/json" https://demo.dataverse.org/api/dataverses/root/assignments --upload-file role.json + +POSTed JSON example (the content of ``role.json`` file):: { "assignee": "@uma", @@ -198,14 +335,27 @@ POSTed JSON example:: Delete Role Assignment from a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Delete the assignment whose id is ``$id``:: +Delete the assignment whose id is ``$id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + export ASSIGNMENT_ID=6 + + curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/dataverses/$ID/assignments/$ASSIGNMENT_ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - DELETE http://$SERVER/api/dataverses/$id/assignments/$id?key=$apiKey + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/dataverses/root/assignments/6 List Metadata Blocks Defined on a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Get the metadata blocks defined on a dataverse which determine which field are available to authors when they create and edit datasets within that dataverse. This feature is described under "General Information" in the :doc:`/user/dataverse-management` section of the User Guide. +|CORS| Get the metadata blocks defined on a dataverse which determine which field are available to authors when they create and edit datasets within that dataverse. This feature is described in :ref:`general-information` section of Dataverse Management of the User Guide. Please note that an API token is only required if the dataverse has not been published. @@ -214,10 +364,10 @@ Please note that an API token is only required if the dataverse has not been pub .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export ALIAS=root export SERVER_URL=https://demo.dataverse.org + export ID=root - curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ALIAS/metadatablocks + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/metadatablocks The fully expanded example above (without environment variables) looks like this: @@ -239,10 +389,10 @@ The metadata blocks that are available with a default installation of Dataverse .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export ALIAS=root export SERVER_URL=https://demo.dataverse.org - - curl -H X-Dataverse-key:$API_TOKEN -X POST -H \"Content-type:application/json\" --upload-file define-metadatablocks.json $SERVER_URL/api/dataverses/$ALIAS/metadatablocks + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ID/metadatablocks -H \"Content-type:application/json\" --upload-file define-metadatablocks.json The fully expanded example above (without environment variables) looks like this: @@ -253,19 +403,43 @@ The fully expanded example above (without environment variables) looks like this Determine if a Dataverse Inherits Its Metadata Blocks from Its Parent ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Get whether the dataverse is a metadata block root, or does it uses its parent blocks:: +Get whether the dataverse is a metadata block root, or does it uses its parent blocks: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/metadatablocks/isRoot + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - GET http://$SERVER/api/dataverses/$id/metadatablocks/isRoot?key=$apiKey + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot Configure a Dataverse to Inherit Its Metadata Blocks from Its Parent ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Set whether the dataverse is a metadata block root, or does it uses its parent blocks. Possible -values are ``true`` and ``false`` (both are valid JSON expressions). :: +values are ``true`` and ``false`` (both are valid JSON expressions): + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN -X PUT $SERVER_URL/api/dataverses/$ID/metadatablocks/isRoot + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - PUT http://$SERVER/api/dataverses/$id/metadatablocks/isRoot?key=$apiKey + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X PUT https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot -.. note:: Previous endpoints ``GET http://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` and ``POST http://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported. +.. note:: Previous endpoints ``$SERVER/api/dataverses/$id/metadatablocks/:isRoot`` and ``POST http://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported. .. _create-dataset-command: @@ -279,6 +453,7 @@ To create a dataset, you must supply a JSON file that contains at least the foll - Title - Author +- Contact - Description - Subject @@ -296,24 +471,37 @@ Next you need to figure out the alias or database id of the "parent" dataverse i export PARENT=root export SERVER_URL=https://demo.dataverse.org - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$PARENT/datasets --upload-file dataset-finch1.json + curl -H X-Dataverse-key:$API_TOKEN -X POST "$SERVER_URL/api/dataverses/$PARENT/datasets" --upload-file dataset-finch1.json The fully expanded example above (without the environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/datasets --upload-file dataset-finch1.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/datasets" --upload-file "dataset-finch1.json" -You should expect a 201 ("CREATED") response and JSON indicating the database ID and Persistent ID (PID such as DOI or Handle) that has been assigned to your newly created dataset. +You should expect an HTTP 200 ("OK") response and JSON indicating the database ID and Persistent ID (PID such as DOI or Handle) that has been assigned to your newly created dataset. Import a Dataset into a Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. note:: This action requires a Dataverse account with super-user permissions. -To import a dataset with an existing persistent identifier (PID), the dataset's metadata should be prepared in Dataverse's native JSON format. The PID is provided as a parameter at the URL. The following line imports a dataset with the PID ``PERSISTENT_IDENTIFIER`` to Dataverse, and then releases it:: +To import a dataset with an existing persistent identifier (PID), the dataset's metadata should be prepared in Dataverse's native JSON format. The PID is provided as a parameter at the URL. The following line imports a dataset with the PID ``PERSISTENT_IDENTIFIER`` to Dataverse, and then releases it: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export DATAVERSE_ID=root + export PERSISTENT_IDENTIFIER=doi:ZZ7/MOSEISLEYDB94 + + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:import?pid=$PERSISTENT_IDENTIFIER&release=yes --upload-file dataset.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - curl -H "X-Dataverse-key: $API_TOKEN" -X POST $SERVER_URL/api/dataverses/$DV_ALIAS/datasets/:import?pid=$PERSISTENT_IDENTIFIER&release=yes --upload-file dataset.json + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/datasets/:import?pid=doi:ZZ7/MOSEISLEYDB94&release=yes --upload-file dataset.json The ``pid`` parameter holds a persistent identifier (such as a DOI or Handle). The import will fail if no PID is provided, or if the provided PID fails validation. @@ -339,9 +527,22 @@ Import a Dataset into a Dataverse with a DDI file .. note:: This action requires a Dataverse account with super-user permissions. -To import a dataset with an existing persistent identifier (PID), you have to provide the PID as a parameter at the URL. The following line imports a dataset with the PID ``PERSISTENT_IDENTIFIER`` to Dataverse, and then releases it:: +To import a dataset with an existing persistent identifier (PID), you have to provide the PID as a parameter at the URL. The following line imports a dataset with the PID ``PERSISTENT_IDENTIFIER`` to Dataverse, and then releases it: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export DATAVERSE_ID=root + export PERSISTENT_IDENTIFIER=doi:ZZ7/MOSEISLEYDB94 + + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:importddi?pid=$PERSISTENT_IDENTIFIER&release=yes --upload-file ddi_dataset.xml + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - curl -H "X-Dataverse-key: $API_TOKEN" -X POST $SERVER_URL/api/dataverses/$DV_ALIAS/datasets/:importddi?pid=$PERSISTENT_IDENTIFIER&release=yes --upload-file ddi_dataset.xml + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/datasets/:importddi?pid=doi:ZZ7/MOSEISLEYDB94&release=yes --upload-file ddi_dataset.xml The optional ``pid`` parameter holds a persistent identifier (such as a DOI or Handle). The import will fail if the provided PID fails validation. @@ -366,10 +567,10 @@ In order to publish a dataverse, you must know either its "alias" (which the GUI .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export ALIAS=root export SERVER_URL=https://demo.dataverse.org + export ID=root - curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ALIAS/actions/:publish + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ID/actions/:publish The fully expanded example above (without environment variables) looks like this: @@ -397,49 +598,159 @@ Get JSON Representation of a Dataset .. note:: Datasets can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the dataset is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. - Example: Getting the dataset whose DOI is *10.5072/FK2/J8SJZB* :: +Example: Getting the dataset whose DOI is *10.5072/FK2/J8SJZB*: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB + + curl $SERVER_URL/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl https://demo.dataverse.org/api/datasets/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB + +Getting its draft version: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB + + curl http://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - curl http://$SERVER/api/datasets/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB + curl https://demo.dataverse.org/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/J8SJZB - fully expanded:: - curl http://localhost:8080/api/datasets/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB +|CORS| Show the dataset whose id is passed: - Getting its draft version:: +.. code-block:: bash - curl http://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/J8SJZB + export SERVER_URL=https://demo.dataverse.org + export ID=408730 - fully expanded:: + curl $SERVER_URL/api/datasets/$ID - curl http://localhost:8080/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/J8SJZB +The fully expanded example above (without environment variables) looks like this: +.. code-block:: bash -|CORS| Show the dataset whose id is passed:: + curl https://demo.dataverse.org/api/datasets/408730 - GET http://$SERVER/api/datasets/$id?key=$apiKey +The dataset id can be extracted from the response retrieved from the API which uses the persistent identifier (``/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER``). List Versions of a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| List versions of the dataset:: +|CORS| List versions of the dataset: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl $SERVER_URL/api/dataverses/$ID/versions + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl https://demo.dataverse.org/api/datasets/24/versions + +It returns a list of versions with their metadata, and file list: + +.. code-block:: bash + + { + "status": "OK", + "data": [ + { + "id": 7, + "datasetId": 24, + "datasetPersistentId": "doi:10.5072/FK2/U6AEZM", + "storageIdentifier": "file://10.5072/FK2/U6AEZM", + "versionNumber": 2, + "versionMinorNumber": 0, + "versionState": "RELEASED", + "lastUpdateTime": "2015-04-20T09:58:35Z", + "releaseTime": "2015-04-20T09:58:35Z", + "createTime": "2015-04-20T09:57:32Z", + "license": "CC0", + "termsOfUse": "CC0 Waiver", + "termsOfAccess": "You need to request for access.", + "fileAccessRequest": true, + "metadataBlocks": {...}, + "files": [...] + }, + { + "id": 6, + "datasetId": 24, + "datasetPersistentId": "doi:10.5072/FK2/U6AEZM", + "storageIdentifier": "file://10.5072/FK2/U6AEZM", + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "RELEASED", + "UNF": "UNF:6:y4dtFxWhBaPM9K/jlPPuqg==", + "lastUpdateTime": "2015-04-20T09:56:34Z", + "releaseTime": "2015-04-20T09:56:34Z", + "createTime": "2015-04-20T09:43:45Z", + "license": "CC0", + "termsOfUse": "CC0 Waiver", + "termsOfAccess": "You need to request for access.", + "fileAccessRequest": true, + "metadataBlocks": {...}, + "files": [...] + } + ] + } - GET http://$SERVER/api/datasets/$id/versions?key=$apiKey Get Version of a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Show a version of the dataset. The Dataset also include any metadata blocks the data might have:: +|CORS| Show a version of the dataset. The output includes any metadata blocks the dataset might have: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSION=1.0 + + curl $SERVER_URL/api/datasets/$ID/versions/$VERSION + +The fully expanded example above (without environment variables) looks like this: - GET http://$SERVER/api/datasets/$id/versions/$versionNumber?key=$apiKey +.. code-block:: bash + + curl https://demo.dataverse.org/api/datasets/24/versions/1.0 .. _export-dataset-metadata-api: Export Metadata of a Dataset in Various Formats ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Export the metadata of the current published version of a dataset in various formats see Note below:: +|CORS| Export the metadata of the current published version of a dataset in various formats see Note below: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB + export METADATA_FORMAT=ddi + + curl $SERVER_URL/api/datasets/export?exporter=$METADATA_FORMAT&persistentId=PERSISTENT_IDENTIFIER + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - GET http://$SERVER/api/datasets/export?exporter=ddi&persistentId=$persistentId + curl https://demo.dataverse.org/api/datasets/export?exporter=ddi&persistentId=doi:10.5072/FK2/J8SJZB .. note:: Supported exporters (export formats) are ``ddi``, ``oai_ddi``, ``dcterms``, ``oai_dc``, ``schema.org`` , ``OAI_ORE`` , ``Datacite``, ``oai_datacite`` and ``dataverse_json``. Descriptive names can be found under :ref:`metadata-export-formats` in the User Guide. @@ -457,38 +768,99 @@ Both forms are valid according to Google's Structured Data Testing Tool at https List Files in a Dataset ~~~~~~~~~~~~~~~~~~~~~~~ -|CORS| Lists all the file metadata, for the given dataset and version:: +|CORS| Lists all the file metadata, for the given dataset and version: - GET http://$SERVER/api/datasets/$id/versions/$versionId/files?key=$apiKey +.. code-block:: bash -List All Metadata Blocks for a Dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSION=1.0 -|CORS| Lists all the metadata blocks and their content, for the given dataset and version:: + curl $SERVER_URL/api/datasets/$ID/versions/$VERSION/files - GET http://$SERVER/api/datasets/$id/versions/$versionId/metadata?key=$apiKey +The fully expanded example above (without environment variables) looks like this: -List Single Metadata Block for a Dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. code-block:: bash -|CORS| Lists the metadata block block named `blockname`, for the given dataset and version:: + curl https://demo.dataverse.org/api/datasets/24/versions/1.0/files - GET http://$SERVER/api/datasets/$id/versions/$versionId/metadata/$blockname?key=$apiKey +List All Metadata Blocks for a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Update Metadata For a Dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +|CORS| Lists all the metadata blocks and their content, for the given dataset and version: -Updates the metadata for a dataset. If a draft of the dataset already exists, the metadata of that draft is overwritten; otherwise, a new draft is created with this metadata. +.. code-block:: bash -You must download a JSON representation of the dataset, edit the JSON you download, and then send the updated JSON to the Dataverse server. + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSION=1.0 -For example, after making your edits, your JSON file might look like :download:`dataset-update-metadata.json <../_static/api/dataset-update-metadata.json>` which you would send to Dataverse like this:: + curl $SERVER_URL/api/datasets/$ID/versions/$VERSION/metadata - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/versions/:draft?persistentId=$PID --upload-file dataset-update-metadata.json +The fully expanded example above (without environment variables) looks like this: -Note that in the example JSON file above, there is a single JSON object with ``metadataBlocks`` as a key. When you download a representation of your dataset in JSON format, the ``metadataBlocks`` object you need is nested inside another object called ``json``. To extract just the ``metadataBlocks`` key when downloading a JSON representation, you can use a tool such as ``jq`` like this:: +.. code-block:: bash + + curl https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata + +List Single Metadata Block for a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|CORS| Lists the metadata block named `METADATA_BLOCK`, for the given dataset and version: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSION=1.0 + export METADATA_BLOCK=citation + + curl $SERVER_URL/api/datasets/$ID/versions/$VERSION/metadata/$METADATA_BLOCK + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata/citation + +Update Metadata For a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates the metadata for a dataset. If a draft of the dataset already exists, the metadata of that draft is overwritten; otherwise, a new draft is created with this metadata. + +You must download a JSON representation of the dataset, edit the JSON you download, and then send the updated JSON to the Dataverse server. + +For example, after making your edits, your JSON file might look like :download:`dataset-update-metadata.json <../_static/api/dataset-update-metadata.json>` which you would send to Dataverse like this: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z + + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER --upload-file dataset-update-metadata.json + +The fully expanded example above (without environment variables) looks like this: - curl -H "X-Dataverse-key: $API_TOKEN" $SERVER_URL/api/datasets/:persistentId/versions/:latest?persistentId=$PID | jq '.data | {metadataBlocks: .metadataBlocks}' > dataset-update-metadata.json +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/BCCP9Z --upload-file dataset-update-metadata.json + +Note that in the example JSON file above, there is a single JSON object with ``metadataBlocks`` as a key. When you download a representation of your dataset in JSON format, the ``metadataBlocks`` object you need is nested inside another object called ``json``. To extract just the ``metadataBlocks`` key when downloading a JSON representation, you can use a tool such as ``jq`` like this: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z + + curl -H "X-Dataverse-key: $API_TOKEN" $SERVER_URL/api/datasets/:persistentId/versions/:latest?persistentId=$PERSISTENT_IDENTIFIER | jq '.data | {metadataBlocks: .metadataBlocks}' > dataset-update-metadata.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/datasets/:persistentId/versions/:latest?persistentId=doi:10.5072/FK2/BCCP9Z | jq '.data | {metadataBlocks: .metadataBlocks}' > dataset-update-metadata.json Now that the resulting JSON file only contains the ``metadataBlocks`` key, you can edit the JSON such as with ``vi`` in the example below:: @@ -499,25 +871,60 @@ Now that you've made edits to the metadata in your JSON file, you can send it to Edit Dataset Metadata ~~~~~~~~~~~~~~~~~~~~~ -Alternatively to replacing an entire dataset version with its JSON representation you may add data to dataset fields that are blank or accept multiple values with the following :: +Alternatively to replacing an entire dataset version with its JSON representation you may add data to dataset fields that are blank or accept multiple values with the following: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z + + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/editMetadata/?persistentId=$PERSISTENT_IDENTIFIER --upload-file dataset-add-metadata.json - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/editMetadata/?persistentId=$PID --upload-file dataset-add-metadata.json +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash -You may also replace existing metadata in dataset fields with the following (adding the parameter replace=true) :: + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z --upload-file dataset-add-metadata.json + +You may also replace existing metadata in dataset fields with the following (adding the parameter replace=true): + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z + + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/editMetadata?persistentId=$PERSISTENT_IDENTIFIER&replace=true --upload-file dataset-update-metadata.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z&replace=true --upload-file dataset-update-metadata.json - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/editMetadata?persistentId=$PID&replace=true --upload-file dataset-update-metadata.json - For these edits your JSON file need only include those dataset fields which you would like to edit. A sample JSON file may be downloaded here: :download:`dataset-edit-metadata-sample.json <../_static/api/dataset-edit-metadata-sample.json>` Delete Dataset Metadata ~~~~~~~~~~~~~~~~~~~~~~~ -You may delete some of the metadata of a dataset version by supplying a file with a JSON representation of dataset fields that you would like to delete with the following :: +You may delete some of the metadata of a dataset version by supplying a file with a JSON representation of dataset fields that you would like to delete with the following: - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/deleteMetadata/?persistentId=$PID --upload-file dataset-delete-author-metadata.json - -For these deletes your JSON file must include an exact match of those dataset fields which you would like to delete. A sample JSON file may be downloaded here: :download:`dataset-delete-author-metadata.json <../_static/api/dataset-delete-author-metadata.json>` +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z + + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/deleteMetadata/?persistentId=$PERSISTENT_IDENTIFIER --upload-file dataset-delete-author-metadata.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/deleteMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z --upload-file dataset-delete-author-metadata.json +For these deletes your JSON file must include an exact match of those dataset fields which you would like to delete. A sample JSON file may be downloaded here: :download:`dataset-delete-author-metadata.json <../_static/api/dataset-delete-author-metadata.json>` .. _publish-dataset-api: @@ -537,13 +944,13 @@ If this is the first version of the dataset, its version number will be set to ` export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB export MAJOR_OR_MINOR=major - curl -H X-Dataverse-key:$API_TOKEN -X POST \""$SERVER_URL/api/datasets/:persistentId/actions/:publish?persistentId=$PERSISTENT_ID&type=$MAJOR_OR_MINOR"\" + curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/:persistentId/actions/:publish?persistentId=$PERSISTENT_ID&type=$MAJOR_OR_MINOR" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST "https://demo.dataverse.org/api/datasets/:persistentId/actions/:publish?persistentId=doi:10.5072/FK2/J8SJZB&type=major" + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/:persistentId/actions/:publish?persistentId=doi:10.5072/FK2/J8SJZB&type=major" The quotes around the URL are required because there is more than one query parameter separated by an ampersand (``&``), which has special meaning to Unix shells such as Bash. Putting the ``&`` in quotes ensures that "type" is interpreted as one of the query parameters. @@ -554,53 +961,190 @@ You should expect JSON output and a 200 ("OK") response in most cases. If you re Delete Dataset Draft ~~~~~~~~~~~~~~~~~~~~ -Deletes the draft version of dataset ``$id``. Only the draft version can be deleted:: +Deletes the draft version of dataset ``$ID``. Only the draft version can be deleted: - DELETE http://$SERVER/api/datasets/$id/versions/:draft?key=$apiKey +.. code-block:: bash -Set Citation Date Field for a Dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 -Sets the dataset field type to be used as the citation date for the given dataset (if the dataset does not include the dataset field type, the default logic is used). The name of the dataset field type should be sent in the body of the request. -To revert to the default logic, use ``:publicationDate`` as the ``$datasetFieldTypeName``. -Note that the dataset field used has to be a date field:: + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/versions/:draft - PUT http://$SERVER/api/datasets/$id/citationdate?key=$apiKey --data "$datasetFieldTypeName" +The fully expanded example above (without environment variables) looks like this: -Revert Citation Date Field to Default for Dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. code-block:: bash -Restores the default logic of the field type to be used as the citation date. Same as ``PUT`` with ``:publicationDate`` body:: + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/versions/:draft - DELETE http://$SERVER/api/datasets/$id/citationdate?key=$apiKey +Set Citation Date Field Type for a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -List Role Assignments for a Dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Sets the dataset citation date field type for a given dataset. ``:publicationDate`` is the default. +Note that the dataset citation date field type must be a date field. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export DATASET_FIELD_TYPE_NAME=:dateOfDeposit + + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/$ID/citationdate --data "$DATASET_FIELD_TYPE_NAME" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/24/citationdate --data ":dateOfDeposit" + +Revert Citation Date Field Type to Default for Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Restores the default citation date field type, ``:publicationDate``, for a given dataset. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/citationdate + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/citationdate + +.. _list-roles-on-a-dataset-api: + +List Role Assignments in a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Lists all role assignments on a given dataset: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=2347 + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/datasets/$ID/assignments + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash -List all the role assignments at the given dataset:: + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/datasets/2347/assignments + +.. _assign-role-on-a-dataset-api: + +Assign a New Role on a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Assigns a new role, based on the POSTed JSON: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=2347 + + curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-Type: application/json" $SERVER_URL/api/datasets/$ID/assignments --upload-file role.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-Type: application/json" https://demo.dataverse.org/api/datasets/2347/assignments --upload-file role.json + +POSTed JSON example (the content of ``role.json`` file):: + + { + "assignee": "@uma", + "role": "curator" + } + +.. _revoke-role-on-a-dataset-api: + +Delete Role Assignment from a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Delete the assignment whose id is ``$id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=2347 + export ASSIGNMENT_ID=6 + + curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/datasets/$ID/assignments/$ASSIGNMENT_ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/datasets/2347/assignments/6 - GET http://$SERVER/api/datasets/$id/assignments?key=$apiKey Create a Private URL for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Create a Private URL (must be able to manage dataset permissions):: +Create a Private URL (must be able to manage dataset permissions): + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key: $API_TOKEN" -X POST $SERVER_URL/api/datasets/$ID/privateUrl + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - POST http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/24/privateUrl Get the Private URL for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Get a Private URL from a dataset (if available):: +Get a Private URL from a dataset (if available): + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key: $API_TOKEN" $SERVER_URL/api/datasets/$ID/privateUrl + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - GET http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/datasets/24/privateUrl Delete the Private URL from a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Delete a Private URL from a dataset (if it exists):: +Delete a Private URL from a dataset (if it exists): + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/privateUrl + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - DELETE http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/privateUrl .. _add-file-api: @@ -645,402 +1189,1117 @@ The curl syntax above to upload a file is tricky and a Python version is provide In practice, you only need one the ``dataset_id`` or the ``persistentId``. The example below shows both uses. -.. code-block:: python +.. code-block:: python + + from datetime import datetime + import json + import requests # http://docs.python-requests.org/en/master/ + + # -------------------------------------------------- + # Update the 4 params below to run this code + # -------------------------------------------------- + dataverse_server = 'https://your dataverse server' # no trailing slash + api_key = 'api key' + dataset_id = 1 # database id of the dataset + persistentId = 'doi:10.5072/FK2/6XACVA' # doi or hdl of the dataset + + # -------------------------------------------------- + # Prepare "file" + # -------------------------------------------------- + file_content = 'content: %s' % datetime.now() + files = {'file': ('sample_file.txt', file_content)} + + # -------------------------------------------------- + # Using a "jsonData" parameter, add optional description + file tags + # -------------------------------------------------- + params = dict(description='Blue skies!', + categories=['Lily', 'Rosemary', 'Jack of Hearts']) + + params_as_json_string = json.dumps(params) + + payload = dict(jsonData=params_as_json_string) + + # -------------------------------------------------- + # Add file using the Dataset's id + # -------------------------------------------------- + url_dataset_id = '%s/api/datasets/%s/add?key=%s' % (dataverse_server, dataset_id, api_key) + + # ------------------- + # Make the request + # ------------------- + print '-' * 40 + print 'making request: %s' % url_dataset_id + r = requests.post(url_dataset_id, data=payload, files=files) + + # ------------------- + # Print the response + # ------------------- + print '-' * 40 + print r.json() + print r.status_code + + # -------------------------------------------------- + # Add file using the Dataset's persistentId (e.g. doi, hdl, etc) + # -------------------------------------------------- + url_persistent_id = '%s/api/datasets/:persistentId/add?persistentId=%s&key=%s' % (dataverse_server, persistentId, api_key) + + # ------------------- + # Update the file content to avoid a duplicate file error + # ------------------- + file_content = 'content2: %s' % datetime.now() + files = {'file': ('sample_file2.txt', file_content)} + + + # ------------------- + # Make the request + # ------------------- + print '-' * 40 + print 'making request: %s' % url_persistent_id + r = requests.post(url_persistent_id, data=payload, files=files) + + # ------------------- + # Print the response + # ------------------- + print '-' * 40 + print r.json() + print r.status_code + +Report the data (file) size of a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Shows the combined size in bytes of all the files uploaded into the dataset ``id``. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/datasets/$ID/storagesize + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/datasets/24/storagesize + +The size of published and unpublished files will be summed in the dataset specified. +By default, only the archival files are counted - i.e., the files uploaded by users (plus the tab-delimited versions generated for tabular data files on ingest). If the optional argument ``includeCached=true`` is specified, the API will also add the sizes of all the extra files generated and cached by Dataverse - the resized thumbnail versions for image files, the metadata exports for published datasets, etc. Because this deals with unpublished files the token supplied must have permission to view unpublished drafts. + + +Get the size of Downloading all the files of a Dataset Version +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Shows the combined size in bytes of all the files available for download from version ``versionId`` of dataset ``id``. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSIONID=1.0 + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/datasets/$ID/versions/$VERSIONID/downloadsize + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize + +The size of all files available for download will be returned. +If :draft is passed as versionId the token supplied must have permission to view unpublished drafts. A token is not required for published datasets. Also restricted files will be included in this total regardless of whether the user has access to download the restricted file(s). + +Submit a Dataset for Review +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When dataset authors do not have permission to publish directly, they can click the "Submit for Review" button in the web interface (see :doc:`/user/dataset-management`), or perform the equivalent operation via API: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB + + curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/:persistentId/submitForReview?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/:persistentId/submitForReview?persistentId=doi:10.5072/FK2/J8SJZB" + +The people who need to review the dataset (often curators or journal editors) can check their notifications periodically via API to see if any new datasets have been submitted for review and need their attention. See the :ref:`Notifications` section for details. Alternatively, these curators can simply check their email or notifications to know when datasets have been submitted (or resubmitted) for review. + +Return a Dataset to Author +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +After the curators or journal editors have reviewed a dataset that has been submitted for review (see "Submit for Review", above) they can either choose to publish the dataset (see the ``:publish`` "action" above) or return the dataset to its authors. In the web interface there is a "Return to Author" button (see :doc:`/user/dataset-management`), but the interface does not provide a way to explain **why** the dataset is being returned. There is a way to do this outside of this interface, however. Instead of clicking the "Return to Author" button in the UI, a curator can write a "reason for return" into the database via API. + +Here's how curators can send a "reason for return" to the dataset authors. First, the curator creates a JSON file that contains the reason for return: + +.. literalinclude:: ../_static/api/reason-for-return.json + +In the example below, the curator has saved the JSON file as :download:`reason-for-return.json <../_static/api/reason-for-return.json>` in their current working directory. Then, the curator sends this JSON file to the ``returnToAuthor`` API endpoint like this: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB + + curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/:persistentId/returnToAuthor?persistentId=$PERSISTENT_ID" -H "Content-type: application/json" -d @reason-for-return.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/:persistentId/returnToAuthor?persistentId=doi:10.5072/FK2/J8SJZB" -H "Content-type: application/json" -d @reason-for-return.json + +The review process can sometimes resemble a tennis match, with the authors submitting and resubmitting the dataset over and over until the curators are satisfied. Each time the curators send a "reason for return" via API, that reason is persisted into the database, stored at the dataset version level. + +Link a Dataset +~~~~~~~~~~~~~~ + +Creates a link between a dataset and a dataverse (see :ref:`dataset-linking` section of Dataverse Management in the User Guide for more information): + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export DATASET_ID=24 + export DATAVERSE_ID=test + + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/$DATASET_ID/link/$DATAVERSE_ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/24/link/test + +Dataset Locks +~~~~~~~~~~~~~ + +To check if a dataset is locked: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl $SERVER_URL/api/datasets/$ID/locks + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl https://demo.dataverse.org/api/datasets/24/locks + +Optionally, you can check if there's a lock of a specific type on the dataset: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export LOCK_TYPE=Ingest + + curl "$SERVER_URL/api/datasets/$ID/locks?type=$LOCK_TYPE" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/locks?type=Ingest" + +Currently implemented lock types are ``Ingest``, ``Workflow``, ``InReview``, ``DcmUpload``, ``pidRegister``, and ``EditInProgress``. + +The API will output the list of locks, for example:: + + {"status":"OK","data": + [ + { + "lockType":"Ingest", + "date":"Fri Aug 17 15:05:51 EDT 2018", + "user":"dataverseAdmin" + }, + { + "lockType":"Workflow", + "date":"Fri Aug 17 15:02:00 EDT 2018", + "user":"dataverseAdmin" + } + ] + } + +If the dataset is not locked (or if there is no lock of the requested type), the API will return an empty list. + +The following API end point will lock a Dataset with a lock of specified type: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export LOCK_TYPE=Ingest + + curl -H "X-Dataverse-key: $API_TOKEN" -X POST $SERVER_URL/api/datasets/$ID/lock/$LOCK_TYPE + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/24/lock/Ingest + +Use the following API to unlock the dataset, by deleting all the locks currently on the dataset: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/locks + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/locks + +Or, to delete a lock of the type specified only: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export LOCK_TYPE=pidRegister + + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/locks?type=$LOCK_TYPE + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/locks?type=pidRegister + +If the dataset is not locked (or if there is no lock of the specified type), the API will exit with a warning message. + +(Note that the API calls above all support both the database id and persistent identifier notation for referencing the dataset) + +.. _dataset-metrics-api: + +Dataset Metrics +~~~~~~~~~~~~~~~ + +Please note that these dataset level metrics are only available if support for Make Data Count has been enabled in your installation of Dataverse. See the :ref:`Dataset Metrics ` in the :doc:`/user/dataset-management` section of the User Guide and the :doc:`/admin/make-data-count` section of the Admin Guide for details. + +.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + +To confirm that the environment variable was set properly, you can use ``echo`` like this: + +.. code-block:: bash + + echo $SERVER_URL + +Please note that for each of these endpoints except the "citations" endpoint, you can optionally pass the query parameter "country" with a two letter code (e.g. "country=us") and you can specify a particular month by adding it in yyyy-mm format after the requested metric (e.g. "viewsTotal/2019-02"). + +Retrieving Total Views for a Dataset +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Please note that "viewsTotal" is a combination of "viewsTotalRegular" and "viewsTotalMachine" which can be requested separately. + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB + + curl "$SERVER_URL/api/datasets/:persistentId/makeDataCount/viewsTotal?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/:persistentId/makeDataCount/viewsTotal?persistentId=10.5072/FK2/J8SJZB" + +Retrieving Unique Views for a Dataset +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Please note that "viewsUnique" is a combination of "viewsUniqueRegular" and "viewsUniqueMachine" which can be requested separately. + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB + + curl "$SERVER_URL/api/datasets/:persistentId/makeDataCount/viewsUnique?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/:persistentId/makeDataCount/viewsUnique?persistentId=10.5072/FK2/J8SJZB" + +Retrieving Total Downloads for a Dataset +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Please note that "downloadsTotal" is a combination of "downloadsTotalRegular" and "downloadsTotalMachine" which can be requested separately. + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB + + curl "$SERVER_URL/api/datasets/:persistentId/makeDataCount/downloadsTotal?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/:persistentId/makeDataCount/downloadsTotal?persistentId=10.5072/FK2/J8SJZB" + +Retrieving Unique Downloads for a Dataset +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Please note that "downloadsUnique" is a combination of "downloadsUniqueRegular" and "downloadsUniqueMachine" which can be requested separately. + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB + + curl "$SERVER_URL/api/datasets/:persistentId/makeDataCount/downloadsUnique?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/:persistentId/makeDataCount/downloadsUnique?persistentId=10.5072/FK2/J8SJZB" + +Retrieving Citations for a Dataset +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB + + curl "$SERVER_URL/api/datasets/:persistentId/makeDataCount/citations?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/:persistentId/makeDataCount/citations?persistentId=10.5072/FK2/J8SJZB" + +Delete Unpublished Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Delete the dataset whose id is passed: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24 + +Delete Published Dataset +~~~~~~~~~~~~~~~~~~~~~~~~ + +Normally published datasets should not be deleted, but there exists a "destroy" API endpoint for superusers which will act on a dataset given a persistent ID or dataset database ID: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/:persistentId/destroy/?persistentId=$PERSISTENT_ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/:persistentId/destroy/?persistentId=doi:10.5072/FK2/AAA000 + +Delete with dataset identifier: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/destroy + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/destroy + +Calling the destroy endpoint is permanent and irreversible. It will remove the dataset and its datafiles, then re-index the parent dataverse in Solr. This endpoint requires the API token of a superuser. + +Files +----- + +Adding Files +~~~~~~~~~~~~ + +.. Note:: Files can be added via the native API but the operation is performed on the parent object, which is a dataset. Please see the Datasets_ endpoint above for more information. + +Accessing (downloading) files +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. Note:: Access API has its own section in the Guide: :doc:`/api/dataaccess` + +**Note** Data Access API calls can now be made using persistent identifiers (in addition to database ids). This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. + +Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB + + curl "$SERVER_URL/api/access/datafile/:persistentId/?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/access/datafile/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB" + +Note: you can use the combination of cURL's ``-J`` (``--remote-header-name``) and ``-O`` (``--remote-name``) options to save the file in its original file name, such as + +.. code-block:: bash + + curl -J -O "https://demo.dataverse.org/api/access/datafile/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB" + +Restrict Files +~~~~~~~~~~~~~~ + +Restrict or unrestrict an existing file where ``id`` is the database id of the file or ``pid`` is the persistent id (DOI or Handle) of the file to restrict. Note that some Dataverse installations do not allow the ability to restrict files. + +A curl example using an ``id`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true $SERVER_URL/api/files/$ID/restrict + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -d true https://demo.dataverse.org/api/files/24/restrict + +A curl example using a ``pid`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true $SERVER_URL/api/files/:persistentId/restrict?persistentId=$PERSISTENT_ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -d true "https://demo.dataverse.org/api/files/:persistentId/restrict?persistentId=doi:10.5072/FK2/AAA000" + +Uningest a File +~~~~~~~~~~~~~~~ + +Reverse the tabular data ingest process performed on a file where ``ID`` is the database id or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file to process. Note that this requires "superuser" credentials. + +A curl example using an ``ID``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/uningest + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/files/24/uningest + +A curl example using a ``PERSISTENT_ID``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/:persistentId/uningest?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/uningest?persistentId=doi:10.5072/FK2/AAA000" + +Reingest a File +~~~~~~~~~~~~~~~ + +Attempt to ingest an existing datafile as tabular data. This API can be used on a file that was not ingested as tabular back when it was uploaded. For example, a Stata v.14 file that was uploaded before ingest support for Stata 14 was added (in Dataverse v.4.9). It can also be used on a file that failed to ingest due to a bug in the ingest plugin that has since been fixed (hence the name "reingest"). + +Note that this requires "superuser" credentials. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/reingest + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/files/24/reingest + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/:persistentId/reingest?persistentId=$PERSISTENT_ID + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/reingest?persistentId=doi:10.5072/FK2/AAA000" + +Note: at present, the API cannot be used on a file that's already successfully ingested as tabular. + +.. _redetect-file-type: + +Redetect File Type +~~~~~~~~~~~~~~~~~~ + +Dataverse uses a variety of methods for determining file types (MIME types or content types) and these methods (listed below) are updated periodically. If you have files that have an unknown file type, you can have Dataverse attempt to redetect the file type. + +When using the curl command below, you can pass ``dryRun=true`` if you don't want any changes to be saved to the database. Change this to ``dryRun=false`` (or omit it) to save the change. + +A curl example using an ``id`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/redetect?dryRun=true" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/redetect?dryRun=true" + +A curl example using a ``pid`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/:persistentId/redetect?persistentId=$PERSISTENT_ID&dryRun=true" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/redetect?persistentId=doi:10.5072/FK2/AAA000&dryRun=true" + +Currently the following methods are used to detect file types: + +- The file type detected by the browser (or sent via API). +- JHOVE: http://jhove.openpreservation.org +- As a last resort the file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``. + +Replacing Files +~~~~~~~~~~~~~~~ + +Replace an existing file where ``ID`` is the database id of the file to replace or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires the ``file`` to be passed as well as a ``jsonString`` expressing the new metadata. Note that metadata such as description, directoryLabel (File Path) and tags are not carried over from the file being replaced. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F 'file=@file.extension' -F 'jsonData={json}' $SERVER_URL/api/files/$ID/metadata + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F 'file=@data.tsv' \ + -F 'jsonData={"description":"My description.","categories":["Data"],"forceReplace":false}' \ + https://demo.dataverse.org/api/files/24/replace + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F 'file=@file.extension' -F 'jsonData={json}' \ + "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F 'file=@data.tsv' \ + -F 'jsonData={"description":"My description.","categories":["Data"],"forceReplace":false}' \ + "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" + +Getting File Metadata +~~~~~~~~~~~~~~~~~~~~~ + +Provides a json representation of the file metadata for an existing file where ``ID`` is the database id of the file to get metadata from or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl $SERVER_URL/api/files/$ID/metadata + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl https://demo.dataverse.org/api/files/24/metadata + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" + +The current draft can also be viewed if you have permissions and pass your API token + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/$ID/metadata/draft - from datetime import datetime - import json - import requests # http://docs.python-requests.org/en/master/ +The fully expanded example above (without environment variables) looks like this: - # -------------------------------------------------- - # Update the 4 params below to run this code - # -------------------------------------------------- - dataverse_server = 'https://your dataverse server' # no trailing slash - api_key = 'api key' - dataset_id = 1 # database id of the dataset - persistentId = 'doi:10.5072/FK2/6XACVA' # doi or hdl of the dataset +.. code-block:: bash - # -------------------------------------------------- - # Prepare "file" - # -------------------------------------------------- - file_content = 'content: %s' % datetime.now() - files = {'file': ('sample_file.txt', file_content)} + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/24/metadata/draft - # -------------------------------------------------- - # Using a "jsonData" parameter, add optional description + file tags - # -------------------------------------------------- - params = dict(description='Blue skies!', - categories=['Lily', 'Rosemary', 'Jack of Hearts']) +A curl example using a ``PERSISTENT_ID`` - params_as_json_string = json.dumps(params) +.. code-block:: bash - payload = dict(jsonData=params_as_json_string) + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 - # -------------------------------------------------- - # Add file using the Dataset's id - # -------------------------------------------------- - url_dataset_id = '%s/api/datasets/%s/add?key=%s' % (dataverse_server, dataset_id, api_key) + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/metadata/draft?persistentId=$PERSISTENT_ID" - # ------------------- - # Make the request - # ------------------- - print '-' * 40 - print 'making request: %s' % url_dataset_id - r = requests.post(url_dataset_id, data=payload, files=files) +The fully expanded example above (without environment variables) looks like this: - # ------------------- - # Print the response - # ------------------- - print '-' * 40 - print r.json() - print r.status_code +.. code-block:: bash - # -------------------------------------------------- - # Add file using the Dataset's persistentId (e.g. doi, hdl, etc) - # -------------------------------------------------- - url_persistent_id = '%s/api/datasets/:persistentId/add?persistentId=%s&key=%s' % (dataverse_server, persistentId, api_key) + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/metadata/draft?persistentId=doi:10.5072/FK2/AAA000" - # ------------------- - # Update the file content to avoid a duplicate file error - # ------------------- - file_content = 'content2: %s' % datetime.now() - files = {'file': ('sample_file2.txt', file_content)} +Note: The ``id`` returned in the json response is the id of the file metadata version. +Updating File Metadata +~~~~~~~~~~~~~~~~~~~~~~ - # ------------------- - # Make the request - # ------------------- - print '-' * 40 - print 'making request: %s' % url_persistent_id - r = requests.post(url_persistent_id, data=payload, files=files) +Updates the file metadata for an existing file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the new metadata. No metadata from the previous version of this file will be persisted, so if you want to update a specific field first get the json with the above command and alter the fields you want. - # ------------------- - # Print the response - # ------------------- - print '-' * 40 - print r.json() - print r.status_code +A curl example using an ``ID`` -Submit a Dataset for Review -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. code-block:: bash -When dataset authors do not have permission to publish directly, they can click the "Submit for Review" button in the web interface (see :doc:`/user/dataset-management`), or perform the equivalent operation via API:: + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 - curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/:persistentId/submitForReview?persistentId=$DOI_OR_HANDLE_OF_DATASET" + curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + $SERVER_URL/api/files/$ID/metadata -The people who need to review the dataset (often curators or journal editors) can check their notifications periodically via API to see if any new datasets have been submitted for review and need their attention. See the :ref:`Notifications` section for details. Alternatively, these curators can simply check their email or notifications to know when datasets have been submitted (or resubmitted) for review. +The fully expanded example above (without environment variables) looks like this: -Return a Dataset to Author -~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. code-block:: bash -After the curators or journal editors have reviewed a dataset that has been submitted for review (see "Submit for Review", above) they can either choose to publish the dataset (see the ``:publish`` "action" above) or return the dataset to its authors. In the web interface there is a "Return to Author" button (see :doc:`/user/dataset-management`), but the interface does not provide a way to explain **why** the dataset is being returned. There is a way to do this outside of this interface, however. Instead of clicking the "Return to Author" button in the UI, a curator can write a "reason for return" into the database via API. + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + http://demo.dataverse.org/api/files/24/metadata -Here's how curators can send a "reason for return" to the dataset authors. First, the curator creates a JSON file that contains the reason for return: +A curl example using a ``PERSISTENT_ID`` -.. literalinclude:: ../_static/api/reason-for-return.json +.. code-block:: bash -In the example below, the curator has saved the JSON file as :download:`reason-for-return.json <../_static/api/reason-for-return.json>` in their current working directory. Then, the curator sends this JSON file to the ``returnToAuthor`` API endpoint like this:: + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 - curl -H "Content-type:application/json" -d @reason-for-return.json -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/:persistentId/returnToAuthor?persistentId=$DOI_OR_HANDLE_OF_DATASET" + curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" -The review process can sometimes resemble a tennis match, with the authors submitting and resubmitting the dataset over and over until the curators are satisfied. Each time the curators send a "reason for return" via API, that reason is persisted into the database, stored at the dataset version level. +The fully expanded example above (without environment variables) looks like this: -Link a Dataset -~~~~~~~~~~~~~~ +.. code-block:: bash -Creates a link between a dataset and a dataverse (see the Linked Dataverses + Linked Datasets section of the :doc:`/user/dataverse-management` guide for more information). :: + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT http://$SERVER/api/datasets/$linked-dataset-id/link/$linking-dataverse-alias +Also note that dataFileTags are not versioned and changes to these will update the published version of the file. -Dataset Locks -~~~~~~~~~~~~~ +.. _EditingVariableMetadata: -To check if a dataset is locked:: +Editing Variable Level Metadata +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - curl "$SERVER_URL/api/datasets/{database_id}/locks +Updates variable level metadata using ddi xml ``FILE``, where ``ID`` is file id. -Optionally, you can check if there's a lock of a specific type on the dataset:: +A curl example using an ``ID`` - curl "$SERVER_URL/api/datasets/{database_id}/locks?type={lock_type} +.. code-block:: bash -Currently implemented lock types are ``Ingest, Workflow, InReview, DcmUpload, pidRegister, and EditInProgress``. + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export FILE=dct.xml -The API will output the list of locks, for example:: + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT $SERVER_URL/api/edit/$ID --upload-file $FILE - {"status":"OK","data": - [ - { - "lockType":"Ingest", - "date":"Fri Aug 17 15:05:51 EDT 2018", - "user":"dataverseAdmin" - }, - { - "lockType":"Workflow", - "date":"Fri Aug 17 15:02:00 EDT 2018", - "user":"dataverseAdmin" - } - ] - } +The fully expanded example above (without environment variables) looks like this: -If the dataset is not locked (or if there is no lock of the requested type), the API will return an empty list. +.. code-block:: bash -The following API end point will lock a Dataset with a lock of specified type:: + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/edit/24 --upload-file dct.xml - POST /api/datasets/{database_id}/lock/{lock_type} +You can download :download:`dct.xml <../../../../src/test/resources/xml/dct.xml>` from the example above to see what the XML looks like. -For example:: +Provenance +~~~~~~~~~~ - curl -X POST "$SERVER_URL/api/datasets/1234/lock/Ingest?key=$ADMIN_API_TOKEN" - or - curl -X POST -H "X-Dataverse-key: $ADMIN_API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/lock/Ingest?persistentId=$DOI_OR_HANDLE_OF_DATASET" +Get Provenance JSON for an uploaded file +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Use the following API to unlock the dataset, by deleting all the locks currently on the dataset:: +A curl example using an ``ID`` - DELETE /api/datasets/{database_id}/locks +.. code-block:: bash -Or, to delete a lock of the type specified only:: + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 - DELETE /api/datasets/{database_id}/locks?type={lock_type} + curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/$ID/prov-json -For example:: +The fully expanded example above (without environment variables) looks like this: - curl -X DELETE -H "X-Dataverse-key: $ADMIN_API_TOKEN" "$SERVER_URL/api/datasets/1234/locks?type=pidRegister" +.. code-block:: bash -If the dataset is not locked (or if there is no lock of the specified type), the API will exit with a warning message. + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/24/prov-json -(Note that the API calls above all support both the database id and persistent identifier notation for referencing the dataset) +A curl example using a ``PERSISTENT_ID`` -.. _dataset-metrics-api: +.. code-block:: bash -Dataset Metrics -~~~~~~~~~~~~~~~ + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 -Please note that these dataset level metrics are only available if support for Make Data Count has been enabled in your installation of Dataverse. See the :ref:`Dataset Metrics ` in the :doc:`/user/dataset-management` section of the User Guide and the :doc:`/admin/make-data-count` section of the Admin Guide for details. + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/prov-json?persistentId=$PERSISTENT_ID" -.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of ``export`` below. +The fully expanded example above (without environment variables) looks like this: -``export DV_BASE_URL=https://demo.dataverse.org`` +.. code-block:: bash -To confirm that the environment variable was set properly, you can use ``echo`` like this: + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/prov-json?persistentId=doi:10.5072/FK2/AAA000" -``echo $DV_BASE_URL`` +Get Provenance Description for an uploaded file +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Please note that for each of these endpoints except the "citations" endpoint, you can optionally pass the query parameter "country" with a two letter code (e.g. "country=us") and you can specify a particular month by adding it in yyyy-mm format after the requested metric (e.g. "viewsTotal/2019-02"). +A curl example using an ``ID`` -Retrieving Total Views for a Dataset -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. code-block:: bash -Please note that "viewsTotal" is a combination of "viewsTotalRegular" and "viewsTotalMachine" which can be requested separately. + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 -``curl "$DV_BASE_URL/api/datasets/:persistentId/makeDataCount/viewsTotal?persistentId=$DOI"`` + curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/$ID/prov-freeform -Retrieving Unique Views for a Dataset -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +The fully expanded example above (without environment variables) looks like this: -Please note that "viewsUnique" is a combination of "viewsUniqueRegular" and "viewsUniqueMachine" which can be requested separately. +.. code-block:: bash -``curl "$DV_BASE_URL/api/datasets/:persistentId/makeDataCount/viewsUnique?persistentId=$DOI"`` + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/24/prov-freeform -Retrieving Total Downloads for a Dataset -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +A curl example using a ``PERSISTENT_ID`` -Please note that "downloadsTotal" is a combination of "downloadsTotalRegular" and "downloadsTotalMachine" which can be requested separately. +.. code-block:: bash -``curl "$DV_BASE_URL/api/datasets/:persistentId/makeDataCount/downloadsTotal?persistentId=$DOI"`` + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 -Retrieving Unique Downloads for a Dataset -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/prov-freeform?persistentId=$PERSISTENT_ID" -Please note that "downloadsUnique" is a combination of "downloadsUniqueRegular" and "downloadsUniqueMachine" which can be requested separately. +The fully expanded example above (without environment variables) looks like this: -``curl "$DV_BASE_URL/api/datasets/:persistentId/makeDataCount/downloadsUnique?persistentId=$DOI"`` +.. code-block:: bash -Retrieving Citations for a Dataset -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/prov-freeform?persistentId=doi:10.5072/FK2/AAA000" -``curl "$DV_BASE_URL/api/datasets/:persistentId/makeDataCount/citations?persistentId=$DOI"`` +Create/Update Provenance JSON and provide related entity name for an uploaded file +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Delete Unpublished Dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~ +A curl example using an ``ID`` -Delete the dataset whose id is passed: +.. code-block:: bash -``curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE http://$SERVER/api/datasets/$id`` + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export ENTITY_NAME="..." + export FILE_PATH=provenance.json -Delete Published Dataset -~~~~~~~~~~~~~~~~~~~~~~~~ + curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/prov-json?entityName=$ENTITY_NAME -H "Content-type:application/json" --upload-file $FILE_PATH -Normally published datasets should not be deleted, but there exists a "destroy" API endpoint for superusers which will act on a dataset given a persistent ID or dataset database ID: +The fully expanded example above (without environment variables) looks like this: -``curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE http://$SERVER/api/datasets/:persistentId/destroy/?persistentId=doi:10.5072/FK2/AAA000`` - -``curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE http://$SERVER/api/datasets/999/destroy`` - -Calling the destroy endpoint is permanent and irreversible. It will remove the dataset and its datafiles, then re-index the parent dataverse in Solr. This endpoint requires the API token of a superuser. +.. code-block:: bash -Files ------ + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/prov-json?entityName=..." -H "Content-type:application/json" --upload-file provenance.json -Adding Files -~~~~~~~~~~~~ +A curl example using a ``PERSISTENT_ID`` -.. Note:: Files can be added via the native API but the operation is performed on the parent object, which is a dataset. Please see the Datasets_ endpoint above for more information. +.. code-block:: bash -Accessing (downloading) files -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + export ENTITY_NAME="..." + export FILE_PATH=provenance.json -.. Note:: Access API has its own section in the Guide: :doc:`/api/dataaccess` + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/:persistentId/prov-json?persistentId=$PERSISTENT_ID&entityName=$ENTITY_NAME" -H "Content-type:application/json" --upload-file $FILE_PATH -**Note** Data Access API calls can now be made using persistent identifiers (in addition to database ids). This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. +The fully expanded example above (without environment variables) looks like this: - Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* :: +.. code-block:: bash - GET http://$SERVER/api/access/datafile/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/prov-json?persistentId=doi:10.5072/FK2/AAA000&entityName=..." -H "Content-type:application/json" --upload-file provenance.json +Create/Update Provenance Description for an uploaded file +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Restrict Files -~~~~~~~~~~~~~~ +Requires a JSON file with the description connected to a key named "text" -Restrict or unrestrict an existing file where ``id`` is the database id of the file or ``pid`` is the persistent id (DOI or Handle) of the file to restrict. Note that some Dataverse installations do not allow the ability to restrict files. +A curl example using an ``ID`` -A curl example using an ``id``:: +.. code-block:: bash - curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true http://$SERVER/api/files/{id}/restrict + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export FILE_PATH=provenance.json -A curl example using a ``pid``:: + curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/prov-freeform -H "Content-type:application/json" --upload-file $FILE_PATH - curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true http://$SERVER/api/files/:persistentId/restrict?persistentId={pid} - -Uningest a File -~~~~~~~~~~~~~~~ +The fully expanded example above (without environment variables) looks like this: -Reverse the tabular data ingest process performed on a file where ``{id}`` is the database id of the file to process. Note that this requires "superuser" credentials:: +.. code-block:: bash - POST http://$SERVER/api/files/{id}/uningest?key={apiKey} + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/files/24/prov-freeform -H "Content-type:application/json" --upload-file provenance.json -Reingest a File -~~~~~~~~~~~~~~~ +A curl example using a ``PERSISTENT_ID`` -Attempt to ingest an existing datafile as tabular data. This API can be used on a file that was not ingested as tabular back when it was uploaded. For example, a Stata v.14 file that was uploaded before ingest support for Stata 14 was added (in Dataverse v.4.9). It can also be used on a file that failed to ingest due to a bug in the ingest plugin that has since been fixed (hence the name "reingest"). +.. code-block:: bash -Note that this requires "superuser" credentials:: + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + export FILE_PATH=provenance.json - POST http://$SERVER/api/files/{id}/reingest?key={apiKey} + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/:persistentId/prov-freeform?persistentId=$PERSISTENT_ID" -H "Content-type:application/json" --upload-file $FILE_PATH -(``{id}`` is the database id of the file to process) +The fully expanded example above (without environment variables) looks like this: -Note: at present, the API cannot be used on a file that's already successfully ingested as tabular. +.. code-block:: bash -.. _redetect-file-type: + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/prov-freeform?persistentId=doi:10.5072/FK2/AAA000" -H "Content-type:application/json" --upload-file provenance.json -Redetect File Type -~~~~~~~~~~~~~~~~~~ +See a sample JSON file :download:`file-provenance.json <../_static/api/file-provenance.json>` from http://openprovenance.org (c.f. Huynh, Trung Dong and Moreau, Luc (2014) ProvStore: a public provenance repository. At 5th International Provenance and Annotation Workshop (IPAW'14), Cologne, Germany, 09-13 Jun 2014. pp. 275-277). -Dataverse uses a variety of methods for determining file types (MIME types or content types) and these methods (listed below) are updated periodically. If you have files that have an unknown file type, you can have Dataverse attempt to redetect the file type. +Delete Provenance JSON for an uploaded file +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -When using the curl command below, you can pass ``dryRun=true`` if you don't want any changes to be saved to the database. Change this to ``dryRun=false`` (or omit it) to save the change. In the example below, the file is identified by database id "42". +A curl example using an ``ID`` -``export FILE_ID=42`` +.. code-block:: bash -``curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$FILE_ID/redetect?dryRun=true`` + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 -Currently the following methods are used to detect file types: + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/files/$ID/prov-json -- The file type detected by the browser (or sent via API). -- JHOVE: http://jhove.openpreservation.org -- As a last resort the file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``. +The fully expanded example above (without environment variables) looks like this: -Replacing Files -~~~~~~~~~~~~~~~ +.. code-block:: bash -Replace an existing file where ``id`` is the database id of the file to replace or ``pid`` is the persistent id (DOI or Handle) of the file. Requires the ``file`` to be passed as well as a ``jsonString`` expressing the new metadata. Note that metadata such as description, directoryLabel (File Path) and tags are not carried over from the file being replaced:: + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/files/24/prov-json - POST -F 'file=@file.extension' -F 'jsonData={json}' http://$SERVER/api/files/{id}/metadata?key={apiKey} +A curl example using a ``PERSISTENT_ID`` -Example:: +.. code-block:: bash - curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F 'file=@data.tsv' \ - -F 'jsonData={"description":"My description.","categories":["Data"],"forceReplace":false}'\ - "https://demo.dataverse.org/api/files/$FILE_ID/replace" + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 -Getting File Metadata -~~~~~~~~~~~~~~~~~~~~~ + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/files/:persistentId/prov-json?persistentId=$PERSISTENT_ID" -Provides a json representation of the file metadata for an existing file where ``id`` is the database id of the file to replace or ``pid`` is the persistent id (DOI or Handle) of the file:: +The fully expanded example above (without environment variables) looks like this: - GET http://$SERVER/api/files/{id}/metadata +.. code-block:: bash -The current draft can also be viewed if you have permissions and pass your ``apiKey``:: + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/files/:persistentId/prov-json?persistentId=doi:10.5072/FK2/AAA000" - GET http://$SERVER/api/files/{id}/metadata/draft?key={apiKey} +Datafile Integrity +~~~~~~~~~~~~~~~~~~ -Note: The ``id`` returned in the json response is the id of the file metadata version. +Starting the release 4.10 the size of the saved original file (for an ingested tabular datafile) is stored in the database. The following API will retrieve and permanently store the sizes for any already existing saved originals: -Updating File Metadata -~~~~~~~~~~~~~~~~~~~~~~ +.. code-block:: bash -Updates the file metadata for an existing file where ``id`` is the database id of the file to replace or ``pid`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the new metadata. No metadata from the previous version of this file will be persisted, so if you want to update a specific field first get the json with the above command and alter the fields you want:: + export SERVER_URL=https://localhost - POST -F 'jsonData={json}' http://$SERVER/api/files/{id}/metadata?key={apiKey} + curl $SERVER_URL/api/admin/datafiles/integrity/fixmissingoriginalsizes -Example:: +with limit parameter: - curl -H "X-Dataverse-key:{apiKey}" -X POST -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' 'http://localhost:8080/api/files/264/metadata' +.. code-block:: bash -Also note that dataFileTags are not versioned and changes to these will update the published version of the file. + export SERVER_URL=https://localhost + export LIMIT=10 -.. _EditingVariableMetadata: + curl "$SERVER_URL/api/admin/datafiles/integrity/fixmissingoriginalsizes?limit=$LIMIT" -Editing Variable Level Metadata -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The fully expanded example above (without environment variables) looks like this: -Updates variable level metadata using ddi xml ``$file``, where ``$id`` is file id:: +.. code-block:: bash - PUT https://$SERVER/api/edit/$id --upload-file $file + curl https://localhost/api/admin/datafiles/integrity/fixmissingoriginalsizes" -Example: ``curl -H "X-Dataverse-key:$API_TOKEN" -X PUT http://localhost:8080/api/edit/95 --upload-file dct.xml`` +with limit parameter: -You can download :download:`dct.xml <../../../../src/test/resources/xml/dct.xml>` from the example above to see what the XML looks like. +.. code-block:: bash -Provenance -~~~~~~~~~~ -Get Provenance JSON for an uploaded file:: + curl https://localhost/api/admin/datafiles/integrity/fixmissingoriginalsizes?limit=10" - GET http://$SERVER/api/files/{id}/prov-json?key=$apiKey +Note the optional "limit" parameter. Without it, the API will attempt to populate the sizes for all the saved originals that don't have them in the database yet. Otherwise it will do so for the first N such datafiles. -Get Provenance Description for an uploaded file:: +By default, the admin API calls are blocked and can only be called from localhost. See more details in :ref:`:BlockedApiEndpoints <:BlockedApiEndpoints>` and :ref:`:BlockedApiPolicy <:BlockedApiPolicy>` settings in :doc:`/installation/config`. - GET http://$SERVER/api/files/{id}/prov-freeform?key=$apiKey +Users Token Management +---------------------- -Create/Update Provenance JSON and provide related entity name for an uploaded file:: +The following endpoints will allow users to manage their API tokens. - POST http://$SERVER/api/files/{id}/prov-json?key=$apiKey&entityName=$entity -H "Content-type:application/json" --upload-file $filePath +Find a Token's Expiration Date +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Create/Update Provenance Description for an uploaded file. Requires a JSON file with the description connected to a key named "text":: +In order to obtain the expiration date of a token use:: - POST http://$SERVER/api/files/{id}/prov-freeform?key=$apiKey -H "Content-type:application/json" --upload-file $filePath + curl -H X-Dataverse-key:$API_TOKEN -X GET $SERVER_URL/api/users/token -Delete Provenance JSON for an uploaded file:: +Recreate a Token +~~~~~~~~~~~~~~~~ - DELETE http://$SERVER/api/files/{id}/prov-json?key=$apiKey +In order to obtain a new token use:: -Datafile Integrity -~~~~~~~~~~~~~~~~~~ + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/users/token/recreate -Starting the release 4.10 the size of the saved original file (for an ingested tabular datafile) is stored in the database. The following API will retrieve and permanently store the sizes for any already existing saved originals:: +Delete a Token +~~~~~~~~~~~~~~~~ - GET http://$SERVER/api/admin/datafiles/integrity/fixmissingoriginalsizes{?limit=N} +In order to delete a token use:: -Note the optional "limit" parameter. Without it, the API will attempt to populate the sizes for all the saved originals that don't have them in the database yet. Otherwise it will do so for the first N such datafiles. + curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/users/token + + Builtin Users ------------- -Builtin users are known as "Username/Email and Password" users in the :doc:`/user/account` of the User Guide. Dataverse stores a password (encrypted, of course) for these users, which differs from "remote" users such as Shibboleth or OAuth users where the password is stored elsewhere. See also "Auth Modes: Local vs. Remote vs. Both" in the :doc:`/installation/config` section of the Installation Guide. It's a valid configuration of Dataverse to not use builtin users at all. +Builtin users are known as "Username/Email and Password" users in the :doc:`/user/account` of the User Guide. Dataverse stores a password (encrypted, of course) for these users, which differs from "remote" users such as Shibboleth or OAuth users where the password is stored elsewhere. See also :ref:`auth-modes` section of Configuration in the Installation Guide. It's a valid configuration of Dataverse to not use builtin users at all. Create a Builtin User ~~~~~~~~~~~~~~~~~~~~~ -For security reasons, builtin users cannot be created via API unless the team who runs the Dataverse installation has populated a database setting called ``BuiltinUsers.KEY``, which is described under "Securing Your Installation" and "Database Settings" in the :doc:`/installation/config` section of the Installation Guide. You will need to know the value of ``BuiltinUsers.KEY`` before you can proceed. +For security reasons, builtin users cannot be created via API unless the team who runs the Dataverse installation has populated a database setting called ``BuiltinUsers.KEY``, which is described under :ref:`securing-your-installation` and :ref:`database-settings` sections of Configuration in the Installation Guide. You will need to know the value of ``BuiltinUsers.KEY`` before you can proceed. To create a builtin user via API, you must first construct a JSON document. You can download :download:`user-add.json <../_static/api/user-add.json>` or copy the text below as a starting point and edit as necessary. @@ -1148,6 +2407,8 @@ Shibboleth Groups Management of Shibboleth groups via API is documented in the :doc:`/installation/shibboleth` section of the Installation Guide. +.. _info: + Info ---- @@ -1192,7 +2453,7 @@ The fully expanded example above (without environment variables) looks like this Show Custom Popup Text for Publishing Datasets ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -For now, only the value for the ``:DatasetPublishPopupCustomText`` setting from the :doc:`/installation/config` section of the Installation Guide is exposed: +For now, only the value for the :ref:`:DatasetPublishPopupCustomText` setting from the Configuration section of the Installation Guide is exposed: .. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below. @@ -1256,10 +2517,12 @@ Each user can get a dump of their notifications by passing in their API token:: curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/notifications/all +.. _admin: + Admin ----- -This is the administrative part of the API. For security reasons, it is absolutely essential that you block it before allowing public access to a Dataverse installation. Blocking can be done using settings. See the ``post-install-api-block.sh`` script in the ``scripts/api`` folder for details. See also "Blocking API Endpoints" under "Securing Your Installation" in the :doc:`/installation/config` section of the Installation Guide. +This is the administrative part of the API. For security reasons, it is absolutely essential that you block it before allowing public access to a Dataverse installation. Blocking can be done using settings. See the ``post-install-api-block.sh`` script in the ``scripts/api`` folder for details. See :ref:`blocking-api-endpoints` in Securing Your Installation section of the Configuration page of the Installation Guide. List All Database Settings ~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1531,7 +2794,7 @@ Change User Identifier Changes identifier for user in ``AuthenticatedUser``, ``BuiltinUser``, ``AuthenticatedUserLookup`` & ``RoleAssignment``. Allows them to log in with the new identifier. Only accessible to superusers.:: - PUT http://$SERVER/api/users/$oldIdentifier/changeIdentifier/$newIdentifier + POST http://$SERVER/api/users/$oldIdentifier/changeIdentifier/$newIdentifier Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST https://demo.dataverse.org/api/users/johnsmith/changeIdentifier/jsmith`` @@ -1543,6 +2806,21 @@ Make User a SuperUser Toggles superuser mode on the ``AuthenticatedUser`` whose ``identifier`` (without the ``@`` sign) is passed. :: POST http://$SERVER/api/admin/superuser/$identifier + +Delete a User +~~~~~~~~~~~~~ + +Deletes an ``AuthenticatedUser`` whose ``identifier`` (without the ``@`` sign) is passed. :: + + DELETE http://$SERVER/api/admin/authenticatedUsers/$identifier + +Deletes an ``AuthenticatedUser`` whose ``id`` is passed. :: + + DELETE http://$SERVER/api/admin/authenticatedUsers/id/$id + +Note: If the user has performed certain actions such as creating or contributing to a Dataset or downloading a file they cannot be deleted. + + List Role Assignments of a Role Assignee ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst index ff2da576577..25bad2b8091 100755 --- a/doc/sphinx-guides/source/api/search.rst +++ b/doc/sphinx-guides/source/api/search.rst @@ -6,7 +6,7 @@ Search API The Search API supports the same searching, sorting, and faceting operations as the Dataverse web interface. -Unlike the web interface, this new API is limited to *published* data. +To search unpublished content, you must pass in an API token as described in the :doc:`auth` section. The parameters and JSON response are partly inspired by the `GitHub Search API `_. @@ -35,7 +35,6 @@ show_relevance boolean Whether or not to show details of which fields were ma show_facets boolean Whether or not to show facets that can be operated on by the "fq" parameter. False by default. See :ref:`advanced search example `. fq string A filter query on the search term. Multiple "fq" parameters can be used. See :ref:`advanced search example `. show_entity_ids boolean Whether or not to show the database IDs of the search results (for developer use). -query_entities boolean Whether entities are queried via direct database calls (for developer use). =============== ======= =========== Basic Search Example @@ -49,7 +48,7 @@ https://demo.dataverse.org/api/search?q=trees "status":"OK", "data":{ "q":"trees", - "total_count":4, + "total_count":5, "start":0, "spelling_alternatives":{ "trees":"[tree]" @@ -99,16 +98,57 @@ https://demo.dataverse.org/api/search?q=trees "identifier":"birds", "description":"A bird dataverse with some trees", "published_at":"2016-05-10T12:57:27Z" - } + }, + { + "name":"Darwin's Finches", + "type":"dataset", + "url":"https://doi.org/10.70122/FK2/MB5VGR", + "global_id":"doi:10.70122/FK2/MB5VGR", + "description":"Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.", + "published_at":"2019-12-11T15:26:10Z", + "publisher":"dvbe69f5e1", + "citationHtml":"Finch, Fiona; Spruce, Sabrina; Poe, Edgar Allen; Mulligan, Hercules, 2019, \"Darwin's Finches\", https://doi.org/10.70122/FK2/MB5VGR, Root, V3", + "identifier_of_dataverse":"dvbe69f5e1", + "name_of_dataverse":"dvbe69f5e1", + "citation":"Finch, Fiona; Spruce, Sabrina; Poe, Edgar Allen; Mulligan, Hercules, 2019, \"Darwin's Finches\", https://doi.org/10.70122/FK2/MB5VGR, Root, V3", + "storageIdentifier":"file://10.70122/FK2/MB5VGR", + "subjects":[ + "Astronomy and Astrophysics", + "Other" + ], + "fileCount":3, + "versionId":1260, + "versionState":"RELEASED", + "majorVersion":3, + "minorVersion":0, + "createdAt":"2019-09-20T18:08:29Z", + "updatedAt":"2019-12-11T15:26:10Z", + "contacts":[ + { + "name":"Finch, Fiona", + "affiliation":"" + } + ], + "producers":[ + "Allen, Irwin", + "Spielberg, Stephen" + ], + "authors":[ + "Finch, Fiona", + "Spruce, Sabrina", + "Poe, Edgar Allen", + "Mulligan, Hercules" + ] + } ], - "count_in_response":4 + "count_in_response":5 } } .. _advancedsearch-example: -Advanced Search Example ------------------------ +Advanced Search Examples +------------------------ https://demo.dataverse.org/api/search?q=finch&show_relevance=true&show_facets=true&fq=publicationDate:2016&subtree=birds @@ -222,6 +262,100 @@ In this example, ``show_relevance=true`` matches per field are shown. Available } } +https://demo.dataverse.org/api/search?q=finch&fq=publicationStatus:Published&type=dataset + +The above example ``fq=publicationStatus:Published`` retrieves only "RELEASED" versions of datasets. The same could be done to retrieve "DRAFT" versions, ``fq=publicationStatus:Draft`` + +.. code-block:: json + + { + "status": "OK", + "data": { + "q": "finch", + "total_count": 2, + "start": 0, + "spelling_alternatives": {}, + "items": [ + { + "name": "Darwin's Finches", + "type": "dataset", + "url": "https://doi.org/10.70122/FK2/GUAS41", + "global_id": "doi:10.70122/FK2/GUAS41", + "description": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.", + "published_at": "2019-12-24T08:05:02Z", + "publisher": "mdmizanur rahman Dataverse", + "citationHtml": "Finch, Fiona, 2019, \"Darwin's Finches\", https://doi.org/10.70122/FK2/GUAS41, Demo Dataverse, V1", + "identifier_of_dataverse": "rahman", + "name_of_dataverse": "mdmizanur rahman Dataverse", + "citation": "Finch, Fiona, 2019, \"Darwin's Finches\", https://doi.org/10.70122/FK2/GUAS41, Demo Dataverse, V1", + "storageIdentifier": "file://10.70122/FK2/GUAS41", + "subjects": [ + "Medicine, Health and Life Sciences" + ], + "fileCount":6, + "versionId": 53001, + "versionState": "RELEASED", + "majorVersion": 1, + "minorVersion": 0, + "createdAt": "2019-12-05T09:18:30Z", + "updatedAt": "2019-12-24T08:38:00Z", + "contacts": [ + { + "name": "Finch, Fiona", + "affiliation": "" + } + ], + "authors": [ + "Finch, Fiona" + ] + }, + { + "name": "Darwin's Finches", + "type": "dataset", + "url": "https://doi.org/10.70122/FK2/7ZXYRH", + "global_id": "doi:10.70122/FK2/7ZXYRH", + "description": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.", + "published_at": "2020-01-22T21:47:34Z", + "publisher": "Demo Dataverse", + "citationHtml": "Finch, Fiona, 2020, \"Darwin's Finches\", https://doi.org/10.70122/FK2/7ZXYRH, Demo Dataverse, V1", + "identifier_of_dataverse": "demo", + "name_of_dataverse": "Demo Dataverse", + "citation": "Finch, Fiona, 2020, \"Darwin's Finches\", https://doi.org/10.70122/FK2/7ZXYRH, Demo Dataverse, V1", + "storageIdentifier": "file://10.70122/FK2/7ZXYRH", + "subjects": [ + "Medicine, Health and Life Sciences" + ], + "fileCount":9, + "versionId": 53444, + "versionState": "RELEASED", + "majorVersion": 1, + "minorVersion": 0, + "createdAt": "2020-01-22T21:23:43Z", + "updatedAt": "2020-01-22T21:47:34Z", + "contacts": [ + { + "name": "Finch, Fiona", + "affiliation": "" + } + ], + "authors": [ + "Finch, Fiona" + ] + } + ], + "count_in_response": 2 + } + } + +.. _search-date-range: + +Date Range Search Example +------------------------- + +Below is an example of searching across a date range of dataverses, datasets, and files that were published in 2018. + +`https://demo.dataverse.org/api/search?q=*&per_page=1000&sort=date&order=asc&q=*&fq=dateSort:[2018-01-01T00\:00\:00Z+TO+2019-01-01T00\:00\:00Z] `_ + .. _iteration-example: Iteration diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index b73d1fdc4ac..6ca7856c063 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -65,9 +65,9 @@ # built documents. # # The short X.Y version. -version = '4.17' +version = '4.20' # The full version, including alpha/beta/rc tags. -release = '4.17' +release = '4.20' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index 37a794e804e..c1c2969a60a 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -6,7 +6,52 @@ Big data support is highly experimental. Eventually this content will move to th .. contents:: |toctitle| :local: -Various components need to be installed and configured for big data support. +Various components need to be installed and/or configured for big data support. + +S3 Direct Upload and Download +----------------------------- + +A lightweight option for supporting file sizes beyond a few gigabytes - a size that can cause performance issues when uploaded through the Dataverse server itself - is to configure an S3 store to provide direct upload and download via 'pre-signed URLs'. When these options are configured, file uploads and downloads are made directly to and from a configured S3 store using secure (https) connections that enforce Dataverse's access controls. (The upload and download URLs are signed with a unique key that only allows access for a short time period and Dataverse will only generate such a URL if the user has permission to upload/download the specific file in question.) + +This option can handle files >40GB and could be appropriate for files up to a TB. Other options can scale farther, but this option has the advantages that it is simple to configure and does not require any user training - uploads and downloads are done via the same interface as normal uploads to Dataverse. + +To configure these options, an administrator must set two JVM options for the Dataverse server using the same process as for other configuration options: + +``./asadmin create-jvm-options "-Ddataverse.files..download-redirect=true"`` +``./asadmin create-jvm-options "-Ddataverse.files..upload-redirect=true"`` + + +With multiple stores configured, it is possible to configure one S3 store with direct upload and/or download to support large files (in general or for specific dataverses) while configuring only direct download, or no direct access for another store. + +It is also possible to set file upload size limits per store. See the :MaxFileUploadSizeInBytes setting described in the :doc:`/installation/config` guide. + +At present, one potential drawback for direct-upload is that files are only partially 'ingested', tabular and FITS files are processed, but zip files are not unzipped, and the file contents are not inspected to evaluate their mimetype. This could be appropriate for large files, or it may be useful to completely turn off ingest processing for performance reasons (ingest processing requires a copy of the file to be retrieved by Dataverse from the S3 store). A store using direct upload can be configured to disable all ingest processing for files above a given size limit: + +``./asadmin create-jvm-options "-Ddataverse.files..ingestsizelimit="`` + + +**IMPORTANT:** One additional step that is required to enable direct download to work with previewers is to allow cross site (CORS) requests on your S3 store. +The example below shows how to enable the minimum needed CORS rules on a bucket using the AWS CLI command line tool. Note that you may need to add more methods and/or locations, if you also need to support certain previewers and external tools. + +``aws s3api put-bucket-cors --bucket --cors-configuration file://cors.json`` + +with the contents of the file cors.json as follows: + +.. code-block:: json + + { + "CORSRules": [ + { + "AllowedOrigins": ["https://"], + "AllowedHeaders": ["*"], + "AllowedMethods": ["PUT", "GET"] + } + ] + } + +Alternatively, you can enable CORS using the AWS S3 web interface, using json-encoded rules as in the example above. + +Since the direct upload mechanism creates the final file rather than an intermediate temporary file, user actions, such as neither saving or canceling an upload session before closing the browser page, can leave an abandoned file in the store. The direct upload mechanism attempts to use S3 Tags to aid in identifying/removing such files. Upon upload, files are given a "dv-status":"temp" tag which is removed when the dataset changes are saved and the new file(s) are added in Dataverse. Note that not all S3 implementations support Tags: Minio does not. WIth such stores, direct upload works, but Tags are not used. Data Capture Module (DCM) ------------------------- @@ -18,7 +63,7 @@ Install a DCM Installation instructions can be found at https://github.com/sbgrid/data-capture-module/blob/master/doc/installation.md. Note that shared storage (posix or AWS S3) between Dataverse and your DCM is required. You cannot use a DCM with Swift at this point in time. -.. FIXME: Explain what ``dataverse.files.dcm-s3-bucket-name`` is for and what it has to do with ``dataverse.files.s3-bucket-name``. +.. FIXME: Explain what ``dataverse.files.dcm-s3-bucket-name`` is for and what it has to do with ``dataverse.files.s3.bucket-name``. Once you have installed a DCM, you will need to configure two database settings on the Dataverse side. These settings are documented in the :doc:`/installation/config` section of the Installation Guide: @@ -100,6 +145,7 @@ Optional steps for setting up the S3 Docker DCM Variant ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - Before: the default bucket for DCM to hold files in S3 is named test-dcm. It is coded into `post_upload_s3.bash` (line 30). Change to a different bucket if needed. +- Also Note: With the new support for multiple file store in Dataverse, DCM requires a store with id="s3" and DCM will only work with this store. - Add AWS bucket info to dcmsrv - Add AWS credentials to ``~/.aws/credentials`` @@ -115,6 +161,9 @@ Optional steps for setting up the S3 Docker DCM Variant - ``cd /opt/glassfish4/bin/`` - ``./asadmin delete-jvm-options "\-Ddataverse.files.storage-driver-id=file"`` - ``./asadmin create-jvm-options "\-Ddataverse.files.storage-driver-id=s3"`` + - ``./asadmin create-jvm-options "\-Ddataverse.files.s3.type=s3"`` + - ``./asadmin create-jvm-options "\-Ddataverse.files.s3.label=s3"`` + - Add AWS bucket info to Dataverse - Add AWS credentials to ``~/.aws/credentials`` @@ -132,7 +181,7 @@ Optional steps for setting up the S3 Docker DCM Variant - S3 bucket for Dataverse - - ``/usr/local/glassfish4/glassfish/bin/asadmin create-jvm-options "-Ddataverse.files.s3-bucket-name=iqsstestdcmbucket"`` + - ``/usr/local/glassfish4/glassfish/bin/asadmin create-jvm-options "-Ddataverse.files.s3.bucket-name=iqsstestdcmbucket"`` - S3 bucket for DCM (as Dataverse needs to do the copy over) diff --git a/doc/sphinx-guides/source/developers/deployment.rst b/doc/sphinx-guides/source/developers/deployment.rst index 9532e7c769f..5e830bfde5b 100755 --- a/doc/sphinx-guides/source/developers/deployment.rst +++ b/doc/sphinx-guides/source/developers/deployment.rst @@ -82,23 +82,26 @@ Download and Run the "Create Instance" Script Once you have done the configuration above, you are ready to try running the "ec2-create-instance.sh" script to spin up Dataverse in AWS. -Download :download:`ec2-create-instance.sh <../../../../scripts/installer/ec2-create-instance.sh>` and put it somewhere reasonable. For the purpose of these instructions we'll assume it's in the "Downloads" directory in your home directory. +Download :download:`ec2-create-instance.sh` and put it somewhere reasonable. For the purpose of these instructions we'll assume it's in the "Downloads" directory in your home directory. -ec2-create-instance accepts a number few command-line switches: +To run it with default values you just need the script, but you may also want a current copy of the ansible :download:`group vars`_ file. + +ec2-create-instance accepts a number of command-line switches, including: * -r: GitHub Repository URL (defaults to https://github.com/IQSS/dataverse.git) * -b: branch to build (defaults to develop) * -p: pemfile directory (defaults to $HOME) * -g: Ansible GroupVars file (if you wish to override role defaults) +* -h: help (displays usage for each available option) ``bash ~/Downloads/ec2-create-instance.sh -b develop -r https://github.com/scholarsportal/dataverse.git -g main.yml`` -Now you will need to wait around 15 minutes until the deployment is finished. Eventually, the output should tell you how to access the installation of Dataverse in a web browser or via ssh. It will also provide instructions on how to delete the instance when you are finished with it. Please be aware that AWS charges per minute for a running instance. You can also delete your instance from https://console.aws.amazon.com/console/home?region=us-east-1 . +You will need to wait for 15 minutes or so until the deployment is finished, longer if you've enabled sample data and/or the API test suite. Eventually, the output should tell you how to access the installation of Dataverse in a web browser or via SSH. It will also provide instructions on how to delete the instance when you are finished with it. Please be aware that AWS charges per minute for a running instance. You may also delete your instance from https://console.aws.amazon.com/console/home?region=us-east-1 . -Caveats -~~~~~~~ +Caveat Recipiens +~~~~~~~~~~~~~~~~ -Please note that while the script should work fine on newish branches, older branches that have different dependencies such as an older version of Solr may not produce a working Dataverse installation. Your mileage may vary. +Please note that while the script should work well on new-ish branches, older branches that have different dependencies such as an older version of Solr may not produce a working Dataverse installation. Your mileage may vary. ---- diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 82b2f0bcc56..e7babf75b34 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -131,7 +131,7 @@ On Linux, you should just install PostgreSQL from your package manager without w Install Solr ~~~~~~~~~~~~ -`Solr `_ 7.3.1 is required. +`Solr `_ 7.7.2 is required. To install Solr, execute the following commands: @@ -141,29 +141,31 @@ To install Solr, execute the following commands: ``cd /usr/local/solr`` -``curl -O http://archive.apache.org/dist/lucene/solr/7.3.1/solr-7.3.1.tgz`` +``curl -O http://archive.apache.org/dist/lucene/solr/7.7.2/solr-7.7.2.tgz`` -``tar xvfz solr-7.3.1.tgz`` +``tar xvfz solr-7.7.2.tgz`` -``cd solr-7.3.1/server/solr`` +``cd solr-7.7.2/server/solr`` ``cp -r configsets/_default collection1`` -``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/7.3.1/schema.xml`` +``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/7.7.2/schema.xml`` -``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/7.3.1/schema_dv_mdb_fields.xml`` +``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/7.7.2/schema_dv_mdb_fields.xml`` -``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/7.3.1/schema_dv_mdb_copies.xml`` +``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/7.7.2/schema_dv_mdb_copies.xml`` ``mv schema*.xml collection1/conf`` -``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/7.3.1/solrconfig.xml`` +``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/7.7.2/solrconfig.xml`` ``mv solrconfig.xml collection1/conf/solrconfig.xml`` -``cd /usr/local/solr/solr-7.3.1`` +``cd /usr/local/solr/solr-7.7.2`` -``bin/solr start`` +(Please note that the extra jetty argument below is a security measure to limit connections to Solr to only your computer. For extra security, run a firewall.) + +``bin/solr start -j "-Djetty.host=127.0.0.1"`` ``bin/solr create_core -c collection1 -d server/solr/collection1/conf`` @@ -180,6 +182,8 @@ It's fine to accept the default values. After a while you will see ``Enter admin user name [Enter to accept default]>`` and you can just hit Enter. +**NEW in v.4.19:** We have added a new implementation of the installer script written in Python. It is intended to eventually replace the old installer above (written in Perl). For now it is being offered as an (experimental) alternative. See README_python.txt, in the scripts/installer directory, for more information on how to run it (please give it a try, and let us know if there are any problems and/or if you have any suggestions!) + Verify Dataverse is Running ~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/developers/geospatial.rst b/doc/sphinx-guides/source/developers/geospatial.rst index 2857f7df9bf..8a19a0b11f2 100644 --- a/doc/sphinx-guides/source/developers/geospatial.rst +++ b/doc/sphinx-guides/source/developers/geospatial.rst @@ -10,7 +10,7 @@ Geoconnect Geoconnect works as a middle layer, allowing geospatial data files in Dataverse to be visualized with Harvard WorldMap. To set up a Geoconnect development environment, you can follow the steps outlined in the `local_setup.md `_ guide. You will need Python and a few other prerequisites. -As mentioned under "Architecture and Components" in the :doc:`/installation/prep` section of the Installation Guide, Geoconnect is an optional component of Dataverse, so this section is only necessary to follow it you are working on an issue related to this feature. +As mentioned under the :ref:`architecture` section of Preparation in the Installation Guide, Geoconnect is an optional component of Dataverse, so this section is only necessary to follow it you are working on an issue related to this feature. How Dataverse Ingests Shapefiles -------------------------------- diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst index ea8e924b4ef..3ebfecd4a35 100755 --- a/doc/sphinx-guides/source/developers/intro.rst +++ b/doc/sphinx-guides/source/developers/intro.rst @@ -60,7 +60,7 @@ As a developer, you also may be interested in these projects related to Datavers - DVUploader - a stand-alone command-line Java application that uses the Dataverse API to support upload of files from local disk to a Dataset: https://github.com/IQSS/dataverse-uploader - dataverse-sample-data - populate your Dataverse installation with sample data: https://github.com/IQSS/dataverse-sample-data - dataverse-metrics - aggregate and visualize metrics for installations of Dataverse around the world: https://github.com/IQSS/dataverse-metrics -- Configuration management scripts - Ansible, Puppet, etc.: See "Advanced Installation" in the :doc:`/installation/prep` section of the Installation Guide. +- Configuration management scripts - Ansible, Puppet, etc.: See :ref:`advanced` section in the Installation Guide. - :doc:`/developers/unf/index` (Java) - a Universal Numerical Fingerprint: https://github.com/IQSS/UNF - GeoConnect (Python) - create a map by uploading files to Dataverse: https://github.com/IQSS/geoconnect - `DataTags `_ (Java and Scala) - tag datasets with privacy levels: https://github.com/IQSS/DataTags diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst index 95dac2f763b..253304d78ea 100644 --- a/doc/sphinx-guides/source/developers/make-data-count.rst +++ b/doc/sphinx-guides/source/developers/make-data-count.rst @@ -71,7 +71,7 @@ If all this is working and you want to send data to the test instance of the Dat ``curl --header "Content-Type: application/json; Accept: application/json" -H "Authorization: Bearer $JSON_WEB_TOKEN" -X POST https://api.test.datacite.org/reports/ -d @sushi_report.json`` -For how to put citations into your dev database and how to get them out again, see "Configuring Dataverse for Make Data Count Citations" in the :doc:`/admin/make-data-count` section of the Admin Guide. +For how to put citations into your dev database and how to get them out again, see :ref:`MDC-updateCitationsForDataset` section in Make Data Count of the Admin Guide. Testing Make Data Count and Dataverse ------------------------------------- @@ -88,6 +88,8 @@ To get the ``REPORT_ID``, look at the logs generated in ``/usr/local/counter-pro To read more about the Make Data Count api, see https://github.com/datacite/sashimi +You can compare the MDC metrics display with Dataverse's original by toggling the ``:DisplayMDCMetrics`` setting (true by default to display MDC metrics). + Resources --------- diff --git a/doc/sphinx-guides/source/developers/remote-users.rst b/doc/sphinx-guides/source/developers/remote-users.rst index 4a517c1beb2..66af0c71eda 100755 --- a/doc/sphinx-guides/source/developers/remote-users.rst +++ b/doc/sphinx-guides/source/developers/remote-users.rst @@ -8,7 +8,7 @@ Shibboleth and OAuth Shibboleth and OAuth -------------------- -If you are working on anything related to users, please keep in mind that your changes will likely affect Shibboleth and OAuth users. For some background on user accounts in Dataverse, see "Auth Modes: Local vs. Remote vs. Both" in the :doc:`/installation/config` section of the Installation Guide. +If you are working on anything related to users, please keep in mind that your changes will likely affect Shibboleth and OAuth users. For some background on user accounts in Dataverse, see :ref:`auth-modes` section of Configuration in the Installation Guide. Rather than setting up Shibboleth on your laptop, developers are advised to simply add a value to their database to enable Shibboleth "dev mode" like this: diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index fc3910ac36a..2894c457d85 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -105,6 +105,37 @@ Unfortunately, the term "integration tests" can mean different things to differe - Integration tests operate on an installation of Dataverse that is running and able to talk to both PostgreSQL and Solr. - Integration tests are written using REST Assured. +Running the Full API Test Suite Using EC2 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To run the API test suite in an EC2 instance you should first follow the steps in the :doc:`deployment` section to get set up for AWS in general and EC2 in particular. + +You may always retrieve a current copy of the ec2-create-instance.sh script and accompanying group_var.yml file from the `dataverse-ansible repo`_: + +- `ec2-create-instance.sh`_ +- `main.yml`_ + +Edit ``main.yml`` to set the desired GitHub repo, branch, and to ensure that the API test suite is enabled: + +- ``dataverse_repo: https://github.com/IQSS/dataverse.git`` +- ``dataverse_branch: develop`` +- ``dataverse.api.test_suite: true`` +- ``dataverse.sampledata.enabled: true`` + +If you wish, you may pass the local path of a logging directory, which will tell ec2-create-instance.sh to `grab glassfish, maven and other logs`_ for your review. + +Finally, run the script: + +.. code-block:: bash + + $ ./ec2-create-instance.sh -g main.yml -l log_dir + +Near the beginning and at the end of the ec2-create-instance.sh output you will see instructions for connecting to the instance via SSH. If you are actively working on a branch and want to refresh the warfile after each commit, you may wish to call a `redeploy.sh`_ script placed by the Ansible role, which will do a "git pull" against your branch, build the warfile, deploy the warfile, then restart glassfish. By default this script is written to /tmp/dataverse/redeploy.sh. You may invoke the script by appending it to the SSH command in ec2-create's output: + +.. code-block:: bash + + $ ssh -i your_pem.pem user@ec2-host.aws.com /tmp/dataverse/redeploy.sh + Running the full API test suite using Docker ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -145,7 +176,7 @@ The root dataverse must be published for some of the REST Assured tests to run. dataverse.siteUrl ^^^^^^^^^^^^^^^^^ -When run locally (as opposed to a remote server), some of the REST Assured tests require the ``dataverse.siteUrl`` JVM option to be set to ``http://localhost:8080``. See "JVM Options" under the :doc:`/installation/config` section of the Installation Guide for advice changing JVM options. First you should check to check your JVM options with: +When run locally (as opposed to a remote server), some of the REST Assured tests require the ``dataverse.siteUrl`` JVM option to be set to ``http://localhost:8080``. See :ref:`jvm-options` section in the Installation Guide for advice changing JVM options. First you should check to check your JVM options with: ``./asadmin list-jvm-options | egrep 'dataverse|doi'`` diff --git a/doc/sphinx-guides/source/developers/tools.rst b/doc/sphinx-guides/source/developers/tools.rst index 767a4a91694..236dee2e3c9 100755 --- a/doc/sphinx-guides/source/developers/tools.rst +++ b/doc/sphinx-guides/source/developers/tools.rst @@ -25,6 +25,8 @@ Maven With Maven installed you can run ``mvn package`` and ``mvn test`` from the command line. It can be downloaded from https://maven.apache.org +.. _vagrant: + Vagrant +++++++ diff --git a/doc/sphinx-guides/source/developers/troubleshooting.rst b/doc/sphinx-guides/source/developers/troubleshooting.rst index ec49b442016..2182c9768ad 100755 --- a/doc/sphinx-guides/source/developers/troubleshooting.rst +++ b/doc/sphinx-guides/source/developers/troubleshooting.rst @@ -84,6 +84,8 @@ As another example, here is how to create a Mail Host via command line for Amazo - Delete: ``./asadmin delete-javamail-resource mail/MyMailSession`` - Create (remove brackets and replace the variables inside): ``./asadmin create-javamail-resource --mailhost email-smtp.us-east-1.amazonaws.com --mailuser [test\@test\.com] --fromaddress [test\@test\.com] --transprotocol aws --transprotocolclass com.amazonaws.services.simpleemail.AWSJavaMailTransport --property mail.smtp.auth=true:mail.smtp.user=[aws_access_key]:mail.smtp.password=[aws_secret_key]:mail.transport.protocol=smtp:mail.smtp.port=587:mail.smtp.starttls.enable=true mail/notifyMailSession`` +.. _rebuilding-dev-environment: + Rebuilding Your Dev Environment ------------------------------- @@ -96,7 +98,7 @@ If you have an old copy of the database and old Solr data and want to start fres - confirm http://localhost:8080 is up - If you want to set some dataset-specific facets, go to the root dataverse (or any dataverse; the selections can be inherited) and click "General Information" and make choices under "Select Facets". There is a ticket to automate this: https://github.com/IQSS/dataverse/issues/619 -You may also find https://github.com/IQSS/dataverse/blob/develop/scripts/deploy/phoenix.dataverse.org/deploy and related scripts interesting because they demonstrate how we have at least partially automated the process of tearing down a Dataverse installation and having it rise again, hence the name "phoenix." See also "Fresh Reinstall" in the :doc:`/installation/installation-main` section of the Installation Guide. +You may also find https://github.com/IQSS/dataverse/blob/develop/scripts/deploy/phoenix.dataverse.org/deploy and related scripts interesting because they demonstrate how we have at least partially automated the process of tearing down a Dataverse installation and having it rise again, hence the name "phoenix." See :ref:`fresh-reinstall` section of the Installation Guide. DataCite -------- diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index 618468392b7..eaaf0fa1911 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -93,6 +93,65 @@ Now that you've made your pull request, your goal is to make sure it appears in Look at https://github.com/IQSS/dataverse/blob/master/CONTRIBUTING.md for various ways to reach out to developers who have enough access to the GitHub repo to move your issue and pull request to the "Code Review" column. +Summary of Git commands +~~~~~~~~~~~~~~~~~~~~~~~ + +This section provides sequences of Git commands for two scenarios: + +* preparing the first request, when the IQSS Dataverse repository and the forked repository are identical +* creating an additional request after some time, when the IQSS Dataverse repository is ahead of the forked repository + +In the examples we use 123-COOL-FEATURE as the name of the feature branch, and https://github.com/YOUR_NAME/dataverse.git as your forked repository's URL. In practice modify both accordingly. + +**1st scenario: preparing the first pull request** + +.. code-block:: bash + + # clone Dataverse at Github.com ... then + + git clone https://github.com/YOUR_NAME/dataverse.git dataverse_fork + cd dataverse_fork + + # create a new branch locally for the pull request + git checkout -b 123-COOL-FEATURE + + # working on the branch ... then commit changes + git commit -am "#123 explanation of changes" + + # upload the new branch to https://github.com/YOUR_NAME/dataverse + git push -u origin 123-COOL-FEATURE + + # ... then create pull request at github.com/YOUR_NAME/dataverse + + +**2nd scenario: preparing another pull request some month later** + +.. code-block:: bash + + # register IQSS Dataverse repo + git remote add upstream https://github.com/IQSS/dataverse.git + + git checkout develop + + # update local develop banch from https://github.com/IQSS/dataverse + git fetch upstream develop + git rebase upstream/develop + + # update remote develop branch at https://github.com/YOUR_NAME/dataverse + git push + + # create a new branch locally for the pull request + git checkout -b 123-COOL-FEATURE + + # work on the branch and commit changes + git commit -am "#123 explanation of changes" + + # upload the new branch to https://github.com/YOUR_NAME/dataverse + git push -u origin 123-COOL-FEATURE + + # ... then create pull request at github.com/YOUR_NAME/dataverse + + How to Resolve Conflicts in Your Pull Request --------------------------------------------- diff --git a/doc/sphinx-guides/source/installation/advanced.rst b/doc/sphinx-guides/source/installation/advanced.rst index a1f559af57d..a60d7dbc23f 100644 --- a/doc/sphinx-guides/source/installation/advanced.rst +++ b/doc/sphinx-guides/source/installation/advanced.rst @@ -15,8 +15,8 @@ You should be conscious of the following when running multiple Glassfish servers - Only one Glassfish server can be the dedicated timer server, as explained in the :doc:`/admin/timers` section of the Admin Guide. - When users upload a logo or footer for their dataverse using the "theme" feature described in the :doc:`/user/dataverse-management` section of the User Guide, these logos are stored only on the Glassfish server the user happend to be on when uploading the logo. By default these logos and footers are written to the directory ``/usr/local/glassfish4/glassfish/domains/domain1/docroot/logos``. - When a sitemp is created by a Glassfish server it is written to the filesystem of just that Glassfish server. By default the sitemap is written to the directory ``/usr/local/glassfish4/glassfish/domains/domain1/docroot/sitemap``. -- If Make Data Count is used, its raw logs must be copied from each Glassfish server to single instance of Counter Processor. See also the ``:MDCLogPath`` database setting in the :doc:`config` section of this guide and the :doc:`/admin/make-data-count` section of the Admin Guide. -- Dataset draft version logging occurs separately on each Glassfish server. See "Edit Draft Versions Logging" in the :doc:`/admin/monitoring` section of the Admin Guide for details. +- If Make Data Count is used, its raw logs must be copied from each Glassfish server to single instance of Counter Processor. See also :ref:`:MDCLogPath` section in the Configuration section of this guide and the :doc:`/admin/make-data-count` section of the Admin Guide. +- Dataset draft version logging occurs separately on each Glassfish server. See :ref:`edit-draft-versions-logging` section in Monitoring of the Admin Guide for details. - Password aliases (``db_password_alias``, etc.) are stored per Glassfish server. Detecting Which Glassfish Server a User Is On @@ -34,4 +34,4 @@ If you have successfully installed multiple Glassfish servers behind a load bala You would repeat the steps above for all of your Glassfish servers. If users seem to be having a problem with a particular server, you can ask them to visit https://dataverse.example.edu/host.txt and let you know what they see there (e.g. "server1.example.edu") to help you know which server to troubleshoot. -Please note that "Network Ports" under the :doc:`config` section has more information on fronting Glassfish with Apache. The :doc:`shibboleth` section talks about the use of ``ProxyPassMatch``. +Please note that :ref:`network-ports` under the Configuration section has more information on fronting Glassfish with Apache. The :doc:`shibboleth` section talks about the use of ``ProxyPassMatch``. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2c63d7ffd6e..fe559776500 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -11,6 +11,8 @@ Once you have finished securing and configuring your Dataverse installation, you .. contents:: |toctitle| :local: +.. _securing-your-installation: + Securing Your Installation -------------------------- @@ -19,14 +21,18 @@ Changing the Superuser Password The default password for the "dataverseAdmin" superuser account is "admin", as mentioned in the :doc:`installation-main` section, and you should change it, of course. +.. _blocking-api-endpoints: + Blocking API Endpoints ++++++++++++++++++++++ -The :doc:`/api/native-api` contains a useful but potentially dangerous API endpoint called "admin" that allows you to change system settings, make ordinary users into superusers, and more. The ``builtin-users`` endpoint lets people create a local/builtin user account if they know the ``BuiltinUsers.KEY`` value described below. +The :doc:`/api/native-api` contains a useful but potentially dangerous API endpoint called "admin" that allows you to change system settings, make ordinary users into superusers, and more. The "builtin-users" endpoint lets people create a local/builtin user account if they know the key defined in :ref:`BuiltinUsers.KEY`. The endpoint "test" is not used but is where testing code maybe be added (see https://github.com/IQSS/dataverse/issues/4137 ). + +By default, most APIs can be operated on remotely and a number of endpoints do not require authentication. The endpoints "admin" and "test" are limited to localhost out of the box by the settings :ref:`:BlockedApiEndpoints` and :ref:`:BlockedApiPolicy`. -By default, all APIs can be operated on remotely and a number of endpoints do not require authentication. https://github.com/IQSS/dataverse/issues/1886 was opened to explore changing these defaults, but until then it is very important to block both the "admin" endpoint (and at least consider blocking ``builtin-users``). For details please see also the section on ``:BlockedApiPolicy`` below. +It is very important to keep the block in place for the "admin" endpoint (and at least consider blocking "builtin-users"). Please note that documentation for the "admin" endpoint is spread across the :doc:`/api/native-api` section of the API Guide and the :doc:`/admin/index`. -It's also possible to prevent file uploads via API by adjusting the ``:UploadMethods`` database setting. +It's also possible to prevent file uploads via API by adjusting the :ref:`:UploadMethods` database setting. Forcing HTTPS +++++++++++++ @@ -45,17 +51,18 @@ Out of the box, Dataverse will list email addresses of the contacts for datasets Additional Recommendations ++++++++++++++++++++++++++ + Run Glassfish as a User Other Than Root -+++++++++++++++++++++++++++++++++++++++ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -See the Glassfish section of :doc:`prerequisites` for details and init scripts for running Glassfish as non-root. +See the :ref:`glassfish` section of :doc:`prerequisites` for details and init scripts for running Glassfish as non-root. Related to this is that you should remove ``/root/.glassfish/pass`` to ensure that Glassfish isn't ever accidentally started as root. Without the password, Glassfish won't be able to start as root, which is a good thing. Enforce Strong Passwords for User Accounts -++++++++++++++++++++++++++++++++++++++++++ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Dataverse only stores passwords (as salted hash, and using a strong hashing algorithm) for "builtin" users. You can increase the password complexity rules to meet your security needs. If you have configured your Dataverse installation to allow login from remote authentication providers such as Shibboleth, ORCID, GitHub or Google, you do not have any control over those remote providers' password complexity rules. See the "Auth Modes: Local vs. Remote vs. Both" section below for more on login options. +Dataverse only stores passwords (as salted hash, and using a strong hashing algorithm) for "builtin" users. You can increase the password complexity rules to meet your security needs. If you have configured your Dataverse installation to allow login from remote authentication providers such as Shibboleth, ORCID, GitHub or Google, you do not have any control over those remote providers' password complexity rules. See the :ref:`auth-modes` section below for more on login options. Even if you are satisfied with the out-of-the-box password complexity rules Dataverse ships with, for the "dataverseAdmin" account you should use a strong password so the hash cannot easily be cracked through dictionary attacks. @@ -70,6 +77,8 @@ Password complexity rules for "builtin" accounts can be adjusted with a variety - :ref:`:PVGoodStrength` - :ref:`:PVCustomPasswordResetAlertMessage` +.. _network-ports: + Network Ports ------------- @@ -106,7 +115,7 @@ Root Dataverse Permissions The user who creates a dataverse is given the "Admin" role on that dataverse. The root dataverse is created automatically for you by the installer and the "Admin" is the superuser account ("dataverseAdmin") we used in the :doc:`installation-main` section to confirm that we can log in. These next steps of configuring the root dataverse require the "Admin" role on the root dataverse, but not the much more powerful superuser attribute. In short, users with the "Admin" role are subject to the permission system. A superuser, on the other hand, completely bypasses the permission system. You can give non-superusers the "Admin" role on the root dataverse if you'd like them to configure the root dataverse. -In order for non-superusers to start creating dataverses or datasets, you need click "Edit" then "Permissions" and make choices about which users can add dataverses or datasets within the root dataverse. (There is an API endpoint for this operation as well.) Again, the user who creates a dataverse will be granted the "Admin" role on that dataverse. Non-superusers who are not "Admin" on the root dataverse will not be able to to do anything useful until the root dataverse has been published. +In order for non-superusers to start creating dataverses or datasets, you need click "Edit" then "Permissions" and make choices about which users can add dataverses or datasets within the root dataverse. (There is an API endpoint for this operation as well.) Again, the user who creates a dataverse will be granted the "Admin" role on that dataverse. Non-superusers who are not "Admin" on the root dataverse will not be able to do anything useful until the root dataverse has been published. As the person installing Dataverse you may or may not be a local metadata expert. You may want to have others sign up for accounts and grant them the "Admin" role at the root dataverse to configure metadata fields, templates, browse/search facets, guestbooks, etc. For more on these topics, consult the :doc:`/user/dataverse-management` section of the User Guide. @@ -142,6 +151,7 @@ Here are the configuration options for DOIs: - :ref:`doi.baseurlstring` - :ref:`doi.username` - :ref:`doi.password` +- :ref:`doi.mdcbaseurlstring` **Database Settings:** @@ -174,6 +184,8 @@ Here are the configuration options for handles: Note: If you are **minting your own handles** and plan to set up your own handle service, please refer to `Handle.Net documentation `_. +.. _auth-modes: + Auth Modes: Local vs. Remote vs. Both ------------------------------------- @@ -189,7 +201,7 @@ Both Local and Remote Auth The ``authenticationproviderrow`` database table controls which "authentication providers" are available within Dataverse. Out of the box, a single row with an id of "builtin" will be present. For each user in Dataverse, the ``authenticateduserlookup`` table will have a value under ``authenticationproviderid`` that matches this id. For example, the default "dataverseAdmin" user will have the value "builtin" under ``authenticationproviderid``. Why is this important? Users are tied to a specific authentication provider but conversion mechanisms are available to switch a user from one authentication provider to the other. As explained in the :doc:`/user/account` section of the User Guide, a graphical workflow is provided for end users to convert from the "builtin" authentication provider to a remote provider. Conversion from a remote authentication provider to the builtin provider can be performed by a sysadmin with access to the "admin" API. See the :doc:`/api/native-api` section of the API Guide for how to list users and authentication providers as JSON. -Adding and enabling a second authentication provider (:ref:`native-api-add-auth-provider` and :ref:`api-toggle-auth-provider`) will result in the Log In page showing additional providers for your users to choose from. By default, the Log In page will show the "builtin" provider, but you can adjust this via the :ref:`conf-default-auth-provider` configuration option. Further customization can be achieved by setting :ref:`conf-allow-signup` to "false", thus preventing users from creating local accounts via the web interface. Please note that local accounts can also be created via API, and the way to prevent this is to block the ``builtin-users`` endpoint (:ref:`conf-blocked-api-endpoints`) or scramble (or remove) the ``BuiltinUsers.KEY`` database setting (:ref:`conf-built-in-users-key`) per the :doc:`config` section. +Adding and enabling a second authentication provider (:ref:`native-api-add-auth-provider` and :ref:`api-toggle-auth-provider`) will result in the Log In page showing additional providers for your users to choose from. By default, the Log In page will show the "builtin" provider, but you can adjust this via the :ref:`conf-default-auth-provider` configuration option. Further customization can be achieved by setting :ref:`conf-allow-signup` to "false", thus preventing users from creating local accounts via the web interface. Please note that local accounts can also be created via API, and the way to prevent this is to block the ``builtin-users`` endpoint (:ref:`:BlockedApiEndpoints`) or scramble (or remove) the ``BuiltinUsers.KEY`` database setting (:ref:`BuiltinUsers.KEY`). To configure Shibboleth see the :doc:`shibboleth` section and to configure OAuth see the :doc:`oauth2` section. @@ -203,10 +215,48 @@ As for the "Remote only" authentication mode, it means that: - ``:DefaultAuthProvider`` has been set to use the desired authentication provider - The "builtin" authentication provider has been disabled (:ref:`api-toggle-auth-provider`). Note that disabling the "builtin" authentication provider means that the API endpoint for converting an account from a remote auth provider will not work. Converting directly from one remote authentication provider to another (i.e. from GitHub to Google) is not supported. Conversion from remote is always to "builtin". Then the user initiates a conversion from "builtin" to remote. Note that longer term, the plan is to permit multiple login options to the same Dataverse account per https://github.com/IQSS/dataverse/issues/3487 (so all this talk of conversion will be moot) but for now users can only use a single login option, as explained in the :doc:`/user/account` section of the User Guide. In short, "remote only" might work for you if you only plan to use a single remote authentication provider such that no conversion between remote authentication providers will be necessary. -File Storage: Local Filesystem vs. Swift vs. S3 ------------------------------------------------ +File Storage: Using a Local Filesystem and/or Swift and/or S3 object stores +--------------------------------------------------------------------------- + +By default, a Dataverse installation stores all data files (files uploaded by end users) on the filesystem at ``/usr/local/glassfish4/glassfish/domains/domain1/files``. This path can vary based on answers you gave to the installer (see the :ref:`dataverse-installer` section of the Installation Guide) or afterward by reconfiguring the ``dataverse.files.directory`` JVM option described below. + +Dataverse can alternately store files in a Swift or S3-compatible object store, and can now be configured to support multiple stores at once. With a multi-store configuration, the location for new files can be controlled on a per-dataverse basis. + +The following sections describe how to set up various types of stores and how to configure for multiple stores. + +Multi-store Basics ++++++++++++++++++ + +To support multiple stores, Dataverse now requires an id, type, and label for each store (even for a single store configuration). These are configured by defining two required jvm options: + +.. code-block:: none + + ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files..type=" + ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files..label=